Merge "Upgrade to tuner aidl V3 interface" into main
diff --git a/Android.bp b/Android.bp
index 7a2bb9b..0c7ed6e 100644
--- a/Android.bp
+++ b/Android.bp
@@ -34,6 +34,24 @@
     ],
 }
 
+aidl_interface_defaults {
+    name: "audio-aidl-defaults",
+    unstable: true,
+    host_supported: true,
+    backend: {
+        cpp: {
+            enabled: true,
+        },
+        java: {
+            enabled: true,
+        },
+        rust: {
+            enabled: true,
+        },
+    },
+
+}
+
 aidl_interface {
     name: "av-types-aidl",
     unstable: true,
@@ -71,6 +89,18 @@
     },
 }
 
+aidl_interface {
+    name: "audio-permission-aidl",
+    // TODO remove
+    vendor_available: true,
+    double_loadable: true,
+    defaults: ["audio-aidl-defaults"],
+    local_include_dir: "aidl",
+    srcs: [
+        "aidl/com/android/media/permission/*",
+    ],
+}
+
 cc_library_headers {
     name: "av-headers",
     export_include_dirs: ["include"],
@@ -105,7 +135,6 @@
 
 aidl_interface {
     name: "av-audio-types-aidl",
-    unstable: true,
     host_supported: true,
     vendor_available: true,
     double_loadable: true,
@@ -125,4 +154,28 @@
             sdk_version: "module_current",
         },
     },
+    versions_with_info: [
+        {
+            version: "1",
+            imports: ["android.hardware.audio.core-V2"],
+        },
+    ],
+    frozen: false,
+
+}
+
+latest_av_audio_types_aidl = "av-audio-types-aidl-V2"
+
+cc_defaults {
+    name: "latest_av_audio_types_aidl_ndk_shared",
+    shared_libs: [
+        latest_av_audio_types_aidl + "-ndk",
+    ],
+}
+
+cc_defaults {
+    name: "latest_av_audio_types_aidl_ndk_static",
+    static_libs: [
+        latest_av_audio_types_aidl + "-ndk",
+    ],
 }
diff --git a/PREUPLOAD.cfg b/PREUPLOAD.cfg
index 1cf63b0..e9b757b 100644
--- a/PREUPLOAD.cfg
+++ b/PREUPLOAD.cfg
@@ -4,9 +4,30 @@
 hidden_api_txt_checksorted_hook = ${REPO_ROOT}/tools/platform-compat/hiddenapi/checksorted_sha.sh ${PREUPLOAD_COMMIT} ${REPO_ROOT}
 
 [Builtin Hooks]
+bpfmt = true
 clang_format = true
 
 [Builtin Hooks Options]
+# Enable sort and limit subfolder checks
+bpfmt = -s
+    media/audio/
+    media/audioserver/
+    media/libaaudio/
+    media/libaudioclient/
+    media/libaudiofoundation/
+    media/libaudiohal/
+    media/libaudioprocessing/
+    media/libaudiousecasevalidation/
+    media/libeffects/
+    media/libmediametrics/
+    media/libnbaio/
+    media/libnblog/
+    services/audioflinger/
+    services/audioparameterparser/
+    services/audiopolicy/
+    services/medialog/
+    services/oboeservice/
+
 # Only turn on clang-format check for the following subfolders.
 clang_format = --commit ${PREUPLOAD_COMMIT} --style file --extensions c,h,cc,cpp
                media/libaudioclient/tests/
diff --git a/aidl/android/media/audio/IHalAdapterVendorExtension.aidl b/aidl/android/media/audio/IHalAdapterVendorExtension.aidl
index b7a7678..48fb291 100644
--- a/aidl/android/media/audio/IHalAdapterVendorExtension.aidl
+++ b/aidl/android/media/audio/IHalAdapterVendorExtension.aidl
@@ -23,6 +23,8 @@
  * is optional. Vendors may provide an implementation on the system_ext
  * partition. The default instance of this interface, if provided, must be
  * registered prior to the moment when the audio server connects to HAL modules.
+ * Vendors need to set the system property `ro.audio.ihaladaptervendorextension_enabled`
+ * to `true` for the framework to bind to this service.
  *
  * {@hide}
  */
diff --git a/aidl/com/android/media/permission/INativePermissionController.aidl b/aidl/com/android/media/permission/INativePermissionController.aidl
new file mode 100644
index 0000000..a14092d
--- /dev/null
+++ b/aidl/com/android/media/permission/INativePermissionController.aidl
@@ -0,0 +1,46 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.media.permission;
+
+import com.android.media.permission.PermissionEnum;
+import com.android.media.permission.UidPackageState;
+
+/**
+ * This interface is used by system_server to communicate permission information
+ * downwards towards native services.
+ * {@hide}
+ */
+interface INativePermissionController {
+    /**
+     * Initialize app-ids and their corresponding packages, to be used for package validation.
+     */
+    void populatePackagesForUids(in List<UidPackageState> initialPackageStates);
+    /**
+     * Replace or populate the list of packages associated with a given uid.
+     * If the list is empty, the package no longer exists.
+     */
+    void updatePackagesForUid(in UidPackageState newPackageState);
+    /**
+     * Populate or replace the list of uids which holds a particular permission.
+     * Runtime permissions will need additional checks, and should not use the cache as-is.
+     * Not virtual device aware.
+     * Is is possible for updates to the permission state to be delayed during high traffic.
+     * @param perm - Enum representing the permission for which holders are being supplied
+     * @param uids - Uids (not app-ids) which hold the permission. Should be sorted
+     */
+    void populatePermissionState(in PermissionEnum perm, in int[] uids);
+}
diff --git a/aidl/com/android/media/permission/PermissionEnum.aidl b/aidl/com/android/media/permission/PermissionEnum.aidl
new file mode 100644
index 0000000..b08db44
--- /dev/null
+++ b/aidl/com/android/media/permission/PermissionEnum.aidl
@@ -0,0 +1,41 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.media.permission;
+
+/**
+ * Enumerates permissions which are tracked/pushed by NativePermissionController
+ * {@hide}
+ */
+enum PermissionEnum {
+    // This is a runtime + WIU permission, which means data delivery should be protected by AppOps
+    // We query the controller only for early fails/hard errors
+    RECORD_AUDIO = 0,
+    MODIFY_AUDIO_ROUTING = 1,
+    MODIFY_AUDIO_SETTINGS = 2,
+    MODIFY_PHONE_STATE = 3,
+    MODIFY_DEFAULT_AUDIO_EFFECTS = 4,
+    WRITE_SECURE_SETTINGS = 5,
+    CALL_AUDIO_INTERCEPTION = 6,
+    ACCESS_ULTRASOUND = 7,
+    CAPTURE_AUDIO_OUTPUT = 8,
+    CAPTURE_MEDIA_OUTPUT = 9,
+    CAPTURE_AUDIO_HOTWORD = 10,
+    CAPTURE_TUNER_AUDIO_INPUT = 11,
+    CAPTURE_VOICE_COMMUNICATION_OUTPUT = 12,
+    BLUETOOTH_CONNECT = 13,
+    ENUM_SIZE = 14, // Not for actual usage, used by Java
+}
diff --git a/services/camera/libcameraservice/utils/CameraThreadState.h b/aidl/com/android/media/permission/UidPackageState.aidl
similarity index 63%
rename from services/camera/libcameraservice/utils/CameraThreadState.h
rename to aidl/com/android/media/permission/UidPackageState.aidl
index e1a70de..747a7ef 100644
--- a/services/camera/libcameraservice/utils/CameraThreadState.h
+++ b/aidl/com/android/media/permission/UidPackageState.aidl
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2018 The Android Open Source Project
+ * Copyright (C) 2024 The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -14,18 +14,14 @@
  * limitations under the License.
  */
 
-#include <stdint.h>
+package com.android.media.permission;
 
-namespace android {
-class CameraThreadState {
-public:
-  static int64_t clearCallingIdentity();
-
-  static void restoreCallingIdentity(int64_t token);
-
-  static int getCallingUid();
-
-  static int getCallingPid();
-};
-
-} // android
+/**
+ * Entity representing the package names associated with a particular uid/app-id
+ * {@hide}
+ */
+@JavaDerive(equals = true, toString = true)
+parcelable UidPackageState {
+    int uid;
+    @utf8InCpp List<String> packageNames;
+}
diff --git a/aidl_api/av-audio-types-aidl/1/.hash b/aidl_api/av-audio-types-aidl/1/.hash
new file mode 100644
index 0000000..0002682
--- /dev/null
+++ b/aidl_api/av-audio-types-aidl/1/.hash
@@ -0,0 +1 @@
+ef1bc5ed9db445fbfc116cdec6e6ad081458ee40
diff --git a/aidl_api/av-audio-types-aidl/1/android/media/audio/IHalAdapterVendorExtension.aidl b/aidl_api/av-audio-types-aidl/1/android/media/audio/IHalAdapterVendorExtension.aidl
new file mode 100644
index 0000000..a9aa2c1
--- /dev/null
+++ b/aidl_api/av-audio-types-aidl/1/android/media/audio/IHalAdapterVendorExtension.aidl
@@ -0,0 +1,46 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+///////////////////////////////////////////////////////////////////////////////
+// THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE.                          //
+///////////////////////////////////////////////////////////////////////////////
+
+// This file is a snapshot of an AIDL file. Do not edit it manually. There are
+// two cases:
+// 1). this is a frozen version file - do not edit this in any case.
+// 2). this is a 'current' file. If you make a backwards compatible change to
+//     the interface (from the latest frozen version), the build system will
+//     prompt you to update this file with `m <name>-update-api`.
+//
+// You must not make a backward incompatible change to any AIDL file built
+// with the aidl_interface module type with versions property set. The module
+// type is used to build AIDL files in a way that they can be used across
+// independently updatable components of the system. If a device is shipped
+// with such a backward incompatible change, it has a high risk of breaking
+// later when a module using the interface is updated, e.g., Mainline modules.
+
+package android.media.audio;
+/* @hide */
+interface IHalAdapterVendorExtension {
+  @utf8InCpp String[] parseVendorParameterIds(android.media.audio.IHalAdapterVendorExtension.ParameterScope scope, in @utf8InCpp String rawKeys);
+  void parseVendorParameters(android.media.audio.IHalAdapterVendorExtension.ParameterScope scope, in @utf8InCpp String rawKeysAndValues, out android.hardware.audio.core.VendorParameter[] syncParameters, out android.hardware.audio.core.VendorParameter[] asyncParameters);
+  android.hardware.audio.core.VendorParameter[] parseBluetoothA2dpReconfigureOffload(in @utf8InCpp String rawValue);
+  android.hardware.audio.core.VendorParameter[] parseBluetoothLeReconfigureOffload(in @utf8InCpp String rawValue);
+  @utf8InCpp String processVendorParameters(android.media.audio.IHalAdapterVendorExtension.ParameterScope scope, in android.hardware.audio.core.VendorParameter[] parameters);
+  enum ParameterScope {
+    MODULE = 0,
+    STREAM = 1,
+  }
+}
diff --git a/aidl_api/av-audio-types-aidl/current/android/media/audio/IHalAdapterVendorExtension.aidl b/aidl_api/av-audio-types-aidl/current/android/media/audio/IHalAdapterVendorExtension.aidl
new file mode 100644
index 0000000..a9aa2c1
--- /dev/null
+++ b/aidl_api/av-audio-types-aidl/current/android/media/audio/IHalAdapterVendorExtension.aidl
@@ -0,0 +1,46 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+///////////////////////////////////////////////////////////////////////////////
+// THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE.                          //
+///////////////////////////////////////////////////////////////////////////////
+
+// This file is a snapshot of an AIDL file. Do not edit it manually. There are
+// two cases:
+// 1). this is a frozen version file - do not edit this in any case.
+// 2). this is a 'current' file. If you make a backwards compatible change to
+//     the interface (from the latest frozen version), the build system will
+//     prompt you to update this file with `m <name>-update-api`.
+//
+// You must not make a backward incompatible change to any AIDL file built
+// with the aidl_interface module type with versions property set. The module
+// type is used to build AIDL files in a way that they can be used across
+// independently updatable components of the system. If a device is shipped
+// with such a backward incompatible change, it has a high risk of breaking
+// later when a module using the interface is updated, e.g., Mainline modules.
+
+package android.media.audio;
+/* @hide */
+interface IHalAdapterVendorExtension {
+  @utf8InCpp String[] parseVendorParameterIds(android.media.audio.IHalAdapterVendorExtension.ParameterScope scope, in @utf8InCpp String rawKeys);
+  void parseVendorParameters(android.media.audio.IHalAdapterVendorExtension.ParameterScope scope, in @utf8InCpp String rawKeysAndValues, out android.hardware.audio.core.VendorParameter[] syncParameters, out android.hardware.audio.core.VendorParameter[] asyncParameters);
+  android.hardware.audio.core.VendorParameter[] parseBluetoothA2dpReconfigureOffload(in @utf8InCpp String rawValue);
+  android.hardware.audio.core.VendorParameter[] parseBluetoothLeReconfigureOffload(in @utf8InCpp String rawValue);
+  @utf8InCpp String processVendorParameters(android.media.audio.IHalAdapterVendorExtension.ParameterScope scope, in android.hardware.audio.core.VendorParameter[] parameters);
+  enum ParameterScope {
+    MODULE = 0,
+    STREAM = 1,
+  }
+}
diff --git a/camera/Android.bp b/camera/Android.bp
index 22f1633..d91fcb2 100644
--- a/camera/Android.bp
+++ b/camera/Android.bp
@@ -13,6 +13,7 @@
 // limitations under the License.
 
 package {
+    default_team: "trendy_team_camera_framework",
     default_applicable_licenses: ["frameworks_av_camera_license"],
 }
 
@@ -46,6 +47,7 @@
 aconfig_declarations {
     name: "camera_platform_flags",
     package: "com.android.internal.camera.flags",
+    container: "system",
     srcs: ["camera_platform.aconfig"],
 }
 
@@ -64,6 +66,7 @@
     name: "camera_headers",
     export_include_dirs: ["include"],
 }
+
 cc_library {
     name: "libcamera_client",
 
@@ -72,6 +75,7 @@
         local_include_dirs: ["aidl"],
         include_dirs: [
             "frameworks/native/aidl/gui",
+            "frameworks/native/libs/permission/aidl",
         ],
     },
 
@@ -83,50 +87,57 @@
 
         // Source for camera interface parcelables, and manually-written interfaces
         "Camera.cpp",
+        "CameraBase.cpp",
         "CameraMetadata.cpp",
         "CameraParameters.cpp",
-        "CaptureResult.cpp",
         "CameraParameters2.cpp",
         "CameraSessionStats.cpp",
+        "CameraUtils.cpp",
+        "CaptureResult.cpp",
         "ICamera.cpp",
         "ICameraClient.cpp",
         "ICameraRecordingProxy.cpp",
+        "VendorTagDescriptor.cpp",
         "camera2/CaptureRequest.cpp",
         "camera2/ConcurrentCamera.cpp",
         "camera2/OutputConfiguration.cpp",
         "camera2/SessionConfiguration.cpp",
         "camera2/SubmitInfo.cpp",
-        "CameraBase.cpp",
-        "CameraUtils.cpp",
-        "VendorTagDescriptor.cpp",
     ],
 
     shared_libs: [
         "camera_platform_flags_c_lib",
-        "libbase",
-        "libcutils",
-        "libutils",
-        "liblog",
-        "libbinder",
-        "libgui",
-        "libcamera_metadata",
-        "libnativewindow",
+        "framework-permission-aidl-cpp",
         "lib-platform-compat-native-api",
+        "libbase",
+        "libbinder",
+        "libcamera_metadata",
+        "libcutils",
+        "libgui",
+        "liblog",
+        "libnativewindow",
+        "libpermission",
+        "libutils",
     ],
 
     include_dirs: [
-        "system/media/private/camera/include",
         "frameworks/native/include/media/openmax",
+        "system/media/private/camera/include",
     ],
     export_include_dirs: [
-         "include",
-         "include/camera"
+        "include",
+        "include/camera",
     ],
-    export_shared_lib_headers: ["libcamera_metadata", "libnativewindow", "libgui"],
+    export_shared_lib_headers: [
+        "framework-permission-aidl-cpp",
+        "libcamera_metadata",
+        "libgui",
+        "libnativewindow",
+    ],
 
     cflags: [
-        "-Werror",
         "-Wall",
+        "-Werror",
         "-Wextra",
     ],
 
@@ -146,13 +157,13 @@
     ],
 
     include_dirs: [
-        "system/media/private/camera/include",
         "frameworks/native/include/media/openmax",
+        "system/media/private/camera/include",
     ],
 
     export_include_dirs: [
         "include",
-        "include/camera"
+        "include/camera",
     ],
 }
 
@@ -161,8 +172,8 @@
     name: "libcamera_client_aidl",
     srcs: [
         "aidl/android/hardware/CameraExtensionSessionStats.aidl",
+        "aidl/android/hardware/CameraFeatureCombinationStats.aidl",
         "aidl/android/hardware/ICameraService.aidl",
-        "aidl/android/hardware/CameraIdRemapping.aidl",
         "aidl/android/hardware/ICameraServiceListener.aidl",
         "aidl/android/hardware/ICameraServiceProxy.aidl",
         "aidl/android/hardware/camera2/ICameraDeviceCallbacks.aidl",
diff --git a/camera/Camera.cpp b/camera/Camera.cpp
index 6b040ab..d90f7c9 100644
--- a/camera/Camera.cpp
+++ b/camera/Camera.cpp
@@ -69,12 +69,12 @@
     // deadlock if we call any method of ICamera here.
 }
 
-sp<Camera> Camera::connect(int cameraId, const std::string& clientPackageName,
-        int clientUid, int clientPid, int targetSdkVersion, bool overrideToPortrait,
-        bool forceSlowJpegMode)
+sp<Camera> Camera::connect(int cameraId, int targetSdkVersion, int rotationOverride,
+        bool forceSlowJpegMode, const AttributionSourceState& clientAttribution,
+        int32_t devicePolicy)
 {
-    return CameraBaseT::connect(cameraId, clientPackageName, clientUid,
-            clientPid, targetSdkVersion, overrideToPortrait, forceSlowJpegMode);
+    return CameraBaseT::connect(cameraId, targetSdkVersion, rotationOverride,
+            forceSlowJpegMode, clientAttribution, devicePolicy);
 }
 
 status_t Camera::reconnect()
diff --git a/camera/CameraBase.cpp b/camera/CameraBase.cpp
index 6759f3b..774db25 100644
--- a/camera/CameraBase.cpp
+++ b/camera/CameraBase.cpp
@@ -73,6 +73,9 @@
     if (res != OK) return res;
 
     res = parcel->writeString16(toString16(clientPackage));
+    if (res != OK) return res;
+
+    res = parcel->writeInt32(deviceId);
     return res;
 }
 
@@ -97,6 +100,7 @@
     if (res != OK) return res;
     clientPackage = toStdString(tempClientPackage);
 
+    res = parcel->readInt32(&deviceId);
     return res;
 }
 
@@ -123,7 +127,7 @@
     };
 
     sp<DeathNotifier>         gDeathNotifier;
-}; // namespace anonymous
+} // namespace anonymous
 
 ///////////////////////////////////////////////////////////
 // CameraBase definition
@@ -157,9 +161,10 @@
 
 template <typename TCam, typename TCamTraits>
 sp<TCam> CameraBase<TCam, TCamTraits>::connect(int cameraId,
-                                               const std::string& clientPackageName,
-                                               int clientUid, int clientPid, int targetSdkVersion,
-                                               bool overrideToPortrait, bool forceSlowJpegMode)
+                                               int targetSdkVersion, int rotationOverride,
+                                               bool forceSlowJpegMode,
+                                               const AttributionSourceState& clientAttribution,
+                                               int32_t devicePolicy)
 {
     ALOGV("%s: connect", __FUNCTION__);
     sp<TCam> c = new TCam(cameraId);
@@ -169,11 +174,11 @@
     binder::Status ret;
     if (cs != nullptr) {
         TCamConnectService fnConnectService = TCamTraits::fnConnectService;
-        ALOGI("Connect camera (legacy API) - overrideToPortrait %d, forceSlowJpegMode %d",
-                overrideToPortrait, forceSlowJpegMode);
-        ret = (cs.get()->*fnConnectService)(cl, cameraId, clientPackageName, clientUid,
-                clientPid, targetSdkVersion, overrideToPortrait, forceSlowJpegMode,
-                 /*out*/ &c->mCamera);
+        ALOGI("Connect camera (legacy API) - rotationOverride %d, forceSlowJpegMode %d",
+                rotationOverride, forceSlowJpegMode);
+        ret = (cs.get()->*fnConnectService)(cl, cameraId, targetSdkVersion,
+                rotationOverride, forceSlowJpegMode, clientAttribution, devicePolicy,
+                /*out*/ &c->mCamera);
     }
     if (ret.isOk() && c->mCamera != nullptr) {
         IInterface::asBinder(c->mCamera)->linkToDeath(c);
@@ -252,7 +257,8 @@
 }
 
 template <typename TCam, typename TCamTraits>
-int CameraBase<TCam, TCamTraits>::getNumberOfCameras() {
+int CameraBase<TCam, TCamTraits>::getNumberOfCameras(
+        const AttributionSourceState& clientAttribution, int32_t devicePolicy) {
     const sp<::android::hardware::ICameraService> cs = getCameraService();
 
     if (!cs.get()) {
@@ -261,8 +267,8 @@
     }
     int32_t count;
     binder::Status res = cs->getNumberOfCameras(
-            ::android::hardware::ICameraService::CAMERA_TYPE_BACKWARD_COMPATIBLE,
-            &count);
+            ::android::hardware::ICameraService::CAMERA_TYPE_BACKWARD_COMPATIBLE, clientAttribution,
+            devicePolicy, &count);
     if (!res.isOk()) {
         ALOGE("Error reading number of cameras: %s",
                 res.toString8().c_str());
@@ -274,11 +280,12 @@
 // this can be in BaseCamera but it should be an instance method
 template <typename TCam, typename TCamTraits>
 status_t CameraBase<TCam, TCamTraits>::getCameraInfo(int cameraId,
-        bool overrideToPortrait,
+        int rotationOverride, const AttributionSourceState& clientAttribution, int32_t devicePolicy,
         struct hardware::CameraInfo* cameraInfo) {
     const sp<::android::hardware::ICameraService> cs = getCameraService();
     if (cs == 0) return UNKNOWN_ERROR;
-    binder::Status res = cs->getCameraInfo(cameraId, overrideToPortrait, cameraInfo);
+    binder::Status res = cs->getCameraInfo(cameraId, rotationOverride, clientAttribution,
+            devicePolicy, cameraInfo);
     return res.isOk() ? OK : res.serviceSpecificErrorCode();
 }
 
diff --git a/camera/CameraSessionStats.cpp b/camera/CameraSessionStats.cpp
index 057ec99..450bdd8 100644
--- a/camera/CameraSessionStats.cpp
+++ b/camera/CameraSessionStats.cpp
@@ -439,6 +439,16 @@
         return err;
     }
 
+    auto mostRequestedFpsRange = std::make_pair(0,0);
+    if ((err = parcel->readInt32(&mostRequestedFpsRange.first)) != OK) {
+        ALOGE("%s: Failed to read frame rate range min info!", __FUNCTION__);
+        return err;
+    }
+    if ((err = parcel->readInt32(&mostRequestedFpsRange.second)) != OK) {
+        ALOGE("%s: Failed to read frame rate range max info!", __FUNCTION__);
+        return err;
+    }
+
     mCameraId = toStdString(id);
     mFacing = facing;
     mNewCameraState = newCameraState;
@@ -460,6 +470,7 @@
     mUsedZoomOverride = usedZoomOverride;
     mSessionIndex = sessionIdx;
     mCameraExtensionSessionStats = extStats;
+    mMostRequestedFpsRange = mostRequestedFpsRange;
 
     return OK;
 }
@@ -577,6 +588,16 @@
         return err;
     }
 
+    if ((err = parcel->writeInt32(mMostRequestedFpsRange.first)) != OK) {
+        ALOGE("%s: Failed to write frame rate range min info!", __FUNCTION__);
+        return err;
+    }
+
+    if ((err = parcel->writeInt32(mMostRequestedFpsRange.second)) != OK) {
+        ALOGE("%s: Failed to write frame rate range max info!", __FUNCTION__);
+        return err;
+    }
+
     return OK;
 }
 
diff --git a/camera/aidl/android/hardware/CameraExtensionSessionStats.aidl b/camera/aidl/android/hardware/CameraExtensionSessionStats.aidl
index 1c81831..a3c0e69 100644
--- a/camera/aidl/android/hardware/CameraExtensionSessionStats.aidl
+++ b/camera/aidl/android/hardware/CameraExtensionSessionStats.aidl
@@ -66,4 +66,9 @@
      * true if advanced extensions are being used, false otherwise
      */
     boolean isAdvanced = false;
+
+    /**
+     * Format of image capture request
+     */
+    int captureFormat;
 }
\ No newline at end of file
diff --git a/camera/aidl/android/hardware/CameraFeatureCombinationStats.aidl b/camera/aidl/android/hardware/CameraFeatureCombinationStats.aidl
new file mode 100644
index 0000000..f4a11b1
--- /dev/null
+++ b/camera/aidl/android/hardware/CameraFeatureCombinationStats.aidl
@@ -0,0 +1,47 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware;
+
+/**
+ * {@hide}
+ */
+parcelable CameraFeatureCombinationStats {
+    /**
+     * Values for feature combination queries
+     */
+    const long CAMERA_FEATURE_UNKNOWN = 0;
+    const long CAMERA_FEATURE_60_FPS = 1 << 0;
+    const long CAMERA_FEATURE_STABILIZATION = 1 << 1;
+    const long CAMERA_FEATURE_HLG10 = 1 << 2;
+    const long CAMERA_FEATURE_JPEG = 1 << 3;
+    const long CAMERA_FEATURE_JPEG_R = 1 << 4;
+    const long CAMERA_FEATURE_4K = 1 << 5;
+
+    /**
+     * Values for notifyFeatureCombinationStats type
+     */
+    enum QueryType {
+        QUERY_FEATURE_COMBINATION = 0,
+        QUERY_SESSION_CHARACTERISTICS = 1,
+    }
+
+    @utf8InCpp String mCameraId;
+    int mUid;
+    long mFeatureCombination;
+    int mQueryType;
+    int mStatus;
+}
diff --git a/camera/aidl/android/hardware/CameraIdRemapping.aidl b/camera/aidl/android/hardware/CameraIdRemapping.aidl
deleted file mode 100644
index 453f696..0000000
--- a/camera/aidl/android/hardware/CameraIdRemapping.aidl
+++ /dev/null
@@ -1,50 +0,0 @@
-/*
- * Copyright (C) 2023 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.hardware;
-
-/**
- * Specifies a remapping of Camera Ids.
- *
- * Example: For a given package, a remapping of camera id0 to id1 specifies
- * that any operation to perform on id0 should instead be performed on id1.
- *
- * @hide
- */
-parcelable CameraIdRemapping {
-    /**
-     * Specifies remapping of Camera Ids per package.
-     */
-    parcelable PackageIdRemapping {
-        /** Package Name (e.g. com.android.xyz). */
-        @utf8InCpp String packageName;
-        /**
-         * Ordered list of Camera Ids to replace. Only Camera Ids present in this list will be
-         * affected.
-         */
-        @utf8InCpp List<String> cameraIdsToReplace;
-        /**
-         *  Ordered list of updated Camera Ids, where updatedCameraIds[i] corresponds to
-         *  the updated camera id for cameraIdsToReplace[i].
-         */
-        @utf8InCpp List<String> updatedCameraIds;
-    }
-
-    /**
-     * List of Camera Id remappings to perform.
-     */
-    List<PackageIdRemapping> packageIdRemappings;
-}
diff --git a/camera/aidl/android/hardware/ICameraService.aidl b/camera/aidl/android/hardware/ICameraService.aidl
index 0eeeb7f..ce6c2d3 100644
--- a/camera/aidl/android/hardware/ICameraService.aidl
+++ b/camera/aidl/android/hardware/ICameraService.aidl
@@ -16,6 +16,7 @@
 
 package android.hardware;
 
+import android.content.AttributionSourceState;
 import android.hardware.ICamera;
 import android.hardware.ICameraClient;
 import android.hardware.camera2.ICameraDeviceUser;
@@ -30,7 +31,6 @@
 import android.hardware.camera2.impl.CameraMetadataNative;
 import android.hardware.ICameraServiceListener;
 import android.hardware.CameraInfo;
-import android.hardware.CameraIdRemapping;
 import android.hardware.CameraStatus;
 import android.hardware.CameraExtensionSessionStats;
 
@@ -63,44 +63,108 @@
     const int CAMERA_TYPE_ALL = 1;
 
     /**
-     * Return the number of camera devices available in the system
+     * Return the number of camera devices available in the system.
+     *
+     * @param type The type of the camera, can be either CAMERA_TYPE_BACKWARD_COMPATIBLE
+     *        or CAMERA_TYPE_ALL.
+     * @param clientAttribution The AttributionSource of the client.
+     * @param devicePolicy The camera policy of the device of the associated context (default
+     *                     policy for default device context). Only virtual cameras would be exposed
+     *                     only for custom policy and only real cameras would be exposed for default
+     *                     policy.
      */
-    int getNumberOfCameras(int type);
+    int getNumberOfCameras(int type, in AttributionSourceState clientAttribution, int devicePolicy);
 
     /**
-     * Fetch basic camera information for a camera device
+     * If changed, reflect in
+     * frameworks/base/core/java/android/hardware/camera2/CameraManager.java.
+     * We have an enum here since the decision to override to portrait mode / fetch the
+     * rotationOverride as it exists in CameraManager right now is based on a static system
+     * property and not something that changes based dynamically, say on fold state. As a result,
+     * we can't use just a boolean to differentiate between the case where cameraserver should
+     * override to portrait (sensor orientation is 0, 180) or just rotate the sensor feed (sensor
+     * orientation is 90, 270)
      */
-    CameraInfo getCameraInfo(int cameraId, boolean overrideToPortrait);
+    const int ROTATION_OVERRIDE_NONE = 0;
+    const int ROTATION_OVERRIDE_OVERRIDE_TO_PORTRAIT = 1;
+    const int ROTATION_OVERRIDE_ROTATION_ONLY = 2;
 
     /**
-     * Default UID/PID values for non-privileged callers of
-     * connect() and connectDevice()
+     * Fetch basic camera information for a camera.
+     *
+     * @param cameraId The ID of the camera to fetch information for.
+     * @param rotationOverride Whether to override the sensor orientation information to
+     *        correspond to portrait: {@link ICameraService#ROTATION_OVERRIDE_OVERRIDE_TO_PORTRAIT}
+     *        will override the sensor orientation and rotate and crop, while {@link
+     *        ICameraService#ROTATION_OVERRIDE_ROTATION_ONLY} will rotate and crop the camera feed
+     *        without changing the sensor orientation.
+     * @param clientAttribution The AttributionSource of the client.
+     * @param devicePolicy The camera policy of the device of the associated context (default
+     *                     policy for default device context). Only virtual cameras would be exposed
+     *                     only for custom policy and only real cameras would be exposed for default
+     *                     policy.
+     * @return CameraInfo for the camera.
+     */
+    CameraInfo getCameraInfo(int cameraId, int rotationOverride,
+            in AttributionSourceState clientAttribution, int devicePolicy);
+
+    /**
+     * Default UID/PID values for non-privileged callers of connect() and connectDevice(). Can be
+     * used to set the pid/uid fields of AttributionSourceState to indicate the calling uid/pid
+     * should be used.
      */
     const int USE_CALLING_UID = -1;
     const int USE_CALLING_PID = -1;
 
     /**
-     * Open a camera device through the old camera API
+     * Open a camera device through the old camera API.
+     *
+     * @param cameraId The ID of the camera to open.
+     * @param targetSdkVersion the target sdk level of the application calling this function.
+     * @param rotationOverride Whether to override the sensor orientation information to
+     *        correspond to portrait: {@link ICameraService#ROTATION_OVERRIDE_OVERRIDE_TO_PORTRAIT}
+     *        will override the sensor orientation and rotate and crop, while {@link
+     *        ICameraService#ROTATION_OVERRIDE_ROTATION_ONLY} will rotate and crop the camera feed
+     *        without changing the sensor orientation.
+     * @param forceSlowJpegMode Whether to force slow jpeg mode.
+     * @param clientAttribution The AttributionSource of the client.
+     * @param devicePolicy The camera policy of the device of the associated context (default
+     *                     policy for default device context). Only virtual cameras would be exposed
+     *                     only for custom policy and only real cameras would be exposed for default
+     *                     policy.
      */
     ICamera connect(ICameraClient client,
             int cameraId,
-            @utf8InCpp String opPackageName,
-            int clientUid, int clientPid,
             int targetSdkVersion,
-            boolean overrideToPortrait,
-            boolean forceSlowJpegMode);
+            int rotationOverride,
+            boolean forceSlowJpegMode,
+            in AttributionSourceState clientAttribution,
+            int devicePolicy);
 
     /**
-     * Open a camera device through the new camera API
-     * Only supported for device HAL versions >= 3.2
+     * Open a camera device through the new camera API.
+     * Only supported for device HAL versions >= 3.2.
+     *
+     * @param cameraId The ID of the camera to open.
+     * @param targetSdkVersion the target sdk level of the application calling this function.
+     * @param rotationOverride Whether to override the sensor orientation information to
+     *        correspond to portrait: {@link ICameraService#ROTATION_OVERRIDE_OVERRIDE_TO_PORTRAIT}
+     *        will override the sensor orientation and rotate and crop, while {@link
+     *        ICameraService#ROTATION_OVERRIDE_ROTATION_ONLY} will rotate and crop the camera feed
+     *        without changing the sensor orientation.
+     * @param clientAttribution The AttributionSource of the client.
+     * @param devicePolicy The camera policy of the device of the associated context (default
+     *                     policy for default device context). Only virtual cameras would be exposed
+     *                     only for custom policy and only real cameras would be exposed for default
+     *                     policy.
      */
     ICameraDeviceUser connectDevice(ICameraDeviceCallbacks callbacks,
             @utf8InCpp String cameraId,
-            @utf8InCpp String opPackageName,
-            @nullable @utf8InCpp String featureId,
-            int clientUid, int oomScoreOffset,
+            int oomScoreOffset,
             int targetSdkVersion,
-            boolean overrideToPortrait);
+            int rotationOverride,
+            in AttributionSourceState clientAttribution,
+            int devicePolicy);
 
     /**
      * Add listener for changes to camera device and flashlight state.
@@ -118,35 +182,24 @@
     ConcurrentCameraIdCombination[] getConcurrentCameraIds();
 
     /**
-      * Check whether a particular set of session configurations are concurrently supported by the
-      * corresponding camera ids.
-      *
-      * @param sessions the set of camera id and session configuration pairs to be queried.
-      * @param targetSdkVersion the target sdk level of the application calling this function.
-      * @return true  - the set of concurrent camera id and stream combinations is supported.
-      *         false - the set of concurrent camera id and stream combinations is not supported
-      *                 OR the method was called with a set of camera ids not returned by
-      *                 getConcurrentCameraIds().
-      */
+     * Check whether a particular set of session configurations are concurrently supported by the
+     * corresponding camera ids.
+     *
+     * @param sessions the set of camera id and session configuration pairs to be queried.
+     * @param targetSdkVersion the target sdk level of the application calling this function.
+     * @param clientAttribution The AttributionSource of the client.
+     * @param devicePolicy The camera policy of the device of the associated context (default
+     *                     policy for default device context). Only virtual cameras would be exposed
+     *                     only for custom policy and only real cameras would be exposed for default
+     *                     policy.
+     * @return true  - the set of concurrent camera id and stream combinations is supported.
+     *         false - the set of concurrent camera id and stream combinations is not supported
+     *                 OR the method was called with a set of camera ids not returned by
+     *                 getConcurrentCameraIds().
+     */
     boolean isConcurrentSessionConfigurationSupported(
             in CameraIdAndSessionConfiguration[] sessions,
-            int targetSdkVersion);
-
-    /**
-     * Remap Camera Ids in the CameraService.
-     *
-     * Once this is in effect, all binder calls in the ICameraService that
-     * use logicalCameraId should consult remapping state to arrive at the
-     * correct cameraId to perform the operation on.
-     *
-     * Note: Before the new cameraIdRemapping state is applied, the previous
-     * state is cleared.
-     *
-     * @param cameraIdRemapping the camera ids to remap. Sending an unpopulated
-     *        cameraIdRemapping object will result in clearing of any previous
-     *        cameraIdRemapping state in the camera service.
-     */
-    void remapCameraIds(in CameraIdRemapping cameraIdRemapping);
+            int targetSdkVersion, in AttributionSourceState clientAttribution, int devicePolicy);
 
     /**
      * Inject Session Params into an existing camera session.
@@ -168,9 +221,23 @@
     /**
      * Read the static camera metadata for a camera device.
      * Only supported for device HAL versions >= 3.2
+     *
+     * @param cameraId The ID of the camera to fetch metadata for.
+     * @param targetSdkVersion the target sdk level of the application calling this function.
+     * @param rotationOverride Whether to override the sensor orientation information to
+     *        correspond to portrait: {@link ICameraService#ROTATION_OVERRIDE_OVERRIDE_TO_PORTRAIT}
+     *        will override the sensor orientation and rotate and crop, while {@link
+     *        ICameraService#ROTATION_OVERRIDE_ROTATION_ONLY} will rotate and crop the camera feed
+     *        without changing the sensor orientation.
+     * @param clientAttribution The AttributionSource of the client.
+     * @param devicePolicy The camera policy of the device of the associated context (default
+     *                     policy for default device context). Only virtual cameras would be exposed
+     *                     only for custom policy and only real cameras would be exposed for default
+     *                     policy.
+     * @return Characteristics for the given camera.
      */
     CameraMetadataNative getCameraCharacteristics(@utf8InCpp String cameraId, int targetSdkVersion,
-            boolean overrideToPortrait);
+            int rotationOverride, in AttributionSourceState clientAttribution, int devicePolicy);
 
     /**
      * Read in the vendor tag descriptors from the camera module HAL.
@@ -206,14 +273,47 @@
     ICameraInjectionSession injectCamera(@utf8InCpp String packageName, @utf8InCpp String internalCamId,
             @utf8InCpp String externalCamId, in ICameraInjectionCallback CameraInjectionCallback);
 
-    void setTorchMode(@utf8InCpp String cameraId, boolean enabled, IBinder clientBinder);
+    /**
+     * Set the torch mode for a camera device.
+     *
+     * @param cameraId The ID of the camera to set torch mode for.
+     * @param clientAttribution The AttributionSource of the client.
+     * @param devicePolicy The camera policy of the device of the associated context (default
+     *                     policy for default device context). Only virtual cameras would be exposed
+     *                     only for custom policy and only real cameras would be exposed for default
+     *                     policy.
+     */
+    void setTorchMode(@utf8InCpp String cameraId, boolean enabled, IBinder clientBinder,
+            in AttributionSourceState clientAttribution, int devicePolicy);
 
-    // Change the brightness level of the flash unit associated with cameraId to strengthLevel.
-    // If the torch is in OFF state and strengthLevel > 0 then the torch will also be turned ON.
-    void turnOnTorchWithStrengthLevel(@utf8InCpp String cameraId, int strengthLevel, IBinder clientBinder);
+    /**
+     * Change the brightness level of the flash unit associated with cameraId to strengthLevel.
+     * If the torch is in OFF state and strengthLevel > 0 then the torch will also be turned ON.
+     *
+     * @param cameraId The ID of the camera.
+     * @param strengthLevel The torch strength level to set for the camera.
+     * @param clientAttribution The AttributionSource of the client.
+     * @param devicePolicy The camera policy of the device of the associated context (default
+     *                     policy for default device context). Only virtual cameras would be exposed
+     *                     only for custom policy and only real cameras would be exposed for default
+     *                     policy.
+     */
+    void turnOnTorchWithStrengthLevel(@utf8InCpp String cameraId, int strengthLevel,
+            IBinder clientBinder, in AttributionSourceState clientAttribution, int devicePolicy);
 
-    // Get the brightness level of the flash unit associated with cameraId.
-    int getTorchStrengthLevel(@utf8InCpp String cameraId);
+    /**
+     * Get the brightness level of the flash unit associated with cameraId.
+     *
+     * @param cameraId The ID of the camera.
+     * @param clientAttribution The AttributionSource of the client.
+     * @param devicePolicy The camera policy of the device of the associated context (default
+     *                     policy for default device context). Only virtual cameras would be exposed
+     *                     only for custom policy and only real cameras would be exposed for default
+     *                     policy.
+     * @return Torch strength level for the camera.
+     */
+    int getTorchStrengthLevel(@utf8InCpp String cameraId,
+            in AttributionSourceState clientAttribution, int devicePolicy);
 
     /**
      * Notify the camera service of a system event.  Should only be called from system_server.
@@ -274,18 +374,64 @@
     const int DEVICE_STATE_FOLDED = 4;
     const int DEVICE_STATE_LAST_FRAMEWORK_BIT = 0x80000000; // 1 << 31;
 
-    // Create a CaptureRequest metadata based on template id
-    CameraMetadataNative createDefaultRequest(@utf8InCpp String cameraId, int templateId);
+    /**
+     * Create a CaptureRequest metadata based on template id
+     *
+     * @param cameraId The camera id to create the CaptureRequest for.
+     * @param templateId The template id create the CaptureRequest for.
+     * @param clientAttribution The AttributionSource of the client.
+     * @param devicePolicy The camera policy of the device of the associated context (default
+     *                     policy for default device context). Only virtual cameras would be exposed
+     *                     only for custom policy and only real cameras would be exposed for default
+     *                     policy.
+     * @return Metadata representing the CaptureRequest.
+     */
+    CameraMetadataNative createDefaultRequest(@utf8InCpp String cameraId, int templateId,
+            in AttributionSourceState clientAttribution, int devicePolicy);
 
     /**
-      * Check whether a particular session configuration with optional session parameters
-      * has camera device support.
-      *
-      * @param cameraId The camera id to query session configuration on
-      * @param sessionConfiguration Specific session configuration to be verified.
-      * @return true  - in case the stream combination is supported.
-      *         false - in case there is no device support.
-      */
+     * Check whether a particular session configuration with optional session parameters
+     * has camera device support.
+     *
+     * @param cameraId The camera id to query session configuration for
+     * @param targetSdkVersion the target sdk level of the application calling this function.
+     * @param sessionConfiguration Specific session configuration to be verified.
+     * @param clientAttribution The AttributionSource of the client.
+     * @param devicePolicy The camera policy of the device of the associated context (default
+     *                     policy for default device context). Only virtual cameras would be exposed
+     *                     only for custom policy and only real cameras would be exposed for default
+     *                     policy.
+     * @return true  - in case the stream combination is supported.
+     *         false - in case there is no device support.
+     */
     boolean isSessionConfigurationWithParametersSupported(@utf8InCpp String cameraId,
-            in SessionConfiguration sessionConfiguration);
+            int targetSdkVersion, in SessionConfiguration sessionConfiguration,
+            in AttributionSourceState clientAttribution, int devicePolicy);
+
+    /**
+     * Get the camera characteristics for a particular session configuration for
+     * the given camera device.
+     *
+     * @param cameraId ID of the device for which the session characteristics must be fetched.
+     * @param targetSdkVersion the target sdk level of the application calling this function.
+     * @param rotationOverride Whether to override the sensor orientation information to
+     *        correspond to portrait: {@link ICameraService#ROTATION_OVERRIDE_OVERRIDE_TO_PORTRAIT}
+     *        will override the sensor orientation and rotate and crop, while {@link
+     *        ICameraService#ROTATION_OVERRIDE_ROTATION_ONLY} will rotate and crop the camera feed
+     *        without changing the sensor orientation.
+     * @param sessionConfiguration Session configuration for which the characteristics
+     *                             must be fetched.
+     * @param clientAttribution The AttributionSource of the client.
+     * @param devicePolicy The camera policy of the device of the associated context (default
+     *                     policy for default device context). Only virtual cameras would be exposed
+     *                     only for custom policy and only real cameras would be exposed for default
+     *                     policy.
+     * @return Characteristics associated with the given session.
+     */
+    CameraMetadataNative getSessionCharacteristics(@utf8InCpp String cameraId,
+            int targetSdkVersion,
+            int rotationOverride,
+            in SessionConfiguration sessionConfiguration,
+            in AttributionSourceState clientAttribution,
+            int devicePolicy);
 }
diff --git a/camera/aidl/android/hardware/ICameraServiceListener.aidl b/camera/aidl/android/hardware/ICameraServiceListener.aidl
index 23a87d3..9c8c88a 100644
--- a/camera/aidl/android/hardware/ICameraServiceListener.aidl
+++ b/camera/aidl/android/hardware/ICameraServiceListener.aidl
@@ -51,14 +51,20 @@
     // Use to initialize variables only
     const int STATUS_UNKNOWN          = -1;
 
-    oneway void onStatusChanged(int status, @utf8InCpp String cameraId);
+    // We pass deviceId associated with a camera in the callbacks, which is the id of the virtual
+    // device owning the camera (for virtual cameras), or kDefaultDeviceId (for real
+    // cameras). The deviceId is being passed so that the API layer (CameraManagerGlobal) can filter
+    // out the cameras that don't correspond to the context associated with the caller who
+    // registers a callback.
+
+    oneway void onStatusChanged(int status, @utf8InCpp String cameraId, int deviceId);
 
     /**
      * Notify registered client about status changes for a physical camera backing
      * a logical camera.
      */
     oneway void onPhysicalCameraStatusChanged(int status, @utf8InCpp String cameraId,
-            @utf8InCpp String physicalCameraId);
+            @utf8InCpp String physicalCameraId, int deviceId);
 
     /**
      * The torch mode status of a camera.
@@ -82,9 +88,9 @@
     // Use to initialize variables only
     const int TORCH_STATUS_UNKNOWN = -1;
 
-    oneway void onTorchStatusChanged(int status, @utf8InCpp String cameraId);
+    oneway void onTorchStatusChanged(int status, @utf8InCpp String cameraId, int deviceId);
 
-    oneway void onTorchStrengthLevelChanged(@utf8InCpp String cameraId, int newTorchStrength);
+    oneway void onTorchStrengthLevelChanged(@utf8InCpp String cameraId, int newTorchStrength, int deviceId);
 
     /**
      * Notify registered clients about camera access priority changes.
@@ -98,6 +104,6 @@
      * Only clients with android.permission.CAMERA_OPEN_CLOSE_LISTENER permission
      * will receive such callbacks.
      */
-    oneway void onCameraOpened(@utf8InCpp String cameraId, @utf8InCpp String clientPackageId);
-    oneway void onCameraClosed(@utf8InCpp String cameraId);
+    oneway void onCameraOpened(@utf8InCpp String cameraId, @utf8InCpp String clientPackageId, int deviceId);
+    oneway void onCameraClosed(@utf8InCpp String cameraId, int deviceId);
 }
diff --git a/camera/aidl/android/hardware/ICameraServiceProxy.aidl b/camera/aidl/android/hardware/ICameraServiceProxy.aidl
index dcd69b0..887a68b 100644
--- a/camera/aidl/android/hardware/ICameraServiceProxy.aidl
+++ b/camera/aidl/android/hardware/ICameraServiceProxy.aidl
@@ -16,6 +16,7 @@
 
 package android.hardware;
 
+import android.hardware.CameraFeatureCombinationStats;
 import android.hardware.CameraSessionStats;
 import android.hardware.CameraExtensionSessionStats;
 
@@ -38,6 +39,12 @@
     oneway void notifyCameraState(in CameraSessionStats cameraSessionStats);
 
     /**
+     * Notify feature combination query for a camera device.
+     */
+    oneway void notifyFeatureCombinationStats(
+            in CameraFeatureCombinationStats cameraFeatureCombinationStats);
+
+    /**
      * Returns the necessary rotate and crop override for the top activity which
      * will be one of ({@link android.hardware.camera2.CameraMetadata#SCALER_ROTATE_AND_CROP_NONE},
      * {@link android.hardware.camera2.CameraMetadata#SCALER_ROTATE_AND_CROP_90},
diff --git a/camera/aidl/android/hardware/camera2/ICameraDeviceUser.aidl b/camera/aidl/android/hardware/camera2/ICameraDeviceUser.aidl
index 843e0d4..8e1fcc0 100644
--- a/camera/aidl/android/hardware/camera2/ICameraDeviceUser.aidl
+++ b/camera/aidl/android/hardware/camera2/ICameraDeviceUser.aidl
@@ -99,15 +99,6 @@
       */
     boolean isSessionConfigurationSupported(in SessionConfiguration sessionConfiguration);
 
-    /**
-     * Get the camera characteristics for a particular session configuration
-     *
-     * @param sessionConfiguration Specific session configuration for which the characteristics
-     * are fetched.
-     * @return - characteristics associated with the given session.
-     */
-    CameraMetadataNative getSessionCharacteristics(in SessionConfiguration sessionConfiguration);
-
     void deleteStream(int streamId);
 
     /**
diff --git a/camera/camera2/ConcurrentCamera.cpp b/camera/camera2/ConcurrentCamera.cpp
index 67aa876..ac442ed 100644
--- a/camera/camera2/ConcurrentCamera.cpp
+++ b/camera/camera2/ConcurrentCamera.cpp
@@ -32,7 +32,8 @@
 ConcurrentCameraIdCombination::ConcurrentCameraIdCombination() = default;
 
 ConcurrentCameraIdCombination::ConcurrentCameraIdCombination(
-        std::vector<std::string> &&combination) : mConcurrentCameraIds(std::move(combination)) { }
+        std::vector<std::pair<std::string, int32_t>> &&combination)
+            : mConcurrentCameraIdDeviceIdPairs(std::move(combination)) { }
 
 ConcurrentCameraIdCombination::~ConcurrentCameraIdCombination() = default;
 
@@ -42,25 +43,29 @@
         return BAD_VALUE;
     }
     status_t err = OK;
-    mConcurrentCameraIds.clear();
-    int32_t cameraIdCount = 0;
-    if ((err = parcel->readInt32(&cameraIdCount)) != OK) {
-        ALOGE("%s: Failed to read the camera id count from parcel: %d", __FUNCTION__, err);
+    mConcurrentCameraIdDeviceIdPairs.clear();
+    int32_t cameraCount = 0;
+    if ((err = parcel->readInt32(&cameraCount)) != OK) {
+        ALOGE("%s: Failed to read the camera count from parcel: %d", __FUNCTION__, err);
         return err;
     }
-    for (int32_t i = 0; i < cameraIdCount; i++) {
-        String16 id;
-        if ((err = parcel->readString16(&id)) != OK) {
+    for (int32_t i = 0; i < cameraCount; i++) {
+        String16 cameraId;
+        if ((err = parcel->readString16(&cameraId)) != OK) {
             ALOGE("%s: Failed to read camera id!", __FUNCTION__);
             return err;
         }
-        mConcurrentCameraIds.push_back(toStdString(id));
+        int32_t deviceId;
+        if ((err = parcel->readInt32(&deviceId)) != OK) {
+            ALOGE("%s: Failed to read device id!", __FUNCTION__);
+            return err;
+        }
+        mConcurrentCameraIdDeviceIdPairs.push_back({toStdString(cameraId), deviceId});
     }
     return OK;
 }
 
 status_t ConcurrentCameraIdCombination::writeToParcel(android::Parcel* parcel) const {
-
     if (parcel == nullptr) {
         ALOGE("%s: Null parcel", __FUNCTION__);
         return BAD_VALUE;
@@ -68,16 +73,20 @@
 
     status_t err = OK;
 
-    if ((err = parcel->writeInt32(mConcurrentCameraIds.size())) != OK) {
+    if ((err = parcel->writeInt32(mConcurrentCameraIdDeviceIdPairs.size())) != OK) {
         ALOGE("%s: Failed to write the camera id count to parcel: %d", __FUNCTION__, err);
         return err;
     }
 
-    for (const auto &it : mConcurrentCameraIds) {
-        if ((err = parcel->writeString16(toString16(it))) != OK) {
+    for (const auto &it : mConcurrentCameraIdDeviceIdPairs) {
+        if ((err = parcel->writeString16(toString16(it.first))) != OK) {
             ALOGE("%s: Failed to write the camera id string to parcel: %d", __FUNCTION__, err);
             return err;
         }
+        if ((err = parcel->writeInt32(it.second)) != OK) {
+            ALOGE("%s: Failed to write the device id integer to parcel: %d", __FUNCTION__, err);
+            return err;
+        }
     }
     return OK;
 }
@@ -105,7 +114,6 @@
 }
 
 status_t CameraIdAndSessionConfiguration::writeToParcel(android::Parcel* parcel) const {
-
     if (parcel == nullptr) {
         ALOGE("%s: Null parcel", __FUNCTION__);
         return BAD_VALUE;
diff --git a/camera/camera2/OutputConfiguration.cpp b/camera/camera2/OutputConfiguration.cpp
index 73b153c..2d1af32 100644
--- a/camera/camera2/OutputConfiguration.cpp
+++ b/camera/camera2/OutputConfiguration.cpp
@@ -25,6 +25,7 @@
 #include <binder/Parcel.h>
 #include <gui/view/Surface.h>
 #include <system/camera_metadata.h>
+#include <system/graphics.h>
 #include <utils/String8.h>
 
 
@@ -102,6 +103,25 @@
     return mUseReadoutTimestamp;
 }
 
+int OutputConfiguration::getFormat() const {
+    return mFormat;
+}
+
+int OutputConfiguration::getDataspace() const {
+    return mDataspace;
+}
+
+int64_t OutputConfiguration::getUsage() const {
+    return mUsage;
+}
+
+bool OutputConfiguration::isComplete() const {
+    return !((mSurfaceType == SURFACE_TYPE_MEDIA_RECORDER ||
+             mSurfaceType == SURFACE_TYPE_MEDIA_CODEC ||
+             mSurfaceType == SURFACE_TYPE_IMAGE_READER) &&
+             mGbps.empty());
+}
+
 OutputConfiguration::OutputConfiguration() :
         mRotation(INVALID_ROTATION),
         mSurfaceSetID(INVALID_SET_ID),
@@ -116,7 +136,10 @@
         mStreamUseCase(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT),
         mTimestampBase(TIMESTAMP_BASE_DEFAULT),
         mMirrorMode(MIRROR_MODE_AUTO),
-        mUseReadoutTimestamp(false) {
+        mUseReadoutTimestamp(false),
+        mFormat(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED),
+        mDataspace(0),
+        mUsage(0) {
 }
 
 OutputConfiguration::OutputConfiguration(const android::Parcel& parcel) :
@@ -234,6 +257,24 @@
         return err;
     }
 
+    int format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
+    if ((err = parcel->readInt32(&format)) != OK) {
+        ALOGE("%s: Failed to read format from parcel", __FUNCTION__);
+        return err;
+    }
+
+    int dataspace = 0;
+    if ((err = parcel->readInt32(&dataspace)) != OK) {
+        ALOGE("%s: Failed to read dataspace from parcel", __FUNCTION__);
+        return err;
+    }
+
+    int64_t usage = 0;
+    if ((err = parcel->readInt64(&usage)) != OK) {
+        ALOGE("%s: Failed to read usage flag from parcel", __FUNCTION__);
+        return err;
+    }
+
     mRotation = rotation;
     mSurfaceSetID = setID;
     mSurfaceType = surfaceType;
@@ -256,13 +297,17 @@
     mSensorPixelModesUsed = std::move(sensorPixelModesUsed);
     mDynamicRangeProfile = dynamicProfile;
     mColorSpace = colorSpace;
+    mFormat = format;
+    mDataspace = dataspace;
+    mUsage = usage;
 
     ALOGV("%s: OutputConfiguration: rotation = %d, setId = %d, surfaceType = %d,"
           " physicalCameraId = %s, isMultiResolution = %d, streamUseCase = %" PRId64
-          ", timestampBase = %d, mirrorMode = %d, useReadoutTimestamp = %d",
+          ", timestampBase = %d, mirrorMode = %d, useReadoutTimestamp = %d, format = %d, "
+          "dataspace = %d, usage = %" PRId64,
           __FUNCTION__, mRotation, mSurfaceSetID, mSurfaceType,
           mPhysicalCameraId.c_str(), mIsMultiResolution, mStreamUseCase, timestampBase,
-          mMirrorMode, mUseReadoutTimestamp);
+          mMirrorMode, mUseReadoutTimestamp, mFormat, mDataspace, mUsage);
 
     return err;
 }
@@ -283,6 +328,9 @@
     mTimestampBase = TIMESTAMP_BASE_DEFAULT;
     mMirrorMode = MIRROR_MODE_AUTO;
     mUseReadoutTimestamp = false;
+    mFormat = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
+    mDataspace = 0;
+    mUsage = 0;
 }
 
 OutputConfiguration::OutputConfiguration(
@@ -296,7 +344,9 @@
     mColorSpace(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED),
     mStreamUseCase(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT),
     mTimestampBase(TIMESTAMP_BASE_DEFAULT),
-    mMirrorMode(MIRROR_MODE_AUTO), mUseReadoutTimestamp(false) { }
+    mMirrorMode(MIRROR_MODE_AUTO), mUseReadoutTimestamp(false),
+    mFormat(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED), mDataspace(0),
+    mUsage(0) { }
 
 status_t OutputConfiguration::writeToParcel(android::Parcel* parcel) const {
 
@@ -362,6 +412,15 @@
     err = parcel->writeInt32(mUseReadoutTimestamp ? 1 : 0);
     if (err != OK) return err;
 
+    err = parcel->writeInt32(mFormat);
+    if (err != OK) return err;
+
+    err = parcel->writeInt32(mDataspace);
+    if (err != OK) return err;
+
+    err = parcel->writeInt64(mUsage);
+    if (err != OK) return err;
+
     return OK;
 }
 
diff --git a/camera/camera_platform.aconfig b/camera/camera_platform.aconfig
index 076394d..e916985 100644
--- a/camera/camera_platform.aconfig
+++ b/camera/camera_platform.aconfig
@@ -1,59 +1,71 @@
 package: "com.android.internal.camera.flags"
+container: "system"
 
 flag {
-     namespace: "camera_platform"
-     name: "camera_hsum_permission"
-     description: "Camera access by headless system user"
-     bug: "273539631"
+    namespace: "camera_platform"
+    name: "camera_hsum_permission"
+    is_exported: true
+    description: "Camera access by headless system user"
+    bug: "273539631"
 }
 
 flag {
-     namespace: "camera_platform"
-     name: "concert_mode"
-     description: "Introduces a new concert mode camera extension type"
-     bug: "297083874"
+    namespace: "camera_platform"
+    name: "concert_mode"
+    is_exported: true
+    description: "Introduces a new concert mode camera extension type"
+    bug: "297083874"
 }
 
 flag {
-     namespace: "camera_platform"
-     name: "feature_combination_query"
-     description: "Query feature combination support and session specific characteristics"
-     bug: "309627704"
+    namespace: "camera_platform"
+    name: "feature_combination_query"
+    is_exported: true
+    description: "Query feature combination support and session specific characteristics"
+    bug: "309627704"
 }
 
 flag {
-     namespace: "camera_platform"
-     name: "log_ultrawide_usage"
-     description: "Enable measuring how much usage there is for ultrawide-angle cameras"
-     bug: "300515796"
+    namespace: "camera_platform"
+    name: "watch_foreground_changes"
+    description: "Request AppOps to notify changes in the foreground status of the client"
+    bug: "290086710"
 }
 
 flag {
-     namespace: "camera_platform"
-     name: "camera_manual_flash_strength_control"
-     description: "Flash brightness level control in manual flash mode"
-     bug: "238348881"
+    namespace: "camera_platform"
+    name: "log_ultrawide_usage"
+    description: "Enable measuring how much usage there is for ultrawide-angle cameras"
+    bug: "300515796"
 }
 
 flag {
-     namespace: "camera_platform"
-     name: "lazy_aidl_wait_for_service"
-     description: "Use waitForService instead of getService with lazy AIDL HALs"
-     bug: "285546208"
+    namespace: "camera_platform"
+    name: "camera_manual_flash_strength_control"
+    is_exported: true
+    description: "Flash brightness level control in manual flash mode"
+    bug: "238348881"
 }
 
 flag {
-     namespace: "camera_platform"
-     name: "log_zoom_override_usage"
-     description: "Enable measuring how much usage there is for zoom settings overrde"
-     bug: "307409002"
+    namespace: "camera_platform"
+    name: "lazy_aidl_wait_for_service"
+    description: "Use waitForService instead of getService with lazy AIDL HALs"
+    bug: "285546208"
 }
 
 flag {
-     namespace: "camera_platform"
-     name: "session_hal_buf_manager"
-     description: "Enable or disable HAL buffer manager as requested by the camera HAL"
-     bug: "311263114"
+    namespace: "camera_platform"
+    name: "log_zoom_override_usage"
+    description: "Enable measuring how much usage there is for zoom settings overrde"
+    bug: "307409002"
+}
+
+flag {
+    namespace: "camera_platform"
+    name: "session_hal_buf_manager"
+    description: "Enable or disable HAL buffer manager as requested by the camera HAL"
+    bug: "311263114"
 }
 
 flag {
@@ -64,22 +76,204 @@
 }
 
 flag {
-     namespace: "camera_platform"
-     name: "camera_ae_mode_low_light_boost"
-     description: "An AE mode that enables increased brightening in low light scenes"
-     bug: "312803148"
+    namespace: "camera_platform"
+    name: "camera_ae_mode_low_light_boost"
+    is_exported: true
+    description: "An AE mode that enables increased brightening in low light scenes"
+    bug: "312803148"
 }
 
 flag {
-     namespace: "camera_platform"
-     name: "multiresolution_imagereader_usage_config"
-     description: "Enable creating MultiResolutionImageReader with usage flag configuration"
-     bug: "301588215"
+    namespace: "camera_platform"
+    name: "multiresolution_imagereader_usage_config"
+    description: "Enable creating MultiResolutionImageReader with usage flag configuration"
+    bug: "301588215"
 }
 
 flag {
-     namespace: "camera_platform"
-     name: "use_ro_board_api_level_for_vndk_version"
-     description: "Enable using ro.board.api_level instead of ro.vndk.version to get VNDK version"
-     bug: "312315580"
+    namespace: "camera_platform"
+    name: "use_ro_board_api_level_for_vndk_version"
+    description: "Enable using ro.board.api_level instead of ro.vndk.version to get VNDK version"
+    bug: "312315580"
 }
+
+flag {
+    namespace: "camera_platform"
+    name: "camera_extensions_characteristics_get"
+    is_exported: true
+    description: "Enable get extension specific camera characteristics API"
+    bug: "280649914"
+}
+
+flag {
+    namespace: "camera_platform"
+    name: "delay_lazy_hal_instantiation"
+    description: "Only trigger lazy HAL instantiation when the HAL is needed for an operation."
+    bug: "319735068"
+}
+
+flag {
+    namespace: "camera_platform"
+    name: "return_buffers_outside_locks"
+    description: "Enable returning graphics buffers to buffer queues without holding the in-flight mutex"
+    bug: "315526878"
+}
+
+flag {
+    namespace: "camera_platform"
+    name: "camera_device_setup"
+    is_exported: true
+    description: "Create an intermediate Camera Device class for limited CameraDevice access."
+    bug: "320741775"
+}
+
+flag {
+    namespace: "camera_platform"
+    name: "camera_privacy_allowlist"
+    is_exported: true
+    description: "Allowlisting to exempt safety-relevant cameras from privacy control for automotive devices"
+    bug: "282814430"
+}
+
+flag {
+    namespace: "camera_platform"
+    name: "surface_ipc"
+    description: "Optimize Surface binder IPC"
+    bug: "323292530"
+    metadata {
+        purpose: PURPOSE_BUGFIX
+    }
+}
+
+flag {
+    namespace: "camera_platform"
+    name: "extension_10_bit"
+    is_exported: true
+    description: "Enables 10-bit support in the camera extensions."
+    bug: "316375635"
+}
+
+flag {
+    namespace: "camera_platform"
+    name: "single_thread_executor"
+    description: "Ensure device logic is run within one thread."
+    bug: "305857746"
+    metadata {
+        purpose: PURPOSE_BUGFIX
+    }
+}
+
+flag {
+    namespace: "camera_platform"
+    name: "surface_leak_fix"
+    description: "Address Surface release leaks in CaptureRequest"
+    bug: "324071855"
+    metadata {
+        purpose: PURPOSE_BUGFIX
+    }
+}
+
+flag {
+    namespace: "camera_platform"
+    name: "concert_mode_api"
+    description: "Covers the eyes free videography public facing API"
+    bug: "297083874"
+}
+
+
+flag {
+    namespace: "camera_platform"
+    name: "cache_permission_services"
+    description: "Cache IPermissionController and IPermissionChecker in CameraService to reduce query latency."
+    bug: "326139956"
+    metadata {
+        purpose: PURPOSE_BUGFIX
+    }
+}
+
+flag {
+    namespace: "camera_platform"
+    name: "check_session_support_before_session_char"
+    description: "Validate that a SessionConfiguration is supported before fetching SessionCharacteristics."
+    bug: "327008530"
+    metadata {
+        purpose: PURPOSE_BUGFIX
+    }
+}
+
+flag {
+    namespace: "camera_platform"
+    name: "calculate_perf_override_during_session_support"
+    description: "Dynamically calulate whether perf class override should be set in isSessionConfigurationWithParametersSupported."
+    bug: "332975108"
+    metadata {
+        purpose: PURPOSE_BUGFIX
+    }
+}
+
+flag {
+    namespace: "camera_platform"
+    name: "analytics_24q3"
+    description: "Miscellaneous camera platform metrics for 24Q3"
+    bug: "332557570"
+}
+
+flag {
+    namespace: "camera_platform"
+    name: "realtime_priority_bump"
+    description: "Bump the scheduling priority of performance critical code paths"
+    bug: "336628522"
+    metadata {
+        purpose: PURPOSE_BUGFIX
+    }
+}
+
+flag {
+    namespace: "camera_platform"
+    name: "use_system_api_for_vndk_version"
+    description: "ro.board.api_level isn't reliable. Use system api to replace ro.vndk.version"
+    bug: "312315580"
+    metadata {
+        purpose: PURPOSE_BUGFIX
+    }
+}
+
+flag {
+    namespace: "camera_platform"
+    name: "multi_res_raw_reprocessing"
+    description: "Allow multi-resolution raw reprocessing without reprocessing capability"
+    bug: "336922859"
+    metadata {
+        purpose: PURPOSE_BUGFIX
+    }
+}
+
+flag {
+    namespace: "camera_platform"
+    name: "api1_release_binderlock_before_cameraservice_disconnect"
+    description: "Drop mSerializationLock in Camera1 client when calling into CameraService"
+    bug: "351778072"
+    metadata {
+        purpose: PURPOSE_BUGFIX
+    }
+}
+
+flag {
+    namespace: "camera_platform"
+    name: "bump_preview_frame_space_priority"
+    description: "Increase the PreviewFrameSpacer thread priority"
+    bug: "355665306"
+    metadata {
+        purpose: PURPOSE_BUGFIX
+    }
+}
+
+flag {
+    namespace: "camera_platform"
+    name: "dumpsys_request_stream_ids"
+    description: "Add stream id information to last request dumpsys"
+    bug: "357913929"
+    metadata {
+        purpose: PURPOSE_BUGFIX
+    }
+}
\ No newline at end of file
diff --git a/camera/cameraserver/Android.bp b/camera/cameraserver/Android.bp
index 13b705c..6862cb1 100644
--- a/camera/cameraserver/Android.bp
+++ b/camera/cameraserver/Android.bp
@@ -13,6 +13,7 @@
 // limitations under the License.
 
 package {
+    default_team: "trendy_team_camera_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_camera_license"
diff --git a/camera/cameraserver/cameraserver.rc b/camera/cameraserver/cameraserver.rc
index e307653..63fa2b0 100644
--- a/camera/cameraserver/cameraserver.rc
+++ b/camera/cameraserver/cameraserver.rc
@@ -5,5 +5,6 @@
     ioprio rt 4
     task_profiles CameraServiceCapacity MaxPerformance
     rlimit rtprio 10 10
+    capabilities SYS_NICE
     onrestart class_restart cameraWatchdog
     interface aidl android.frameworks.cameraservice.service.ICameraService/default
diff --git a/camera/include/camera/Camera.h b/camera/include/camera/Camera.h
index 6655f82..646b139 100644
--- a/camera/include/camera/Camera.h
+++ b/camera/include/camera/Camera.h
@@ -27,6 +27,7 @@
 #include <camera/android/hardware/ICamera.h>
 #include <camera/android/hardware/ICameraClient.h>
 #include <camera/CameraBase.h>
+#include <camera/CameraUtils.h>
 
 namespace android {
 
@@ -58,7 +59,7 @@
     typedef ::android::hardware::ICameraClient TCamCallbacks;
     typedef ::android::binder::Status (::android::hardware::ICameraService::*TCamConnectService)
         (const sp<::android::hardware::ICameraClient>&,
-        int, const std::string&, int, int, int, bool, bool,
+        int, int, int, bool, const AttributionSourceState&, int32_t,
         /*out*/
         sp<::android::hardware::ICamera>*);
     static TCamConnectService     fnConnectService;
@@ -80,9 +81,9 @@
             // construct a camera client from an existing remote
     static  sp<Camera>  create(const sp<::android::hardware::ICamera>& camera);
     static  sp<Camera>  connect(int cameraId,
-                                const std::string& clientPackageName,
-                                int clientUid, int clientPid, int targetSdkVersion,
-                                bool overrideToPortrait, bool forceSlowJpegMode);
+                                int targetSdkVersion, int rotationOverride, bool forceSlowJpegMode,
+                                const AttributionSourceState& clientAttribution,
+                                int32_t devicePolicy = 0);
 
             virtual     ~Camera();
 
@@ -197,6 +198,6 @@
     friend class        CameraBase;
 };
 
-}; // namespace android
+} // namespace android
 
 #endif
diff --git a/camera/include/camera/CameraBase.h b/camera/include/camera/CameraBase.h
index 6af7f2a..d98abe4 100644
--- a/camera/include/camera/CameraBase.h
+++ b/camera/include/camera/CameraBase.h
@@ -17,6 +17,7 @@
 #ifndef ANDROID_HARDWARE_CAMERA_BASE_H
 #define ANDROID_HARDWARE_CAMERA_BASE_H
 
+#include <android/content/AttributionSourceState.h>
 #include <android/hardware/ICameraServiceListener.h>
 
 #include <utils/Mutex.h>
@@ -62,16 +63,15 @@
 
     virtual status_t writeToParcel(android::Parcel* parcel) const;
     virtual status_t readFromParcel(const android::Parcel* parcel);
-
 };
 
 /**
- * Basic status information about a camera device - its name and its current
+ * Basic status information about a camera device - its id and its current
  * state.
  */
 struct CameraStatus : public android::Parcelable {
     /**
-     * The name of the camera device
+     * The app-visible id of the camera device
      */
     std::string cameraId;
 
@@ -90,20 +90,27 @@
      */
     std::string clientPackage;
 
+    /**
+     * The id of the device owning the camera. For virtual cameras, this is the id of the virtual
+     * device owning the camera. For real cameras, this is the default device id, i.e.,
+     * kDefaultDeviceId.
+     */
+    int32_t deviceId;
+
     virtual status_t writeToParcel(android::Parcel* parcel) const;
     virtual status_t readFromParcel(const android::Parcel* parcel);
 
     CameraStatus(std::string id, int32_t s, const std::vector<std::string>& unavailSubIds,
-            const std::string& clientPkg) : cameraId(id), status(s),
-            unavailablePhysicalIds(unavailSubIds), clientPackage(clientPkg) {}
+            const std::string& clientPkg, int32_t devId) : cameraId(id), status(s),
+            unavailablePhysicalIds(unavailSubIds), clientPackage(clientPkg), deviceId(devId) {}
     CameraStatus() : status(ICameraServiceListener::STATUS_PRESENT) {}
 };
 
 } // namespace hardware
 
+using content::AttributionSourceState;
 using hardware::CameraInfo;
 
-
 template <typename TCam>
 struct CameraTraits {
 };
@@ -118,17 +125,20 @@
     typedef typename TCamTraits::TCamConnectService TCamConnectService;
 
     static sp<TCam>      connect(int cameraId,
-                                 const std::string& clientPackageName,
-                                 int clientUid, int clientPid, int targetSdkVersion,
-                                 bool overrideToPortrait, bool forceSlowJpegMode);
+                                 int targetSdkVersion, int rotationOverride, bool forceSlowJpegMode,
+                                 const AttributionSourceState &clientAttribution,
+                                 int32_t devicePolicy);
     virtual void         disconnect();
 
     void                 setListener(const sp<TCamListener>& listener);
 
-    static int           getNumberOfCameras();
+    static int           getNumberOfCameras(const AttributionSourceState& clientAttribution,
+                                            int32_t devicePolicy);
 
     static status_t      getCameraInfo(int cameraId,
-                                       bool overrideToPortrait,
+                                       int rotationOverride,
+                                       const AttributionSourceState& clientAttribution,
+                                       int32_t devicePolicy,
                                        /*out*/
                                        struct hardware::CameraInfo* cameraInfo);
 
@@ -167,6 +177,6 @@
     typedef CameraBase<TCam>         CameraBaseT;
 };
 
-}; // namespace android
+} // namespace android
 
 #endif
diff --git a/camera/include/camera/CameraSessionStats.h b/camera/include/camera/CameraSessionStats.h
index 06c154d..34ee882 100644
--- a/camera/include/camera/CameraSessionStats.h
+++ b/camera/include/camera/CameraSessionStats.h
@@ -167,6 +167,8 @@
 
     CameraExtensionSessionStats mCameraExtensionSessionStats;
 
+    std::pair<int32_t, int32_t> mMostRequestedFpsRange;
+
     // Constructors
     CameraSessionStats();
     CameraSessionStats(const std::string& cameraId, int facing, int newCameraState,
diff --git a/camera/include/camera/CameraUtils.h b/camera/include/camera/CameraUtils.h
index 31d25e7..d358407 100644
--- a/camera/include/camera/CameraUtils.h
+++ b/camera/include/camera/CameraUtils.h
@@ -26,6 +26,9 @@
 
 namespace android {
 
+// Device id of a context associated with the default device.
+constexpr int32_t kDefaultDeviceId = 0;
+
 /**
  * CameraUtils contains utility methods that are shared between the native
  * camera client, and the camera service.
diff --git a/camera/include/camera/camera2/ConcurrentCamera.h b/camera/include/camera/camera2/ConcurrentCamera.h
index ac99fd5..2a65da8 100644
--- a/camera/include/camera/camera2/ConcurrentCamera.h
+++ b/camera/include/camera/camera2/ConcurrentCamera.h
@@ -28,9 +28,9 @@
 namespace utils {
 
 struct ConcurrentCameraIdCombination : public Parcelable {
-    std::vector<std::string> mConcurrentCameraIds;
+    std::vector<std::pair<std::string, int32_t>> mConcurrentCameraIdDeviceIdPairs;
     ConcurrentCameraIdCombination();
-    ConcurrentCameraIdCombination(std::vector<std::string> &&combination);
+    ConcurrentCameraIdCombination(std::vector<std::pair<std::string, int32_t>> &&combination);
     virtual ~ConcurrentCameraIdCombination();
 
     virtual status_t writeToParcel(android::Parcel *parcel) const override;
diff --git a/camera/include/camera/camera2/OutputConfiguration.h b/camera/include/camera/camera2/OutputConfiguration.h
index 3f74b4a..83ce39d 100644
--- a/camera/include/camera/camera2/OutputConfiguration.h
+++ b/camera/include/camera/camera2/OutputConfiguration.h
@@ -35,10 +35,13 @@
 
     static const int INVALID_ROTATION;
     static const int INVALID_SET_ID;
-    enum SurfaceType{
+    enum SurfaceType {
         SURFACE_TYPE_UNKNOWN = -1,
         SURFACE_TYPE_SURFACE_VIEW = 0,
-        SURFACE_TYPE_SURFACE_TEXTURE = 1
+        SURFACE_TYPE_SURFACE_TEXTURE = 1,
+        SURFACE_TYPE_MEDIA_RECORDER = 2,
+        SURFACE_TYPE_MEDIA_CODEC = 3,
+        SURFACE_TYPE_IMAGE_READER = 4
     };
     enum TimestampBaseType {
         TIMESTAMP_BASE_DEFAULT = 0,
@@ -71,6 +74,10 @@
     int                        getTimestampBase() const;
     int                        getMirrorMode() const;
     bool                       useReadoutTimestamp() const;
+    int                        getFormat() const;
+    int                        getDataspace() const;
+    int64_t                    getUsage() const;
+    bool                       isComplete() const;
 
     // set of sensor pixel mode resolutions allowed {MAX_RESOLUTION, DEFAULT_MODE};
     const std::vector<int32_t>&            getSensorPixelModesUsed() const;
@@ -98,7 +105,7 @@
     OutputConfiguration(const std::vector<sp<IGraphicBufferProducer>>& gbps,
                         int rotation, const std::string& physicalCameraId,
                         int surfaceSetID = INVALID_SET_ID,
-                        int surfaceType = OutputConfiguration::SURFACE_TYPE_UNKNOWN, int width = 0,
+                        int surfaceType = SURFACE_TYPE_UNKNOWN, int width = 0,
                         int height = 0, bool isShared = false);
 
     bool operator == (const OutputConfiguration& other) const {
@@ -118,7 +125,10 @@
                 mStreamUseCase == other.mStreamUseCase &&
                 mTimestampBase == other.mTimestampBase &&
                 mMirrorMode == other.mMirrorMode &&
-                mUseReadoutTimestamp == other.mUseReadoutTimestamp);
+                mUseReadoutTimestamp == other.mUseReadoutTimestamp &&
+                mFormat == other.mFormat &&
+                mDataspace == other.mDataspace &&
+                mUsage == other.mUsage);
     }
     bool operator != (const OutputConfiguration& other) const {
         return !(*this == other);
@@ -173,6 +183,15 @@
         if (mUseReadoutTimestamp != other.mUseReadoutTimestamp) {
             return mUseReadoutTimestamp < other.mUseReadoutTimestamp;
         }
+        if (mFormat != other.mFormat) {
+            return mFormat < other.mFormat;
+        }
+        if (mDataspace != other.mDataspace) {
+            return mDataspace < other.mDataspace;
+        }
+        if (mUsage != other.mUsage) {
+            return mUsage < other.mUsage;
+        }
         return gbpsLessThan(other);
     }
 
@@ -203,6 +222,9 @@
     int                        mTimestampBase;
     int                        mMirrorMode;
     bool                       mUseReadoutTimestamp;
+    int                        mFormat;
+    int                        mDataspace;
+    int64_t                    mUsage;
 };
 } // namespace params
 } // namespace camera2
diff --git a/camera/ndk/Android.bp b/camera/ndk/Android.bp
index 165395a..379c0b5 100644
--- a/camera/ndk/Android.bp
+++ b/camera/ndk/Android.bp
@@ -17,6 +17,7 @@
 // frameworks/av/include.
 
 package {
+    default_team: "trendy_team_camera_framework",
     default_applicable_licenses: ["frameworks_av_camera_ndk_license"],
 }
 
@@ -65,38 +66,41 @@
 cc_library_shared {
     name: "libcamera2ndk",
     srcs: [
+        "NdkCameraCaptureSession.cpp",
+        "NdkCameraDevice.cpp",
         "NdkCameraManager.cpp",
         "NdkCameraMetadata.cpp",
-        "NdkCameraDevice.cpp",
         "NdkCaptureRequest.cpp",
-        "NdkCameraCaptureSession.cpp",
+        "impl/ACameraCaptureSession.cpp",
+        "impl/ACameraDevice.cpp",
         "impl/ACameraManager.cpp",
         "impl/ACameraMetadata.cpp",
-        "impl/ACameraDevice.cpp",
-        "impl/ACameraCaptureSession.cpp",
     ],
     shared_libs: [
-        "libbinder",
-        "liblog",
-        "libgui",
-        "libutils",
+        "android.companion.virtual.virtualdevice_aidl-cpp",
+        "android.companion.virtualdevice.flags-aconfig-cc",
+        "framework-permission-aidl-cpp",
         "libandroid_runtime",
+        "libbinder",
         "libcamera_client",
-        "libstagefright_foundation",
-        "libcutils",
         "libcamera_metadata",
+        "libcutils",
+        "libgui",
+        "liblog",
         "libmediandk",
         "libnativewindow",
+        "libstagefright_foundation",
+        "libutils",
     ],
     header_libs: [
         "jni_headers",
     ],
     cflags: [
-        "-fvisibility=hidden",
         "-DEXPORT=__attribute__((visibility(\"default\")))",
         "-Wall",
-        "-Wextra",
         "-Werror",
+        "-Wextra",
+        "-fvisibility=hidden",
     ],
     // TODO: jchowdhary@, use header_libs instead b/131165718
     include_dirs: [
@@ -114,16 +118,16 @@
     cpp_std: "gnu++17",
     vendor: true,
     srcs: [
+        "NdkCameraCaptureSession.cpp",
+        "NdkCameraDevice.cpp",
+        "NdkCameraManager.cpp",
+        "NdkCameraMetadata.cpp",
+        "NdkCaptureRequest.cpp",
+        "impl/ACameraCaptureSession.cpp",
+        "impl/ACameraMetadata.cpp",
         "ndk_vendor/impl/ACameraDevice.cpp",
         "ndk_vendor/impl/ACameraManager.cpp",
         "ndk_vendor/impl/utils.cpp",
-        "impl/ACameraMetadata.cpp",
-        "impl/ACameraCaptureSession.cpp",
-        "NdkCameraMetadata.cpp",
-        "NdkCameraCaptureSession.cpp",
-        "NdkCameraManager.cpp",
-        "NdkCameraDevice.cpp",
-        "NdkCaptureRequest.cpp",
     ],
 
     export_include_dirs: ["include"],
@@ -132,30 +136,30 @@
     ],
     local_include_dirs: [
         ".",
-        "include",
         "impl",
+        "include",
     ],
     cflags: [
-        "-fvisibility=hidden",
         "-DEXPORT=__attribute__((visibility(\"default\")))",
         "-D__ANDROID_VNDK__",
+        "-fvisibility=hidden",
     ],
 
     shared_libs: [
-        "libbinder_ndk",
-        "libfmq",
-        "libhidlbase",
-        "libhardware",
-        "libnativewindow",
-        "liblog",
-        "libutils",
-        "libstagefright_foundation",
-        "libcutils",
-        "libcamera_metadata",
-        "libmediandk",
         "android.frameworks.cameraservice.common-V1-ndk",
         "android.frameworks.cameraservice.device-V2-ndk",
         "android.frameworks.cameraservice.service-V2-ndk",
+        "libbinder_ndk",
+        "libcamera_metadata",
+        "libcutils",
+        "libfmq",
+        "libhardware",
+        "libhidlbase",
+        "liblog",
+        "libmediandk",
+        "libnativewindow",
+        "libstagefright_foundation",
+        "libutils",
     ],
     static_libs: [
         "android.hardware.camera.common@1.0-helper",
@@ -172,19 +176,19 @@
     name: "ACameraNdkVendorTest",
     vendor: true,
     srcs: [
-        "ndk_vendor/tests/AImageReaderVendorTest.cpp",
         "ndk_vendor/tests/ACameraManagerTest.cpp",
+        "ndk_vendor/tests/AImageReaderVendorTest.cpp",
     ],
     shared_libs: [
         "libcamera2ndk_vendor",
         "libcamera_metadata",
+        "libcutils",
         "libhidlbase",
+        "liblog",
         "libmediandk",
         "libnativewindow",
-        "libutils",
         "libui",
-        "libcutils",
-        "liblog",
+        "libutils",
     ],
     static_libs: [
         "android.hardware.camera.common@1.0-helper",
diff --git a/camera/ndk/NdkCameraManager.cpp b/camera/ndk/NdkCameraManager.cpp
index 2de4a50..1b3343e 100644
--- a/camera/ndk/NdkCameraManager.cpp
+++ b/camera/ndk/NdkCameraManager.cpp
@@ -68,7 +68,7 @@
 
 EXPORT
 camera_status_t ACameraManager_registerAvailabilityCallback(
-        ACameraManager*, const ACameraManager_AvailabilityCallbacks *callback) {
+        ACameraManager* manager, const ACameraManager_AvailabilityCallbacks* callback) {
     ATRACE_CALL();
     if (callback == nullptr) {
         ALOGE("%s: invalid argument! callback is null!", __FUNCTION__);
@@ -81,13 +81,13 @@
                callback->onCameraAvailable, callback->onCameraUnavailable);
         return ACAMERA_ERROR_INVALID_PARAMETER;
     }
-    CameraManagerGlobal::getInstance()->registerAvailabilityCallback(callback);
+    manager->registerAvailabilityCallback(callback);
     return ACAMERA_OK;
 }
 
 EXPORT
 camera_status_t ACameraManager_unregisterAvailabilityCallback(
-        ACameraManager*, const ACameraManager_AvailabilityCallbacks *callback) {
+        ACameraManager* manager, const ACameraManager_AvailabilityCallbacks* callback) {
     ATRACE_CALL();
     if (callback == nullptr) {
         ALOGE("%s: invalid argument! callback is null!", __FUNCTION__);
@@ -100,13 +100,13 @@
                callback->onCameraAvailable, callback->onCameraUnavailable);
         return ACAMERA_ERROR_INVALID_PARAMETER;
     }
-    CameraManagerGlobal::getInstance()->unregisterAvailabilityCallback(callback);
+    manager->unregisterAvailabilityCallback(callback);
     return ACAMERA_OK;
 }
 
 EXPORT
 camera_status_t ACameraManager_registerExtendedAvailabilityCallback(
-        ACameraManager* /*manager*/, const ACameraManager_ExtendedAvailabilityCallbacks *callback) {
+        ACameraManager* manager, const ACameraManager_ExtendedAvailabilityCallbacks* callback) {
     ATRACE_CALL();
     if (callback == nullptr) {
         ALOGE("%s: invalid argument! callback is null!", __FUNCTION__);
@@ -131,13 +131,13 @@
             return ACAMERA_ERROR_INVALID_PARAMETER;
         }
     }
-    CameraManagerGlobal::getInstance()->registerExtendedAvailabilityCallback(callback);
+    manager->registerExtendedAvailabilityCallback(callback);
     return ACAMERA_OK;
 }
 
 EXPORT
 camera_status_t ACameraManager_unregisterExtendedAvailabilityCallback(
-        ACameraManager* /*manager*/, const ACameraManager_ExtendedAvailabilityCallbacks *callback) {
+        ACameraManager* manager, const ACameraManager_ExtendedAvailabilityCallbacks* callback) {
     ATRACE_CALL();
     if (callback == nullptr) {
         ALOGE("%s: invalid argument! callback is null!", __FUNCTION__);
@@ -154,7 +154,7 @@
                callback->onCameraAccessPrioritiesChanged);
         return ACAMERA_ERROR_INVALID_PARAMETER;
     }
-    CameraManagerGlobal::getInstance()->unregisterExtendedAvailabilityCallback(callback);
+    manager->unregisterExtendedAvailabilityCallback(callback);
     return ACAMERA_OK;
 }
 
diff --git a/camera/ndk/NdkCameraMetadata.cpp b/camera/ndk/NdkCameraMetadata.cpp
index 7d3a53e..a2dfaee 100644
--- a/camera/ndk/NdkCameraMetadata.cpp
+++ b/camera/ndk/NdkCameraMetadata.cpp
@@ -121,6 +121,18 @@
 }
 
 EXPORT
+camera_status_t ACameraMetadata_getTagFromName(
+        const ACameraMetadata* acm, const char* name, uint32_t* tag) {
+    ATRACE_CALL();
+    if (acm == nullptr || name == nullptr || tag == nullptr) {
+        ALOGE("%s: invalid argument! metadata %p, name %p, tag %p",
+               __FUNCTION__, acm, name, tag);
+        return ACAMERA_ERROR_INVALID_PARAMETER;
+    }
+    return acm->getTagFromName(name, tag);
+}
+
+EXPORT
 ACameraMetadata* ACameraMetadata_copy(const ACameraMetadata* src) {
     ATRACE_CALL();
     if (src == nullptr) {
diff --git a/camera/ndk/impl/ACameraManager.cpp b/camera/ndk/impl/ACameraManager.cpp
index 5d3b65b..6d29ef5 100644
--- a/camera/ndk/impl/ACameraManager.cpp
+++ b/camera/ndk/impl/ACameraManager.cpp
@@ -17,21 +17,109 @@
 //#define LOG_NDEBUG 0
 #define LOG_TAG "ACameraManager"
 
-#include <memory>
 #include "ACameraManager.h"
-#include "ACameraMetadata.h"
-#include "ACameraDevice.h"
-#include <utils/Vector.h>
-#include <cutils/properties.h>
-#include <stdlib.h>
+#include <android_companion_virtualdevice_flags.h>
 #include <camera/CameraUtils.h>
 #include <camera/StringUtils.h>
 #include <camera/VendorTagDescriptor.h>
+#include <cutils/properties.h>
+#include <stdlib.h>
+#include <utils/Vector.h>
+#include <memory>
+#include "ACameraDevice.h"
+#include "ACameraMetadata.h"
 
 using namespace android::acam;
+namespace vd_flags = android::companion::virtualdevice::flags;
 
 namespace android {
 namespace acam {
+namespace {
+
+using ::android::binder::Status;
+using ::android::companion::virtualnative::IVirtualDeviceManagerNative;
+
+// Return binder connection to VirtualDeviceManager.
+//
+// Subsequent calls return the same cached instance.
+sp<IVirtualDeviceManagerNative> getVirtualDeviceManager() {
+    auto connectToVirtualDeviceManagerNative = []() {
+        sp<IBinder> binder =
+                defaultServiceManager()->checkService(String16("virtualdevice_native"));
+        if (binder == nullptr) {
+            ALOGW("%s: Cannot get virtualdevice_native service", __func__);
+            return interface_cast<IVirtualDeviceManagerNative>(nullptr);
+        }
+        return interface_cast<IVirtualDeviceManagerNative>(binder);
+    };
+
+    static sp<IVirtualDeviceManagerNative> vdm = connectToVirtualDeviceManagerNative();
+    return vdm;
+}
+
+// Returns device id calling process is running on.
+// If the process cannot be attributed to single virtual device id, returns default device id.
+int getCurrentDeviceId() {
+    if (!vd_flags::camera_device_awareness()) {
+        return kDefaultDeviceId;
+    }
+
+    auto vdm = getVirtualDeviceManager();
+    if (vdm == nullptr) {
+        return kDefaultDeviceId;
+    }
+
+    const uid_t myUid = getuid();
+    std::vector<int> deviceIds;
+    Status status = vdm->getDeviceIdsForUid(myUid, &deviceIds);
+    if (!status.isOk() || deviceIds.empty()) {
+        ALOGE("%s: Failed to call getDeviceIdsForUid to determine device id for uid %d: %s",
+              __func__, myUid, status.toString8().c_str());
+        return kDefaultDeviceId;
+    }
+
+    // If the UID is associated with multiple virtual devices, use the default device's
+    // camera as we cannot disambiguate here. This effectively means that the app has
+    // activities on different devices at the same time.
+    if (deviceIds.size() != 1) {
+        return kDefaultDeviceId;
+    }
+    return deviceIds[0];
+}
+
+// Returns device policy for POLICY_TYPE_CAMERA corresponding to deviceId.
+DevicePolicy getDevicePolicyForDeviceId(const int deviceId) {
+    if (!vd_flags::camera_device_awareness() || deviceId == kDefaultDeviceId) {
+        return DevicePolicy::DEVICE_POLICY_DEFAULT;
+    }
+
+    auto vdm = getVirtualDeviceManager();
+    if (vdm == nullptr) {
+        return DevicePolicy::DEVICE_POLICY_DEFAULT;
+    }
+
+    int policy = IVirtualDeviceManagerNative::DEVICE_POLICY_DEFAULT;
+    Status status = vdm->getDevicePolicy(deviceId, IVirtualDeviceManagerNative::POLICY_TYPE_CAMERA,
+                                         &policy);
+    if (!status.isOk()) {
+        ALOGE("%s: Failed to call getDevicePolicy to determine camera policy for device id %d: %s",
+              __func__, deviceId, status.toString8().c_str());
+        return DevicePolicy::DEVICE_POLICY_DEFAULT;
+    }
+    return static_cast<DevicePolicy>(policy);
+}
+
+// Returns true if camera owned by device cameraDeviceId can be accessed within deviceContext.
+bool isCameraAccessible(const DeviceContext deviceContext, const int cameraDeviceId) {
+    if (!vd_flags::camera_device_awareness() ||
+        deviceContext.policy == DevicePolicy::DEVICE_POLICY_DEFAULT) {
+        return cameraDeviceId == kDefaultDeviceId;
+    }
+    return deviceContext.deviceId == cameraDeviceId;
+}
+
+}  // namespace
+
 // Static member definitions
 const char* CameraManagerGlobal::kCameraIdKey   = "CameraId";
 const char* CameraManagerGlobal::kPhysicalCameraIdKey   = "PhysicalCameraId";
@@ -41,6 +129,11 @@
 Mutex                CameraManagerGlobal::sLock;
 wp<CameraManagerGlobal> CameraManagerGlobal::sInstance = nullptr;
 
+DeviceContext::DeviceContext() {
+    deviceId = getCurrentDeviceId();
+    policy = getDevicePolicyForDeviceId(deviceId);
+}
+
 sp<CameraManagerGlobal> CameraManagerGlobal::getInstance() {
     Mutex::Autolock _l(sLock);
     sp<CameraManagerGlobal> instance = sInstance.promote();
@@ -84,14 +177,11 @@
 
         sp<IServiceManager> sm = defaultServiceManager();
         sp<IBinder> binder;
-        do {
-            binder = sm->getService(toString16(kCameraServiceName));
-            if (binder != nullptr) {
-                break;
-            }
-            ALOGW("CameraService not published, waiting...");
-            usleep(kCameraServicePollDelay);
-        } while(true);
+        binder = sm->checkService(String16(kCameraServiceName));
+        if (binder == nullptr) {
+            ALOGE("%s: Could not get CameraService instance.", __FUNCTION__);
+            return nullptr;
+        }
         if (mDeathNotifier == nullptr) {
             mDeathNotifier = new DeathNotifier(this);
         }
@@ -125,11 +215,11 @@
         std::vector<hardware::CameraStatus> cameraStatuses{};
         mCameraService->addListener(mCameraServiceListener, &cameraStatuses);
         for (auto& c : cameraStatuses) {
-            onStatusChangedLocked(c.status, c.cameraId);
+            onStatusChangedLocked(c.status, c.deviceId, c.cameraId);
 
             for (auto& unavailablePhysicalId : c.unavailablePhysicalIds) {
                 onStatusChangedLocked(hardware::ICameraServiceListener::STATUS_NOT_PRESENT,
-                        c.cameraId, unavailablePhysicalId);
+                                      c.deviceId, c.cameraId, unavailablePhysicalId);
             }
         }
 
@@ -189,14 +279,15 @@
     sp<CameraManagerGlobal> cm = mCameraManager.promote();
     if (cm != nullptr) {
         AutoMutex lock(cm->mLock);
-        std::vector<std::string> cameraIdList;
+        std::vector<DeviceStatusMapKey> keysToRemove;
+        keysToRemove.reserve(cm->mDeviceStatusMap.size());
         for (auto& pair : cm->mDeviceStatusMap) {
-            cameraIdList.push_back(pair.first);
+            keysToRemove.push_back(pair.first);
         }
 
-        for (const std::string& cameraId : cameraIdList) {
-            cm->onStatusChangedLocked(
-                    CameraServiceListener::STATUS_NOT_PRESENT, cameraId);
+        for (const DeviceStatusMapKey& key : keysToRemove) {
+            cm->onStatusChangedLocked(CameraServiceListener::STATUS_NOT_PRESENT, key.deviceId,
+                                      key.cameraId);
         }
         cm->mCameraService.clear();
         // TODO: consider adding re-connect call here?
@@ -204,32 +295,35 @@
 }
 
 void CameraManagerGlobal::registerExtendedAvailabilityCallback(
-        const ACameraManager_ExtendedAvailabilityCallbacks *callback) {
-    return registerAvailCallback<ACameraManager_ExtendedAvailabilityCallbacks>(callback);
+        const DeviceContext& deviceContext,
+        const ACameraManager_ExtendedAvailabilityCallbacks* callback) {
+    return registerAvailCallback<ACameraManager_ExtendedAvailabilityCallbacks>(deviceContext,
+                                                                               callback);
 }
 
 void CameraManagerGlobal::unregisterExtendedAvailabilityCallback(
-        const ACameraManager_ExtendedAvailabilityCallbacks *callback) {
+        const DeviceContext& deviceContext,
+        const ACameraManager_ExtendedAvailabilityCallbacks* callback) {
     Mutex::Autolock _l(mLock);
 
     drainPendingCallbacksLocked();
 
-    Callback cb(callback);
+    Callback cb(deviceContext, callback);
     mCallbacks.erase(cb);
 }
 
 void CameraManagerGlobal::registerAvailabilityCallback(
-        const ACameraManager_AvailabilityCallbacks *callback) {
-    return registerAvailCallback<ACameraManager_AvailabilityCallbacks>(callback);
+        const DeviceContext& deviceContext, const ACameraManager_AvailabilityCallbacks* callback) {
+    return registerAvailCallback<ACameraManager_AvailabilityCallbacks>(deviceContext, callback);
 }
 
 void CameraManagerGlobal::unregisterAvailabilityCallback(
-        const ACameraManager_AvailabilityCallbacks *callback) {
+        const DeviceContext& deviceContext, const ACameraManager_AvailabilityCallbacks* callback) {
     Mutex::Autolock _l(mLock);
 
     drainPendingCallbacksLocked();
 
-    Callback cb(callback);
+    Callback cb(deviceContext, callback);
     mCallbacks.erase(cb);
 }
 
@@ -252,19 +346,24 @@
     }
 }
 
-template<class T>
-void CameraManagerGlobal::registerAvailCallback(const T *callback) {
+template <class T>
+void CameraManagerGlobal::registerAvailCallback(const DeviceContext& deviceContext,
+                                                const T* callback) {
     Mutex::Autolock _l(mLock);
-    Callback cb(callback);
-    auto pair = mCallbacks.insert(cb);
+    getCameraServiceLocked();
+    Callback cb(deviceContext, callback);
+    const auto& [_, newlyRegistered] = mCallbacks.insert(cb);
     // Send initial callbacks if callback is newly registered
-    if (pair.second) {
-        for (auto& pair : mDeviceStatusMap) {
-            const std::string& cameraId = pair.first;
-            int32_t status = pair.second.getStatus();
+    if (newlyRegistered) {
+        for (auto& [key, statusAndHAL3Support] : mDeviceStatusMap) {
+            if (!isCameraAccessible(deviceContext, key.deviceId)) {
+                continue;
+            }
+            const std::string& cameraId = key.cameraId;
+            int32_t status = statusAndHAL3Support.getStatus();
             // Don't send initial callbacks for camera ids which don't support
             // camera2
-            if (!pair.second.supportsHAL3) {
+            if (!statusAndHAL3Support.supportsHAL3) {
                 continue;
             }
 
@@ -280,7 +379,7 @@
 
             // Physical camera unavailable callback
             std::set<std::string> unavailablePhysicalCameras =
-                    pair.second.getUnavailablePhysicalIds();
+                    statusAndHAL3Support.getUnavailablePhysicalIds();
             for (const auto& physicalCameraId : unavailablePhysicalCameras) {
                 sp<AMessage> msg = new AMessage(kWhatSendSinglePhysicalCameraCallback, mHandler);
                 ACameraManager_PhysicalCameraAvailabilityCallback cbFunc =
@@ -310,21 +409,26 @@
     return camera2Support;
 }
 
-void CameraManagerGlobal::getCameraIdList(std::vector<std::string>* cameraIds) {
+void CameraManagerGlobal::getCameraIdList(const DeviceContext& context,
+        std::vector<std::string>* cameraIds) {
     // Ensure that we have initialized/refreshed the list of available devices
     Mutex::Autolock _l(mLock);
     // Needed to make sure we're connected to cameraservice
     getCameraServiceLocked();
-    for(auto& deviceStatus : mDeviceStatusMap) {
-        int32_t status = deviceStatus.second.getStatus();
+    for (auto& [key, statusAndHAL3Support] : mDeviceStatusMap) {
+        if (!isCameraAccessible(context, key.deviceId)) {
+            continue;
+        }
+
+        int32_t status = statusAndHAL3Support.getStatus();
         if (status == hardware::ICameraServiceListener::STATUS_NOT_PRESENT ||
                 status == hardware::ICameraServiceListener::STATUS_ENUMERATING) {
             continue;
         }
-        if (!deviceStatus.second.supportsHAL3) {
+        if (!statusAndHAL3Support.supportsHAL3) {
             continue;
         }
-        cameraIds->push_back(deviceStatus.first);
+        cameraIds->push_back(key.cameraId);
     }
 }
 
@@ -460,24 +564,25 @@
 }
 
 binder::Status CameraManagerGlobal::CameraServiceListener::onStatusChanged(
-        int32_t status, const std::string& cameraId) {
+        int32_t status, const std::string& cameraId, int deviceId) {
     sp<CameraManagerGlobal> cm = mCameraManager.promote();
     if (cm != nullptr) {
-        cm->onStatusChanged(status, cameraId);
-    } else {
-        ALOGE("Cannot deliver status change. Global camera manager died");
+        cm->onStatusChanged(status, deviceId, cameraId);
     }
+    ALOGE_IF(cm == nullptr,
+             "Cannot deliver physical camera status change. Global camera manager died");
     return binder::Status::ok();
 }
 
 binder::Status CameraManagerGlobal::CameraServiceListener::onPhysicalCameraStatusChanged(
-        int32_t status, const std::string& cameraId, const std::string& physicalCameraId) {
+        int32_t status, const std::string& cameraId, const std::string& physicalCameraId,
+        int deviceId) {
     sp<CameraManagerGlobal> cm = mCameraManager.promote();
     if (cm != nullptr) {
-        cm->onStatusChanged(status, cameraId, physicalCameraId);
-    } else {
-        ALOGE("Cannot deliver physical camera status change. Global camera manager died");
+        cm->onStatusChanged(status, deviceId, cameraId, physicalCameraId);
     }
+    ALOGE_IF(cm == nullptr,
+             "Cannot deliver physical camera status change. Global camera manager died");
     return binder::Status::ok();
 }
 
@@ -495,23 +600,24 @@
     }
 }
 
-void CameraManagerGlobal::onStatusChanged(
-        int32_t status, const std::string& cameraId) {
+void CameraManagerGlobal::onStatusChanged(int32_t status, const int deviceId,
+        const std::string& cameraId) {
     Mutex::Autolock _l(mLock);
-    onStatusChangedLocked(status, cameraId);
+    onStatusChangedLocked(status, deviceId, cameraId);
 }
 
-void CameraManagerGlobal::onStatusChangedLocked(
-        int32_t status, const std::string& cameraId) {
+void CameraManagerGlobal::onStatusChangedLocked(int32_t status, const int deviceId,
+        const std::string& cameraId) {
     if (!validStatus(status)) {
         ALOGE("%s: Invalid status %d", __FUNCTION__, status);
         return;
     }
 
-    bool firstStatus = (mDeviceStatusMap.count(cameraId) == 0);
-    int32_t oldStatus = firstStatus ?
-            status : // first status
-            mDeviceStatusMap[cameraId].getStatus();
+    DeviceStatusMapKey key{.deviceId = deviceId, .cameraId = cameraId};
+
+    bool firstStatus = (mDeviceStatusMap.count(key) == 0);
+    int32_t oldStatus = firstStatus ? status :  // first status
+                                mDeviceStatusMap[key].getStatus();
 
     if (!firstStatus &&
             isStatusAvailable(status) == isStatusAvailable(oldStatus)) {
@@ -521,15 +627,17 @@
 
     bool supportsHAL3 = supportsCamera2ApiLocked(cameraId);
     if (firstStatus) {
-        mDeviceStatusMap.emplace(std::piecewise_construct,
-                std::forward_as_tuple(cameraId),
-                std::forward_as_tuple(status, supportsHAL3));
+        mDeviceStatusMap.emplace(std::piecewise_construct, std::forward_as_tuple(key),
+                                 std::forward_as_tuple(status, supportsHAL3));
     } else {
-        mDeviceStatusMap[cameraId].updateStatus(status);
+        mDeviceStatusMap[key].updateStatus(status);
     }
     // Iterate through all registered callbacks
     if (supportsHAL3) {
         for (auto cb : mCallbacks) {
+            if (!isCameraAccessible(cb.mDeviceContext, deviceId)) {
+                continue;
+            }
             sp<AMessage> msg = new AMessage(kWhatSendSingleCallback, mHandler);
             ACameraManager_AvailabilityCallback cbFp = isStatusAvailable(status) ?
                     cb.mAvailable : cb.mUnavailable;
@@ -541,30 +649,31 @@
         }
     }
     if (status == hardware::ICameraServiceListener::STATUS_NOT_PRESENT) {
-        mDeviceStatusMap.erase(cameraId);
+        mDeviceStatusMap.erase(key);
     }
 }
 
-void CameraManagerGlobal::onStatusChanged(
-        int32_t status, const std::string& cameraId, const std::string& physicalCameraId) {
+void CameraManagerGlobal::onStatusChanged(int32_t status, const int deviceId,
+        const std::string& cameraId, const std::string& physicalCameraId) {
     Mutex::Autolock _l(mLock);
-    onStatusChangedLocked(status, cameraId, physicalCameraId);
+    onStatusChangedLocked(status, deviceId, cameraId, physicalCameraId);
 }
 
-void CameraManagerGlobal::onStatusChangedLocked(
-        int32_t status, const std::string& cameraId, const std::string& physicalCameraId) {
+void CameraManagerGlobal::onStatusChangedLocked(int32_t status, const int deviceId,
+        const std::string& cameraId, const std::string& physicalCameraId) {
     if (!validStatus(status)) {
         ALOGE("%s: Invalid status %d", __FUNCTION__, status);
         return;
     }
 
-    auto logicalStatus = mDeviceStatusMap.find(cameraId);
+    DeviceStatusMapKey key{.deviceId = deviceId, .cameraId = cameraId};
+    auto logicalStatus = mDeviceStatusMap.find(key);
     if (logicalStatus == mDeviceStatusMap.end()) {
         ALOGE("%s: Physical camera id %s status change on a non-present id %s",
                 __FUNCTION__, physicalCameraId.c_str(), cameraId.c_str());
         return;
     }
-    int32_t logicalCamStatus = mDeviceStatusMap[cameraId].getStatus();
+    int32_t logicalCamStatus = mDeviceStatusMap[key].getStatus();
     if (logicalCamStatus != hardware::ICameraServiceListener::STATUS_PRESENT &&
             logicalCamStatus != hardware::ICameraServiceListener::STATUS_NOT_AVAILABLE) {
         ALOGE("%s: Physical camera id %s status %d change for an invalid logical camera state %d",
@@ -576,14 +685,17 @@
 
     bool updated = false;
     if (status == hardware::ICameraServiceListener::STATUS_PRESENT) {
-        updated = mDeviceStatusMap[cameraId].removeUnavailablePhysicalId(physicalCameraId);
+        updated = mDeviceStatusMap[key].removeUnavailablePhysicalId(physicalCameraId);
     } else {
-        updated = mDeviceStatusMap[cameraId].addUnavailablePhysicalId(physicalCameraId);
+        updated = mDeviceStatusMap[key].addUnavailablePhysicalId(physicalCameraId);
     }
 
     // Iterate through all registered callbacks
     if (supportsHAL3 && updated) {
         for (auto cb : mCallbacks) {
+            if (!isCameraAccessible(cb.mDeviceContext, deviceId)) {
+                continue;
+            }
             sp<AMessage> msg = new AMessage(kWhatSendSinglePhysicalCameraCallback, mHandler);
             ACameraManager_PhysicalCameraAvailabilityCallback cbFp = isStatusAvailable(status) ?
                     cb.mPhysicalCamAvailable : cb.mPhysicalCamUnavailable;
@@ -637,7 +749,7 @@
     Mutex::Autolock _l(mLock);
 
     std::vector<std::string> idList;
-    CameraManagerGlobal::getInstance()->getCameraIdList(&idList);
+    mGlobalManager->getCameraIdList(mDeviceContext, &idList);
 
     int numCameras = idList.size();
     ACameraIdList *out = new ACameraIdList;
@@ -687,7 +799,7 @@
         const char* cameraIdStr, sp<ACameraMetadata>* characteristics) {
     Mutex::Autolock _l(mLock);
 
-    sp<hardware::ICameraService> cs = CameraManagerGlobal::getInstance()->getCameraService();
+    sp<hardware::ICameraService> cs = mGlobalManager->getCameraService();
     if (cs == nullptr) {
         ALOGE("%s: Cannot reach camera service!", __FUNCTION__);
         return ACAMERA_ERROR_CAMERA_DISCONNECTED;
@@ -695,8 +807,16 @@
 
     CameraMetadata rawMetadata;
     int targetSdkVersion = android_get_application_target_sdk_version();
+
+    AttributionSourceState clientAttribution;
+    clientAttribution.uid = hardware::ICameraService::USE_CALLING_UID;
+    clientAttribution.pid = hardware::ICameraService::USE_CALLING_PID;
+    clientAttribution.deviceId = mDeviceContext.deviceId;
+
     binder::Status serviceRet = cs->getCameraCharacteristics(cameraIdStr,
-            targetSdkVersion, /*overrideToPortrait*/false, &rawMetadata);
+            targetSdkVersion, /*rotationOverride*/hardware::ICameraService::ROTATION_OVERRIDE_NONE,
+            clientAttribution, static_cast<int32_t>(mDeviceContext.policy),
+            &rawMetadata);
     if (!serviceRet.isOk()) {
         switch(serviceRet.serviceSpecificErrorCode()) {
             case hardware::ICameraService::ERROR_DISCONNECTED:
@@ -733,7 +853,7 @@
 
     ACameraDevice* device = new ACameraDevice(cameraId, callback, chars);
 
-    sp<hardware::ICameraService> cs = CameraManagerGlobal::getInstance()->getCameraService();
+    sp<hardware::ICameraService> cs = mGlobalManager->getCameraService();
     if (cs == nullptr) {
         ALOGE("%s: Cannot reach camera service!", __FUNCTION__);
         delete device;
@@ -743,12 +863,21 @@
     sp<hardware::camera2::ICameraDeviceCallbacks> callbacks = device->getServiceCallback();
     sp<hardware::camera2::ICameraDeviceUser> deviceRemote;
     int targetSdkVersion = android_get_application_target_sdk_version();
+
+    AttributionSourceState clientAttribution;
+    clientAttribution.uid = hardware::ICameraService::USE_CALLING_UID;
+    clientAttribution.pid = hardware::ICameraService::USE_CALLING_PID;
+    clientAttribution.deviceId = mDeviceContext.deviceId;
+    clientAttribution.packageName = "";
+    clientAttribution.attributionTag = std::nullopt;
+
     // No way to get package name from native.
     // Send a zero length package name and let camera service figure it out from UID
     binder::Status serviceRet = cs->connectDevice(
-            callbacks, cameraId, "", {},
-            hardware::ICameraService::USE_CALLING_UID, /*oomScoreOffset*/0,
-            targetSdkVersion, /*overrideToPortrait*/false, /*out*/&deviceRemote);
+            callbacks, cameraId, /*oomScoreOffset*/0,
+            targetSdkVersion, /*rotationOverride*/hardware::ICameraService::ROTATION_OVERRIDE_NONE,
+            clientAttribution, static_cast<int32_t>(mDeviceContext.policy),
+            /*out*/&deviceRemote);
 
     if (!serviceRet.isOk()) {
         ALOGE("%s: connect camera device failed: %s", __FUNCTION__, serviceRet.toString8().c_str());
@@ -795,6 +924,22 @@
     return ACAMERA_OK;
 }
 
-ACameraManager::~ACameraManager() {
+void ACameraManager::registerAvailabilityCallback(
+        const ACameraManager_AvailabilityCallbacks* callback) {
+    mGlobalManager->registerAvailabilityCallback(mDeviceContext, callback);
+}
 
+void ACameraManager::unregisterAvailabilityCallback(
+        const ACameraManager_AvailabilityCallbacks* callback) {
+    mGlobalManager->unregisterAvailabilityCallback(mDeviceContext, callback);
+}
+
+void ACameraManager::registerExtendedAvailabilityCallback(
+        const ACameraManager_ExtendedAvailabilityCallbacks* callback) {
+    mGlobalManager->registerExtendedAvailabilityCallback(mDeviceContext, callback);
+}
+
+void ACameraManager::unregisterExtendedAvailabilityCallback(
+        const ACameraManager_ExtendedAvailabilityCallbacks* callback) {
+    mGlobalManager->unregisterExtendedAvailabilityCallback(mDeviceContext, callback);
 }
diff --git a/camera/ndk/impl/ACameraManager.h b/camera/ndk/impl/ACameraManager.h
index c135d0f..f4124ef 100644
--- a/camera/ndk/impl/ACameraManager.h
+++ b/camera/ndk/impl/ACameraManager.h
@@ -20,6 +20,7 @@
 #include <camera/NdkCameraManager.h>
 
 #include <android-base/parseint.h>
+#include <android/companion/virtualnative/IVirtualDeviceManagerNative.h>
 #include <android/hardware/ICameraService.h>
 #include <android/hardware/BnCameraServiceListener.h>
 #include <camera/CameraMetadata.h>
@@ -37,6 +38,36 @@
 namespace android {
 namespace acam {
 
+enum class DevicePolicy {
+  DEVICE_POLICY_DEFAULT =
+    ::android::companion::virtualnative::IVirtualDeviceManagerNative::DEVICE_POLICY_DEFAULT,
+  DEVICE_POLICY_CUSTOM =
+    ::android::companion::virtualnative::IVirtualDeviceManagerNative::DEVICE_POLICY_CUSTOM
+};
+
+/**
+ * Device context within which are cameras accessed.
+ *
+ * When constructed, the device id is set to id of virtual device corresponding to
+ * caller's UID (or default device id if current app process is not running on virtual device).
+ *
+ * See getDeviceId() in Context.java for more context (no pun intented).
+ */
+struct DeviceContext {
+    DeviceContext();
+
+    // Id of virtual device asociated with this context (or DEFAULT_DEVICE_ID = 0 in case
+    // caller UID is not running on virtual device).
+    int deviceId;
+    // Device policy corresponding to VirtualDeviceParams.POLICY_TYPE_CAMERA:
+    //
+    // Can be either:
+    // * (0) DEVICE_POLICY_DEFAULT - virtual devices have access to default device cameras.
+    // * (1) DEVICE_POLICY_CUSTOM - virtual devices do not have access to default device cameras
+    //                              and can only access virtual cameras owned by the same device.
+    DevicePolicy policy;
+};
+
 /**
  * Per-process singleton instance of CameraManger. Shared by all ACameraManager
  * instances. Created when first ACameraManager is created and destroyed when
@@ -49,29 +80,30 @@
     static sp<CameraManagerGlobal> getInstance();
     sp<hardware::ICameraService> getCameraService();
 
-    void registerAvailabilityCallback(
-            const ACameraManager_AvailabilityCallbacks *callback);
-    void unregisterAvailabilityCallback(
-            const ACameraManager_AvailabilityCallbacks *callback);
+    void registerAvailabilityCallback(const DeviceContext& context,
+                                      const ACameraManager_AvailabilityCallbacks* callback);
+    void unregisterAvailabilityCallback(const DeviceContext& context,
+                                        const ACameraManager_AvailabilityCallbacks* callback);
 
     void registerExtendedAvailabilityCallback(
+            const DeviceContext& context,
             const ACameraManager_ExtendedAvailabilityCallbacks* callback);
     void unregisterExtendedAvailabilityCallback(
+            const DeviceContext& context,
             const ACameraManager_ExtendedAvailabilityCallbacks* callback);
 
     /**
      * Return camera IDs that support camera2
      */
-    void getCameraIdList(std::vector<std::string> *cameraIds);
+    void getCameraIdList(const DeviceContext& deviceContext, std::vector<std::string>* cameraIds);
 
   private:
     sp<hardware::ICameraService> mCameraService;
-    const int                    kCameraServicePollDelay = 500000; // 0.5s
     const char*                  kCameraServiceName      = "media.camera";
     Mutex                        mLock;
 
-    template<class T>
-    void registerAvailCallback(const T *callback);
+    template <class T>
+    void registerAvailCallback(const DeviceContext& deviceContext, const T* callback);
 
     class DeathNotifier : public IBinder::DeathRecipient {
       public:
@@ -87,23 +119,24 @@
     class CameraServiceListener final : public hardware::BnCameraServiceListener {
       public:
         explicit CameraServiceListener(CameraManagerGlobal* cm) : mCameraManager(cm) {}
-        virtual binder::Status onStatusChanged(int32_t status, const std::string& cameraId);
+        virtual binder::Status onStatusChanged(int32_t status, const std::string& cameraId,
+                int32_t deviceId);
         virtual binder::Status onPhysicalCameraStatusChanged(int32_t status,
-                const std::string& cameraId, const std::string& physicalCameraId);
+                const std::string& cameraId, const std::string& physicalCameraId, int32_t deviceId);
 
         // Torch API not implemented yet
-        virtual binder::Status onTorchStatusChanged(int32_t, const std::string&) {
+        virtual binder::Status onTorchStatusChanged(int32_t, const std::string&, int32_t) {
             return binder::Status::ok();
         }
-        virtual binder::Status onTorchStrengthLevelChanged(const std::string&, int32_t) {
+        virtual binder::Status onTorchStrengthLevelChanged(const std::string&, int32_t, int32_t) {
             return binder::Status::ok();
         }
 
         virtual binder::Status onCameraAccessPrioritiesChanged();
-        virtual binder::Status onCameraOpened(const std::string&, const std::string&) {
+        virtual binder::Status onCameraOpened(const std::string&, const std::string&, int32_t) {
             return binder::Status::ok();
         }
-        virtual binder::Status onCameraClosed(const std::string&) {
+        virtual binder::Status onCameraClosed(const std::string&, int32_t) {
             return binder::Status::ok();
         }
 
@@ -114,29 +147,34 @@
 
     // Wrapper of ACameraManager_AvailabilityCallbacks so we can store it in std::set
     struct Callback {
-        explicit Callback(const ACameraManager_AvailabilityCallbacks *callback) :
-            mAvailable(callback->onCameraAvailable),
-            mUnavailable(callback->onCameraUnavailable),
-            mAccessPriorityChanged(nullptr),
-            mPhysicalCamAvailable(nullptr),
-            mPhysicalCamUnavailable(nullptr),
-            mContext(callback->context) {}
+        explicit Callback(const DeviceContext& deviceContext,
+                 const ACameraManager_AvailabilityCallbacks* callback)
+            : mDeviceContext(deviceContext),
+              mAvailable(callback->onCameraAvailable),
+              mUnavailable(callback->onCameraUnavailable),
+              mAccessPriorityChanged(nullptr),
+              mPhysicalCamAvailable(nullptr),
+              mPhysicalCamUnavailable(nullptr),
+              mContext(callback->context) {}
 
-        explicit Callback(const ACameraManager_ExtendedAvailabilityCallbacks *callback) :
-            mAvailable(callback->availabilityCallbacks.onCameraAvailable),
-            mUnavailable(callback->availabilityCallbacks.onCameraUnavailable),
-            mAccessPriorityChanged(callback->onCameraAccessPrioritiesChanged),
-            mPhysicalCamAvailable(callback->onPhysicalCameraAvailable),
-            mPhysicalCamUnavailable(callback->onPhysicalCameraUnavailable),
-            mContext(callback->availabilityCallbacks.context) {}
+        explicit Callback(const DeviceContext& deviceContext,
+                 const ACameraManager_ExtendedAvailabilityCallbacks* callback)
+            : mDeviceContext(deviceContext),
+              mAvailable(callback->availabilityCallbacks.onCameraAvailable),
+              mUnavailable(callback->availabilityCallbacks.onCameraUnavailable),
+              mAccessPriorityChanged(callback->onCameraAccessPrioritiesChanged),
+              mPhysicalCamAvailable(callback->onPhysicalCameraAvailable),
+              mPhysicalCamUnavailable(callback->onPhysicalCameraUnavailable),
+              mContext(callback->availabilityCallbacks.context) {}
 
         bool operator == (const Callback& other) const {
-            return (mAvailable == other.mAvailable &&
-                    mUnavailable == other.mUnavailable &&
+            return (mAvailable == other.mAvailable && mUnavailable == other.mUnavailable &&
                     mAccessPriorityChanged == other.mAccessPriorityChanged &&
                     mPhysicalCamAvailable == other.mPhysicalCamAvailable &&
                     mPhysicalCamUnavailable == other.mPhysicalCamUnavailable &&
-                    mContext == other.mContext);
+                    mContext == other.mContext &&
+                    mDeviceContext.deviceId == other.mDeviceContext.deviceId &&
+                    mDeviceContext.policy == other.mDeviceContext.policy);
         }
         bool operator != (const Callback& other) const {
             return !(*this == other);
@@ -145,6 +183,9 @@
 #pragma GCC diagnostic push
 #pragma GCC diagnostic ignored "-Wordered-compare-function-pointers"
             if (*this == other) return false;
+            if (mDeviceContext.deviceId != other.mDeviceContext.deviceId) {
+                return mDeviceContext.deviceId < other.mDeviceContext.deviceId;
+            }
             if (mContext != other.mContext) return mContext < other.mContext;
             if (mPhysicalCamAvailable != other.mPhysicalCamAvailable) {
                 return mPhysicalCamAvailable < other.mPhysicalCamAvailable;
@@ -162,6 +203,7 @@
         bool operator > (const Callback& other) const {
             return (*this != other && !(*this < other));
         }
+        DeviceContext mDeviceContext;
         ACameraManager_AvailabilityCallback mAvailable;
         ACameraManager_AvailabilityCallback mUnavailable;
         ACameraManager_AccessPrioritiesChangedCallback mAccessPriorityChanged;
@@ -203,37 +245,17 @@
 
     sp<hardware::ICameraService> getCameraServiceLocked();
     void onCameraAccessPrioritiesChanged();
-    void onStatusChanged(int32_t status, const std::string& cameraId);
-    void onStatusChangedLocked(int32_t status, const std::string& cameraId);
-    void onStatusChanged(int32_t status, const std::string& cameraId, const std::string& physicalCameraId);
-    void onStatusChangedLocked(int32_t status, const std::string& cameraId,
-           const std::string& physicalCameraId);
+    void onStatusChanged(int32_t status, int deviceId, const std::string& cameraId);
+    void onStatusChangedLocked(int32_t status, int deviceId, const std::string& cameraId);
+    void onStatusChanged(int32_t status, int deviceId, const std::string& cameraId,
+                         const std::string& physicalCameraId);
+    void onStatusChangedLocked(int32_t status, int deviceId, const std::string& cameraId,
+                               const std::string& physicalCameraId);
     // Utils for status
     static bool validStatus(int32_t status);
     static bool isStatusAvailable(int32_t status);
     bool supportsCamera2ApiLocked(const std::string &cameraId);
 
-    // The sort logic must match the logic in
-    // libcameraservice/common/CameraProviderManager.cpp::getAPI1CompatibleCameraDeviceIds
-    struct CameraIdComparator {
-        bool operator()(const std::string& a, const std::string& b) const {
-            uint32_t aUint = 0, bUint = 0;
-            bool aIsUint = base::ParseUint(a.c_str(), &aUint);
-            bool bIsUint = base::ParseUint(b.c_str(), &bUint);
-
-            // Uint device IDs first
-            if (aIsUint && bIsUint) {
-                return aUint < bUint;
-            } else if (aIsUint) {
-                return true;
-            } else if (bIsUint) {
-                return false;
-            }
-            // Simple string compare if both id are not uint
-            return a < b;
-        }
-    };
-
     struct StatusAndHAL3Support {
       private:
         int32_t status = hardware::ICameraServiceListener::STATUS_NOT_PRESENT;
@@ -252,13 +274,40 @@
         std::set<std::string> getUnavailablePhysicalIds();
     };
 
-    // Map camera_id -> status
-    std::map<std::string, StatusAndHAL3Support, CameraIdComparator> mDeviceStatusMap;
+    struct DeviceStatusMapKey {
+        int deviceId;
+        std::string cameraId;
+
+        bool operator<(const DeviceStatusMapKey& other) const {
+            if (deviceId != other.deviceId) {
+                return deviceId < other.deviceId;
+            }
+
+            // The sort logic must match the logic in
+            // libcameraservice/common/CameraProviderManager.cpp::getAPI1CompatibleCameraDeviceIds
+            uint32_t cameraIdUint = 0, otherCameraIdUint = 0;
+            bool cameraIdIsUint = base::ParseUint(cameraId.c_str(), &cameraIdUint);
+            bool otherCameraIdIsUint = base::ParseUint(other.cameraId.c_str(), &otherCameraIdUint);
+
+            // Uint device IDs first
+            if (cameraIdIsUint && otherCameraIdIsUint) {
+                return cameraIdUint < otherCameraIdUint;
+            } else if (cameraIdIsUint) {
+                return true;
+            } else if (otherCameraIdIsUint) {
+                return false;
+            }
+            // Simple string compare if both id are not uint
+            return cameraIdIsUint < otherCameraIdIsUint;
+        }
+    };
+
+    std::map<DeviceStatusMapKey, StatusAndHAL3Support> mDeviceStatusMap;
 
     // For the singleton instance
     static Mutex sLock;
     static wp<CameraManagerGlobal> sInstance;
-    CameraManagerGlobal() {};
+    CameraManagerGlobal() {}
     ~CameraManagerGlobal();
 };
 
@@ -270,9 +319,7 @@
  * Leave outside of android namespace because it's NDK struct
  */
 struct ACameraManager {
-    ACameraManager() :
-            mGlobalManager(android::acam::CameraManagerGlobal::getInstance()) {}
-    ~ACameraManager();
+    ACameraManager() : mGlobalManager(android::acam::CameraManagerGlobal::getInstance()) {}
     camera_status_t getCameraIdList(ACameraIdList** cameraIdList);
     static void     deleteCameraIdList(ACameraIdList* cameraIdList);
 
@@ -281,6 +328,12 @@
     camera_status_t openCamera(const char* cameraId,
                                ACameraDevice_StateCallbacks* callback,
                                /*out*/ACameraDevice** device);
+    void registerAvailabilityCallback(const ACameraManager_AvailabilityCallbacks* callback);
+    void unregisterAvailabilityCallback(const ACameraManager_AvailabilityCallbacks* callback);
+    void registerExtendedAvailabilityCallback(
+            const ACameraManager_ExtendedAvailabilityCallbacks* callback);
+    void unregisterExtendedAvailabilityCallback(
+            const ACameraManager_ExtendedAvailabilityCallbacks* callback);
 
   private:
     enum {
@@ -288,6 +341,7 @@
     };
     android::Mutex         mLock;
     android::sp<android::acam::CameraManagerGlobal> mGlobalManager;
+    const android::acam::DeviceContext mDeviceContext;
 };
 
 #endif //_ACAMERA_MANAGER_H
diff --git a/camera/ndk/impl/ACameraMetadata.cpp b/camera/ndk/impl/ACameraMetadata.cpp
index b6b8012..69b30f7 100644
--- a/camera/ndk/impl/ACameraMetadata.cpp
+++ b/camera/ndk/impl/ACameraMetadata.cpp
@@ -18,6 +18,8 @@
 #define LOG_TAG "ACameraMetadata"
 
 #include "ACameraMetadata.h"
+
+#include <camera_metadata_hidden.h>
 #include <utils/Vector.h>
 #include <system/graphics.h>
 #include <media/NdkImage.h>
@@ -85,6 +87,19 @@
         filterDurations(ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STALL_DURATIONS);
     }
     // TODO: filter request/result keys
+    const CameraMetadata& metadata = *mData;
+    const camera_metadata_t *rawMetadata = metadata.getAndLock();
+    metadata_vendor_id_t vendorTagId = get_camera_metadata_vendor_id(rawMetadata);
+    metadata.unlock(rawMetadata);
+    sp<VendorTagDescriptorCache> vtCache = VendorTagDescriptorCache::getGlobalVendorTagCache();
+    if (vtCache == nullptr) {
+        ALOGE("%s: error vendor tag descriptor cache is not initialized", __FUNCTION__);
+        return;
+    }
+    vtCache->getVendorTagDescriptor(vendorTagId, &mVTags);
+    if (mVTags == nullptr) {
+        ALOGE("%s: error retrieving vendor tag descriptor", __FUNCTION__);
+    }
 }
 
 bool
@@ -473,6 +488,13 @@
     return (*mData);
 }
 
+camera_status_t
+ACameraMetadata::getTagFromName(const char *name, uint32_t *tag) const {
+    Mutex::Autolock _l(mLock);
+    status_t status = CameraMetadata::getTagFromName(name, mVTags.get(), tag);
+    return status == OK ? ACAMERA_OK : ACAMERA_ERROR_METADATA_NOT_FOUND;
+}
+
 bool
 ACameraMetadata::isLogicalMultiCamera(size_t* count, const char*const** physicalCameraIds) const {
     if (mType != ACM_CHARACTERISTICS) {
diff --git a/camera/ndk/impl/ACameraMetadata.h b/camera/ndk/impl/ACameraMetadata.h
index 084a60b..e89e620 100644
--- a/camera/ndk/impl/ACameraMetadata.h
+++ b/camera/ndk/impl/ACameraMetadata.h
@@ -27,9 +27,17 @@
 
 #ifdef __ANDROID_VNDK__
 #include <CameraMetadata.h>
+#include <aidl/android/frameworks/cameraservice/common/VendorTag.h>
+#include <aidl/android/frameworks/cameraservice/common/VendorTagSection.h>
+#include <aidl/android/frameworks/cameraservice/common/ProviderIdAndVendorTagSections.h>
+#include <VendorTagDescriptor.h>
 using CameraMetadata = android::hardware::camera::common::V1_0::helper::CameraMetadata;
+using ::aidl::android::frameworks::cameraservice::common::ProviderIdAndVendorTagSections;
+using ::android::hardware::camera::common::V1_0::helper::VendorTagDescriptor;
+using ::android::hardware::camera::common::V1_0::helper::VendorTagDescriptorCache;
 #else
 #include <camera/CameraMetadata.h>
+#include <camera/VendorTagDescriptor.h>
 #endif
 
 #include <camera/NdkCameraMetadata.h>
@@ -73,6 +81,8 @@
 
     camera_status_t getTags(/*out*/int32_t* numTags,
                             /*out*/const uint32_t** tags) const;
+    camera_status_t
+    getTagFromName(const char *name, uint32_t *tag) const;
 
     const CameraMetadata& getInternalData() const;
     bool isLogicalMultiCamera(size_t* count, const char* const** physicalCameraIds) const;
@@ -134,6 +144,7 @@
 
     std::vector<const char*> mStaticPhysicalCameraIds;
     std::vector<String8> mStaticPhysicalCameraIdValues;
+    sp<VendorTagDescriptor> mVTags = nullptr;
 };
 
 #endif // _ACAMERA_METADATA_H
diff --git a/camera/ndk/include/camera/NdkCameraCaptureSession.h b/camera/ndk/include/camera/NdkCameraCaptureSession.h
index cf6b970..1400121 100644
--- a/camera/ndk/include/camera/NdkCameraCaptureSession.h
+++ b/camera/ndk/include/camera/NdkCameraCaptureSession.h
@@ -576,9 +576,7 @@
  *
  * @param session the capture session of interest
  *
- * @return <ul><li>
- *             {@link ACAMERA_OK} if the method succeeds. captureSequenceId will be filled
- *             if it is not NULL.</li>
+ * @return <ul><li>{@link ACAMERA_OK} if the method succeeds.</li>
  *         <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if session is NULL.</li>
  *         <li>{@link ACAMERA_ERROR_SESSION_CLOSED} if the capture session has been closed</li>
  *         <li>{@link ACAMERA_ERROR_CAMERA_DISCONNECTED} if the camera device is closed</li>
@@ -617,9 +615,7 @@
  *
  * @param session the capture session of interest
  *
- * @return <ul><li>
- *             {@link ACAMERA_OK} if the method succeeds. captureSequenceId will be filled
- *             if it is not NULL.</li>
+ * @return <ul><li> {@link ACAMERA_OK} if the method succeeds</li>
  *         <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if session is NULL.</li>
  *         <li>{@link ACAMERA_ERROR_SESSION_CLOSED} if the capture session has been closed</li>
  *         <li>{@link ACAMERA_ERROR_CAMERA_DISCONNECTED} if the camera device is closed</li>
diff --git a/camera/ndk/include/camera/NdkCameraMetadata.h b/camera/ndk/include/camera/NdkCameraMetadata.h
index cf29736..237d07b 100644
--- a/camera/ndk/include/camera/NdkCameraMetadata.h
+++ b/camera/ndk/include/camera/NdkCameraMetadata.h
@@ -221,6 +221,24 @@
         /*out*/int32_t* numEntries, /*out*/const uint32_t** tags) __INTRODUCED_IN(24);
 
 /**
+ * Look up tag ID value for device-specific custom tags that are usable only
+ * for the particular device, by name. The name and type of the tag need to be
+ * discovered from some other source, such as the manufacturer. The ID value is
+ * stable during the lifetime of an application, but should be queried again after
+ * process restarts. This method can also be used to query tag values using the names
+ * for public tags which exist in the Java API, however it is just simpler and faster to
+ * use the values of the tags which exist in the ndk.
+ * @param metadata The {@link ACameraMetadata} of to query the tag value from.
+ * @param name The name of the tag being queried.
+ * @param tag The output tag assigned by this method.
+ *
+ * @return ACAMERA_OK only if the function call was successful.
+ */
+
+camera_status_t
+ACameraMetadata_getTagFromName(const ACameraMetadata* metadata, const char *name, uint32_t *tag)  __INTRODUCED_IN(35);
+
+/**
  * Create a copy of input {@link ACameraMetadata}.
  *
  * <p>The returned ACameraMetadata must be freed by the application by {@link ACameraMetadata_free}
diff --git a/camera/ndk/include/camera/NdkCameraMetadataTags.h b/camera/ndk/include/camera/NdkCameraMetadataTags.h
index 2c68cef..7d234bb 100644
--- a/camera/ndk/include/camera/NdkCameraMetadataTags.h
+++ b/camera/ndk/include/camera/NdkCameraMetadataTags.h
@@ -40,6 +40,21 @@
 
 __BEGIN_DECLS
 
+/*
+ * Note: The following enum values were incorrect and have been updated:
+ * enum                                                        old value                        updated value
+ * ACAMERA_CONTROL_SETTINGS_OVERRIDE                           ACAMERA_CONTROL_START + 49       ACAMERA_CONTROL_START + 52;
+ * ACAMERA_CONTROL_AVAILABLE_SETTINGS_OVERRIDES                ACAMERA_CONTROL_START + 50       ACAMERA_CONTROL_START + 53;
+ * ACAMERA_CONTROL_AUTOFRAMING                                 ACAMERA_CONTROL_START + 52       ACAMERA_CONTROL_START + 55;
+ * ACAMERA_CONTROL_AUTOFRAMING_AVAILABLE                       ACAMERA_CONTROL_START + 53       ACAMERA_CONTROL_START + 56;
+ * ACAMERA_CONTROL_AUTOFRAMING_STATE                           ACAMERA_CONTROL_START + 54       ACAMERA_CONTROL_START + 57;
+ * ACAMERA_CONTROL_LOW_LIGHT_BOOST_INFO_LUMINANCE_RANGE        ACAMERA_CONTROL_START + 55       ACAMERA_CONTROL_START + 58;
+ * ACAMERA_CONTROL_LOW_LIGHT_BOOST_STATE                       ACAMERA_CONTROL_START + 56       ACAMERA_CONTROL_START + 59;
+
+ * ACAMERA_SCALER_AVAILABLE_STREAM_USE_CASES                   ACAMERA_SCALER_START + 25        ACAMERA_SCALER_START + 26;
+ * ACAMERA_SCALER_CROP_REGION                                  ACAMERA_SCALER_START + 26        ACAMERA_SCALER_START + 27;
+ */
+
 
 typedef enum acamera_metadata_section {
     ACAMERA_COLOR_CORRECTION,
@@ -76,6 +91,7 @@
     ACAMERA_AUTOMOTIVE_LENS,
     ACAMERA_EXTENSION,
     ACAMERA_JPEGR,
+    ACAMERA_EFV,
     ACAMERA_SECTION_COUNT,
 
     ACAMERA_VENDOR = 0x8000
@@ -123,6 +139,7 @@
     ACAMERA_AUTOMOTIVE_LENS_START  = ACAMERA_AUTOMOTIVE_LENS   << 16,
     ACAMERA_EXTENSION_START        = ACAMERA_EXTENSION         << 16,
     ACAMERA_JPEGR_START            = ACAMERA_JPEGR             << 16,
+    ACAMERA_EFV_START              = ACAMERA_EFV               << 16,
     ACAMERA_VENDOR_START           = ACAMERA_VENDOR            << 16
 } acamera_metadata_section_start_t;
 
@@ -586,7 +603,7 @@
      * ACAMERA_SENSOR_FRAME_DURATION.</p>
      * <p>Note that the actual achievable max framerate also depends on the minimum frame
      * duration of the output streams. The max frame rate will be
-     * <code>min(aeTargetFpsRange.maxFps, 1 / max(individual stream min durations)</code>. For example,
+     * <code>min(aeTargetFpsRange.maxFps, 1 / max(individual stream min durations))</code>. For example,
      * if the application sets this key to <code>{60, 60}</code>, but the maximum minFrameDuration among
      * all configured streams is 33ms, the maximum framerate won't be 60fps, but will be
      * 30fps.</p>
@@ -2147,7 +2164,7 @@
      * </ul>
      */
     ACAMERA_CONTROL_SETTINGS_OVERRIDE =                         // int32 (acamera_metadata_enum_android_control_settings_override_t)
-            ACAMERA_CONTROL_START + 49,
+            ACAMERA_CONTROL_START + 52,
     /**
      * <p>List of available settings overrides supported by the camera device that can
      * be used to speed up certain controls.</p>
@@ -2173,7 +2190,7 @@
      * @see ACAMERA_CONTROL_SETTINGS_OVERRIDE
      */
     ACAMERA_CONTROL_AVAILABLE_SETTINGS_OVERRIDES =              // int32[n]
-            ACAMERA_CONTROL_START + 50,
+            ACAMERA_CONTROL_START + 53,
     /**
      * <p>Automatic crop, pan and zoom to keep objects in the center of the frame.</p>
      *
@@ -2200,7 +2217,7 @@
      * @see ACAMERA_SCALER_CROP_REGION
      */
     ACAMERA_CONTROL_AUTOFRAMING =                               // byte (acamera_metadata_enum_android_control_autoframing_t)
-            ACAMERA_CONTROL_START + 52,
+            ACAMERA_CONTROL_START + 55,
     /**
      * <p>Whether the camera device supports ACAMERA_CONTROL_AUTOFRAMING.</p>
      *
@@ -2216,7 +2233,7 @@
      * <p>Will be <code>false</code> if auto-framing is not available.</p>
      */
     ACAMERA_CONTROL_AUTOFRAMING_AVAILABLE =                     // byte (acamera_metadata_enum_android_control_autoframing_available_t)
-            ACAMERA_CONTROL_START + 53,
+            ACAMERA_CONTROL_START + 56,
     /**
      * <p>Current state of auto-framing.</p>
      *
@@ -2243,7 +2260,7 @@
      * @see ACAMERA_CONTROL_AUTOFRAMING_AVAILABLE
      */
     ACAMERA_CONTROL_AUTOFRAMING_STATE =                         // byte (acamera_metadata_enum_android_control_autoframing_state_t)
-            ACAMERA_CONTROL_START + 54,
+            ACAMERA_CONTROL_START + 57,
     /**
      * <p>The operating luminance range of low light boost measured in lux (lx).</p>
      *
@@ -2256,7 +2273,7 @@
      *
      */
     ACAMERA_CONTROL_LOW_LIGHT_BOOST_INFO_LUMINANCE_RANGE =      // float[2]
-            ACAMERA_CONTROL_START + 55,
+            ACAMERA_CONTROL_START + 58,
     /**
      * <p>Current state of the low light boost AE mode.</p>
      *
@@ -2274,9 +2291,10 @@
      * indicate when it is not being applied by returning 'INACTIVE'.</p>
      * <p>This key will be absent from the CaptureResult if AE mode is not set to
      * 'ON_LOW_LIGHT_BOOST_BRIGHTNESS_PRIORITY.</p>
+     * <p>The default value will always be 'INACTIVE'.</p>
      */
     ACAMERA_CONTROL_LOW_LIGHT_BOOST_STATE =                     // byte (acamera_metadata_enum_android_control_low_light_boost_state_t)
-            ACAMERA_CONTROL_START + 56,
+            ACAMERA_CONTROL_START + 59,
     ACAMERA_CONTROL_END,
 
     /**
@@ -2409,35 +2427,39 @@
      * </ul></p>
      *
      * <p>Flash strength level to use in capture mode i.e. when the applications control
-     * flash with either SINGLE or TORCH mode.</p>
-     * <p>Use android.flash.info.singleStrengthMaxLevel and
-     * android.flash.info.torchStrengthMaxLevel to check whether the device supports
+     * flash with either <code>SINGLE</code> or <code>TORCH</code> mode.</p>
+     * <p>Use ACAMERA_FLASH_SINGLE_STRENGTH_MAX_LEVEL and
+     * ACAMERA_FLASH_TORCH_STRENGTH_MAX_LEVEL to check whether the device supports
      * flash strength control or not.
-     * If the values of android.flash.info.singleStrengthMaxLevel and
-     * android.flash.info.torchStrengthMaxLevel are greater than 1,
+     * If the values of ACAMERA_FLASH_SINGLE_STRENGTH_MAX_LEVEL and
+     * ACAMERA_FLASH_TORCH_STRENGTH_MAX_LEVEL are greater than 1,
      * then the device supports manual flash strength control.</p>
-     * <p>If the ACAMERA_FLASH_MODE <code>==</code> TORCH the value must be &gt;= 1
-     * and &lt;= android.flash.info.torchStrengthMaxLevel.
+     * <p>If the ACAMERA_FLASH_MODE <code>==</code> <code>TORCH</code> the value must be &gt;= 1
+     * and &lt;= ACAMERA_FLASH_TORCH_STRENGTH_MAX_LEVEL.
      * If the application doesn't set the key and
-     * android.flash.info.torchStrengthMaxLevel &gt; 1,
+     * ACAMERA_FLASH_TORCH_STRENGTH_MAX_LEVEL &gt; 1,
      * then the flash will be fired at the default level set by HAL in
-     * android.flash.info.torchStrengthDefaultLevel.
-     * If the ACAMERA_FLASH_MODE <code>==</code> SINGLE, then the value must be &gt;= 1
-     * and &lt;= android.flash.info.singleStrengthMaxLevel.
+     * ACAMERA_FLASH_TORCH_STRENGTH_DEFAULT_LEVEL.
+     * If the ACAMERA_FLASH_MODE <code>==</code> <code>SINGLE</code>, then the value must be &gt;= 1
+     * and &lt;= ACAMERA_FLASH_SINGLE_STRENGTH_MAX_LEVEL.
      * If the application does not set this key and
-     * android.flash.info.singleStrengthMaxLevel &gt; 1,
+     * ACAMERA_FLASH_SINGLE_STRENGTH_MAX_LEVEL &gt; 1,
      * then the flash will be fired at the default level set by HAL
-     * in android.flash.info.singleStrengthDefaultLevel.
-     * If ACAMERA_CONTROL_AE_MODE is set to any of ON_AUTO_FLASH, ON_ALWAYS_FLASH,
-     * ON_AUTO_FLASH_REDEYE, ON_EXTERNAL_FLASH values, then the strengthLevel will be ignored.</p>
+     * in ACAMERA_FLASH_SINGLE_STRENGTH_DEFAULT_LEVEL.
+     * If ACAMERA_CONTROL_AE_MODE is set to any of <code>ON_AUTO_FLASH</code>, <code>ON_ALWAYS_FLASH</code>,
+     * <code>ON_AUTO_FLASH_REDEYE</code>, <code>ON_EXTERNAL_FLASH</code> values, then the strengthLevel will be ignored.</p>
      *
      * @see ACAMERA_CONTROL_AE_MODE
      * @see ACAMERA_FLASH_MODE
+     * @see ACAMERA_FLASH_SINGLE_STRENGTH_DEFAULT_LEVEL
+     * @see ACAMERA_FLASH_SINGLE_STRENGTH_MAX_LEVEL
+     * @see ACAMERA_FLASH_TORCH_STRENGTH_DEFAULT_LEVEL
+     * @see ACAMERA_FLASH_TORCH_STRENGTH_MAX_LEVEL
      */
     ACAMERA_FLASH_STRENGTH_LEVEL =                              // int32
             ACAMERA_FLASH_START + 6,
     /**
-     * <p>Maximum flash brightness level for manual flash control in SINGLE mode.</p>
+     * <p>Maximum flash brightness level for manual flash control in <code>SINGLE</code> mode.</p>
      *
      * <p>Type: int32</p>
      *
@@ -2447,7 +2469,7 @@
      * </ul></p>
      *
      * <p>Maximum flash brightness level in camera capture mode and
-     * ACAMERA_FLASH_MODE set to SINGLE.
+     * ACAMERA_FLASH_MODE set to <code>SINGLE</code>.
      * Value will be &gt; 1 if the manual flash strength control feature is supported,
      * otherwise the value will be equal to 1.
      * Note that this level is just a number of supported levels (the granularity of control).
@@ -2458,7 +2480,7 @@
     ACAMERA_FLASH_SINGLE_STRENGTH_MAX_LEVEL =                   // int32
             ACAMERA_FLASH_START + 7,
     /**
-     * <p>Default flash brightness level for manual flash control in SINGLE mode.</p>
+     * <p>Default flash brightness level for manual flash control in <code>SINGLE</code> mode.</p>
      *
      * <p>Type: int32</p>
      *
@@ -2468,14 +2490,16 @@
      * </ul></p>
      *
      * <p>If flash unit is available this will be greater than or equal to 1 and less
-     * or equal to <code>android.flash.info.singleStrengthMaxLevel</code>.
+     * or equal to ACAMERA_FLASH_SINGLE_STRENGTH_MAX_LEVEL.
      * Note for devices that do not support the manual flash strength control
      * feature, this level will always be equal to 1.</p>
+     *
+     * @see ACAMERA_FLASH_SINGLE_STRENGTH_MAX_LEVEL
      */
     ACAMERA_FLASH_SINGLE_STRENGTH_DEFAULT_LEVEL =               // int32
             ACAMERA_FLASH_START + 8,
     /**
-     * <p>Maximum flash brightness level for manual flash control in TORCH mode</p>
+     * <p>Maximum flash brightness level for manual flash control in <code>TORCH</code> mode</p>
      *
      * <p>Type: int32</p>
      *
@@ -2485,22 +2509,24 @@
      * </ul></p>
      *
      * <p>Maximum flash brightness level in camera capture mode and
-     * ACAMERA_FLASH_MODE set to TORCH.
+     * ACAMERA_FLASH_MODE set to <code>TORCH</code>.
      * Value will be &gt; 1 if the manual flash strength control feature is supported,
      * otherwise the value will be equal to 1.</p>
      * <p>Note that this level is just a number of supported levels(the granularity of control).
      * There is no actual physical power units tied to this level.
-     * There is no relation between android.flash.info.torchStrengthMaxLevel and
-     * android.flash.info.singleStrengthMaxLevel i.e. the ratio of
-     * android.flash.info.torchStrengthMaxLevel:android.flash.info.singleStrengthMaxLevel
+     * There is no relation between ACAMERA_FLASH_TORCH_STRENGTH_MAX_LEVEL and
+     * ACAMERA_FLASH_SINGLE_STRENGTH_MAX_LEVEL i.e. the ratio of
+     * ACAMERA_FLASH_TORCH_STRENGTH_MAX_LEVEL:ACAMERA_FLASH_SINGLE_STRENGTH_MAX_LEVEL
      * is not guaranteed to be the ratio of actual brightness.</p>
      *
      * @see ACAMERA_FLASH_MODE
+     * @see ACAMERA_FLASH_SINGLE_STRENGTH_MAX_LEVEL
+     * @see ACAMERA_FLASH_TORCH_STRENGTH_MAX_LEVEL
      */
     ACAMERA_FLASH_TORCH_STRENGTH_MAX_LEVEL =                    // int32
             ACAMERA_FLASH_START + 9,
     /**
-     * <p>Default flash brightness level for manual flash control in TORCH mode</p>
+     * <p>Default flash brightness level for manual flash control in <code>TORCH</code> mode</p>
      *
      * <p>Type: int32</p>
      *
@@ -2510,9 +2536,11 @@
      * </ul></p>
      *
      * <p>If flash unit is available this will be greater than or equal to 1 and less
-     * or equal to android.flash.info.torchStrengthMaxLevel.
+     * or equal to ACAMERA_FLASH_TORCH_STRENGTH_MAX_LEVEL.
      * Note for the devices that do not support the manual flash strength control feature,
      * this level will always be equal to 1.</p>
+     *
+     * @see ACAMERA_FLASH_TORCH_STRENGTH_MAX_LEVEL
      */
     ACAMERA_FLASH_TORCH_STRENGTH_DEFAULT_LEVEL =                // int32
             ACAMERA_FLASH_START + 10,
@@ -2686,7 +2714,9 @@
      * upright.</p>
      * <p>Camera devices may either encode this value into the JPEG EXIF header, or
      * rotate the image data to match this orientation. When the image data is rotated,
-     * the thumbnail data will also be rotated.</p>
+     * the thumbnail data will also be rotated. Additionally, in the case where the image data
+     * is rotated, <a href="https://developer.android.com/reference/android/media/Image.html#getWidth">Image#getWidth</a> and <a href="https://developer.android.com/reference/android/media/Image.html#getHeight">Image#getHeight</a>
+     * will not be updated to reflect the height and width of the rotated image.</p>
      * <p>Note that this orientation is relative to the orientation of the camera sensor, given
      * by ACAMERA_SENSOR_ORIENTATION.</p>
      * <p>To translate from the device orientation given by the Android sensor APIs for camera
@@ -4669,7 +4699,7 @@
      * application should leave stream use cases within the session as DEFAULT.</p>
      */
     ACAMERA_SCALER_AVAILABLE_STREAM_USE_CASES =                 // int64[n] (acamera_metadata_enum_android_scaler_available_stream_use_cases_t)
-            ACAMERA_SCALER_START + 25,
+            ACAMERA_SCALER_START + 26,
     /**
      * <p>The region of the sensor that corresponds to the RAW read out for this
      * capture when the stream use case of a RAW stream is set to CROPPED_RAW.</p>
@@ -4705,24 +4735,27 @@
      * </ul>
      * <p>should be interpreted in the effective after raw crop field-of-view coordinate system.
      * In this coordinate system,
-     * {preCorrectionActiveArraySize.left, preCorrectionActiveArraySize.top} corresponds to the
+     * {ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE.left,
+     *  ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE.top} corresponds to the
      * the top left corner of the cropped RAW frame and
-     * {preCorrectionActiveArraySize.right, preCorrectionActiveArraySize.bottom} corresponds to
+     * {ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE.right,
+     *  ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE.bottom} corresponds to
      * the bottom right corner. Client applications must use the values of the keys
      * in the CaptureResult metadata if present.</p>
-     * <p>Crop regions (android.scaler.CropRegion), AE/AWB/AF regions and face coordinates still
+     * <p>Crop regions ACAMERA_SCALER_CROP_REGION, AE/AWB/AF regions and face coordinates still
      * use the ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE coordinate system as usual.</p>
      *
      * @see ACAMERA_LENS_DISTORTION
      * @see ACAMERA_LENS_INTRINSIC_CALIBRATION
      * @see ACAMERA_LENS_POSE_ROTATION
      * @see ACAMERA_LENS_POSE_TRANSLATION
+     * @see ACAMERA_SCALER_CROP_REGION
      * @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE
      * @see ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE
      * @see ACAMERA_STATISTICS_HOT_PIXEL_MAP
      */
     ACAMERA_SCALER_RAW_CROP_REGION =                            // int32[4]
-            ACAMERA_SCALER_START + 26,
+            ACAMERA_SCALER_START + 27,
     ACAMERA_SCALER_END,
 
     /**
@@ -5841,10 +5874,16 @@
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
      * </ul></p>
      *
-     * <p>If TRUE, all images produced by the camera device in the RAW image formats will
-     * have lens shading correction already applied to it. If FALSE, the images will
-     * not be adjusted for lens shading correction.
-     * See android.request.maxNumOutputRaw for a list of RAW image formats.</p>
+     * <p>If <code>true</code>, all images produced by the camera device in the <code>RAW</code> image formats will have
+     * at least some lens shading correction already applied to it. If <code>false</code>, the images will
+     * not be adjusted for lens shading correction.  See android.request.maxNumOutputRaw for a
+     * list of RAW image formats.</p>
+     * <p>When <code>true</code>, the <code>lensShadingCorrectionMap</code> key may still have values greater than 1.0,
+     * and those will need to be applied to any captured RAW frames for them to match the shading
+     * correction of processed buffers such as <code>YUV</code> or <code>JPEG</code> images. This may occur, for
+     * example, when some basic fixed lens shading correction is applied by hardware to RAW data,
+     * and additional correction is done dynamically in the camera processing pipeline after
+     * demosaicing.</p>
      * <p>This key will be <code>null</code> for all devices do not report this information.
      * Devices with RAW capability will always report this information in this key.</p>
      */
@@ -8270,9 +8309,9 @@
      * FPS.</p>
      * <p>If the session configuration is not supported, the AE mode reported in the
      * CaptureResult will be 'ON' instead of 'ON_LOW_LIGHT_BOOST_BRIGHTNESS_PRIORITY'.</p>
-     * <p>The application can observe the CapturerResult field
-     * ACAMERA_CONTROL_LOW_LIGHT_BOOST_STATE to determine when low light boost is 'ACTIVE' or
-     * 'INACTIVE'.</p>
+     * <p>When this AE mode is enabled, the CaptureResult field
+     * ACAMERA_CONTROL_LOW_LIGHT_BOOST_STATE will indicate when low light boost is 'ACTIVE'
+     * or 'INACTIVE'. By default ACAMERA_CONTROL_LOW_LIGHT_BOOST_STATE will be 'INACTIVE'.</p>
      * <p>The low light boost is 'ACTIVE' once the scene lighting condition is less than the
      * upper bound lux value defined by ACAMERA_CONTROL_LOW_LIGHT_BOOST_INFO_LUMINANCE_RANGE.
      * This mode will be 'INACTIVE' once the scene lighting condition is greater than the
@@ -11524,6 +11563,7 @@
 
 
 
+
 __END_DECLS
 
 #endif /* _NDK_CAMERA_METADATA_TAGS_H */
diff --git a/camera/ndk/libcamera2ndk.map.txt b/camera/ndk/libcamera2ndk.map.txt
index 4c54658..7d7868b 100644
--- a/camera/ndk/libcamera2ndk.map.txt
+++ b/camera/ndk/libcamera2ndk.map.txt
@@ -35,6 +35,7 @@
     ACameraMetadata_copy;
     ACameraMetadata_free;
     ACameraMetadata_getAllTags;
+    ACameraMetadata_getTagFromName; #introduced=35
     ACameraMetadata_getConstEntry;
     ACameraMetadata_isLogicalMultiCamera; # introduced=29
     ACameraMetadata_fromCameraMetadata; # introduced=30
diff --git a/camera/ndk/ndk_vendor/impl/ACameraManager.cpp b/camera/ndk/ndk_vendor/impl/ACameraManager.cpp
index 3aa7817..cdba8ff 100644
--- a/camera/ndk/ndk_vendor/impl/ACameraManager.cpp
+++ b/camera/ndk/ndk_vendor/impl/ACameraManager.cpp
@@ -396,6 +396,7 @@
 
 template <class T>
 void CameraManagerGlobal::registerAvailCallback(const T *callback) {
+    getCameraService();
     Mutex::Autolock _l(mLock);
     Callback cb(callback);
     auto res = mCallbacks.insert(cb);
@@ -573,7 +574,7 @@
 void CameraManagerGlobal::onStatusChangedLocked(
         const CameraDeviceStatus &status, const std::string &cameraId) {
     if (!validStatus(status)) {
-        ALOGE("%s: Invalid status %d", __FUNCTION__, status);
+        ALOGE("%s: Invalid status %d", __FUNCTION__, static_cast<int>(status));
         return;
     }
 
@@ -628,7 +629,7 @@
         const CameraDeviceStatus &status, const std::string& cameraId,
         const std::string& physicalCameraId) {
     if (!validStatus(status)) {
-        ALOGE("%s: Invalid status %d", __FUNCTION__, status);
+        ALOGE("%s: Invalid status %d", __FUNCTION__, static_cast<int>(status));
         return;
     }
 
@@ -642,7 +643,8 @@
     if (logicalCamStatus != CameraDeviceStatus::STATUS_PRESENT &&
             logicalCamStatus != CameraDeviceStatus::STATUS_NOT_AVAILABLE) {
         ALOGE("%s: Physical camera id %s status %d change for an invalid logical camera state %d",
-                __FUNCTION__, physicalCameraId.c_str(), status, logicalCamStatus);
+              __FUNCTION__, physicalCameraId.c_str(), static_cast<int>(status),
+              static_cast<int>(logicalCamStatus));
         return;
     }
 
@@ -865,6 +867,25 @@
     return status == OK ? ACAMERA_OK : ACAMERA_ERROR_METADATA_NOT_FOUND;
 }
 
-ACameraManager::~ACameraManager() {
+void ACameraManager::registerAvailabilityCallback(
+        const ACameraManager_AvailabilityCallbacks* callback) {
+    mGlobalManager->registerAvailabilityCallback(callback);
+}
 
+void ACameraManager::unregisterAvailabilityCallback(
+        const ACameraManager_AvailabilityCallbacks* callback) {
+    mGlobalManager->unregisterAvailabilityCallback(callback);
+}
+
+void ACameraManager::registerExtendedAvailabilityCallback(
+        const ACameraManager_ExtendedAvailabilityCallbacks* callback) {
+    mGlobalManager->registerExtendedAvailabilityCallback(callback);
+}
+
+void ACameraManager::unregisterExtendedAvailabilityCallback(
+        const ACameraManager_ExtendedAvailabilityCallbacks* callback) {
+    mGlobalManager->unregisterExtendedAvailabilityCallback(callback);
+}
+
+ACameraManager::~ACameraManager() {
 }
diff --git a/camera/ndk/ndk_vendor/impl/ACameraManager.h b/camera/ndk/ndk_vendor/impl/ACameraManager.h
index 85acee7..2d8eefa 100644
--- a/camera/ndk/ndk_vendor/impl/ACameraManager.h
+++ b/camera/ndk/ndk_vendor/impl/ACameraManager.h
@@ -265,6 +265,12 @@
                                ACameraDevice_StateCallbacks* callback,
                                /*out*/ACameraDevice** device);
     camera_status_t getTagFromName(const char *cameraId, const char *name, uint32_t *tag);
+    void registerAvailabilityCallback(const ACameraManager_AvailabilityCallbacks* callback);
+    void unregisterAvailabilityCallback(const ACameraManager_AvailabilityCallbacks* callback);
+    void registerExtendedAvailabilityCallback(
+            const ACameraManager_ExtendedAvailabilityCallbacks* callback);
+    void unregisterExtendedAvailabilityCallback(
+            const ACameraManager_ExtendedAvailabilityCallbacks* callback);
 
   private:
     enum {
diff --git a/camera/tests/Android.bp b/camera/tests/Android.bp
index 65b8b41..484335a 100644
--- a/camera/tests/Android.bp
+++ b/camera/tests/Android.bp
@@ -13,6 +13,7 @@
 // limitations under the License.
 
 package {
+    default_team: "trendy_team_camera_framework",
     // See: http://go/android-license-faq
     default_applicable_licenses: [
         "frameworks_av_camera_license",
@@ -28,6 +29,7 @@
         "CameraCharacteristicsPermission.cpp",
     ],
     shared_libs: [
+        "framework-permission-aidl-cpp",
         "liblog",
         "libutils",
         "libcutils",
diff --git a/camera/tests/CameraBinderTests.cpp b/camera/tests/CameraBinderTests.cpp
index bb963ab..5135b5d 100644
--- a/camera/tests/CameraBinderTests.cpp
+++ b/camera/tests/CameraBinderTests.cpp
@@ -33,6 +33,7 @@
 #include <hardware/gralloc.h>
 
 #include <camera/CameraMetadata.h>
+#include <android/content/AttributionSourceState.h>
 #include <android/hardware/ICameraService.h>
 #include <android/hardware/ICameraServiceListener.h>
 #include <android/hardware/BnCameraServiceListener.h>
@@ -43,8 +44,10 @@
 #include <camera/camera2/OutputConfiguration.h>
 #include <camera/camera2/SessionConfiguration.h>
 #include <camera/camera2/SubmitInfo.h>
+#include <camera/CameraUtils.h>
 #include <camera/StringUtils.h>
 
+#include <com_android_graphics_libgui_flags.h>
 #include <gui/BufferItemConsumer.h>
 #include <gui/IGraphicBufferProducer.h>
 #include <gui/Surface.h>
@@ -77,29 +80,34 @@
 public:
     virtual ~TestCameraServiceListener() {};
 
-    virtual binder::Status onStatusChanged(int32_t status, const std::string& cameraId) override {
+    virtual binder::Status onStatusChanged(int32_t status, const std::string& cameraId,
+            [[maybe_unused]] int32_t /*deviceId*/) override {
         Mutex::Autolock l(mLock);
         mCameraStatuses[cameraId] = status;
         mCondition.broadcast();
         return binder::Status::ok();
-    };
+    }
 
-    virtual binder::Status onPhysicalCameraStatusChanged(int32_t /*status*/,
-            const std::string& /*cameraId*/, const std::string& /*physicalCameraId*/) override {
+    virtual binder::Status onPhysicalCameraStatusChanged([[maybe_unused]] int32_t /*status*/,
+            [[maybe_unused]] const std::string& /*cameraId*/,
+            [[maybe_unused]] const std::string& /*physicalCameraId*/,
+            [[maybe_unused]] int32_t /*deviceId*/) override {
         // No op
         return binder::Status::ok();
-    };
+    }
 
     virtual binder::Status onTorchStatusChanged(int32_t status,
-            const std::string& cameraId) override {
+            const std::string& cameraId, [[maybe_unused]] int32_t /*deviceId*/) override {
         Mutex::Autolock l(mLock);
         mCameraTorchStatuses[cameraId] = status;
         mTorchCondition.broadcast();
         return binder::Status::ok();
-    };
+    }
 
-    virtual binder::Status onTorchStrengthLevelChanged(const std::string& /*cameraId*/,
-            int32_t /*torchStrength*/) override {
+    virtual binder::Status onTorchStrengthLevelChanged(
+            [[maybe_unused]] const std::string& /*cameraId*/,
+            [[maybe_unused]] int32_t /*torchStrength*/,
+            [[maybe_unused]] int32_t /*deviceId*/) override {
         // No op
         return binder::Status::ok();
     }
@@ -109,13 +117,15 @@
         return binder::Status::ok();
     }
 
-    virtual binder::Status onCameraOpened(const std::string& /*cameraId*/,
-            const std::string& /*clientPackageName*/) {
+    virtual binder::Status onCameraOpened([[maybe_unused]] const std::string& /*cameraId*/,
+            [[maybe_unused]] const std::string& /*clientPackageName*/,
+            [[maybe_unused]] int32_t /*deviceId*/) {
         // No op
         return binder::Status::ok();
     }
 
-    virtual binder::Status onCameraClosed(const std::string& /*cameraId*/) override {
+    virtual binder::Status onCameraClosed([[maybe_unused]] const std::string& /*cameraId*/,
+            [[maybe_unused]] int32_t /*deviceId*/) override {
         // No op
         return binder::Status::ok();
     }
@@ -133,7 +143,7 @@
             }
         }
         return true;
-    };
+    }
 
     bool waitForTorchState(int32_t status, int32_t cameraId) const {
         Mutex::Autolock l(mLock);
@@ -153,7 +163,7 @@
             foundStatus = (iter != mCameraTorchStatuses.end() && iter->second == status);
         }
         return true;
-    };
+    }
 
     int32_t getTorchStatus(int32_t cameraId) const {
         Mutex::Autolock l(mLock);
@@ -162,7 +172,7 @@
             return hardware::ICameraServiceListener::TORCH_STATUS_UNKNOWN;
         }
         return iter->second;
-    };
+    }
 
     int32_t getStatus(const std::string& cameraId) const {
         Mutex::Autolock l(mLock);
@@ -171,7 +181,7 @@
             return hardware::ICameraServiceListener::STATUS_UNKNOWN;
         }
         return iter->second;
-    };
+    }
 };
 
 // Callback implementation
@@ -230,7 +240,6 @@
         return binder::Status::ok();
     }
 
-
     virtual binder::Status onResultReceived(const CameraMetadata& metadata,
             const CaptureResultExtras& resultExtras,
             const std::vector<PhysicalCaptureResultInfo>& physicalResultInfos) {
@@ -296,7 +305,6 @@
         mStatusesHit.clear();
 
         return true;
-
     }
 
     void clearStatus() const {
@@ -307,7 +315,6 @@
     bool waitForIdle() const {
         return waitForStatus(IDLE);
     }
-
 };
 
 namespace {
@@ -324,7 +331,7 @@
         }
     };
     sp<DeathNotifier>         gDeathNotifier;
-}; // anonymous namespace
+} // anonymous namespace
 
 // Exercise basic binder calls for the camera service
 TEST(CameraServiceBinderTest, CheckBinderCameraService) {
@@ -342,7 +349,12 @@
     binder::Status res;
 
     int32_t numCameras = 0;
-    res = service->getNumberOfCameras(hardware::ICameraService::CAMERA_TYPE_ALL, &numCameras);
+    AttributionSourceState clientAttribution;
+    clientAttribution.deviceId = kDefaultDeviceId;
+    clientAttribution.uid = hardware::ICameraService::USE_CALLING_UID;
+    clientAttribution.packageName = "meeeeeeeee!";
+    res = service->getNumberOfCameras(hardware::ICameraService::CAMERA_TYPE_ALL, clientAttribution,
+            /*devicePolicy*/0, &numCameras);
     EXPECT_TRUE(res.isOk()) << res;
     EXPECT_LE(0, numCameras);
 
@@ -354,7 +366,7 @@
 
     EXPECT_EQ(numCameras, static_cast<const int>(statuses.size()));
     for (const auto &it : statuses) {
-        listener->onStatusChanged(it.status, it.cameraId);
+        listener->onStatusChanged(it.status, it.cameraId, clientAttribution.deviceId);
     }
 
     for (int32_t i = 0; i < numCameras; i++) {
@@ -372,7 +384,8 @@
         // Check metadata binder call
         CameraMetadata metadata;
         res = service->getCameraCharacteristics(cameraId,
-                /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false, &metadata);
+                /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false,
+                clientAttribution, /*devicePolicy*/0, &metadata);
         EXPECT_TRUE(res.isOk()) << res;
         EXPECT_FALSE(metadata.isEmpty());
 
@@ -386,10 +399,10 @@
         // Check connect binder calls
         sp<TestCameraDeviceCallbacks> callbacks(new TestCameraDeviceCallbacks());
         sp<hardware::camera2::ICameraDeviceUser> device;
-        res = service->connectDevice(callbacks, cameraId, "meeeeeeeee!",
-                {}, hardware::ICameraService::USE_CALLING_UID, /*oomScoreOffset*/ 0,
+        res = service->connectDevice(callbacks, cameraId,
+                /*oomScoreOffset*/ 0,
                 /*targetSdkVersion*/__ANDROID_API_FUTURE__,
-                /*overrideToPortrait*/false, /*out*/&device);
+                /*overrideToPortrait*/false, clientAttribution, /*devicePolicy*/0, /*out*/&device);
         EXPECT_TRUE(res.isOk()) << res;
         ASSERT_NE(nullptr, device.get());
         device->disconnect();
@@ -399,12 +412,12 @@
         if (torchStatus == hardware::ICameraServiceListener::TORCH_STATUS_AVAILABLE_OFF) {
             // Check torch calls
             res = service->setTorchMode(cameraId,
-                    /*enabled*/true, callbacks);
+                    /*enabled*/true, callbacks, clientAttribution, /*devicePolicy*/0);
             EXPECT_TRUE(res.isOk()) << res;
             EXPECT_TRUE(listener->waitForTorchState(
                     hardware::ICameraServiceListener::TORCH_STATUS_AVAILABLE_ON, i));
             res = service->setTorchMode(cameraId,
-                    /*enabled*/false, callbacks);
+                    /*enabled*/false, callbacks, clientAttribution, /*devicePolicy*/0);
             EXPECT_TRUE(res.isOk()) << res;
             EXPECT_TRUE(listener->waitForTorchState(
                     hardware::ICameraServiceListener::TORCH_STATUS_AVAILABLE_OFF, i));
@@ -430,10 +443,15 @@
         sp<hardware::camera2::ICameraDeviceUser> device;
         {
             SCOPED_TRACE("openNewDevice");
-            binder::Status res = service->connectDevice(callbacks, deviceId, "meeeeeeeee!",
-                    {}, hardware::ICameraService::USE_CALLING_UID, /*oomScoreOffset*/ 0,
+            AttributionSourceState clientAttribution;
+            clientAttribution.deviceId = kDefaultDeviceId;
+            clientAttribution.uid = hardware::ICameraService::USE_CALLING_UID;
+            clientAttribution.packageName = "meeeeeeeee!";
+            binder::Status res = service->connectDevice(callbacks, deviceId,
+                    /*oomScoreOffset*/ 0,
                     /*targetSdkVersion*/__ANDROID_API_FUTURE__,
-                    /*overrideToPortrait*/false, /*out*/&device);
+                    /*overrideToPortrait*/false, clientAttribution, /*devicePolicy*/0,
+                    /*out*/&device);
             EXPECT_TRUE(res.isOk()) << res;
         }
         auto p = std::make_pair(callbacks, device);
@@ -465,11 +483,13 @@
         serviceListener = new TestCameraServiceListener();
         std::vector<hardware::CameraStatus> statuses;
         service->addListener(serviceListener, &statuses);
+        AttributionSourceState clientAttribution;
+        clientAttribution.deviceId = kDefaultDeviceId;
         for (const auto &it : statuses) {
-            serviceListener->onStatusChanged(it.status, it.cameraId);
+            serviceListener->onStatusChanged(it.status, it.cameraId, clientAttribution.deviceId);
         }
         service->getNumberOfCameras(hardware::ICameraService::CAMERA_TYPE_BACKWARD_COMPATIBLE,
-                &numCameras);
+                clientAttribution, /*devicePolicy*/0, &numCameras);
     }
 
     virtual void TearDown() {
@@ -479,7 +499,6 @@
             closeDevice(p);
         }
     }
-
 };
 
 TEST_F(CameraClientBinderTest, CheckBinderCameraDeviceUser) {
@@ -500,6 +519,23 @@
 
         // Setup a buffer queue; I'm just using the vendor opaque format here as that is
         // guaranteed to be present
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+        sp<BufferItemConsumer> opaqueConsumer = new BufferItemConsumer(
+                GRALLOC_USAGE_SW_READ_NEVER, /*maxImages*/ 2, /*controlledByApp*/ true);
+        EXPECT_TRUE(opaqueConsumer.get() != nullptr);
+        opaqueConsumer->setName(String8("nom nom nom"));
+
+        // Set to VGA dimens for default, as that is guaranteed to be present
+        EXPECT_EQ(OK, opaqueConsumer->setDefaultBufferSize(640, 480));
+        EXPECT_EQ(OK,
+                  opaqueConsumer->setDefaultBufferFormat(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED));
+
+        sp<Surface> surface = opaqueConsumer->getSurface();
+
+        sp<IGraphicBufferProducer> producer = surface->getIGraphicBufferProducer();
+        std::string noPhysicalId;
+        OutputConfiguration output(producer, /*rotation*/ 0, noPhysicalId);
+#else
         sp<IGraphicBufferProducer> gbProducer;
         sp<IGraphicBufferConsumer> gbConsumer;
         BufferQueue::createBufferQueue(&gbProducer, &gbConsumer);
@@ -516,6 +552,7 @@
 
         std::string noPhysicalId;
         OutputConfiguration output(gbProducer, /*rotation*/0, noPhysicalId);
+#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
 
         // Can we configure?
         res = device->beginConfigure();
@@ -647,8 +684,7 @@
 
         closeDevice(p);
     }
-
-};
+}
 
 TEST_F(CameraClientBinderTest, CheckBinderCaptureRequest) {
     sp<CaptureRequest> requestOriginal, requestParceled;
@@ -707,4 +743,4 @@
     EXPECT_TRUE(it->settings.exists(ANDROID_CONTROL_CAPTURE_INTENT));
     entry = it->settings.find(ANDROID_CONTROL_CAPTURE_INTENT);
     EXPECT_EQ(entry.data.u8[0], intent2);
-};
+}
diff --git a/camera/tests/CameraCharacteristicsPermission.cpp b/camera/tests/CameraCharacteristicsPermission.cpp
index 1de7cb4..9204eb1 100644
--- a/camera/tests/CameraCharacteristicsPermission.cpp
+++ b/camera/tests/CameraCharacteristicsPermission.cpp
@@ -19,11 +19,13 @@
 
 #include <gtest/gtest.h>
 
+#include <android/content/AttributionSourceState.h>
 #include <binder/ProcessState.h>
 #include <utils/Errors.h>
 #include <utils/Log.h>
 #include <camera/CameraMetadata.h>
 #include <camera/Camera.h>
+#include <camera/CameraUtils.h>
 #include <android/hardware/ICameraService.h>
 
 using namespace android;
@@ -31,7 +33,6 @@
 
 class CameraCharacteristicsPermission : public ::testing::Test {
 protected:
-
     CameraCharacteristicsPermission() : numCameras(0){}
     //Gtest interface
     void SetUp() override;
@@ -47,8 +48,11 @@
     sp<IServiceManager> sm = defaultServiceManager();
     sp<IBinder> binder = sm->getService(String16("media.camera"));
     mCameraService = interface_cast<ICameraService>(binder);
+    AttributionSourceState clientAttribution;
+    clientAttribution.deviceId = kDefaultDeviceId;
     rc = mCameraService->getNumberOfCameras(
-            hardware::ICameraService::CAMERA_TYPE_ALL, &numCameras);
+            hardware::ICameraService::CAMERA_TYPE_ALL, clientAttribution, /*devicePolicy*/0,
+            &numCameras);
     EXPECT_TRUE(rc.isOk());
 }
 
@@ -61,7 +65,6 @@
 // a camera permission.
 TEST_F(CameraCharacteristicsPermission, TestCameraPermission) {
     for (int32_t cameraId = 0; cameraId < numCameras; cameraId++) {
-
         std::string cameraIdStr = std::to_string(cameraId);
         bool isSupported = false;
         auto rc = mCameraService->supportsCameraApi(cameraIdStr,
@@ -73,9 +76,11 @@
 
         CameraMetadata metadata;
         std::vector<int32_t> tagsNeedingPermission;
+        AttributionSourceState clientAttribution;
+        clientAttribution.deviceId = kDefaultDeviceId;
         rc = mCameraService->getCameraCharacteristics(cameraIdStr,
                 /*targetSdkVersion*/__ANDROID_API_FUTURE__,
-                /*overrideToPortrait*/false, &metadata);
+                /*overrideToPortrait*/false, clientAttribution, /*devicePolicy*/0, &metadata);
         ASSERT_TRUE(rc.isOk());
         EXPECT_FALSE(metadata.isEmpty());
         EXPECT_EQ(metadata.removePermissionEntries(CAMERA_METADATA_INVALID_VENDOR_ID,
diff --git a/camera/tests/CameraZSLTests.cpp b/camera/tests/CameraZSLTests.cpp
index 3ae7659..2740d09 100644
--- a/camera/tests/CameraZSLTests.cpp
+++ b/camera/tests/CameraZSLTests.cpp
@@ -19,6 +19,7 @@
 
 #include <gtest/gtest.h>
 
+#include <android/content/AttributionSourceState.h>
 #include <binder/ProcessState.h>
 #include <utils/Errors.h>
 #include <utils/Log.h>
@@ -27,6 +28,7 @@
 #include <camera/CameraParameters.h>
 #include <camera/CameraMetadata.h>
 #include <camera/Camera.h>
+#include <camera/CameraUtils.h>
 #include <camera/StringUtils.h>
 #include <android/hardware/ICameraService.h>
 
@@ -83,8 +85,11 @@
     sp<IServiceManager> sm = defaultServiceManager();
     sp<IBinder> binder = sm->getService(String16("media.camera"));
     mCameraService = interface_cast<ICameraService>(binder);
+    AttributionSourceState clientAttribution;
+    clientAttribution.deviceId = kDefaultDeviceId;
     rc = mCameraService->getNumberOfCameras(
-            hardware::ICameraService::CAMERA_TYPE_ALL, &numCameras);
+            hardware::ICameraService::CAMERA_TYPE_ALL, clientAttribution, /*devicePolicy*/0,
+            &numCameras);
     EXPECT_TRUE(rc.isOk());
 
     mComposerClient = new SurfaceComposerClient;
@@ -109,7 +114,6 @@
 
 void CameraZSLTests::dataCallback(int32_t msgType, const sp<IMemory>& /*data*/,
         camera_frame_metadata_t *) {
-
     switch (msgType) {
     case CAMERA_MSG_PREVIEW_FRAME: {
         Mutex::Autolock l(mPreviewLock);
@@ -127,7 +131,7 @@
     default:
         ALOGV("%s: msgType: %d", __FUNCTION__, msgType);
     }
-};
+}
 
 status_t CameraZSLTests::waitForPreviewStart() {
     status_t rc = NO_ERROR;
@@ -182,9 +186,11 @@
         }
 
         CameraMetadata metadata;
+        AttributionSourceState clientAttribution;
+        clientAttribution.deviceId = kDefaultDeviceId;
         rc = mCameraService->getCameraCharacteristics(cameraIdStr,
                 /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false,
-                &metadata);
+                clientAttribution, /*devicePolicy*/0, &metadata);
         if (!rc.isOk()) {
             // The test is relevant only for cameras with Hal 3.x
             // support.
@@ -208,11 +214,13 @@
             continue;
         }
 
+        clientAttribution.uid = hardware::ICameraService::USE_CALLING_UID;
+        clientAttribution.pid = hardware::ICameraService::USE_CALLING_PID;
+        clientAttribution.packageName = "ZSLTest";
         rc = mCameraService->connect(this, cameraId,
-                "ZSLTest", hardware::ICameraService::USE_CALLING_UID,
-                hardware::ICameraService::USE_CALLING_PID,
                 /*targetSdkVersion*/__ANDROID_API_FUTURE__,
-                /*overrideToPortrait*/false, /*forceSlowJpegMode*/false, &cameraDevice);
+                /*overrideToPortrait*/false, /*forceSlowJpegMode*/false, clientAttribution,
+                /*devicePolicy*/0, &cameraDevice);
         EXPECT_TRUE(rc.isOk());
 
         CameraParameters params(cameraDevice->getParameters());
diff --git a/camera/tests/fuzzer/Android.bp b/camera/tests/fuzzer/Android.bp
index b74b7a1..3b6413c 100644
--- a/camera/tests/fuzzer/Android.bp
+++ b/camera/tests/fuzzer/Android.bp
@@ -14,6 +14,7 @@
  * limitations under the License.
  */
 package {
+    default_team: "trendy_team_camera_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_camera_license"
@@ -30,6 +31,7 @@
     ],
     shared_libs: [
         "camera_platform_flags_c_lib",
+        "framework-permission-aidl-cpp",
         "libbase",
         "libcutils",
         "libutils",
diff --git a/camera/tests/fuzzer/camera_c2ConcurrentCamera_fuzzer.cpp b/camera/tests/fuzzer/camera_c2ConcurrentCamera_fuzzer.cpp
index 12b5bc3..c00f2ba 100644
--- a/camera/tests/fuzzer/camera_c2ConcurrentCamera_fuzzer.cpp
+++ b/camera/tests/fuzzer/camera_c2ConcurrentCamera_fuzzer.cpp
@@ -15,6 +15,7 @@
  */
 
 #include <camera2/ConcurrentCamera.h>
+#include <CameraUtils.h>
 #include <fuzzer/FuzzedDataProvider.h>
 #include "camera2common.h"
 
@@ -33,7 +34,8 @@
         size_t concurrentCameraIdSize = fdp.ConsumeIntegralInRange<size_t>(kRangeMin, kRangeMax);
         for (size_t idx = 0; idx < concurrentCameraIdSize; ++idx) {
             string concurrentCameraId = fdp.ConsumeRandomLengthString();
-            camIdCombination.mConcurrentCameraIds.push_back(concurrentCameraId);
+            camIdCombination.mConcurrentCameraIdDeviceIdPairs.push_back(
+                    {concurrentCameraId, kDefaultDeviceId});
         }
     }
 
diff --git a/camera/tests/fuzzer/camera_fuzzer.cpp b/camera/tests/fuzzer/camera_fuzzer.cpp
index c2a7549..f46d246 100644
--- a/camera/tests/fuzzer/camera_fuzzer.cpp
+++ b/camera/tests/fuzzer/camera_fuzzer.cpp
@@ -16,6 +16,8 @@
 
 #include <Camera.h>
 #include <CameraParameters.h>
+#include <CameraUtils.h>
+#include <android/content/AttributionSourceState.h>
 #include <binder/MemoryDealer.h>
 #include <fuzzer/FuzzedDataProvider.h>
 #include <gui/Surface.h>
@@ -88,6 +90,7 @@
     bool initCamera();
     void invokeCamera();
     void invokeSetParameters();
+    native_handle_t* createNativeHandle();
     sp<Camera> mCamera = nullptr;
     FuzzedDataProvider* mFDP = nullptr;
 
@@ -102,6 +105,18 @@
     };
 };
 
+native_handle_t* CameraFuzzer::createNativeHandle() {
+    int32_t numFds = mFDP->ConsumeIntegralInRange<int32_t>(kMinElements, kMaxElements);
+    int32_t numInts = mFDP->ConsumeIntegralInRange<int32_t>(kNumMin, kNumMax);
+    native_handle_t* handle = native_handle_create(numFds, numInts);
+    for (int32_t i = 0; i < numFds; ++i) {
+        std::string filename = mFDP->ConsumeRandomLengthString(kMaxBytes);
+        int32_t fd = open(filename.c_str(), O_RDWR | O_CREAT | O_TRUNC);
+        handle->data[i] = fd;
+    }
+    return handle;
+}
+
 bool CameraFuzzer::initCamera() {
     ProcessState::self()->startThreadPool();
     sp<IServiceManager> sm = defaultServiceManager();
@@ -109,21 +124,25 @@
     sp<ICameraService> cameraService = nullptr;
     cameraService = interface_cast<ICameraService>(binder);
     sp<ICamera> cameraDevice = nullptr;
+    AttributionSourceState clientAttribution;
+    clientAttribution.deviceId = kDefaultDeviceId;
     if (mFDP->ConsumeBool()) {
-        cameraService->connect(this, mFDP->ConsumeIntegral<int32_t>() /* cameraId */, "CAMERAFUZZ",
-                               hardware::ICameraService::USE_CALLING_UID,
-                               hardware::ICameraService::USE_CALLING_PID,
+        clientAttribution.uid = hardware::ICameraService::USE_CALLING_UID;
+        clientAttribution.pid = hardware::ICameraService::USE_CALLING_PID;
+        clientAttribution.packageName = "CAMERAFUZZ";
+        cameraService->connect(this, mFDP->ConsumeIntegral<int32_t>() /* cameraId */,
                                /*targetSdkVersion*/ __ANDROID_API_FUTURE__,
                                /*overrideToPortrait*/ false, /*forceSlowJpegMode*/ false,
-                               &cameraDevice);
+                               clientAttribution, /*devicePolicy*/0, &cameraDevice);
     } else {
+        clientAttribution.uid = mFDP->ConsumeIntegral<int8_t>();
+        clientAttribution.pid = mFDP->ConsumeIntegral<int8_t>();
+        clientAttribution.packageName = mFDP->ConsumeRandomLengthString(kMaxBytes).c_str();
         cameraService->connect(this, mFDP->ConsumeIntegral<int32_t>() /* cameraId */,
-                               mFDP->ConsumeRandomLengthString(kMaxBytes).c_str(),
-                               mFDP->ConsumeIntegral<int8_t>() /* clientUid */,
-                               mFDP->ConsumeIntegral<int8_t>() /* clientPid */,
                                /*targetSdkVersion*/ mFDP->ConsumeIntegral<int32_t>(),
                                /*overrideToPortrait*/ mFDP->ConsumeBool(),
-                               /*forceSlowJpegMode*/ mFDP->ConsumeBool(), &cameraDevice);
+                               /*forceSlowJpegMode*/ mFDP->ConsumeBool(), clientAttribution,
+                               /*devicePolicy*/0, &cameraDevice);
     }
 
     mCamera = Camera::create(cameraDevice);
@@ -150,13 +169,16 @@
     }
 
     int32_t cameraId = mFDP->ConsumeIntegral<int32_t>();
-    Camera::getNumberOfCameras();
+    AttributionSourceState clientAttribution;
+    clientAttribution.deviceId = kDefaultDeviceId;
+    Camera::getNumberOfCameras(clientAttribution, /*devicePolicy*/0);
     CameraInfo cameraInfo;
     cameraInfo.facing = mFDP->ConsumeBool() ? mFDP->PickValueInArray(kValidFacing)
                                             : mFDP->ConsumeIntegral<int32_t>();
     cameraInfo.orientation = mFDP->ConsumeBool() ? mFDP->PickValueInArray(kValidOrientation)
                                                  : mFDP->ConsumeIntegral<int32_t>();
-    Camera::getCameraInfo(cameraId, /*overrideToPortrait*/false, &cameraInfo);
+    Camera::getCameraInfo(cameraId, /*overrideToPortrait*/false, clientAttribution,
+                          /*devicePolicy*/0, &cameraInfo);
     mCamera->reconnect();
 
     sp<SurfaceComposerClient> composerClient = new SurfaceComposerClient;
@@ -288,15 +310,11 @@
                 },
                 [&]() {
                     int64_t timestamp = mFDP->ConsumeIntegral<int64_t>();
-                    int32_t numFds = mFDP->ConsumeIntegralInRange<int32_t>(kNumMin, kNumMax);
-                    int32_t numInts = mFDP->ConsumeIntegralInRange<int32_t>(kNumMin, kNumMax);
-                    native_handle_t* handle = native_handle_create(numFds, numInts);
+                    native_handle_t* handle = createNativeHandle();
                     mCamera->recordingFrameHandleCallbackTimestamp(timestamp, handle);
                 },
                 [&]() {
-                    int32_t numFds = mFDP->ConsumeIntegralInRange<int32_t>(kNumMin, kNumMax);
-                    int32_t numInts = mFDP->ConsumeIntegralInRange<int32_t>(kNumMin, kNumMax);
-                    native_handle_t* handle = native_handle_create(numFds, numInts);
+                    native_handle_t* handle = createNativeHandle();
                     mCamera->releaseRecordingFrameHandle(handle);
                 },
                 [&]() { mCamera->releaseRecordingFrame(iMem); },
@@ -305,9 +323,7 @@
                     for (int8_t i = 0;
                          i < mFDP->ConsumeIntegralInRange<int8_t>(kMinElements, kMaxElements);
                          ++i) {
-                        int32_t numFds = mFDP->ConsumeIntegralInRange<int32_t>(kNumMin, kNumMax);
-                        int32_t numInts = mFDP->ConsumeIntegralInRange<int32_t>(kNumMin, kNumMax);
-                        native_handle_t* handle = native_handle_create(numFds, numInts);
+                        native_handle_t* handle = createNativeHandle();
                         handles.push_back(handle);
                     }
                     mCamera->releaseRecordingFrameHandleBatch(handles);
@@ -317,9 +333,7 @@
                     for (int8_t i = 0;
                          i < mFDP->ConsumeIntegralInRange<int8_t>(kMinElements, kMaxElements);
                          ++i) {
-                        int32_t numFds = mFDP->ConsumeIntegralInRange<int32_t>(kNumMin, kNumMax);
-                        int32_t numInts = mFDP->ConsumeIntegralInRange<int32_t>(kNumMin, kNumMax);
-                        native_handle_t* handle = native_handle_create(numFds, numInts);
+                        native_handle_t* handle = createNativeHandle();
                         handles.push_back(handle);
                     }
                     std::vector<nsecs_t> timestamps;
diff --git a/camera/tests/fuzzer/camera_utils_fuzzer.cpp b/camera/tests/fuzzer/camera_utils_fuzzer.cpp
index 365305e..c816f82 100644
--- a/camera/tests/fuzzer/camera_utils_fuzzer.cpp
+++ b/camera/tests/fuzzer/camera_utils_fuzzer.cpp
@@ -112,7 +112,8 @@
             }
             string clientPackage = mFDP->ConsumeRandomLengthString(kMaxBytes);
 
-            cameraStatus = new CameraStatus(id, status, unavailSubIds, clientPackage);
+            cameraStatus = new CameraStatus(id, status, unavailSubIds, clientPackage,
+                                            kDefaultDeviceId);
         }
 
         if (mFDP->ConsumeBool()) {
diff --git a/cmds/screenrecord/screenrecord.cpp b/cmds/screenrecord/screenrecord.cpp
index a6b20cf..03c765a 100644
--- a/cmds/screenrecord/screenrecord.cpp
+++ b/cmds/screenrecord/screenrecord.cpp
@@ -117,6 +117,7 @@
 static bool gSizeSpecified = false;     // was size explicitly requested?
 static bool gWantInfoScreen = false;    // do we want initial info screen?
 static bool gWantFrameTime = false;     // do we want times on each frame?
+static bool gSecureDisplay = false;     // should we create a secure virtual display?
 static uint32_t gVideoWidth = 0;        // default width+height
 static uint32_t gVideoHeight = 0;
 static uint32_t gBitRate = 20000000;     // 20Mbps
@@ -361,8 +362,8 @@
         const ui::DisplayState& displayState,
         const sp<IGraphicBufferProducer>& bufferProducer,
         sp<IBinder>* pDisplayHandle, sp<SurfaceControl>* mirrorRoot) {
-    sp<IBinder> dpy = SurfaceComposerClient::createDisplay(
-            String8("ScreenRecorder"), false /*secure*/);
+    static const std::string kDisplayName("ScreenRecorder");
+    sp<IBinder> dpy = SurfaceComposerClient::createVirtualDisplay(kDisplayName, gSecureDisplay);
     SurfaceComposerClient::Transaction t;
     t.setDisplaySurface(dpy, bufferProducer);
     setDisplayProjection(t, dpy, displayState);
@@ -796,7 +797,7 @@
     sp<Overlay> overlay;
 
     ~RecordingData() {
-        if (dpy != nullptr) SurfaceComposerClient::destroyDisplay(dpy);
+        if (dpy != nullptr) SurfaceComposerClient::destroyVirtualDisplay(dpy);
         if (overlay != nullptr) overlay->stop();
         if (encoder != nullptr) {
             encoder->stop();
@@ -1253,6 +1254,7 @@
         { "persistent-surface", no_argument,        NULL, 'p' },
         { "bframes",            required_argument,  NULL, 'B' },
         { "display-id",         required_argument,  NULL, 'd' },
+        { "capture-secure",     no_argument,        NULL, 'S' },
         { NULL,                 0,                  NULL, 0 }
     };
 
@@ -1372,6 +1374,9 @@
 
             fprintf(stderr, "Invalid physical display ID\n");
             return 2;
+        case 'S':
+            gSecureDisplay = true;
+            break;
         default:
             if (ic != '?') {
                 fprintf(stderr, "getopt_long returned unexpected value 0x%x\n", ic);
diff --git a/drm/libdrmframework/plugins/forward-lock/FwdLockEngine/Android.bp b/drm/libdrmframework/plugins/forward-lock/FwdLockEngine/Android.bp
index 16ea15e..6e55a16 100644
--- a/drm/libdrmframework/plugins/forward-lock/FwdLockEngine/Android.bp
+++ b/drm/libdrmframework/plugins/forward-lock/FwdLockEngine/Android.bp
@@ -64,10 +64,6 @@
         "libfwdlock-decoder",
     ],
 
-    whole_static_libs: [
-        "libc++fs",
-    ],
-
     local_include_dirs: ["include"],
 
     relative_install_path: "drm",
diff --git a/drm/libmediadrm/DrmHalAidl.cpp b/drm/libmediadrm/DrmHalAidl.cpp
index 7106d66..650a589 100644
--- a/drm/libmediadrm/DrmHalAidl.cpp
+++ b/drm/libmediadrm/DrmHalAidl.cpp
@@ -466,6 +466,12 @@
     mMetrics->SetAppPackageName(appPackageName);
     mMetrics->SetAppUid(AIBinder_getCallingUid());
     for (ssize_t i = mFactories.size() - 1; i >= 0; i--) {
+        CryptoSchemes schemes{};
+        auto err = mFactories[i]->getSupportedCryptoSchemes(&schemes);
+        if (!err.isOk() || !std::count(schemes.uuids.begin(), schemes.uuids.end(), uuidAidl)) {
+            continue;
+        }
+
         ::ndk::ScopedAStatus status =
                 mFactories[i]->createDrmPlugin(uuidAidl, appPackageNameAidl, &pluginAidl);
         if (status.isOk()) {
diff --git a/drm/libmediadrmrkp/Android.bp b/drm/libmediadrmrkp/Android.bp
index f13eb62..b1a01e4 100644
--- a/drm/libmediadrmrkp/Android.bp
+++ b/drm/libmediadrmrkp/Android.bp
@@ -5,7 +5,7 @@
         "src/**/*.cpp",
     ],
     export_include_dirs: [
-        "include"
+        "include",
     ],
     shared_libs: [
         "libbinder_ndk",
@@ -17,7 +17,7 @@
         "android.hardware.drm-V1-ndk",
         "android.hardware.security.rkp-V3-ndk",
         "libbase",
-        "libcppbor_external",
+        "libcppbor",
     ],
     defaults: [
         "keymint_use_latest_hal_aidl_ndk_shared",
@@ -42,7 +42,7 @@
         "android.hardware.drm-V1-ndk",
         "android.hardware.security.rkp-V3-ndk",
         "libbase",
-        "libcppbor_external",
+        "libcppbor",
         "libmediadrmrkp",
     ],
     vendor: true,
@@ -50,4 +50,4 @@
         "-Wall",
         "-Werror",
     ],
-}
\ No newline at end of file
+}
diff --git a/drm/mediadrm/plugins/clearkey/aidl/Android.bp b/drm/mediadrm/plugins/clearkey/aidl/Android.bp
index 0b0d46a..8e8e57d 100644
--- a/drm/mediadrm/plugins/clearkey/aidl/Android.bp
+++ b/drm/mediadrm/plugins/clearkey/aidl/Android.bp
@@ -11,13 +11,13 @@
     name: "aidl_clearkey_service_defaults-use-shared-deps",
 
     shared_libs: [
+        "android.hardware.drm-V1-ndk",
         "libbase",
         "libbinder_ndk",
         "libcrypto",
         "liblog",
         "libprotobuf-cpp-lite",
         "libutils",
-        "android.hardware.drm-V1-ndk",
     ],
 
     static_libs: [
@@ -62,7 +62,11 @@
 
     relative_install_path: "hw",
 
-    cflags: ["-Wall", "-Werror", "-Wthread-safety"],
+    cflags: [
+        "-Wall",
+        "-Werror",
+        "-Wthread-safety",
+    ],
 
     include_dirs: ["frameworks/av/include"],
 
@@ -107,6 +111,17 @@
     installable: false, // installed in APEX
 }
 
+cc_binary {
+    name: "android.hardware.drm-service-lazy.clearkey.apex",
+    stem: "android.hardware.drm-service-lazy.clearkey",
+    defaults: [
+        "aidl_clearkey_service_defaults",
+        "aidl_clearkey_service_defaults-use-static-deps",
+    ],
+    srcs: ["ServiceLazy.cpp"],
+    installable: false, // installed in APEX
+}
+
 phony {
     name: "android.hardware.drm@latest-service.clearkey",
     required: [
@@ -126,18 +141,22 @@
 
     relative_install_path: "hw",
 
-    cflags: ["-Wall", "-Werror", "-Wthread-safety"],
+    cflags: [
+        "-Wall",
+        "-Werror",
+        "-Wthread-safety",
+    ],
 
     include_dirs: ["frameworks/av/include"],
 
     shared_libs: [
+        "android.hardware.drm-V1-ndk",
         "libbase",
         "libbinder_ndk",
         "libcrypto",
         "liblog",
         "libprotobuf-cpp-lite",
         "libutils",
-        "android.hardware.drm-V1-ndk",
     ],
 
     static_libs: [
@@ -181,19 +200,65 @@
     ],
     prebuilts: [
         "android.hardware.drm-service.clearkey.apex.rc",
-        "android.hardware.drm-service.clearkey.xml"
+        "android.hardware.drm-service.clearkey.xml",
+    ],
+    overrides: [
+        "android.hardware.drm-service.clearkey",
     ],
 }
 
 prebuilt_etc {
     name: "android.hardware.drm-service.clearkey.apex.rc",
-    src: "android.hardware.drm-service.clearkey.apex.rc",
+    src: ":gen-android.hardware.drm-service.clearkey.apex.rc",
     installable: false,
 }
 
+genrule {
+    name: "gen-android.hardware.drm-service.clearkey.apex.rc",
+    srcs: ["android.hardware.drm-service.clearkey.rc"],
+    out: ["android.hardware.drm-service.clearkey.apex.rc"],
+    cmd: "sed -E 's%/vendor/bin/%/apex/com.android.hardware.drm.clearkey/bin/%' $(in) > $(out)",
+}
+
 prebuilt_etc {
     name: "android.hardware.drm-service.clearkey.xml",
     src: "android.hardware.drm-service.clearkey.xml",
     sub_dir: "vintf",
     installable: false,
 }
+
+apex {
+    name: "com.android.hardware.drm.clearkey.lazy",
+    manifest: "manifest.json",
+    file_contexts: "file_contexts",
+    key: "com.android.hardware.key",
+    certificate: ":com.android.hardware.certificate",
+    vendor: true,
+    updatable: false,
+
+    binaries: [
+        "android.hardware.drm-service-lazy.clearkey.apex",
+    ],
+    prebuilts: [
+        "android.hardware.drm-service-lazy.clearkey.apex.rc",
+        "android.hardware.drm-service.clearkey.xml",
+    ],
+    overrides: [
+        "android.hardware.drm-service-lazy.clearkey",
+        "android.hardware.drm-service.clearkey",
+        "com.android.hardware.drm.clearkey",
+    ],
+}
+
+prebuilt_etc {
+    name: "android.hardware.drm-service-lazy.clearkey.apex.rc",
+    src: ":gen-android.hardware.drm-service-lazy.clearkey.apex.rc",
+    installable: false,
+}
+
+genrule {
+    name: "gen-android.hardware.drm-service-lazy.clearkey.apex.rc",
+    srcs: ["android.hardware.drm-service-lazy.clearkey.rc"],
+    out: ["android.hardware.drm-service-lazy.clearkey.apex.rc"],
+    cmd: "sed -E 's%/vendor/bin/%/apex/com.android.hardware.drm.clearkey/bin/%' $(in) > $(out)",
+}
diff --git a/drm/mediadrm/plugins/clearkey/aidl/DrmPlugin.cpp b/drm/mediadrm/plugins/clearkey/aidl/DrmPlugin.cpp
index 31cb7c0..8a93132 100644
--- a/drm/mediadrm/plugins/clearkey/aidl/DrmPlugin.cpp
+++ b/drm/mediadrm/plugins/clearkey/aidl/DrmPlugin.cpp
@@ -37,7 +37,6 @@
 const int kSecureStopIdStart = 100;
 const std::string kOfflineLicense("\"type\":\"persistent-license\"");
 const std::string kStreaming("Streaming");
-const std::string kTemporaryLicense("\"type\":\"temporary\"");
 const std::string kTrue("True");
 
 const std::string kQueryKeyLicenseType("LicenseType");
diff --git a/drm/mediadrm/plugins/clearkey/aidl/android.hardware.drm-service.clearkey.apex.rc b/drm/mediadrm/plugins/clearkey/aidl/android.hardware.drm-service.clearkey.apex.rc
deleted file mode 100644
index f4645b3..0000000
--- a/drm/mediadrm/plugins/clearkey/aidl/android.hardware.drm-service.clearkey.apex.rc
+++ /dev/null
@@ -1,7 +0,0 @@
-service vendor.drm-clearkey-service /apex/com.android.hardware.drm.clearkey/bin/hw/android.hardware.drm-service.clearkey
-    class hal
-    user media
-    group mediadrm drmrpc
-    ioprio rt 4
-    task_profiles ProcessCapacityHigh
-    interface aidl android.hardware.drm.IDrmFactory/clearkey
diff --git a/drm/mediadrm/plugins/clearkey/common/JsonWebKey.cpp b/drm/mediadrm/plugins/clearkey/common/JsonWebKey.cpp
index ddbc594..cd129ac 100644
--- a/drm/mediadrm/plugins/clearkey/common/JsonWebKey.cpp
+++ b/drm/mediadrm/plugins/clearkey/common/JsonWebKey.cpp
@@ -27,10 +27,7 @@
 const std::string kKeyTypeTag("kty");
 const std::string kKeyTag("k");
 const std::string kKeyIdTag("kid");
-const std::string kMediaSessionType("type");
-const std::string kPersistentLicenseSession("persistent-license");
 const std::string kSymmetricKeyValue("oct");
-const std::string kTemporaryLicenseSession("temporary");
 }  // namespace
 
 namespace clearkeydrm {
diff --git a/include/media/Interpolator.h b/include/media/Interpolator.h
index 0ee8779..5a2ab27 100644
--- a/include/media/Interpolator.h
+++ b/include/media/Interpolator.h
@@ -204,7 +204,7 @@
             mInterpolatorType = interpolatorType;
             return NO_ERROR;
         default:
-            ALOGE("invalid interpolatorType: %d", interpolatorType);
+            ALOGE("invalid interpolatorType: %d", static_cast<int>(interpolatorType));
             return BAD_VALUE;
         }
     }
@@ -289,7 +289,7 @@
 
     std::string toString() const {
         std::stringstream ss;
-        ss << "Interpolator{mInterpolatorType=" << static_cast<int32_t>(mInterpolatorType);
+        ss << "Interpolator{mInterpolatorType=" << media::toString(mInterpolatorType);
         ss << ", mFirstSlope=" << mFirstSlope;
         ss << ", mLastSlope=" << mLastSlope;
         ss << ", {";
diff --git a/include/media/VolumeShaper.h b/include/media/VolumeShaper.h
index 6208db3..26da363 100644
--- a/include/media/VolumeShaper.h
+++ b/include/media/VolumeShaper.h
@@ -116,6 +116,16 @@
             TYPE_SCALE,
         };
 
+        static std::string toString(Type type) {
+            switch (type) {
+                case TYPE_ID: return "TYPE_ID";
+                case TYPE_SCALE: return "TYPE_SCALE";
+                default:
+                    return std::string("Unknown Type: ")
+                            .append(std::to_string(static_cast<int>(type)));
+            }
+        }
+
         // Must match with VolumeShaper.java in frameworks/base.
         enum OptionFlag : int32_t {
             OPTION_FLAG_NONE           = 0,
@@ -125,6 +135,22 @@
             OPTION_FLAG_ALL            = (OPTION_FLAG_VOLUME_IN_DBFS | OPTION_FLAG_CLOCK_TIME),
         };
 
+        static std::string toString(OptionFlag flag) {
+            std::string s;
+            for (const auto& flagPair : std::initializer_list<std::pair<OptionFlag, const char*>>{
+                    {OPTION_FLAG_VOLUME_IN_DBFS, "OPTION_FLAG_VOLUME_IN_DBFS"},
+                    {OPTION_FLAG_CLOCK_TIME, "OPTION_FLAG_CLOCK_TIME"},
+                }) {
+                if (flag & flagPair.first) {
+                    if (!s.empty()) {
+                        s.append("|");
+                    }
+                    s.append(flagPair.second);
+                }
+            }
+            return s;
+        }
+
         // Bring from base class; must match with VolumeShaper.java in frameworks/base.
         using InterpolatorType = Interpolator<S, T>::InterpolatorType;
 
@@ -329,10 +355,10 @@
         // Returns a string for debug printing.
         std::string toString() const {
             std::stringstream ss;
-            ss << "VolumeShaper::Configuration{mType=" << static_cast<int32_t>(mType);
+            ss << "VolumeShaper::Configuration{mType=" << toString(mType);
             ss << ", mId=" << mId;
             if (mType != TYPE_ID) {
-                ss << ", mOptionFlags=" << static_cast<int32_t>(mOptionFlags);
+                ss << ", mOptionFlags=" << toString(mOptionFlags);
                 ss << ", mDurationMs=" << mDurationMs;
                 ss << ", " << Interpolator<S, T>::toString().c_str();
             }
@@ -414,6 +440,25 @@
                             | FLAG_CREATE_IF_NECESSARY),
         };
 
+        static std::string toString(Flag flag) {
+            std::string s;
+            for (const auto& flagPair : std::initializer_list<std::pair<Flag, const char*>>{
+                    {FLAG_REVERSE, "FLAG_REVERSE"},
+                    {FLAG_TERMINATE, "FLAG_TERMINATE"},
+                    {FLAG_JOIN, "FLAG_JOIN"},
+                    {FLAG_DELAY, "FLAG_DELAY"},
+                    {FLAG_CREATE_IF_NECESSARY, "FLAG_CREATE_IF_NECESSARY"},
+                }) {
+                if (flag & flagPair.first) {
+                    if (!s.empty()) {
+                        s.append("|");
+                    }
+                    s.append(flagPair.second);
+                }
+            }
+            return s;
+        }
+
         Operation()
             : Operation(FLAG_NONE, -1 /* replaceId */) {
         }
@@ -508,7 +553,7 @@
 
         std::string toString() const {
             std::stringstream ss;
-            ss << "VolumeShaper::Operation{mFlags=" << static_cast<int32_t>(mFlags) ;
+            ss << "VolumeShaper::Operation{mFlags=" << toString(mFlags);
             ss << ", mReplaceId=" << mReplaceId;
             ss << ", mXOffset=" << mXOffset;
             ss << "}";
diff --git a/media/Android.mk b/media/Android.mk
deleted file mode 100644
index 220a358..0000000
--- a/media/Android.mk
+++ /dev/null
@@ -1,5 +0,0 @@
-LOCAL_PATH := $(call my-dir)
-
-$(eval $(call declare-1p-copy-files,frameworks/av/media/libeffects,audio_effects.conf))
-$(eval $(call declare-1p-copy-files,frameworks/av/media/libeffects,audio_effects.xml))
-$(eval $(call declare-1p-copy-files,frameworks/av/media/libstagefright,))
diff --git a/media/OWNERS b/media/OWNERS
index 976fb9e..b926075 100644
--- a/media/OWNERS
+++ b/media/OWNERS
@@ -14,5 +14,8 @@
 taklee@google.com
 wonsik@google.com
 
+# For TEST_MAPPING tv-presubmit and tv-postsubmit configurations:
+per-file TEST_MAPPING = blindahl@google.com
+
 # go/android-fwk-media-solutions for info on areas of ownership.
 include platform/frameworks/av:/media/janitors/media_solutions_OWNERS
diff --git a/media/TEST_MAPPING b/media/TEST_MAPPING
index cd5d354..1a637ac 100644
--- a/media/TEST_MAPPING
+++ b/media/TEST_MAPPING
@@ -44,5 +44,16 @@
             ],
             "file_patterns": ["(?i)drm|crypto"]
         }
+    ],
+    // Postsubmit tests for TV devices
+    "tv-postsubmit": [
+        {
+            "name": "CtsMediaDecoderTestCases",
+            "options": [
+                {
+                    "include-filter": "android.media.decoder.cts.DecoderRenderTest"
+                }
+            ]
+        }
     ]
 }
diff --git a/media/aconfig/Android.bp b/media/aconfig/Android.bp
index ee25c03..16beb28 100644
--- a/media/aconfig/Android.bp
+++ b/media/aconfig/Android.bp
@@ -2,6 +2,7 @@
 aconfig_declarations {
     name: "aconfig_mediacodec_flags",
     package: "com.android.media.codec.flags",
+    container: "system",
     srcs: ["mediacodec_flags.aconfig"],
 }
 
@@ -28,6 +29,7 @@
 aconfig_declarations {
     name: "aconfig_codec_fwk_flags",
     package: "android.media.codec",
+    container: "system",
     srcs: ["codec_fwk.aconfig"],
 }
 
@@ -41,6 +43,7 @@
     name: "android.media.codec-aconfig-cc",
     min_sdk_version: "30",
     vendor_available: true,
+    double_loadable: true,
     apex_available: [
         "//apex_available:platform",
         "com.android.media.swcodec",
diff --git a/media/aconfig/codec_fwk.aconfig b/media/aconfig/codec_fwk.aconfig
index 183bd99..d662585 100644
--- a/media/aconfig/codec_fwk.aconfig
+++ b/media/aconfig/codec_fwk.aconfig
@@ -3,9 +3,18 @@
 # !!! Please add flags in alphabetical order. !!!
 
 package: "android.media.codec"
+container: "system"
+
+flag {
+  name: "aidl_hal_input_surface"
+  namespace: "codec_fwk"
+  description: "Feature flags for enabling AIDL HAL InputSurface handling"
+  bug: "201479783"
+}
 
 flag {
   name: "dynamic_color_aspects"
+  is_exported: true
   namespace: "codec_fwk"
   description: "Feature flag for dynamic color aspect support"
   bug: "297914560"
@@ -13,14 +22,114 @@
 
 flag {
   name: "hlg_editing"
+  is_exported: true
   namespace: "codec_fwk"
   description: "Feature flag for HLG editing support"
   bug: "316397061"
 }
 
 flag {
+  name: "in_process_sw_audio_codec"
+  is_exported: true
+  namespace: "codec_fwk"
+  description: "Feature flag for in-process software audio codec API"
+  bug: "297922713"
+}
+
+flag {
+  name: "in_process_sw_audio_codec_support"
+  namespace: "codec_fwk"
+  description: "Feature flag for in-process software audio codec support"
+  bug: "325520135"
+}
+
+flag {
+  name: "input_surface_throttle"
+  namespace: "codec_fwk"
+  description: "Bugfix flag for input surface throttle"
+  bug: "342269852"
+  metadata {
+    purpose: PURPOSE_BUGFIX
+  }
+}
+
+flag {
+  name: "large_audio_frame_finish"
+  namespace: "codec_fwk"
+  description: "Implementation flag for large audio frame finishing tasks"
+  bug: "325512893"
+}
+
+flag {
+  name: "native_capabilites"
+  namespace: "codec_fwk"
+  description: "Feature flag for native codec capabilities"
+  bug: "306023029"
+}
+
+flag {
   name: "null_output_surface"
+  is_exported: true
+  namespace: "codec_fwk"
+  description: "Feature flag for null output Surface API"
+  bug: "297920102"
+}
+
+flag {
+  name: "null_output_surface_support"
   namespace: "codec_fwk"
   description: "Feature flag for null output Surface support"
-  bug: "297920102"
+  bug: "325550522"
+}
+
+flag {
+  name: "region_of_interest"
+  is_exported: true
+  namespace: "codec_fwk"
+  description: "Feature flag for region of interest API"
+  bug: "299191092"
+}
+
+flag {
+  name: "region_of_interest_support"
+  namespace: "codec_fwk"
+  description: "Feature flag for region of interest support"
+  bug: "325549730"
+}
+
+flag {
+  name: "set_callback_stall"
+  namespace: "codec_fwk"
+  description: "Bugfix flag for setCallback stall"
+  bug: "326010604"
+  metadata {
+    purpose: PURPOSE_BUGFIX
+  }
+}
+
+flag {
+  name: "set_state_early"
+  namespace: "codec_fwk"
+  description: "Bugfix flag for setting state early to avoid a race condition"
+  bug: "298613712"
+  metadata {
+    purpose: PURPOSE_BUGFIX
+  }
+}
+
+flag {
+  name: "stop_hal_before_surface"
+  namespace: "codec_fwk"
+  description: "Bugfix flag for setting state early to avoid a race condition"
+  bug: "339247977"
+  metadata {
+    purpose: PURPOSE_BUGFIX
+  }
+}
+
+flag {
+  name: "teamfood"
+  namespace: "codec_fwk"
+  description: "Feature flag to track teamfood population"
+  bug: "328770262"
 }
diff --git a/media/aconfig/mediacodec_flags.aconfig b/media/aconfig/mediacodec_flags.aconfig
index be0fc5c..3cc9a1a 100644
--- a/media/aconfig/mediacodec_flags.aconfig
+++ b/media/aconfig/mediacodec_flags.aconfig
@@ -1,4 +1,5 @@
 package: "com.android.media.codec.flags"
+container: "system"
 
 # ******************************************************************
 #            !!! DO NOT ADD FURTHER FLAGS TO THIS FILE !!!
@@ -6,22 +7,23 @@
 # ******************************************************************
 
 flag {
-  name: "large_audio_frame"
+  name: "aidl_hal"
   namespace: "codec_fwk"
-  description: "Feature flags for large audio frame support"
-  bug: "297219557"
+  description: "Feature flags for enabling AIDL HAL handling"
+  bug: "251850069"
 }
 
 flag {
   name: "codec_importance"
+  is_exported: true
   namespace: "codec_fwk"
   description: "Feature flags for media codec importance"
   bug: "297929011"
 }
 
 flag {
-  name: "aidl_hal"
+  name: "large_audio_frame"
   namespace: "codec_fwk"
-  description: "Feature flags for enabling AIDL HAL handling"
-  bug: "251850069"
+  description: "Feature flags for large audio frame support"
+  bug: "297219557"
 }
diff --git a/media/audio/aconfig/Android.bp b/media/audio/aconfig/Android.bp
index af97dac..de8aca7 100644
--- a/media/audio/aconfig/Android.bp
+++ b/media/audio/aconfig/Android.bp
@@ -6,20 +6,30 @@
 }
 
 aconfig_declarations {
+    name: "com.android.media.audioclient-aconfig",
+    package: "com.android.media.audioclient",
+    container: "system",
+    srcs: ["audioclient.aconfig"],
+}
+
+aconfig_declarations {
     name: "com.android.media.audioserver-aconfig",
     package: "com.android.media.audioserver",
+    container: "system",
     srcs: ["audioserver.aconfig"],
 }
 
 aconfig_declarations {
     name: "com.android.media.audio-aconfig",
     package: "com.android.media.audio",
+    container: "system",
     srcs: ["audio.aconfig"],
 }
 
 aconfig_declarations {
     name: "com.android.media.aaudio-aconfig",
     package: "com.android.media.aaudio",
+    container: "system",
     srcs: ["aaudio.aconfig"],
 }
 
@@ -50,9 +60,9 @@
     // TODO(b/316909431) native_bridge_supported: true,
     apex_available: [
         "//apex_available:platform",
+        "com.android.btservices",
         "com.android.media",
         "com.android.media.swcodec",
-        "com.android.btservices",
     ],
     min_sdk_version: "29",
 }
@@ -63,6 +73,12 @@
     defaults: ["audio-aconfig-cc-defaults"],
 }
 
+cc_aconfig_library {
+    name: "com.android.media.audioclient-aconfig-cc",
+    aconfig_declarations: "com.android.media.audioclient-aconfig",
+    defaults: ["audio-aconfig-cc-defaults"],
+}
+
 java_aconfig_library {
     name: "com.android.media.audio-aconfig-java",
     aconfig_declarations: "com.android.media.audio-aconfig",
@@ -81,6 +97,7 @@
 aconfig_declarations {
     name: "android.media.audio-aconfig",
     package: "android.media.audio",
+    container: "system",
     srcs: ["audio_framework.aconfig"],
     visibility: ["//frameworks/base/api"],
 }
@@ -88,6 +105,7 @@
 aconfig_declarations {
     name: "android.media.audiopolicy-aconfig",
     package: "android.media.audiopolicy",
+    container: "system",
     srcs: ["audiopolicy_framework.aconfig"],
     visibility: ["//frameworks/base/api"],
 }
@@ -95,10 +113,18 @@
 aconfig_declarations {
     name: "android.media.midi-aconfig",
     package: "android.media.midi",
+    container: "system",
     srcs: ["midi_flags.aconfig"],
     visibility: ["//frameworks/base/api"],
 }
 
+aconfig_declarations {
+    name: "android.media.soundtrigger-aconfig",
+    package: "android.media.soundtrigger",
+    container: "system",
+    srcs: ["soundtrigger.aconfig"],
+}
+
 java_aconfig_library {
     name: "android.media.audio-aconfig-java",
     aconfig_declarations: "android.media.audio-aconfig",
@@ -122,11 +148,24 @@
     defaults: ["framework-minus-apex-aconfig-java-defaults"],
 }
 
-filegroup {
+java_aconfig_library {
+    name: "android.media.soundtrigger-aconfig-java",
+    aconfig_declarations: "android.media.soundtrigger-aconfig",
+    defaults: ["framework-minus-apex-aconfig-java-defaults"],
+}
+
+cc_aconfig_library {
+    name: "android.media.audiopolicy-aconfig-cc",
+    aconfig_declarations: "android.media.audiopolicy-aconfig",
+    defaults: ["audio-aconfig-cc-defaults"],
+}
+
+aconfig_declarations_group {
     name: "audio-framework-aconfig",
-    srcs: [
-        ":android.media.audio-aconfig-java{.generated_srcjars}",
-        ":android.media.audiopolicy-aconfig-java{.generated_srcjars}",
-        ":android.media.midi-aconfig-java{.generated_srcjars}",
+    java_aconfig_libraries: [
+        "android.media.audio-aconfig-java",
+        "android.media.audiopolicy-aconfig-java",
+        "android.media.midi-aconfig-java",
+        "android.media.soundtrigger-aconfig-java",
     ],
 }
diff --git a/media/audio/aconfig/OWNERS b/media/audio/aconfig/OWNERS
new file mode 100644
index 0000000..fb1e866
--- /dev/null
+++ b/media/audio/aconfig/OWNERS
@@ -0,0 +1,4 @@
+# Bug component: 48436
+atneya@google.com
+elaurent@google.com
+include platform/frameworks/av:/media/janitors/audio_OWNERS #{LAST_RESORT_SUGGESTION}
diff --git a/media/audio/aconfig/aaudio.aconfig b/media/audio/aconfig/aaudio.aconfig
index 7196525..f9fb4c7 100644
--- a/media/audio/aconfig/aaudio.aconfig
+++ b/media/audio/aconfig/aaudio.aconfig
@@ -3,6 +3,7 @@
 # Please add flags in alphabetical order.
 
 package: "com.android.media.aaudio"
+container: "system"
 
 flag {
     name: "sample_rate_conversion"
@@ -10,3 +11,10 @@
     description: "Enable the AAudio sample rate converter."
     bug: "219533889"
 }
+
+flag {
+    name: "start_stop_client_from_command_thread"
+    namespace: "media_audio"
+    description: "Start or stop client from command thread."
+    bug: "341627085"
+}
diff --git a/media/audio/aconfig/audio.aconfig b/media/audio/aconfig/audio.aconfig
index 0b2a5c4..9221c04 100644
--- a/media/audio/aconfig/audio.aconfig
+++ b/media/audio/aconfig/audio.aconfig
@@ -3,6 +3,15 @@
 # Please add flags in alphabetical order.
 
 package: "com.android.media.audio"
+container: "system"
+
+flag {
+    name: "abs_volume_index_fix"
+    namespace: "media_audio"
+    description:
+        "Fix double attenuation and index jumps in absolute volume mode"
+    bug: "340693050"
+}
 
 flag {
     name: "alarm_min_volume_zero"
@@ -12,6 +21,20 @@
 }
 
 flag {
+    name: "as_device_connection_failure"
+    namespace: "media_audio"
+    description: "AudioService handles device connection failures."
+    bug: "326597760"
+}
+
+flag {
+    name: "audioserver_permissions"
+    namespace: "media_audio"
+    description: "Refactoring permission management in audioserver"
+    bug: "338089555"
+}
+
+flag {
     name: "bluetooth_mac_address_anonymization"
     namespace: "media_audio"
     description:
@@ -37,6 +60,37 @@
 }
 
 flag {
+    name: "music_fx_edge_to_edge"
+    namespace: "media_audio"
+    description: "Enable Edge-to-edge feature for MusicFx and handle insets"
+    bug: "336204940"
+}
+
+flag {
+    name: "port_to_piid_simplification"
+    namespace: "media_audio"
+    description: "PAM only needs for each piid the last portId mapping"
+    bug: "335747248"
+
+}
+
+flag {
+    name: "replace_stream_bt_sco"
+    namespace: "media_audio"
+    description:
+        "Replace internally STREAM_BLUETOOTH_SCO with STREAM_VOICE_CALL"
+    bug: "345024266"
+}
+
+flag {
+    name: "ring_my_car"
+    namespace: "media_audio"
+    description:
+        "Incoming ringtones will not be muted based on ringer mode when connected to a car"
+    bug: "319515324"
+}
+
+flag {
     name: "ringer_mode_affects_alarm"
     namespace: "media_audio"
     description:
@@ -45,6 +99,15 @@
 }
 
 flag {
+    name: "set_stream_volume_order"
+    namespace: "media_audio"
+    description:
+        "Fix race condition by adjusting the order when"
+        "setStreamVolume is calling into the BT stack"
+    bug: "329202581"
+}
+
+flag {
     name: "spatializer_offload"
     namespace: "media_audio"
     description: "Enable spatializer offload"
@@ -52,8 +115,31 @@
 }
 
 flag {
+    name: "spatializer_upmix"
+    namespace: "media_audio"
+    description: "Enable spatializer upmix"
+    bug: "323985367"
+}
+
+flag {
     name: "stereo_spatialization"
     namespace: "media_audio"
     description: "Enable stereo channel mask for spatialization."
     bug: "303920722"
 }
+
+flag {
+    name: "vgs_vss_sync_mute_order"
+    namespace: "media_audio"
+    description:
+        "When syncing the VGS to VSS we need to first adjust the"
+        "mute state before the index."
+    bug: "331849188"
+}
+
+flag {
+    name: "volume_refactoring"
+    namespace: "media_audio"
+    description: "Refactor the audio volume internal architecture logic"
+    bug: "324152869"
+}
diff --git a/media/audio/aconfig/audio_framework.aconfig b/media/audio/aconfig/audio_framework.aconfig
index 34c026a..0209e28 100644
--- a/media/audio/aconfig/audio_framework.aconfig
+++ b/media/audio/aconfig/audio_framework.aconfig
@@ -4,6 +4,7 @@
 # Please add flags in alphabetical order.
 
 package: "android.media.audio"
+container: "system"
 
 flag {
     name: "auto_public_volume_api_hardening"
@@ -22,7 +23,16 @@
 }
 
 flag {
+    name: "feature_spatial_audio_headtracking_low_latency"
+    is_exported: true
+    namespace: "media_audio"
+    description: "Define feature for low latency headtracking for SA"
+    bug: "324291076"
+}
+
+flag {
     name: "focus_exclusive_with_recording"
+    is_exported: true
     namespace: "media_audio"
     description:
         "Audio focus GAIN_TRANSIENT_EXCLUSIVE only mutes"
@@ -32,7 +42,20 @@
 
 # TODO remove
 flag {
+    name: "foreground_audio_control"
+    is_exported: true
+    namespace: "media_audio"
+    description:
+        "Audio focus gain requires FGS or delegation to "
+        "take effect"
+    bug: "296232417"
+    is_fixed_read_only: true
+}
+
+# TODO remove
+flag {
     name: "focus_freeze_test_api"
+    is_exported: true
     namespace: "media_audio"
     description: "\
  AudioManager audio focus test APIs:\
@@ -46,10 +69,65 @@
 
 flag {
     name: "loudness_configurator_api"
+    is_exported: true
     namespace: "media_audio"
     description: "\
 Enable the API for providing loudness metadata and CTA-2075 \
 support."
     bug: "298463873"
+    is_exported: true
 }
 
+flag {
+    name: "mute_background_audio"
+    namespace: "media_audio"
+    description: "mute audio playing in background"
+    bug: "296232417"
+}
+
+flag {
+    name: "sco_managed_by_audio"
+    is_exported: true
+    namespace: "media_audio"
+    description: "\
+Enable new implementation of headset profile device connection and\
+SCO audio activation."
+    bug: "265057196"
+}
+
+flag {
+    name: "supported_device_types_api"
+    is_exported: true
+    namespace: "media_audio"
+    description: "Surface new API method AudioManager.getSupportedDeviceTypes()"
+    bug: "307537538"
+}
+
+flag {
+    name: "ro_foreground_audio_control"
+    is_exported: true
+    namespace: "media_audio"
+    description:
+        "Audio focus gain requires FGS or delegation to "
+        "take effect"
+    bug: "296232417"
+    is_fixed_read_only: true
+}
+
+flag {
+    name: "ro_volume_ringer_api_hardening"
+    namespace: "media_audio"
+    description: "Limit access to volume and ringer SDK APIs in AudioManager"
+    bug: "296232417"
+    is_fixed_read_only: true
+}
+
+
+# TODO remove
+flag {
+    name: "volume_ringer_api_hardening"
+    namespace: "media_audio"
+    description: "Limit access to volume and ringer SDK APIs in AudioManager"
+    bug: "296232417"
+    is_fixed_read_only: true
+}
diff --git a/media/audio/aconfig/audioclient.aconfig b/media/audio/aconfig/audioclient.aconfig
new file mode 100644
index 0000000..a804834
--- /dev/null
+++ b/media/audio/aconfig/audioclient.aconfig
@@ -0,0 +1,16 @@
+# Flags for libaudioclient, and other native client libraries.
+#
+# Please add flags in alphabetical order.
+
+package: "com.android.media.audioclient"
+container: "system"
+
+flag {
+    name: "audiosystem_service_acquisition"
+    namespace: "media_audio"
+    description: "Clean up audiosystem service acquisition."
+    bug: "330358287"
+    metadata {
+      purpose: PURPOSE_BUGFIX
+    }
+}
diff --git a/media/audio/aconfig/audiopolicy_framework.aconfig b/media/audio/aconfig/audiopolicy_framework.aconfig
index b41c1c3..28b6c7f 100644
--- a/media/audio/aconfig/audiopolicy_framework.aconfig
+++ b/media/audio/aconfig/audiopolicy_framework.aconfig
@@ -4,9 +4,36 @@
 # Please add flags in alphabetical order.
 
 package: "android.media.audiopolicy"
+container: "system"
+
+flag {
+    name: "audio_mix_ownership"
+    namespace: "media_audio"
+    description: "Improves ownership model of AudioMixes and the relationship between AudioPolicy and AudioMix."
+    bug: "309080867"
+    is_fixed_read_only: true
+}
+
+flag {
+    name: "audio_mix_policy_ordering"
+    namespace: "media_audio"
+    description: "Orders AudioMixes per registered AudioPolicy."
+    bug: "309080867"
+    is_fixed_read_only: true
+}
+
+flag {
+    name: "audio_mix_test_api"
+    is_exported: true
+    namespace: "media_audio"
+    description: "Enable new Test APIs that provide access to registered AudioMixes on system server and native side."
+    bug: "309080867"
+    is_fixed_read_only: true
+}
 
 flag {
     name: "audio_policy_update_mixing_rules_api"
+    is_exported: true
     namespace: "media_audio"
     description: "Enable AudioPolicy.updateMixingRules API for hot-swapping audio mixing rules."
     bug: "293874525"
@@ -14,6 +41,7 @@
 
 flag {
     name: "enable_fade_manager_configuration"
+    is_exported: true
     namespace: "media_audio"
     description: "Enable Fade Manager Configuration support to determine fade properties"
     bug: "307354764"
@@ -25,3 +53,11 @@
     description: "Enable multi-zone audio support in audio product strategies."
     bug: "316643994"
 }
+
+flag {
+    name: "record_audio_device_aware_permission"
+    namespace: "media_audio"
+    description: "Enable device-aware permission handling for RECORD_AUDIO permission"
+    bug: "291737188"
+    is_fixed_read_only: true
+}
\ No newline at end of file
diff --git a/media/audio/aconfig/audioserver.aconfig b/media/audio/aconfig/audioserver.aconfig
index 21ea1a2..d1c6239 100644
--- a/media/audio/aconfig/audioserver.aconfig
+++ b/media/audio/aconfig/audioserver.aconfig
@@ -3,6 +3,7 @@
 # Please add flags in alphabetical order.
 
 package: "com.android.media.audioserver"
+container: "system"
 
 flag {
     name: "direct_track_reprioritization"
@@ -14,6 +15,13 @@
 }
 
 flag {
+    name: "effect_chain_callback_improve"
+    namespace: "media_audio"
+    description: "Improve effect chain callback mutex logic."
+    bug: "342413767"
+}
+
+flag {
     name: "fdtostring_timeout_fix"
     namespace: "media_audio"
     description: "Improve fdtostring implementation to properly handle timing out."
@@ -21,6 +29,32 @@
 }
 
 flag {
+    name: "fix_call_audio_patch"
+    namespace: "media_audio"
+    description:
+        "optimize creation and release of audio patches for call routing"
+    bug: "292492229"
+}
+
+flag {
+    name: "fix_concurrent_playback_behavior_with_bit_perfect_client"
+    namespace: "media_audio"
+    description:
+        "Treat playback use cases differently when bit-perfect client is active to improve the "
+        "user experience with bit-perfect playback."
+    bug: "339515899"
+}
+
+flag {
+    name: "fix_input_sharing_logic"
+    namespace: "media_audio"
+    description:
+        "Fix the audio policy logic that decides to reuse or close "
+        "input streams when resources are exhausted"
+    bug: "338446410"
+}
+
+flag {
     name: "mutex_priority_inheritance"
     namespace: "media_audio"
     description:
@@ -29,3 +63,27 @@
         "This feature helps reduce audio glitching caused by low priority blocking threads."
     bug: "209491695"
 }
+
+flag {
+    name: "portid_volume_management"
+    namespace: "media_audio"
+    description:
+        "Allows to manage volume by port id within audio flinger instead of legacy stream type."
+    bug: "317212590"
+}
+
+flag {
+    name: "power_stats"
+    namespace: "media_audio"
+    description:
+        "Add power stats tracking and management."
+    bug: "350114693"
+}
+
+flag {
+    name: "use_bt_sco_for_media"
+    namespace: "media_audio"
+    description:
+        "Play media strategy over Bluetooth SCO when active"
+    bug: "292037886"
+}
diff --git a/media/audio/aconfig/midi_flags.aconfig b/media/audio/aconfig/midi_flags.aconfig
index ff9238a..1620e1b 100644
--- a/media/audio/aconfig/midi_flags.aconfig
+++ b/media/audio/aconfig/midi_flags.aconfig
@@ -4,9 +4,11 @@
 # Please add flags in alphabetical order.
 
 package: "android.media.midi"
+container: "system"
 
 flag {
     name: "virtual_ump"
+    is_exported: true
     namespace: "media_audio"
     description: "Enable virtual UMP MIDI."
     bug: "291115176"
diff --git a/media/audio/aconfig/soundtrigger.aconfig b/media/audio/aconfig/soundtrigger.aconfig
new file mode 100644
index 0000000..5233119
--- /dev/null
+++ b/media/audio/aconfig/soundtrigger.aconfig
@@ -0,0 +1,23 @@
+# Flags for sound trigger
+#
+# Please add flags in alphabetical order.
+
+package: "android.media.soundtrigger"
+container: "system"
+
+flag {
+    name: "generic_model_api"
+    is_exported: true
+    namespace: "media_audio"
+    description: "Feature flag for adding GenericSoundModel to SystemApi"
+    bug: "339267254"
+}
+
+flag {
+    name: "manager_api"
+    is_exported: true
+    namespace: "media_audio"
+    description: "Feature flag for adding SoundTriggerManager API to SystemApi"
+    bug: "339267254"
+}
+
diff --git a/media/audioaidlconversion/AidlConversionCppNdk.cpp b/media/audioaidlconversion/AidlConversionCppNdk.cpp
index 1a6c7f1..9eaddce 100644
--- a/media/audioaidlconversion/AidlConversionCppNdk.cpp
+++ b/media/audioaidlconversion/AidlConversionCppNdk.cpp
@@ -904,7 +904,7 @@
         case Tag::voiceMask:
             return convert(aidl, mVoice, __func__, "voice");
     }
-    ALOGE("%s: unexpected tag value %d", __func__, aidl.getTag());
+    ALOGE("%s: unexpected tag value %d", __func__, static_cast<int>(aidl.getTag()));
     return unexpected(BAD_VALUE);
 }
 
@@ -1069,13 +1069,6 @@
             if (mac.size() != 6) return BAD_VALUE;
             snprintf(addressBuffer, AUDIO_DEVICE_MAX_ADDRESS_LEN, "%02X:%02X:%02X:%02X:%02X:%02X",
                     mac[0], mac[1], mac[2], mac[3], mac[4], mac[5]);
-            // special case for anonymized mac address:
-            // change anonymized bytes back from FD:FF:FF:FF to XX:XX:XX:XX
-            std::string address(addressBuffer);
-            if (address.compare(0, strlen("FD:FF:FF:FF"), "FD:FF:FF:FF") == 0) {
-                address.replace(0, strlen("FD:FF:FF:FF"), "XX:XX:XX:XX");
-            }
-            strcpy(addressBuffer, address.c_str());
         } break;
         case Tag::ipv4: {
             const std::vector<uint8_t>& ipv4 = aidl.address.get<AudioDeviceAddress::ipv4>();
@@ -1136,20 +1129,11 @@
     if (!legacyAddress.empty()) {
         switch (suggestDeviceAddressTag(aidl.type)) {
             case Tag::mac: {
-                // special case for anonymized mac address:
-                // change anonymized bytes so that they can be scanned as HEX bytes
-                // Use '01' for LSB bits 0 and 1 as Bluetooth MAC addresses are never multicast
-                // and universaly administered
-                std::string address = legacyAddress;
-                if (address.compare(0, strlen("XX:XX:XX:XX"), "XX:XX:XX:XX") == 0) {
-                    address.replace(0, strlen("XX:XX:XX:XX"), "FD:FF:FF:FF");
-                }
-
                 std::vector<uint8_t> mac(6);
-                int status = sscanf(address.c_str(), "%hhX:%hhX:%hhX:%hhX:%hhX:%hhX",
+                int status = sscanf(legacyAddress.c_str(), "%hhX:%hhX:%hhX:%hhX:%hhX:%hhX",
                         &mac[0], &mac[1], &mac[2], &mac[3], &mac[4], &mac[5]);
                 if (status != mac.size()) {
-                    ALOGE("%s: malformed MAC address: \"%s\"", __func__, address.c_str());
+                    ALOGE("%s: malformed MAC address: \"%s\"", __func__, legacyAddress.c_str());
                     return unexpected(BAD_VALUE);
                 }
                 aidl.address = AudioDeviceAddress::make<AudioDeviceAddress::mac>(std::move(mac));
diff --git a/media/audioaidlconversion/AidlConversionNdk.cpp b/media/audioaidlconversion/AidlConversionNdk.cpp
index 9b14a5e..7ab616a 100644
--- a/media/audioaidlconversion/AidlConversionNdk.cpp
+++ b/media/audioaidlconversion/AidlConversionNdk.cpp
@@ -35,6 +35,7 @@
 
 using hardware::audio::common::PlaybackTrackMetadata;
 using hardware::audio::common::RecordTrackMetadata;
+using hardware::audio::common::SourceMetadata;
 using ::android::BAD_VALUE;
 using ::android::OK;
 
@@ -194,5 +195,16 @@
     return aidl;
 }
 
+// static
+ConversionResult<SourceMetadata>
+legacy2aidl_SourceMetadata(const std::vector<playback_track_metadata_v7_t>& legacy) {
+    SourceMetadata aidl;
+    aidl.tracks = VALUE_OR_RETURN(
+            convertContainer<std::vector<PlaybackTrackMetadata>>(
+                    legacy,
+                    legacy2aidl_playback_track_metadata_v7_PlaybackTrackMetadata));
+    return aidl;
+}
+
 }  // namespace android
 }  // aidl
diff --git a/media/audioaidlconversion/Android.bp b/media/audioaidlconversion/Android.bp
index 07c59c7..2e1eb8c 100644
--- a/media/audioaidlconversion/Android.bp
+++ b/media/audioaidlconversion/Android.bp
@@ -58,10 +58,10 @@
 cc_defaults {
     name: "audio_aidl_conversion_common_default_cpp",
     shared_libs: [
+        "framework-permission-aidl-cpp",
         "libbinder",
         "libshmemcompat",
         "shared-file-region-aidl-cpp",
-        "framework-permission-aidl-cpp",
     ],
     export_shared_lib_headers: [
         "shared-file-region-aidl-cpp",
@@ -94,8 +94,8 @@
     ],
     sanitize: {
         misc_undefined: [
-            "unsigned-integer-overflow",
             "signed-integer-overflow",
+            "unsigned-integer-overflow",
         ],
     },
     target: {
@@ -148,8 +148,8 @@
         "latest_android_media_audio_common_types_ndk_shared",
     ],
     shared_libs: [
-        "libbinder_ndk",
         "libbase",
+        "libbinder_ndk",
     ],
     static_libs: [
         "libaudioaidlcommon",
@@ -182,8 +182,8 @@
     ],
     shared_libs: [
         "libaudio_aidl_conversion_common_ndk",
-        "libbinder_ndk",
         "libbase",
+        "libbinder_ndk",
     ],
     cflags: [
         "-DBACKEND_NDK",
@@ -213,8 +213,8 @@
     ],
     shared_libs: [
         "libaudio_aidl_conversion_common_ndk",
-        "libbinder_ndk",
         "libbase",
+        "libbinder_ndk",
     ],
     cflags: [
         "-DBACKEND_NDK",
@@ -238,8 +238,8 @@
         "latest_android_media_audio_common_types_ndk_shared",
     ],
     shared_libs: [
-        "libbinder_ndk",
         "libbase",
+        "libbinder_ndk",
     ],
     cflags: [
         "-DBACKEND_CPP_NDK",
diff --git a/media/audioaidlconversion/include/media/AidlConversionEffect.h b/media/audioaidlconversion/include/media/AidlConversionEffect.h
index b03d06b..e51bf8b 100644
--- a/media/audioaidlconversion/include/media/AidlConversionEffect.h
+++ b/media/audioaidlconversion/include/media/AidlConversionEffect.h
@@ -72,9 +72,6 @@
                 MAKE_EXTENSION_PARAMETER_ID(_effect, _tag##Tag, _extId);                          \
         aidl::android::hardware::audio::effect::Parameter _aidlParam;                             \
         RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getParameter(_id, &_aidlParam))); \
-        aidl::android::hardware::audio::effect::VendorExtension _ext =                            \
-                VALUE_OR_RETURN_STATUS(GET_PARAMETER_SPECIFIC_FIELD(                              \
-                        _aidlParam, _effect, _tag, _effect::vendor, VendorExtension));            \
         return VALUE_OR_RETURN_STATUS(                                                            \
                 aidl::android::aidl2legacy_Parameter_EffectParameterWriter(_aidlParam, _param));  \
     }
diff --git a/media/audioaidlconversion/include/media/AidlConversionNdk.h b/media/audioaidlconversion/include/media/AidlConversionNdk.h
index 813a728..b5888b3 100644
--- a/media/audioaidlconversion/include/media/AidlConversionNdk.h
+++ b/media/audioaidlconversion/include/media/AidlConversionNdk.h
@@ -28,6 +28,7 @@
 
 #include <aidl/android/hardware/audio/common/PlaybackTrackMetadata.h>
 #include <aidl/android/hardware/audio/common/RecordTrackMetadata.h>
+#include <aidl/android/hardware/audio/common/SourceMetadata.h>
 #include <aidl/android/media/audio/common/AudioConfig.h>
 #include <media/AidlConversionUtil.h>
 
@@ -56,5 +57,8 @@
 ConversionResult<hardware::audio::common::RecordTrackMetadata>
 legacy2aidl_record_track_metadata_v7_RecordTrackMetadata(const record_track_metadata_v7& legacy);
 
+ConversionResult<hardware::audio::common::SourceMetadata>
+legacy2aidl_SourceMetadata(const std::vector<playback_track_metadata_v7_t>& legacy);
+
 }  // namespace android
 }  // namespace aidl
diff --git a/media/audioaidlconversion/tests/Android.bp b/media/audioaidlconversion/tests/Android.bp
index 88b2cc9..bca4dd0 100644
--- a/media/audioaidlconversion/tests/Android.bp
+++ b/media/audioaidlconversion/tests/Android.bp
@@ -16,8 +16,8 @@
     ],
     sanitize: {
         misc_undefined: [
-            "unsigned-integer-overflow",
             "signed-integer-overflow",
+            "unsigned-integer-overflow",
         ],
     },
 }
@@ -26,8 +26,8 @@
     name: "audio_aidl_ndk_conversion_tests",
 
     defaults: [
-        "latest_android_media_audio_common_types_ndk_static",
         "latest_android_hardware_audio_common_ndk_static",
+        "latest_android_media_audio_common_types_ndk_static",
         "libaudio_aidl_conversion_tests_defaults",
     ],
     srcs: ["audio_aidl_ndk_conversion_tests.cpp"],
diff --git a/media/audioserver/Android.bp b/media/audioserver/Android.bp
index 2030dc7..47b48e3 100644
--- a/media/audioserver/Android.bp
+++ b/media/audioserver/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_media_framework_audio",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
@@ -19,18 +20,12 @@
         "-Werror",
     ],
 
-    header_libs: [
-        "libaudiohal_headers",
-        "libmedia_headers",
-        "libmediametrics_headers",
-    ],
-
     defaults: [
+        "latest_android_hardware_audio_core_sounddose_ndk_shared",
+        "latest_android_media_audio_common_types_cpp_shared",
         "libaaudioservice_dependencies",
         "libaudioflinger_dependencies",
         "libaudiopolicyservice_dependencies",
-        "latest_android_media_audio_common_types_cpp_shared",
-        "latest_android_hardware_audio_core_sounddose_ndk_shared",
     ],
 
     static_libs: [
@@ -38,40 +33,10 @@
         "libaudioflinger",
         "libaudiopolicyservice",
         "libmedialogservice",
-        "libnbaio",
     ],
 
     shared_libs: [
-        "libaudioclient",
-        "libaudioprocessing",
-        "libbinder",
-        "libcutils",
-        "libhidlbase",
-        "liblog",
-        "libmedia",
-        "libmediautils",
-        "libnblog",
-        "libpowermanager",
-        "libutils",
-        "libvibrator",
-    ],
-
-    // TODO check if we still need all of these include directories
-    include_dirs: [
-        "external/sonic",
-        "frameworks/av/media/libaaudio/include",
-        "frameworks/av/media/libaaudio/src",
-        "frameworks/av/media/libaaudio/src/binding",
-        "frameworks/av/services/audioflinger",
-        "frameworks/av/services/audiopolicy",
-        "frameworks/av/services/audiopolicy/common/include",
-        "frameworks/av/services/audiopolicy/common/managerdefinitions/include",
-        "frameworks/av/services/audiopolicy/engine/interface",
-        "frameworks/av/services/audiopolicy/service",
-        "frameworks/av/services/medialog",
-        "frameworks/av/services/oboeservice", // TODO oboeservice is the old folder name for aaudioservice. It will be changed.
-
-
+        "libhidlbase", // required for threadpool config.
     ],
 
     init_rc: ["audioserver.rc"],
diff --git a/media/audioserver/main_audioserver.cpp b/media/audioserver/main_audioserver.cpp
index c7a1bfd..5d7daa4 100644
--- a/media/audioserver/main_audioserver.cpp
+++ b/media/audioserver/main_audioserver.cpp
@@ -168,7 +168,13 @@
         ALOGW_IF(AudioSystem::setLocalAudioFlinger(af) != OK,
                 "%s: AudioSystem already has an AudioFlinger instance!", __func__);
         const auto aps = sp<AudioPolicyService>::make();
+        af->initAudioPolicyLocal(aps);
         ALOGD("%s: AudioPolicy created", __func__);
+        ALOGW_IF(AudioSystem::setLocalAudioPolicyService(aps) != OK,
+                 "%s: AudioSystem already has an AudioPolicyService instance!", __func__);
+
+        // Start initialization of internally managed audio objects such as Device Effects.
+        aps->onAudioSystemReady();
 
         // Add AudioFlinger and AudioPolicy to ServiceManager.
         sp<IServiceManager> sm = defaultServiceManager();
diff --git a/media/codec2/components/aac/C2SoftAacDec.cpp b/media/codec2/components/aac/C2SoftAacDec.cpp
index d1b08bd..c770d0c 100644
--- a/media/codec2/components/aac/C2SoftAacDec.cpp
+++ b/media/codec2/components/aac/C2SoftAacDec.cpp
@@ -35,14 +35,14 @@
 
 #define FILEREAD_MAX_LAYERS 2
 
-#define DRC_DEFAULT_MOBILE_REF_LEVEL -16.0  /* 64*-0.25dB = -16 dB below full scale for mobile conf */
-#define DRC_DEFAULT_MOBILE_DRC_CUT   1.0 /* maximum compression of dynamic range for mobile conf */
-#define DRC_DEFAULT_MOBILE_DRC_BOOST 1.0 /* maximum compression of dynamic range for mobile conf */
-#define DRC_DEFAULT_MOBILE_DRC_HEAVY C2Config::DRC_COMPRESSION_HEAVY   /* switch for heavy compression for mobile conf */
+#define DRC_DEFAULT_MOBILE_REF_LEVEL 64  /* 64*-0.25dB = -16 dB below full scale for mobile conf */
+#define DRC_DEFAULT_MOBILE_DRC_CUT   127 /* maximum compression of dynamic range for mobile conf */
+#define DRC_DEFAULT_MOBILE_DRC_BOOST 127 /* maximum compression of dynamic range for mobile conf */
+#define DRC_DEFAULT_MOBILE_DRC_HEAVY 1   /* switch for heavy compression for mobile conf */
 #define DRC_DEFAULT_MOBILE_DRC_EFFECT 3  /* MPEG-D DRC effect type; 3 => Limited playback range */
 #define DRC_DEFAULT_MOBILE_DRC_ALBUM  0  /* MPEG-D DRC album mode; 0 => album mode is disabled, 1 => album mode is enabled */
 #define DRC_DEFAULT_MOBILE_OUTPUT_LOUDNESS (0.25) /* decoder output loudness; -1 => the value is unknown, otherwise dB step value (e.g. 64 for -16 dB) */
-#define DRC_DEFAULT_MOBILE_ENC_LEVEL (0.25) /* encoder target level; -1 => the value is unknown, otherwise dB step value (e.g. 64 for -16 dB) */
+#define DRC_DEFAULT_MOBILE_ENC_LEVEL (-1) /* encoder target level; -1 => the value is unknown, otherwise dB step value (e.g. 64 for -16 dB) */
 #define MAX_CHANNEL_COUNT            8  /* maximum number of audio channels that can be decoded */
 // names of properties that can be used to override the default DRC settings
 #define PROP_DRC_OVERRIDE_REF_LEVEL  "aac_drc_reference_level"
@@ -145,9 +145,13 @@
                 .withSetter(ProfileLevelSetter)
                 .build());
 
+        C2Config::drc_compression_mode_t defaultCompressionMode =
+                property_get_int32(PROP_DRC_OVERRIDE_HEAVY, DRC_DEFAULT_MOBILE_DRC_HEAVY) == 1
+                        ? C2Config::DRC_COMPRESSION_HEAVY
+                        : C2Config::DRC_COMPRESSION_LIGHT;
         addParameter(
                 DefineParam(mDrcCompressMode, C2_PARAMKEY_DRC_COMPRESSION_MODE)
-                .withDefault(new C2StreamDrcCompressionModeTuning::input(0u, C2Config::DRC_COMPRESSION_HEAVY))
+                .withDefault(new C2StreamDrcCompressionModeTuning::input(0u, defaultCompressionMode))
                 .withFields({
                     C2F(mDrcCompressMode, value).oneOf({
                             C2Config::DRC_COMPRESSION_ODM_DEFAULT,
@@ -158,37 +162,48 @@
                 .withSetter(Setter<decltype(*mDrcCompressMode)>::StrictValueWithNoDeps)
                 .build());
 
+
+        float defaultRefLevel = -0.25 * property_get_int32(PROP_DRC_OVERRIDE_REF_LEVEL,
+                                                           DRC_DEFAULT_MOBILE_REF_LEVEL);
         addParameter(
                 DefineParam(mDrcTargetRefLevel, C2_PARAMKEY_DRC_TARGET_REFERENCE_LEVEL)
-                .withDefault(new C2StreamDrcTargetReferenceLevelTuning::input(0u, DRC_DEFAULT_MOBILE_REF_LEVEL))
+                .withDefault(new C2StreamDrcTargetReferenceLevelTuning::input(0u, defaultRefLevel))
                 .withFields({C2F(mDrcTargetRefLevel, value).inRange(-31.75, 0.25)})
                 .withSetter(Setter<decltype(*mDrcTargetRefLevel)>::StrictValueWithNoDeps)
                 .build());
 
+        float defaultEncLevel = -0.25 * property_get_int32(PROP_DRC_OVERRIDE_ENC_LEVEL,
+                                                           DRC_DEFAULT_MOBILE_ENC_LEVEL);
         addParameter(
                 DefineParam(mDrcEncTargetLevel, C2_PARAMKEY_DRC_ENCODED_TARGET_LEVEL)
-                .withDefault(new C2StreamDrcEncodedTargetLevelTuning::input(0u, DRC_DEFAULT_MOBILE_ENC_LEVEL))
+                .withDefault(new C2StreamDrcEncodedTargetLevelTuning::input(0u, defaultEncLevel))
                 .withFields({C2F(mDrcEncTargetLevel, value).inRange(-31.75, 0.25)})
                 .withSetter(Setter<decltype(*mDrcEncTargetLevel)>::StrictValueWithNoDeps)
                 .build());
 
+        float defaultDrcBoost =
+                property_get_int32(PROP_DRC_OVERRIDE_BOOST, DRC_DEFAULT_MOBILE_DRC_BOOST) / 127.;
         addParameter(
                 DefineParam(mDrcBoostFactor, C2_PARAMKEY_DRC_BOOST_FACTOR)
-                .withDefault(new C2StreamDrcBoostFactorTuning::input(0u, DRC_DEFAULT_MOBILE_DRC_BOOST))
+                .withDefault(new C2StreamDrcBoostFactorTuning::input(0u, defaultDrcBoost))
                 .withFields({C2F(mDrcBoostFactor, value).inRange(0, 1.)})
                 .withSetter(Setter<decltype(*mDrcBoostFactor)>::StrictValueWithNoDeps)
                 .build());
 
+        float defaultDrcCut =
+                property_get_int32(PROP_DRC_OVERRIDE_CUT, DRC_DEFAULT_MOBILE_DRC_CUT) / 127.;
         addParameter(
                 DefineParam(mDrcAttenuationFactor, C2_PARAMKEY_DRC_ATTENUATION_FACTOR)
-                .withDefault(new C2StreamDrcAttenuationFactorTuning::input(0u, DRC_DEFAULT_MOBILE_DRC_CUT))
+                .withDefault(new C2StreamDrcAttenuationFactorTuning::input(0u, defaultDrcCut))
                 .withFields({C2F(mDrcAttenuationFactor, value).inRange(0, 1.)})
                 .withSetter(Setter<decltype(*mDrcAttenuationFactor)>::StrictValueWithNoDeps)
                 .build());
 
+        C2Config::drc_effect_type_t defaultDrcEffectType = (C2Config::drc_effect_type_t)
+                property_get_int32(PROP_DRC_OVERRIDE_EFFECT, DRC_DEFAULT_MOBILE_DRC_EFFECT);
         addParameter(
                 DefineParam(mDrcEffectType, C2_PARAMKEY_DRC_EFFECT_TYPE)
-                .withDefault(new C2StreamDrcEffectTypeTuning::input(0u, C2Config::DRC_EFFECT_LIMITED_PLAYBACK_RANGE))
+                .withDefault(new C2StreamDrcEffectTypeTuning::input(0u, defaultDrcEffectType))
                 .withFields({
                     C2F(mDrcEffectType, value).oneOf({
                             C2Config::DRC_EFFECT_ODM_DEFAULT,
diff --git a/media/codec2/components/aom/C2SoftAomEnc.cpp b/media/codec2/components/aom/C2SoftAomEnc.cpp
index 7c9d3e8..722b13a 100644
--- a/media/codec2/components/aom/C2SoftAomEnc.cpp
+++ b/media/codec2/components/aom/C2SoftAomEnc.cpp
@@ -29,6 +29,12 @@
 
 #include "C2SoftAomEnc.h"
 
+/* Quantization param values defined by the spec */
+#define AOM_QP_MIN 0
+#define AOM_QP_MAX 63
+#define AOM_QP_DEFAULT_MIN AOM_QP_MIN
+#define AOM_QP_DEFAULT_MAX AOM_QP_MAX
+
 namespace android {
 
 constexpr char COMPONENT_NAME[] = "c2.android.av1.encoder";
@@ -50,11 +56,13 @@
                                  0u, (uint64_t)C2MemoryUsage::CPU_READ))
                          .build());
 
+    // Odd dimension support in encoders requires Android V and above
+    size_t stepSize = isAtLeastV() ? 1 : 2;
     addParameter(DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
                          .withDefault(new C2StreamPictureSizeInfo::input(0u, 320, 240))
                          .withFields({
-                                 C2F(mSize, width).inRange(2, 2048, 2),
-                                 C2F(mSize, height).inRange(2, 2048, 2),
+                                 C2F(mSize, width).inRange(2, 2048, stepSize),
+                                 C2F(mSize, height).inRange(2, 2048, stepSize),
                          })
                          .withSetter(SizeSetter)
                          .build());
@@ -173,6 +181,19 @@
                                      .inRange(C2Color::MATRIX_UNSPECIFIED, C2Color::MATRIX_OTHER)})
                     .withSetter(CodedColorAspectsSetter, mColorAspects)
                     .build());
+
+    addParameter(
+            DefineParam(mPictureQuantization, C2_PARAMKEY_PICTURE_QUANTIZATION)
+            .withDefault(C2StreamPictureQuantizationTuning::output::AllocShared(
+                    0 /* flexCount */, 0u /* stream */))
+            .withFields({C2F(mPictureQuantization, m.values[0].type_).oneOf(
+                            {C2Config::I_FRAME, C2Config::P_FRAME}),
+                         C2F(mPictureQuantization, m.values[0].min).inRange(
+                            AOM_QP_DEFAULT_MIN, AOM_QP_DEFAULT_MAX),
+                         C2F(mPictureQuantization, m.values[0].max).inRange(
+                            AOM_QP_DEFAULT_MIN, AOM_QP_DEFAULT_MAX)})
+            .withSetter(PictureQuantizationSetter)
+            .build());
 }
 
 C2R C2SoftAomEnc::IntfImpl::BitrateSetter(bool mayBlock, C2P<C2StreamBitrateInfo::output>& me) {
@@ -305,6 +326,54 @@
     return C2R::Ok();
 }
 
+C2R C2SoftAomEnc::IntfImpl::PictureQuantizationSetter(
+        bool mayBlock, C2P<C2StreamPictureQuantizationTuning::output>& me) {
+    (void)mayBlock;
+    int32_t iMin = AOM_QP_DEFAULT_MIN, pMin = AOM_QP_DEFAULT_MIN;
+    int32_t iMax = AOM_QP_DEFAULT_MAX, pMax = AOM_QP_DEFAULT_MAX;
+    for (size_t i = 0; i < me.v.flexCount(); ++i) {
+        const C2PictureQuantizationStruct &layer = me.v.m.values[i];
+        // layerMin is clamped to [AOM_QP_MIN, layerMax] to avoid error
+        // cases where layer.min > layer.max
+        int32_t layerMax = std::clamp(layer.max, AOM_QP_MIN, AOM_QP_MAX);
+        int32_t layerMin = std::clamp(layer.min, AOM_QP_MIN, layerMax);
+        if (layer.type_ == C2Config::picture_type_t(I_FRAME)) {
+            iMax = layerMax;
+            iMin = layerMin;
+            ALOGV("iMin %d iMax %d", iMin, iMax);
+        } else if (layer.type_ == C2Config::picture_type_t(P_FRAME)) {
+            pMax = layerMax;
+            pMin = layerMin;
+            ALOGV("pMin %d pMax %d", pMin, pMax);
+        }
+    }
+    ALOGV("PictureQuantizationSetter(entry): i %d-%d p %d-%d",
+          iMin, iMax, pMin, pMax);
+
+    // aom library takes same range for I/P picture type
+    int32_t maxFrameQP = std::min(iMax, pMax);
+    int32_t minFrameQP = std::max(iMin, pMin);
+    if (minFrameQP > maxFrameQP) {
+        minFrameQP = maxFrameQP;
+    }
+    // put them back into the structure
+    for (size_t i = 0; i < me.v.flexCount(); ++i) {
+        const C2PictureQuantizationStruct &layer = me.v.m.values[i];
+
+        if (layer.type_ == C2Config::picture_type_t(I_FRAME)) {
+            me.set().m.values[i].max = maxFrameQP;
+            me.set().m.values[i].min = minFrameQP;
+        }
+        else if (layer.type_ == C2Config::picture_type_t(P_FRAME)) {
+            me.set().m.values[i].max = maxFrameQP;
+            me.set().m.values[i].min = minFrameQP;
+        }
+    }
+    ALOGV("PictureQuantizationSetter(exit): minFrameQP = %d maxFrameQP = %d",
+          minFrameQP, maxFrameQP);
+    return C2R::Ok();
+}
+
 uint32_t C2SoftAomEnc::IntfImpl::getLevel_l() const {
         return mProfileLevel->level - LEVEL_AV1_2;
 }
@@ -556,6 +625,7 @@
         mQuality = mIntf->getQuality_l();
         mComplexity = mIntf->getComplexity_l();
         mAV1EncLevel = mIntf->getLevel_l();
+        mQpBounds = mIntf->getPictureQuantization_l();
     }
 
 
@@ -573,6 +643,18 @@
             break;
     }
 
+    if (mQpBounds->flexCount() > 0) {
+        // read min max qp for sequence
+        for (size_t i = 0; i < mQpBounds->flexCount(); ++i) {
+            const C2PictureQuantizationStruct &layer = mQpBounds->m.values[i];
+            if (layer.type_ == C2Config::picture_type_t(I_FRAME)) {
+                mMaxQuantizer = layer.max;
+                mMinQuantizer = layer.min;
+                break;
+            }
+        }
+    }
+
     mCodecInterface = aom_codec_av1_cx();
     if (!mCodecInterface) goto CleanUp;
 
@@ -605,7 +687,7 @@
     mCodecConfiguration->g_timebase.den = 1000000;
     // rc_target_bitrate is in kbps, mBitrate in bps
     mCodecConfiguration->rc_target_bitrate = (mBitrate->value + 500) / 1000;
-    mCodecConfiguration->rc_end_usage = mBitrateControlMode == AOM_Q ? AOM_Q : AOM_CBR;
+    mCodecConfiguration->rc_end_usage = mBitrateControlMode;
     // Disable frame drop - not allowed in MediaCodec now.
     mCodecConfiguration->rc_dropframe_thresh = 0;
     // Disable lagged encoding.
diff --git a/media/codec2/components/aom/C2SoftAomEnc.h b/media/codec2/components/aom/C2SoftAomEnc.h
index 7e5ea63..067b04f 100644
--- a/media/codec2/components/aom/C2SoftAomEnc.h
+++ b/media/codec2/components/aom/C2SoftAomEnc.h
@@ -109,6 +109,7 @@
     std::shared_ptr<C2StreamBitrateModeTuning::output> mBitrateMode;
     std::shared_ptr<C2StreamRequestSyncFrameTuning::output> mRequestSync;
     std::shared_ptr<C2StreamColorAspectsInfo::output> mColorAspects;
+    std::shared_ptr<C2StreamPictureQuantizationTuning::output> mQpBounds;
 
     aom_codec_err_t setupCodecParameters();
 };
@@ -126,6 +127,8 @@
                                   const C2P<C2StreamPictureSizeInfo::input>& size,
                                   const C2P<C2StreamFrameRateInfo::output>& frameRate,
                                   const C2P<C2StreamBitrateInfo::output>& bitrate);
+    static C2R PictureQuantizationSetter(bool mayBlock,
+                                         C2P<C2StreamPictureQuantizationTuning::output> &me);
 
     // unsafe getters
     std::shared_ptr<C2StreamPictureSizeInfo::input> getSize_l() const { return mSize; }
@@ -150,6 +153,9 @@
     std::shared_ptr<C2StreamPixelFormatInfo::input> getPixelFormat_l() const {
         return mPixelFormat;
     }
+    std::shared_ptr<C2StreamPictureQuantizationTuning::output> getPictureQuantization_l() const {
+        return mPictureQuantization;
+    }
     uint32_t getSyncFramePeriod() const;
     static C2R ColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::input>& me);
     static C2R CodedColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::output>& me,
@@ -171,6 +177,7 @@
     std::shared_ptr<C2StreamColorAspectsInfo::input> mColorAspects;
     std::shared_ptr<C2StreamColorAspectsInfo::output> mCodedColorAspects;
     std::shared_ptr<C2StreamPixelFormatInfo::input> mPixelFormat;
+    std::shared_ptr<C2StreamPictureQuantizationTuning::output> mPictureQuantization;
 
 };
 
diff --git a/media/codec2/components/avc/Android.bp b/media/codec2/components/avc/Android.bp
index a7ae85b..8ccb9ac 100644
--- a/media/codec2/components/avc/Android.bp
+++ b/media/codec2/components/avc/Android.bp
@@ -17,6 +17,10 @@
 
     static_libs: ["libavcdec"],
 
+    cflags: [
+        "-DKEEP_THREADS_ACTIVE=1",
+    ],
+
     srcs: ["C2SoftAvcDec.cpp"],
 
     export_include_dirs: ["."],
diff --git a/media/codec2/components/avc/C2SoftAvcDec.cpp b/media/codec2/components/avc/C2SoftAvcDec.cpp
index 96a4c4a..77fdeb9 100644
--- a/media/codec2/components/avc/C2SoftAvcDec.cpp
+++ b/media/codec2/components/avc/C2SoftAvcDec.cpp
@@ -16,6 +16,9 @@
 
 //#define LOG_NDEBUG 0
 #define LOG_TAG "C2SoftAvcDec"
+#ifndef KEEP_THREADS_ACTIVE
+#define KEEP_THREADS_ACTIVE 0
+#endif
 #include <log/log.h>
 
 #include <media/stagefright/foundation/MediaDefs.h>
@@ -416,6 +419,7 @@
     ivdext_create_op_t s_create_op = {};
 
     s_create_ip.s_ivd_create_ip_t.u4_size = sizeof(ivdext_create_ip_t);
+    s_create_ip.u4_keep_threads_active = KEEP_THREADS_ACTIVE;
     s_create_ip.s_ivd_create_ip_t.e_cmd = IVD_CMD_CREATE;
     s_create_ip.s_ivd_create_ip_t.u4_share_disp_buf = 0;
     s_create_ip.s_ivd_create_ip_t.e_output_format = mIvColorFormat;
diff --git a/media/codec2/components/avc/C2SoftAvcEnc.cpp b/media/codec2/components/avc/C2SoftAvcEnc.cpp
index e424860..80a5e67 100644
--- a/media/codec2/components/avc/C2SoftAvcEnc.cpp
+++ b/media/codec2/components/avc/C2SoftAvcEnc.cpp
@@ -152,6 +152,17 @@
                 .build());
 
         addParameter(
+                DefineParam(mBitrateMode, C2_PARAMKEY_BITRATE_MODE)
+                .withDefault(new C2StreamBitrateModeTuning::output(0u, C2Config::BITRATE_VARIABLE))
+                .withFields({C2F(mBitrateMode, value).oneOf({
+                                        C2Config::BITRATE_CONST,
+                                        C2Config::BITRATE_VARIABLE,
+                                        C2Config::BITRATE_IGNORE})
+                        })
+                .withSetter(Setter<decltype(*mBitrateMode)>::StrictValueWithNoDeps)
+                .build());
+
+        addParameter(
                 DefineParam(mBitrate, C2_PARAMKEY_BITRATE)
                 .withDefault(new C2StreamBitrateInfo::output(0u, 64000))
                 .withFields({C2F(mBitrate, value).inRange(4096, 12000000)})
@@ -536,6 +547,9 @@
     std::shared_ptr<C2StreamPictureSizeInfo::input> getSize_l() const { return mSize; }
     std::shared_ptr<C2StreamIntraRefreshTuning::output> getIntraRefresh_l() const { return mIntraRefresh; }
     std::shared_ptr<C2StreamFrameRateInfo::output> getFrameRate_l() const { return mFrameRate; }
+    std::shared_ptr<C2StreamBitrateModeTuning::output> getBitrateMode_l() const {
+        return mBitrateMode;
+    }
     std::shared_ptr<C2StreamBitrateInfo::output> getBitrate_l() const { return mBitrate; }
     std::shared_ptr<C2StreamRequestSyncFrameTuning::output> getRequestSync_l() const { return mRequestSync; }
     std::shared_ptr<C2StreamGopTuning::output> getGop_l() const { return mGop; }
@@ -552,6 +566,7 @@
     std::shared_ptr<C2StreamRequestSyncFrameTuning::output> mRequestSync;
     std::shared_ptr<C2StreamIntraRefreshTuning::output> mIntraRefresh;
     std::shared_ptr<C2StreamBitrateInfo::output> mBitrate;
+    std::shared_ptr<C2StreamBitrateModeTuning::output> mBitrateMode;
     std::shared_ptr<C2StreamProfileLevelInfo::output> mProfileLevel;
     std::shared_ptr<C2StreamSyncFrameIntervalTuning::output> mSyncFramePeriod;
     std::shared_ptr<C2StreamGopTuning::output> mGop;
@@ -1154,6 +1169,7 @@
     {
         IntfImpl::Lock lock = mIntf->lock();
         mSize = mIntf->getSize_l();
+        mBitrateMode = mIntf->getBitrateMode_l();
         mBitrate = mIntf->getBitrate_l();
         mFrameRate = mIntf->getFrameRate_l();
         mIntraRefresh = mIntf->getIntraRefresh_l();
@@ -1326,8 +1342,23 @@
         } else {
             ps_init_ip->u4_enable_recon = 0;
         }
+
+        switch (mBitrateMode->value) {
+            case C2Config::BITRATE_IGNORE:
+                ps_init_ip->e_rc_mode = IVE_RC_NONE;
+                break;
+            case C2Config::BITRATE_CONST:
+                ps_init_ip->e_rc_mode = IVE_RC_CBR_NON_LOW_DELAY;
+                break;
+            case C2Config::BITRATE_VARIABLE:
+                ps_init_ip->e_rc_mode = IVE_RC_STORAGE;
+                break;
+            default:
+                ps_init_ip->e_rc_mode = DEFAULT_RC_MODE;
+                break;
+            break;
+        }
         ps_init_ip->e_recon_color_fmt = DEFAULT_RECON_COLOR_FORMAT;
-        ps_init_ip->e_rc_mode = DEFAULT_RC_MODE;
         ps_init_ip->u4_max_framerate = DEFAULT_MAX_FRAMERATE;
         ps_init_ip->u4_max_bitrate = DEFAULT_MAX_BITRATE;
         ps_init_ip->u4_num_bframes = mBframes;
diff --git a/media/codec2/components/avc/C2SoftAvcEnc.h b/media/codec2/components/avc/C2SoftAvcEnc.h
index cde6604..33d166f 100644
--- a/media/codec2/components/avc/C2SoftAvcEnc.h
+++ b/media/codec2/components/avc/C2SoftAvcEnc.h
@@ -191,6 +191,7 @@
     std::shared_ptr<C2StreamIntraRefreshTuning::output> mIntraRefresh;
     std::shared_ptr<C2StreamFrameRateInfo::output> mFrameRate;
     std::shared_ptr<C2StreamBitrateInfo::output> mBitrate;
+    std::shared_ptr<C2StreamBitrateModeTuning::output> mBitrateMode;
     std::shared_ptr<C2StreamRequestSyncFrameTuning::output> mRequestSync;
     std::shared_ptr<C2StreamColorAspectsInfo::output> mColorAspects;
 
diff --git a/media/codec2/components/base/Android.bp b/media/codec2/components/base/Android.bp
index 4b189b4..2b59ee3 100644
--- a/media/codec2/components/base/Android.bp
+++ b/media/codec2/components/base/Android.bp
@@ -43,7 +43,7 @@
     ],
 
     static_libs: [
-        "libyuv_static", // for conversion routines
+        "libyuv", // for conversion routines
     ],
 
     shared_libs: [
diff --git a/media/codec2/components/base/SimpleC2Component.cpp b/media/codec2/components/base/SimpleC2Component.cpp
index 06a21f6..aec6523 100644
--- a/media/codec2/components/base/SimpleC2Component.cpp
+++ b/media/codec2/components/base/SimpleC2Component.cpp
@@ -712,6 +712,7 @@
         case kWhatStop: {
             int32_t err = thiz->onStop();
             thiz->mOutputBlockPool.reset();
+            mRunning = false;
             Reply(msg, &err);
             break;
         }
diff --git a/media/codec2/components/cmds/codec2.cpp b/media/codec2/components/cmds/codec2.cpp
index a17b04e..ca65aa2 100644
--- a/media/codec2/components/cmds/codec2.cpp
+++ b/media/codec2/components/cmds/codec2.cpp
@@ -46,7 +46,6 @@
 #include <media/stagefright/Utils.h>
 
 #include <gui/GLConsumer.h>
-#include <gui/IProducerListener.h>
 #include <gui/Surface.h>
 #include <gui/SurfaceComposerClient.h>
 
@@ -91,7 +90,7 @@
     std::shared_ptr<Listener> mListener;
     std::shared_ptr<C2Component> mComponent;
 
-    sp<IProducerListener> mProducerListener;
+    sp<SurfaceListener> mSurfaceListener;
 
     std::atomic_int mLinearPoolId;
 
@@ -138,7 +137,7 @@
 
 SimplePlayer::SimplePlayer()
     : mListener(new Listener(this)),
-      mProducerListener(new StubProducerListener),
+      mSurfaceListener(new StubSurfaceListener),
       mLinearPoolId(C2BlockPool::PLATFORM_START),
       mComposerClient(new SurfaceComposerClient) {
     CHECK_EQ(mComposerClient->initCheck(), (status_t)OK);
@@ -164,7 +163,7 @@
 
     mSurface = mControl->getSurface();
     CHECK(mSurface != nullptr);
-    mSurface->connect(NATIVE_WINDOW_API_CPU, mProducerListener);
+    mSurface->connect(NATIVE_WINDOW_API_CPU, mSurfaceListener);
 }
 
 SimplePlayer::~SimplePlayer() {
diff --git a/media/codec2/components/dav1d/C2SoftDav1dDec.cpp b/media/codec2/components/dav1d/C2SoftDav1dDec.cpp
index 76680a3..4ec26d6 100644
--- a/media/codec2/components/dav1d/C2SoftDav1dDec.cpp
+++ b/media/codec2/components/dav1d/C2SoftDav1dDec.cpp
@@ -243,10 +243,17 @@
                              .build());
 
         addParameter(
+                DefineParam(mLowLatencyMode, C2_PARAMKEY_LOW_LATENCY_MODE)
+                .withDefault(new C2GlobalLowLatencyModeTuning(0))
+                .withFields({C2F(mLowLatencyMode, value).oneOf({0,1})})
+                .withSetter(Setter<decltype(*mLowLatencyMode)>::StrictValueWithNoDeps)
+                .build());
+
+        addParameter(
                 DefineParam(mActualOutputDelay, C2_PARAMKEY_OUTPUT_DELAY)
                 .withDefault(new C2PortActualDelayTuning::output(kOutputDelay))
                 .withFields({C2F(mActualOutputDelay, value).inRange(0, kOutputDelay)})
-                .withSetter(Setter<decltype(*mActualOutputDelay)>::StrictValueWithNoDeps)
+                .withSetter(ActualOutputDelaySetter, mLowLatencyMode)
                 .build());
     }
 
@@ -365,6 +372,10 @@
         return mPixelFormat;
     }
 
+    std::shared_ptr<C2PortActualDelayTuning::output> getActualOutputDelay_l() const {
+        return mActualOutputDelay;
+    }
+
     static C2R HdrStaticInfoSetter(bool mayBlock, C2P<C2StreamHdrStaticInfo::output>& me) {
         (void)mayBlock;
         if (me.v.mastering.red.x > 1) {
@@ -406,6 +417,13 @@
         return C2R::Ok();
     }
 
+    static C2R ActualOutputDelaySetter(bool mayBlock, C2P<C2PortActualDelayTuning::output>& me,
+                                  const C2P<C2GlobalLowLatencyModeTuning>& lowLatencyMode) {
+        (void)mayBlock;
+        me.set().value = lowLatencyMode.v.value ? 1 : kOutputDelay;
+        return C2R::Ok();
+    }
+
   private:
     std::shared_ptr<C2StreamProfileLevelInfo::input> mProfileLevel;
     std::shared_ptr<C2StreamPictureSizeInfo::output> mSize;
@@ -419,6 +437,7 @@
     std::shared_ptr<C2StreamHdr10PlusInfo::input> mHdr10PlusInfoInput;
     std::shared_ptr<C2StreamHdr10PlusInfo::output> mHdr10PlusInfoOutput;
     std::shared_ptr<C2StreamHdrStaticInfo::output> mHdrStaticInfo;
+    std::shared_ptr<C2GlobalLowLatencyModeTuning> mLowLatencyMode;
 };
 
 C2SoftDav1dDec::C2SoftDav1dDec(const char* name, c2_node_id_t id,
@@ -516,6 +535,7 @@
     {
         IntfImpl::Lock lock = mIntf->lock();
         mPixelFormatInfo = mIntf->getPixelFormat_l();
+        mActualOutputDelayInfo = mIntf->getActualOutputDelay_l();
     }
 
     const char* version = dav1d_version();
@@ -529,7 +549,7 @@
             android::base::GetIntProperty(NUM_THREADS_DAV1D_PROPERTY, NUM_THREADS_DAV1D_DEFAULT);
     if (numThreads > 0) lib_settings.n_threads = numThreads;
 
-    lib_settings.max_frame_delay = kOutputDelay;
+    lib_settings.max_frame_delay = mActualOutputDelayInfo->value;
 
     int res = 0;
     if ((res = dav1d_open(&mDav1dCtx, &lib_settings))) {
diff --git a/media/codec2/components/dav1d/C2SoftDav1dDec.h b/media/codec2/components/dav1d/C2SoftDav1dDec.h
index 5d2a725..6008325 100644
--- a/media/codec2/components/dav1d/C2SoftDav1dDec.h
+++ b/media/codec2/components/dav1d/C2SoftDav1dDec.h
@@ -62,6 +62,7 @@
     // configurations used by component in process
     // (TODO: keep this in intf but make them internal only)
     std::shared_ptr<C2StreamPixelFormatInfo::output> mPixelFormatInfo;
+    std::shared_ptr<C2PortActualDelayTuning::output> mActualOutputDelayInfo;
 
     uint32_t mHalPixelFormat;
     uint32_t mWidth;
diff --git a/media/codec2/components/flac/C2SoftFlacEnc.cpp b/media/codec2/components/flac/C2SoftFlacEnc.cpp
index 591d56d..780660e 100644
--- a/media/codec2/components/flac/C2SoftFlacEnc.cpp
+++ b/media/codec2/components/flac/C2SoftFlacEnc.cpp
@@ -21,6 +21,7 @@
 #include <audio_utils/primitives.h>
 #include <media/stagefright/foundation/MediaDefs.h>
 
+#include <C2Debug.h>
 #include <C2PlatformSupport.h>
 #include <SimpleC2Interface.h>
 
@@ -81,10 +82,6 @@
                     FLAC_COMPRESSION_LEVEL_MIN, FLAC_COMPRESSION_LEVEL_MAX)})
                 .withSetter(Setter<decltype(*mComplexity)>::NonStrictValueWithNoDeps)
                 .build());
-        addParameter(
-                DefineParam(mInputMaxBufSize, C2_PARAMKEY_INPUT_MAX_BUFFER_SIZE)
-                .withConstValue(new C2StreamMaxBufferSizeInfo::input(0u, 4608))
-                .build());
 
         addParameter(
                 DefineParam(mPcmEncodingInfo, C2_PARAMKEY_PCM_ENCODING)
@@ -96,6 +93,26 @@
                 })
                 .withSetter((Setter<decltype(*mPcmEncodingInfo)>::StrictValueWithNoDeps))
                 .build());
+
+        addParameter(
+                DefineParam(mInputMaxBufSize, C2_PARAMKEY_INPUT_MAX_BUFFER_SIZE)
+                .withDefault(new C2StreamMaxBufferSizeInfo::input(0u, kMaxBlockSize))
+                .withFields({
+                    C2F(mInputMaxBufSize, value).any(),
+                })
+                .withSetter(MaxInputSizeSetter, mChannelCount, mPcmEncodingInfo)
+                .build());
+    }
+
+    static C2R MaxInputSizeSetter(bool mayBlock,
+            C2P<C2StreamMaxBufferSizeInfo::input> &me,
+            const C2P<C2StreamChannelCountInfo::input> &channelCount,
+            const C2P<C2StreamPcmEncodingInfo::input> &pcmEncoding) {
+        (void)mayBlock;
+        C2R res = C2R::Ok();
+        int bytesPerSample = pcmEncoding.v.value == C2Config::PCM_FLOAT ? 4 : 2;
+        me.set().value = kMaxBlockSize * bytesPerSample * channelCount.v.value;
+        return res;
     }
 
     uint32_t getSampleRate() const { return mSampleRate->value; }
@@ -138,7 +155,7 @@
     mSignalledError = false;
     mSignalledOutputEos = false;
     mIsFirstFrame = true;
-    mAnchorTimeStamp = 0ull;
+    mAnchorTimeStamp = 0;
     mProcessedSamples = 0u;
     mEncoderWriteData = false;
     mEncoderReturnedNbBytes = 0;
@@ -169,7 +186,7 @@
     mSignalledError = false;
     mSignalledOutputEos = false;
     mIsFirstFrame = true;
-    mAnchorTimeStamp = 0ull;
+    mAnchorTimeStamp = 0;
     mProcessedSamples = 0u;
     mEncoderWriteData = false;
     mEncoderReturnedNbBytes = 0;
@@ -219,7 +236,7 @@
               inSize, (int)work->input.ordinal.timestamp.peeku(),
               (int)work->input.ordinal.frameIndex.peeku(), work->input.flags);
     if (mIsFirstFrame && inSize) {
-        mAnchorTimeStamp = work->input.ordinal.timestamp.peekull();
+        mAnchorTimeStamp = work->input.ordinal.timestamp.peekll();
         mIsFirstFrame = false;
     }
 
@@ -388,7 +405,7 @@
     C2WriteView wView = mOutputBlock->map().get();
     uint8_t* outData = wView.data();
     const uint32_t sampleRate = mIntf->getSampleRate();
-    const uint64_t outTimeStamp = mProcessedSamples * 1000000ll / sampleRate;
+    const int64_t outTimeStamp = mProcessedSamples * 1000000ll / sampleRate;
     ALOGV("writing %zu bytes of encoded data on output", bytes);
     // increment mProcessedSamples to maintain audio synchronization during
     // play back
@@ -446,6 +463,9 @@
 
     mBlockSize = FLAC__stream_encoder_get_blocksize(mFlacStreamEncoder);
 
+    // Update kMaxBlockSize to match maximum size used by the encoder
+    CHECK(mBlockSize <= kMaxBlockSize);
+
     ALOGV("encoder successfully configured");
     return OK;
 }
diff --git a/media/codec2/components/flac/C2SoftFlacEnc.h b/media/codec2/components/flac/C2SoftFlacEnc.h
index a971ab5..ed9c298 100644
--- a/media/codec2/components/flac/C2SoftFlacEnc.h
+++ b/media/codec2/components/flac/C2SoftFlacEnc.h
@@ -63,7 +63,8 @@
 
     std::shared_ptr<IntfImpl> mIntf;
     const unsigned int kInBlockSize = 1152;
-    const unsigned int kMaxNumChannels = 2;
+    static constexpr unsigned int kMaxNumChannels = 2;
+    static constexpr unsigned int kMaxBlockSize = 4608;
     FLAC__StreamEncoder* mFlacStreamEncoder;
     FLAC__int32* mInputBufferPcm32;
     std::shared_ptr<C2LinearBlock> mOutputBlock;
@@ -71,7 +72,7 @@
     bool mSignalledOutputEos;
     uint32_t mBlockSize;
     bool mIsFirstFrame;
-    uint64_t mAnchorTimeStamp;
+    int64_t mAnchorTimeStamp;
     uint64_t mProcessedSamples;
     // should the data received by the callback be written to the output port
     bool mEncoderWriteData;
diff --git a/media/codec2/components/gav1/Android.bp b/media/codec2/components/gav1/Android.bp
index 9781b6d..f22490d 100644
--- a/media/codec2/components/gav1/Android.bp
+++ b/media/codec2/components/gav1/Android.bp
@@ -23,7 +23,7 @@
     srcs: ["C2SoftGav1Dec.cpp"],
     static_libs: [
         "libgav1",
-        "libyuv_static",
+        "libyuv",
     ],
 
     apex_available: [
diff --git a/media/codec2/components/hevc/Android.bp b/media/codec2/components/hevc/Android.bp
index d1388b9..cb9c2ae 100644
--- a/media/codec2/components/hevc/Android.bp
+++ b/media/codec2/components/hevc/Android.bp
@@ -15,6 +15,10 @@
         "libcodec2_soft_sanitize_cfi-defaults",
     ],
 
+    cflags: [
+        "-DKEEP_THREADS_ACTIVE=1",
+    ],
+
     srcs: ["C2SoftHevcDec.cpp"],
 
     static_libs: ["libhevcdec"],
diff --git a/media/codec2/components/hevc/C2SoftHevcDec.cpp b/media/codec2/components/hevc/C2SoftHevcDec.cpp
index 15d6dcd..64aa7a4 100644
--- a/media/codec2/components/hevc/C2SoftHevcDec.cpp
+++ b/media/codec2/components/hevc/C2SoftHevcDec.cpp
@@ -16,6 +16,9 @@
 
 //#define LOG_NDEBUG 0
 #define LOG_TAG "C2SoftHevcDec"
+#ifndef KEEP_THREADS_ACTIVE
+#define KEEP_THREADS_ACTIVE 0
+#endif
 #include <log/log.h>
 
 #include <media/stagefright/foundation/MediaDefs.h>
@@ -407,6 +410,7 @@
     ivdext_create_op_t s_create_op = {};
 
     s_create_ip.s_ivd_create_ip_t.u4_size = sizeof(ivdext_create_ip_t);
+    s_create_ip.u4_keep_threads_active = KEEP_THREADS_ACTIVE;
     s_create_ip.s_ivd_create_ip_t.e_cmd = IVD_CMD_CREATE;
     s_create_ip.s_ivd_create_ip_t.u4_share_disp_buf = 0;
     s_create_ip.s_ivd_create_ip_t.e_output_format = mIvColorformat;
diff --git a/media/codec2/components/mpeg2/Android.bp b/media/codec2/components/mpeg2/Android.bp
index a58044c..e644ee3 100644
--- a/media/codec2/components/mpeg2/Android.bp
+++ b/media/codec2/components/mpeg2/Android.bp
@@ -14,6 +14,10 @@
         "libcodec2_soft_sanitize_signed-defaults",
     ],
 
+    cflags: [
+        "-DKEEP_THREADS_ACTIVE=0",
+    ],
+
     srcs: ["C2SoftMpeg2Dec.cpp"],
 
     static_libs: ["libmpeg2dec"],
diff --git a/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp b/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp
index 439323c..562dcf5 100644
--- a/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp
+++ b/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp
@@ -16,6 +16,9 @@
 
 //#define LOG_NDEBUG 0
 #define LOG_TAG "C2SoftMpeg2Dec"
+#ifndef KEEP_THREADS_ACTIVE
+#define KEEP_THREADS_ACTIVE 0
+#endif
 #include <log/log.h>
 
 #include <media/stagefright/foundation/MediaDefs.h>
@@ -433,6 +436,7 @@
 
     s_fill_mem_ip.s_ivd_fill_mem_rec_ip_t.u4_size = sizeof(ivdext_fill_mem_rec_ip_t);
     s_fill_mem_ip.u4_share_disp_buf = 0;
+    s_fill_mem_ip.u4_keep_threads_active = KEEP_THREADS_ACTIVE;
     s_fill_mem_ip.e_output_format = mIvColorformat;
     s_fill_mem_ip.u4_deinterlace = 1;
     s_fill_mem_ip.s_ivd_fill_mem_rec_ip_t.e_cmd = IV_CMD_FILL_NUM_MEM_REC;
@@ -474,6 +478,7 @@
     s_init_ip.s_ivd_init_ip_t.u4_frm_max_ht = mHeight;
     s_init_ip.u4_share_disp_buf = 0;
     s_init_ip.u4_deinterlace = 1;
+    s_init_ip.u4_keep_threads_active = KEEP_THREADS_ACTIVE;
     s_init_ip.s_ivd_init_ip_t.u4_num_mem_rec = mNumMemRecords;
     s_init_ip.s_ivd_init_ip_t.e_output_format = mIvColorformat;
     s_init_op.s_ivd_init_op_t.u4_size = sizeof(ivdext_init_op_t);
diff --git a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp
index 2137964..fd9488b 100644
--- a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp
+++ b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp
@@ -469,11 +469,12 @@
         mInitialized = false;
     }
 
+    bool codecConfig = (work->input.flags & C2FrameData::FLAG_CODEC_CONFIG) != 0;
+
     if (!mInitialized) {
         uint8_t *vol_data[1]{};
         int32_t vol_size = 0;
 
-        bool codecConfig = (work->input.flags & C2FrameData::FLAG_CODEC_CONFIG) != 0;
         if (codecConfig || volHeader) {
             vol_data[0] = bitstream;
             vol_size = inSize;
@@ -512,10 +513,11 @@
                 return;
             }
         }
-        if (codecConfig) {
-            fillEmptyWork(work);
-            return;
-        }
+    }
+
+    if (codecConfig) {
+        fillEmptyWork(work);
+        return;
     }
 
     size_t inPos = 0;
diff --git a/media/codec2/components/opus/C2SoftOpusEnc.cpp b/media/codec2/components/opus/C2SoftOpusEnc.cpp
index cdc3be0..40bb26e 100644
--- a/media/codec2/components/opus/C2SoftOpusEnc.cpp
+++ b/media/codec2/components/opus/C2SoftOpusEnc.cpp
@@ -29,7 +29,6 @@
     #include <opus_multistream.h>
 }
 
-#define DEFAULT_FRAME_DURATION_MS 20
 namespace android {
 
 namespace {
@@ -38,7 +37,6 @@
 
 }  // namespace
 
-static const int kMaxNumChannelsSupported = 2;
 
 class C2SoftOpusEnc::IntfImpl : public SimpleInterface<void>::BaseParams {
 public:
@@ -248,10 +246,11 @@
     mAnchorTimeStamp = 0;
     mProcessedSamples = 0;
     mFilledLen = 0;
-    mFrameDurationMs = DEFAULT_FRAME_DURATION_MS;
+    mFrameDurationMs = kDefaultFrameDurationMs;
     if (!mInputBufferPcm16) {
+        size_t frameSize = (mFrameDurationMs * kMaxSampleRateSupported) / 1000;
         mInputBufferPcm16 =
-            (int16_t*)malloc(kFrameSize * kMaxNumChannels * sizeof(int16_t));
+            (int16_t*)malloc(frameSize * kMaxNumChannelsSupported * sizeof(int16_t));
     }
     if (!mInputBufferPcm16) return C2_NO_MEMORY;
 
@@ -368,7 +367,9 @@
     }
 
     C2MemoryUsage usage = {C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE};
-    err = pool->fetchLinearBlock(kMaxPayload, usage, &mOutputBlock);
+    int outCapacity =
+        kMaxPayload * ((inSize + mNumPcmBytesPerInputFrame) / mNumPcmBytesPerInputFrame);
+    err = pool->fetchLinearBlock(outCapacity, usage, &mOutputBlock);
     if (err != C2_OK) {
         ALOGE("fetchLinearBlock for Output failed with status %d", err);
         work->result = C2_NO_MEMORY;
@@ -497,11 +498,11 @@
         uint8_t* outPtr = wView.data() + mBytesEncoded;
         int encodedBytes =
             opus_multistream_encode(mEncoder, mInputBufferPcm16,
-                                    mNumSamplesPerFrame, outPtr, kMaxPayload - mBytesEncoded);
+                                    mNumSamplesPerFrame, outPtr, outCapacity - mBytesEncoded);
         ALOGV("encoded %i Opus bytes from %zu PCM bytes", encodedBytes,
               processSize);
 
-        if (encodedBytes < 0 || encodedBytes > (kMaxPayload - mBytesEncoded)) {
+        if (encodedBytes < 0 || encodedBytes > (outCapacity - mBytesEncoded)) {
             ALOGE("opus_encode failed, encodedBytes : %d", encodedBytes);
             mSignalledError = true;
             work->result = C2_CORRUPTED;
diff --git a/media/codec2/components/opus/C2SoftOpusEnc.h b/media/codec2/components/opus/C2SoftOpusEnc.h
index 733a6bc..2c9f5e5 100644
--- a/media/codec2/components/opus/C2SoftOpusEnc.h
+++ b/media/codec2/components/opus/C2SoftOpusEnc.h
@@ -45,12 +45,13 @@
             uint32_t drainMode,
             const std::shared_ptr<C2BlockPool> &pool) override;
 private:
-    /* OPUS_FRAMESIZE_20_MS */
-    const int kFrameSize = 960;
-    const int kMaxSampleRate = 48000;
-    const int kMinSampleRate = 8000;
-    const int kMaxPayload = (4000 * kMaxSampleRate) / kMinSampleRate;
-    const int kMaxNumChannels = 8;
+    static const int kMaxNumChannelsSupported = 2;
+    static const int kMaxSampleRateSupported = 48000;
+    static const int kDefaultFrameDurationMs = 20;
+    // For a frame duration of 20ms, payload recommended size is 1276 as per
+    // https://www.opus-codec.org/docs/html_api/group__opusencoder.html.
+    // For 40ms, 60ms, .. payload size will change proportionately, 1276 x 2, 1276 x 3, ..
+    static const int kMaxPayload = 1500; // from tests/test_opus_encode.c
 
     std::shared_ptr<IntfImpl> mIntf;
     std::shared_ptr<C2LinearBlock> mOutputBlock;
diff --git a/media/codec2/components/vpx/C2SoftVpxDec.cpp b/media/codec2/components/vpx/C2SoftVpxDec.cpp
index dab7b89..318f093 100644
--- a/media/codec2/components/vpx/C2SoftVpxDec.cpp
+++ b/media/codec2/components/vpx/C2SoftVpxDec.cpp
@@ -446,6 +446,7 @@
     {
         IntfImpl::Lock lock = mIntf->lock();
         mPixelFormatInfo = mIntf->getPixelFormat_l();
+        mColorAspects = mIntf->getDefaultColorAspects_l();
     }
 
     mWidth = 320;
@@ -591,6 +592,41 @@
         return;
     }
 
+    // handle dynamic config parameters
+    {
+        IntfImpl::Lock lock = mIntf->lock();
+        std::shared_ptr<C2StreamColorAspectsTuning::output> defaultColorAspects =
+            mIntf->getDefaultColorAspects_l();
+        lock.unlock();
+
+        if (mColorAspects->range != defaultColorAspects->range ||
+            mColorAspects->primaries != defaultColorAspects->primaries ||
+            mColorAspects->matrix != defaultColorAspects->matrix ||
+            mColorAspects->transfer != defaultColorAspects->transfer) {
+
+            mColorAspects->range = defaultColorAspects->range;
+            mColorAspects->primaries = defaultColorAspects->primaries;
+            mColorAspects->matrix = defaultColorAspects->matrix;
+            mColorAspects->transfer = defaultColorAspects->transfer;
+
+            C2StreamColorAspectsTuning::output colorAspect(0u, defaultColorAspects->range,
+                defaultColorAspects->primaries, defaultColorAspects->transfer,
+                defaultColorAspects->matrix);
+            std::vector<std::unique_ptr<C2SettingResult>> failures;
+            c2_status_t err = mIntf->config({&colorAspect}, C2_MAY_BLOCK, &failures);
+            if (err == C2_OK) {
+                work->worklets.front()->output.configUpdate.push_back(
+                    C2Param::Copy(colorAspect));
+            } else {
+                ALOGE("Config update colorAspect failed");
+                mSignalledError = true;
+                work->workletsProcessed = 1u;
+                work->result = C2_CORRUPTED;
+                return;
+            }
+        }
+    }
+
     size_t inOffset = 0u;
     size_t inSize = 0u;
     C2ReadView rView = mDummyReadView;
diff --git a/media/codec2/components/vpx/C2SoftVpxDec.h b/media/codec2/components/vpx/C2SoftVpxDec.h
index e9d6dc9..93cc213 100644
--- a/media/codec2/components/vpx/C2SoftVpxDec.h
+++ b/media/codec2/components/vpx/C2SoftVpxDec.h
@@ -66,6 +66,7 @@
     // configurations used by component in process
     // (TODO: keep this in intf but make them internal only)
     std::shared_ptr<C2StreamPixelFormatInfo::output> mPixelFormatInfo;
+    std::shared_ptr<C2StreamColorAspectsTuning::output> mColorAspects;
 
     std::shared_ptr<IntfImpl> mIntf;
     vpx_codec_ctx_t *mCodecCtx;
diff --git a/media/codec2/components/vpx/C2SoftVpxEnc.cpp b/media/codec2/components/vpx/C2SoftVpxEnc.cpp
index 76e74ec..3e88acd 100644
--- a/media/codec2/components/vpx/C2SoftVpxEnc.cpp
+++ b/media/codec2/components/vpx/C2SoftVpxEnc.cpp
@@ -22,6 +22,7 @@
 #include <media/hardware/VideoAPI.h>
 
 #include <Codec2BufferUtils.h>
+#include <Codec2CommonUtils.h>
 #include <C2Debug.h>
 #include "C2SoftVpxEnc.h"
 
@@ -29,6 +30,12 @@
 #define INT32_MAX   2147483647
 #endif
 
+/* Quantization param values defined by the spec */
+#define VPX_QP_MIN 0
+#define VPX_QP_MAX 63
+#define VPX_QP_DEFAULT_MIN VPX_QP_MIN
+#define VPX_QP_DEFAULT_MAX VPX_QP_MAX
+
 namespace android {
 
 C2SoftVpxEnc::IntfImpl::IntfImpl(const std::shared_ptr<C2ReflectorHelper> &helper)
@@ -57,12 +64,14 @@
                     0u, (uint64_t)C2MemoryUsage::CPU_READ))
             .build());
 
+    // Odd dimension support in encoders requires Android V and above
+    size_t stepSize = isAtLeastV() ? 1 : 2;
     addParameter(
         DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
             .withDefault(new C2StreamPictureSizeInfo::input(0u, 64, 64))
             .withFields({
-                C2F(mSize, width).inRange(2, 2048, 2),
-                C2F(mSize, height).inRange(2, 2048, 2),
+                C2F(mSize, width).inRange(2, 2048, stepSize),
+                C2F(mSize, height).inRange(2, 2048, stepSize),
             })
             .withSetter(SizeSetter)
             .build());
@@ -197,6 +206,20 @@
             })
             .withSetter(CodedColorAspectsSetter, mColorAspects)
             .build());
+
+    addParameter(
+            DefineParam(mPictureQuantization, C2_PARAMKEY_PICTURE_QUANTIZATION)
+            .withDefault(C2StreamPictureQuantizationTuning::output::AllocShared(
+                    0 /* flexCount */, 0u /* stream */))
+            .withFields({C2F(mPictureQuantization, m.values[0].type_).oneOf(
+                            {C2Config::I_FRAME, C2Config::P_FRAME}),
+                         C2F(mPictureQuantization, m.values[0].min).inRange(
+                            VPX_QP_DEFAULT_MIN, VPX_QP_DEFAULT_MAX),
+                         C2F(mPictureQuantization, m.values[0].max).inRange(
+                            VPX_QP_DEFAULT_MIN, VPX_QP_DEFAULT_MAX)})
+            .withSetter(PictureQuantizationSetter)
+            .build());
+
 }
 
 C2R C2SoftVpxEnc::IntfImpl::BitrateSetter(bool mayBlock, C2P<C2StreamBitrateInfo::output> &me) {
@@ -330,6 +353,55 @@
     double period = mSyncFramePeriod->value / 1e6 * mFrameRate->value;
     return (uint32_t)c2_max(c2_min(period + 0.5, double(UINT32_MAX)), 1.);
 }
+
+C2R C2SoftVpxEnc::IntfImpl::PictureQuantizationSetter(
+        bool mayBlock, C2P<C2StreamPictureQuantizationTuning::output>& me) {
+    (void)mayBlock;
+    int32_t iMin = VPX_QP_DEFAULT_MIN, pMin = VPX_QP_DEFAULT_MIN;
+    int32_t iMax = VPX_QP_DEFAULT_MAX, pMax = VPX_QP_DEFAULT_MAX;
+    for (size_t i = 0; i < me.v.flexCount(); ++i) {
+        const C2PictureQuantizationStruct &layer = me.v.m.values[i];
+        // layerMin is clamped to [VPX_QP_MIN, layerMax] to avoid error
+        // cases where layer.min > layer.max
+        int32_t layerMax = std::clamp(layer.max, VPX_QP_MIN, VPX_QP_MAX);
+        int32_t layerMin = std::clamp(layer.min, VPX_QP_MIN, layerMax);
+        if (layer.type_ == C2Config::picture_type_t(I_FRAME)) {
+            iMax = layerMax;
+            iMin = layerMin;
+            ALOGV("iMin %d iMax %d", iMin, iMax);
+        } else if (layer.type_ == C2Config::picture_type_t(P_FRAME)) {
+            pMax = layerMax;
+            pMin = layerMin;
+            ALOGV("pMin %d pMax %d", pMin, pMax);
+        }
+    }
+    ALOGV("PictureQuantizationSetter(entry): i %d-%d p %d-%d",
+          iMin, iMax, pMin, pMax);
+
+    // vpx library takes same range for I/P picture type
+    int32_t maxFrameQP = std::min(iMax, pMax);
+    int32_t minFrameQP = std::max(iMin, pMin);
+    if (minFrameQP > maxFrameQP) {
+        minFrameQP = maxFrameQP;
+    }
+    // put them back into the structure
+    for (size_t i = 0; i < me.v.flexCount(); ++i) {
+        const C2PictureQuantizationStruct &layer = me.v.m.values[i];
+
+        if (layer.type_ == C2Config::picture_type_t(I_FRAME)) {
+            me.set().m.values[i].max = maxFrameQP;
+            me.set().m.values[i].min = minFrameQP;
+        }
+        else if (layer.type_ == C2Config::picture_type_t(P_FRAME)) {
+            me.set().m.values[i].max = maxFrameQP;
+            me.set().m.values[i].min = minFrameQP;
+        }
+    }
+    ALOGV("PictureQuantizationSetter(exit): minFrameQP = %d maxFrameQP = %d",
+          minFrameQP, maxFrameQP);
+    return C2R::Ok();
+}
+
 C2R C2SoftVpxEnc::IntfImpl::ColorAspectsSetter(bool mayBlock,
                                                C2P<C2StreamColorAspectsInfo::input>& me) {
     (void)mayBlock;
@@ -393,6 +465,7 @@
       mTemporalPatternIdx(0),
       mLastTimestamp(0x7FFFFFFFFFFFFFFFull),
       mSignalledOutputEos(false),
+      mHeaderGenerated(false),
       mSignalledError(false) {
     for (int i = 0; i < MAXTEMPORALLAYERS; i++) {
         mTemporalLayerBitrateRatio[i] = 1.0f;
@@ -422,6 +495,7 @@
 
     // this one is not allocated by us
     mCodecInterface = nullptr;
+    mHeaderGenerated = false;
 }
 
 c2_status_t C2SoftVpxEnc::onStop() {
@@ -453,6 +527,7 @@
         mRequestSync = mIntf->getRequestSync_l();
         mLayering = mIntf->getTemporalLayers_l();
         mTemporalLayers = mLayering->m.layerCount;
+        mQpBounds = mIntf->getPictureQuantization_l();
     }
 
     switch (mBitrateMode->value) {
@@ -466,6 +541,18 @@
             break;
     }
 
+    if (mQpBounds->flexCount() > 0) {
+        // read min max qp for sequence
+        for (size_t i = 0; i < mQpBounds->flexCount(); ++i) {
+            const C2PictureQuantizationStruct &layer = mQpBounds->m.values[i];
+            if (layer.type_ == C2Config::picture_type_t(I_FRAME)) {
+                mMaxQuantizer = layer.max;
+                mMinQuantizer = layer.min;
+                break;
+            }
+        }
+    }
+
     setCodecSpecificInterface();
     if (!mCodecInterface) goto CleanUp;
 
@@ -473,6 +560,7 @@
           (uint32_t)mBitrateControlMode, mTemporalLayers, mIntf->getSyncFramePeriod(),
           mMinQuantizer, mMaxQuantizer);
 
+    mHeaderGenerated = false;
     mCodecConfiguration = new vpx_codec_enc_cfg_t;
     if (!mCodecConfiguration) goto CleanUp;
     codec_return = vpx_codec_enc_config_default(mCodecInterface,
@@ -788,6 +876,27 @@
         return;
     }
 
+    // Header generation is limited to Android V and above, as MediaMuxer did not handle
+    // CSD for VP9 correctly in Android U and before.
+    if (isAtLeastV() && !mHeaderGenerated) {
+        vpx_fixed_buf_t* codec_private_data = vpx_codec_get_global_headers(mCodecContext);
+        if (codec_private_data) {
+            std::unique_ptr<C2StreamInitDataInfo::output> csd =
+                    C2StreamInitDataInfo::output::AllocUnique(codec_private_data->sz, 0u);
+            if (!csd) {
+                ALOGE("CSD allocation failed");
+                mSignalledError = true;
+                work->result = C2_NO_MEMORY;
+                work->workletsProcessed = 1u;
+                return;
+            }
+            memcpy(csd->m.value, codec_private_data->buf, codec_private_data->sz);
+            work->worklets.front()->output.configUpdate.push_back(std::move(csd));
+            ALOGV("CSD Produced of size %zu bytes", codec_private_data->sz);
+        }
+        mHeaderGenerated = true;
+    }
+
     const C2ConstGraphicBlock inBuffer =
         inputBuffer->data().graphicBlocks().front();
     if (inBuffer.width() < mSize->width ||
diff --git a/media/codec2/components/vpx/C2SoftVpxEnc.h b/media/codec2/components/vpx/C2SoftVpxEnc.h
index bfb4444..87d24f9 100644
--- a/media/codec2/components/vpx/C2SoftVpxEnc.h
+++ b/media/codec2/components/vpx/C2SoftVpxEnc.h
@@ -207,6 +207,9 @@
      // Signalled EOS
      bool mSignalledOutputEos;
 
+     // Header generated
+     bool mHeaderGenerated;
+
      // Signalled Error
      bool mSignalledError;
 
@@ -219,6 +222,7 @@
     std::shared_ptr<C2StreamBitrateModeTuning::output> mBitrateMode;
     std::shared_ptr<C2StreamRequestSyncFrameTuning::output> mRequestSync;
     std::shared_ptr<C2StreamTemporalLayeringTuning::output> mLayering;
+    std::shared_ptr<C2StreamPictureQuantizationTuning::output> mQpBounds;
 
      C2_DO_NOT_COPY(C2SoftVpxEnc);
 };
@@ -250,6 +254,9 @@
 
     static C2R LayeringSetter(bool mayBlock, C2P<C2StreamTemporalLayeringTuning::output>& me);
 
+    static C2R PictureQuantizationSetter(bool mayBlock,
+                                         C2P<C2StreamPictureQuantizationTuning::output> &me);
+
     // unsafe getters
     std::shared_ptr<C2StreamPictureSizeInfo::input> getSize_l() const { return mSize; }
     std::shared_ptr<C2StreamIntraRefreshTuning::output> getIntraRefresh_l() const {
@@ -269,6 +276,9 @@
     std::shared_ptr<C2StreamColorAspectsInfo::output> getCodedColorAspects_l() const {
         return mCodedColorAspects;
     }
+    std::shared_ptr<C2StreamPictureQuantizationTuning::output> getPictureQuantization_l() const {
+        return mPictureQuantization;
+    }
     uint32_t getSyncFramePeriod() const;
     static C2R ColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::input> &me);
     static C2R CodedColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::output> &me,
@@ -287,6 +297,7 @@
     std::shared_ptr<C2StreamProfileLevelInfo::output> mProfileLevel;
     std::shared_ptr<C2StreamColorAspectsInfo::input> mColorAspects;
     std::shared_ptr<C2StreamColorAspectsInfo::output> mCodedColorAspects;
+    std::shared_ptr<C2StreamPictureQuantizationTuning::output> mPictureQuantization;
 };
 
 }  // namespace android
diff --git a/media/codec2/core/Android.bp b/media/codec2/core/Android.bp
index 7d5740b..c205dcd 100644
--- a/media/codec2/core/Android.bp
+++ b/media/codec2/core/Android.bp
@@ -26,9 +26,6 @@
         "//apex_available:platform",
         "com.android.media.swcodec",
     ],
-    vndk: {
-        enabled: true,
-    },
     double_loadable: true,
 
     srcs: ["C2.cpp"],
diff --git a/media/codec2/core/include/C2Config.h b/media/codec2/core/include/C2Config.h
index 785cdf2..e6782a9 100644
--- a/media/codec2/core/include/C2Config.h
+++ b/media/codec2/core/include/C2Config.h
@@ -164,6 +164,9 @@
     kParamIndexLargeFrame,
     kParamIndexAccessUnitInfos, // struct
 
+    /* Region of Interest Encoding parameters */
+    kParamIndexQpOffsetMapBuffer, // info-buffer, used to signal qp-offset map for a frame
+
     // deprecated
     kParamIndexDelayRequest = kParamIndexDelay | C2Param::CoreIndex::IS_REQUEST_FLAG,
 
@@ -201,6 +204,8 @@
     kParamIndexPictureQuantization,
     kParamIndexHdrDynamicMetadata,
     kParamIndexHdrFormat,
+    kParamIndexQpOffsetRect,
+    kParamIndexQpOffsetRects,
 
     /* ------------------------------------ video components ------------------------------------ */
 
@@ -1394,6 +1399,47 @@
 constexpr char C2_PARAMKEY_VUI_ROTATION[] = "coded.vui.rotation";
 
 /**
+ * Region of Interest of an image/video frame communicated as an array of C2QpOffsetRectStruct
+ *
+ * Fields width, height, left and top of C2QpOffsetRectStruct form a bounding box contouring RoI.
+ * Field qpOffset of C2QpOffsetRectStruct indicates the qp bias to be used for quantizing the
+ * coding units of the bounding box.
+ *
+ * If Roi rect is not valid that is bounding box width is < 0 or bounding box height is < 0,
+ * components may ignore the configuration silently. If Roi rect extends outside frame
+ * boundaries, then rect shall be clamped to the frame boundaries.
+ *
+ * The scope of this key is throughout the encoding session until it is reconfigured with a
+ * different value.
+ *
+ * The number of elements in C2StreamQpOffset array is not limited by C2 specification.
+ * However components may mandate a limit. Implementations may drop the rectangles that are beyond
+ * the supported limits. Hence it is preferable to place the rects in descending order of
+ * importance. Transitively, if the bounding boxes overlap, then the most preferred
+ * rectangle's qp offset (earlier rectangle qp offset) will be used to quantize the block.
+ */
+struct C2QpOffsetRectStruct : C2Rect {
+    C2QpOffsetRectStruct() = default;
+    C2QpOffsetRectStruct(const C2Rect &rect, int32_t offset) : C2Rect(rect), qpOffset(offset) {}
+
+    bool operator==(const C2QpOffsetRectStruct &) = delete;
+    bool operator!=(const C2QpOffsetRectStruct &) = delete;
+
+    int32_t qpOffset;
+
+    DEFINE_AND_DESCRIBE_C2STRUCT(QpOffsetRect)
+    C2FIELD(width, "width")
+    C2FIELD(height, "height")
+    C2FIELD(left, "left")
+    C2FIELD(top, "top")
+    C2FIELD(qpOffset, "qp-offset")
+};
+
+typedef C2StreamParam<C2Info, C2SimpleArrayStruct<C2QpOffsetRectStruct>, kParamIndexQpOffsetRects>
+        C2StreamQpOffsetRects;
+constexpr char C2_PARAMKEY_QP_OFFSET_RECTS[] = "coding.qp-offset-rects";
+
+/**
  * Pixel (sample) aspect ratio.
  */
 typedef C2StreamParam<C2Info, C2PictureSizeStruct, kParamIndexPixelAspectRatio>
diff --git a/media/codec2/core/include/C2Param.h b/media/codec2/core/include/C2Param.h
index e938f96..387d2b8 100644
--- a/media/codec2/core/include/C2Param.h
+++ b/media/codec2/core/include/C2Param.h
@@ -427,7 +427,9 @@
     inline bool operator==(const C2Param &o) const {
         return equals(o) && memcmp(this, &o, _mSize) == 0;
     }
+#if __cplusplus < 202002
     inline bool operator!=(const C2Param &o) const { return !operator==(o); }
+#endif
 
     /// safe(r) type cast from pointer and size
     inline static C2Param* From(void *addr, size_t len) {
diff --git a/media/codec2/core/include/C2ParamDef.h b/media/codec2/core/include/C2ParamDef.h
index 86dfe65..1805464 100644
--- a/media/codec2/core/include/C2ParamDef.h
+++ b/media/codec2/core/include/C2ParamDef.h
@@ -212,6 +212,26 @@
     }
 };
 
+/// Define equality (and inequality) operators for params.
+#if __cplusplus < 202002
+
+#define DEFINE_EQUALITY_OPERATORS(_Type, T) \
+    inline bool operator==(const _Type &o) const { \
+        return this->T::operator==(o); \
+    } \
+    inline bool operator!=(const _Type &o) const { \
+    return !operator==(o); \
+    }
+
+#else
+
+#define DEFINE_EQUALITY_OPERATORS(_Type, T) \
+    inline bool operator==(const _Type &o) const { \
+        return this->T::operator==(o); \
+    }
+
+#endif
+
 /// Define From() cast operators for params.
 #define DEFINE_CAST_OPERATORS(_Type) \
     inline static _Type* From(C2Param *other) { \
@@ -404,12 +424,12 @@
     /// Specialization for an input port parameter.
     struct input : public T, public S,
             public _C2StructCheck<S, ParamIndex, T::PARAM_KIND | T::Index::DIR_INPUT> {
-        using T::operator!=;
         _C2_CORE_INDEX_OVERRIDE(ParamIndex)
         /// Wrapper around base structure's constructor.
         template<typename ...Args>
         inline input(const Args(&... args)) : T(sizeof(_Type), input::PARAM_TYPE), S(args...) { }
 
+        DEFINE_EQUALITY_OPERATORS(input, T)
         DEFINE_CAST_OPERATORS(input)
 
     };
@@ -417,12 +437,12 @@
     /// Specialization for an output port parameter.
     struct output : public T, public S,
             public _C2StructCheck<S, ParamIndex, T::PARAM_KIND | T::Index::DIR_OUTPUT> {
-        using T::operator!=;
         _C2_CORE_INDEX_OVERRIDE(ParamIndex)
         /// Wrapper around base structure's constructor.
         template<typename ...Args>
         inline output(const Args(&... args)) : T(sizeof(_Type), output::PARAM_TYPE), S(args...) { }
 
+        DEFINE_EQUALITY_OPERATORS(output, T)
         DEFINE_CAST_OPERATORS(output)
     };
 };
@@ -472,7 +492,6 @@
     /// Specialization for an input port parameter.
     struct input : public T,
             public _C2FlexStructCheck<S, ParamIndex, T::PARAM_KIND | T::Index::DIR_INPUT> {
-        using T::operator!=;
     private:
         /// Wrapper around base structure's constructor while also specifying port/direction.
         template<typename ...Args>
@@ -482,6 +501,7 @@
     public:
         S m; ///< wrapped flexible structure
 
+        DEFINE_EQUALITY_OPERATORS(input, T)
         DEFINE_FLEXIBLE_METHODS(input, S)
         DEFINE_CAST_OPERATORS(input)
     };
@@ -489,7 +509,6 @@
     /// Specialization for an output port parameter.
     struct output : public T,
             public _C2FlexStructCheck<S, ParamIndex, T::PARAM_KIND | T::Index::DIR_OUTPUT> {
-        using T::operator!=;
     private:
         /// Wrapper around base structure's constructor while also specifying port/direction.
         template<typename ...Args>
@@ -499,6 +518,7 @@
     public:
         S m; ///< wrapped flexible structure
 
+        DEFINE_EQUALITY_OPERATORS(output, T)
         DEFINE_FLEXIBLE_METHODS(output, S)
         DEFINE_CAST_OPERATORS(output)
     };
@@ -553,7 +573,6 @@
     struct input : public T, public S,
             public _C2StructCheck<S, ParamIndex,
                     T::PARAM_KIND | T::Index::IS_STREAM_FLAG | T::Type::DIR_INPUT> {
-        using T::operator!=;
         _C2_CORE_INDEX_OVERRIDE(ParamIndex)
 
         /// Default constructor. Stream-ID is undefined.
@@ -565,6 +584,7 @@
         /// Set stream-id. \retval true if the stream-id was successfully set.
         inline bool setStream(unsigned stream) { return C2Param::setStream(stream); }
 
+        DEFINE_EQUALITY_OPERATORS(input, T)
         DEFINE_CAST_OPERATORS(input)
     };
 
@@ -572,7 +592,6 @@
     struct output : public T, public S,
             public _C2StructCheck<S, ParamIndex,
                     T::PARAM_KIND | T::Index::IS_STREAM_FLAG | T::Type::DIR_OUTPUT> {
-        using T::operator!=;
         _C2_CORE_INDEX_OVERRIDE(ParamIndex)
 
         /// Default constructor. Stream-ID is undefined.
@@ -584,6 +603,7 @@
         /// Set stream-id. \retval true if the stream-id was successfully set.
         inline bool setStream(unsigned stream) { return C2Param::setStream(stream); }
 
+        DEFINE_EQUALITY_OPERATORS(output, T)
         DEFINE_CAST_OPERATORS(output)
     };
 };
@@ -640,7 +660,6 @@
     struct input : public T,
             public _C2FlexStructCheck<S, ParamIndex,
                     T::PARAM_KIND | T::Index::IS_STREAM_FLAG | T::Type::DIR_INPUT> {
-        using T::operator!=;
     private:
         /// Default constructor. Stream-ID is undefined.
         inline input(size_t flexCount) : T(_Type::CalcSize(flexCount), input::PARAM_TYPE) { }
@@ -655,6 +674,7 @@
         /// Set stream-id. \retval true if the stream-id was successfully set.
         inline bool setStream(unsigned stream) { return C2Param::setStream(stream); }
 
+        DEFINE_EQUALITY_OPERATORS(input, T)
         DEFINE_FLEXIBLE_METHODS(input, S)
         DEFINE_CAST_OPERATORS(input)
     };
@@ -663,7 +683,6 @@
     struct output : public T,
             public _C2FlexStructCheck<S, ParamIndex,
                     T::PARAM_KIND | T::Index::IS_STREAM_FLAG | T::Type::DIR_OUTPUT> {
-        using T::operator!=;
     private:
         /// Default constructor. Stream-ID is undefined.
         inline output(size_t flexCount) : T(_Type::CalcSize(flexCount), output::PARAM_TYPE) { }
@@ -678,6 +697,7 @@
         /// Set stream-id. \retval true if the stream-id was successfully set.
         inline bool setStream(unsigned stream) { return C2Param::setStream(stream); }
 
+        DEFINE_EQUALITY_OPERATORS(output, T)
         DEFINE_FLEXIBLE_METHODS(output, S)
         DEFINE_CAST_OPERATORS(output)
     };
diff --git a/media/codec2/fuzzer/Android.bp b/media/codec2/fuzzer/Android.bp
index b387b2c..ec77427 100644
--- a/media/codec2/fuzzer/Android.bp
+++ b/media/codec2/fuzzer/Android.bp
@@ -163,7 +163,7 @@
 
     static_libs: [
         "libgav1",
-        "libyuv_static",
+        "libyuv",
         "libcodec2_soft_av1dec_gav1",
     ],
 }
diff --git a/media/codec2/hal/aidl/Android.bp b/media/codec2/hal/aidl/Android.bp
index 48b6e21..e16e2b1 100644
--- a/media/codec2/hal/aidl/Android.bp
+++ b/media/codec2/hal/aidl/Android.bp
@@ -8,6 +8,7 @@
     name: "libcodec2_aidl_client",
 
     defaults: [
+        "aconfig_lib_cc_static_link.defaults",
         "libcodec2_hal_selection",
     ],
 
@@ -65,6 +66,7 @@
     ],
 
     defaults: [
+        "aconfig_lib_cc_static_link.defaults",
         "libcodec2_hal_selection",
     ],
 
diff --git a/media/codec2/hal/aidl/Component.cpp b/media/codec2/hal/aidl/Component.cpp
index 8da9861..87c9d87 100644
--- a/media/codec2/hal/aidl/Component.cpp
+++ b/media/codec2/hal/aidl/Component.cpp
@@ -46,6 +46,8 @@
 using ::aidl::android::hardware::common::NativeHandle;
 using ::aidl::android::hardware::media::bufferpool2::IClientManager;
 using ::ndk::ScopedAStatus;
+using ::android::MultiAccessUnitInterface;
+using ::android::MultiAccessUnitHelper;
 
 // ComponentListener wrapper
 struct Component::Listener : public C2Component::Listener {
@@ -139,6 +141,52 @@
     std::weak_ptr<IComponentListener> mListener;
 };
 
+// Component listener for handle multiple access-units
+struct MultiAccessUnitListener : public Component::Listener {
+    MultiAccessUnitListener(const std::shared_ptr<Component>& component,
+            const std::shared_ptr<MultiAccessUnitHelper> &helper):
+        Listener(component), mHelper(helper) {
+    }
+
+    virtual void onError_nb(
+            std::weak_ptr<C2Component> c2component,
+            uint32_t errorCode) override {
+        if (mHelper) {
+            std::list<std::unique_ptr<C2Work>> worklist;
+            mHelper->error(&worklist);
+            if (!worklist.empty()) {
+                Listener::onWorkDone_nb(c2component, std::move(worklist));
+            }
+        }
+        Listener::onError_nb(c2component, errorCode);
+    }
+
+    virtual void onTripped_nb(
+            std::weak_ptr<C2Component> c2component,
+            std::vector<std::shared_ptr<C2SettingResult>> c2settingResult
+            ) override {
+        Listener::onTripped_nb(c2component,
+                c2settingResult);
+    }
+
+    virtual void onWorkDone_nb(
+            std::weak_ptr<C2Component> c2component,
+            std::list<std::unique_ptr<C2Work>> c2workItems) override {
+        if (mHelper) {
+            std::list<std::unique_ptr<C2Work>> processedWork;
+            mHelper->gather(c2workItems, &processedWork);
+            if (!processedWork.empty()) {
+                Listener::onWorkDone_nb(c2component, std::move(processedWork));
+            }
+        } else {
+            Listener::onWorkDone_nb(c2component, std::move(c2workItems));
+        }
+    }
+
+    protected:
+        std::shared_ptr<MultiAccessUnitHelper> mHelper;
+};
+
 // Component::DeathContext
 struct Component::DeathContext {
     std::weak_ptr<Component> mWeakComp;
@@ -151,14 +199,15 @@
         const std::shared_ptr<ComponentStore>& store,
         const std::shared_ptr<IClientManager>& clientPoolManager)
       : mComponent{component},
-        mInterface{SharedRefBase::make<ComponentInterface>(
-                component->intf(), store->getParameterCache())},
         mListener{listener},
         mStore{store},
         mBufferPoolSender{clientPoolManager},
         mDeathContext(nullptr) {
     // Retrieve supported parameters from store
     // TODO: We could cache this per component/interface type
+    mMultiAccessUnitIntf = store->tryCreateMultiAccessUnitInterface(component->intf());
+    mInterface = SharedRefBase::make<ComponentInterface>(
+            component->intf(), mMultiAccessUnitIntf, store->getParameterCache());
     mInit = mInterface->status();
 }
 
@@ -181,8 +230,21 @@
                     registerFrameData(mListener, work->input);
         }
     }
+    c2_status_t err = C2_OK;
+    if (mMultiAccessUnitHelper) {
+        std::list<std::list<std::unique_ptr<C2Work>>> c2worklists;
+        mMultiAccessUnitHelper->scatter(c2works, &c2worklists);
+        for (auto &c2worklist : c2worklists) {
+            err = mComponent->queue_nb(&c2worklist);
+            if (err != C2_OK) {
+                LOG(ERROR) << "Error Queuing to component.";
+                return ScopedAStatus::fromServiceSpecificError(err);
+            }
+        }
+        return ScopedAStatus::ok();
+    }
 
-    c2_status_t err = mComponent->queue_nb(&c2works);
+    err = mComponent->queue_nb(&c2works);
     if (err == C2_OK) {
         return ScopedAStatus::ok();
     }
@@ -194,7 +256,9 @@
     c2_status_t c2res = mComponent->flush_sm(
             C2Component::FLUSH_COMPONENT,
             &c2flushedWorks);
-
+    if (mMultiAccessUnitHelper) {
+        c2res = mMultiAccessUnitHelper->flush(&c2flushedWorks);
+    }
     // Unregister input buffers.
     for (const std::unique_ptr<C2Work>& work : c2flushedWorks) {
         if (work) {
@@ -364,6 +428,9 @@
         std::lock_guard<std::mutex> lock(mBlockPoolsMutex);
         mBlockPools.clear();
     }
+    if (mMultiAccessUnitHelper) {
+        mMultiAccessUnitHelper->reset();
+    }
     InputBufferManager::unregisterFrameData(mListener);
     if (status == C2_OK) {
         return ScopedAStatus::ok();
@@ -377,6 +444,9 @@
         std::lock_guard<std::mutex> lock(mBlockPoolsMutex);
         mBlockPools.clear();
     }
+    if (mMultiAccessUnitHelper) {
+        mMultiAccessUnitHelper->reset();
+    }
     InputBufferManager::unregisterFrameData(mListener);
     if (status == C2_OK) {
         return ScopedAStatus::ok();
@@ -415,7 +485,24 @@
 
 void Component::initListener(const std::shared_ptr<Component>& self) {
     if (__builtin_available(android __ANDROID_API_T__, *)) {
-        std::shared_ptr<C2Component::Listener> c2listener =
+        std::shared_ptr<C2Component::Listener> c2listener;
+        if (mMultiAccessUnitIntf) {
+            std::shared_ptr<C2Allocator> allocator;
+            std::shared_ptr<C2BlockPool> linearPool;
+            std::shared_ptr<C2AllocatorStore> store = ::android::GetCodec2PlatformAllocatorStore();
+            if(store->fetchAllocator(C2AllocatorStore::DEFAULT_LINEAR, &allocator) == C2_OK) {
+                ::android::C2PlatformAllocatorDesc desc;
+                desc.allocatorId = allocator->getId();
+                if (C2_OK == CreateCodec2BlockPool(desc, mComponent, &linearPool)) {
+                    if (linearPool) {
+                        mMultiAccessUnitHelper = std::make_shared<MultiAccessUnitHelper>(
+                                mMultiAccessUnitIntf, linearPool);
+                    }
+                }
+            }
+        }
+        c2listener = mMultiAccessUnitHelper ?
+                std::make_shared<MultiAccessUnitListener>(self, mMultiAccessUnitHelper) :
                 std::make_shared<Listener>(self);
         c2_status_t res = mComponent->setListener_vb(c2listener, C2_DONT_BLOCK);
         if (res != C2_OK) {
diff --git a/media/codec2/hal/aidl/ComponentInterface.cpp b/media/codec2/hal/aidl/ComponentInterface.cpp
index 2d812c9..8c7a986 100644
--- a/media/codec2/hal/aidl/ComponentInterface.cpp
+++ b/media/codec2/hal/aidl/ComponentInterface.cpp
@@ -24,6 +24,8 @@
 
 #include <utils/Timers.h>
 
+#include <codec2/common/MultiAccessUnitHelper.h>
+
 #include <C2Debug.h>
 #include <C2PlatformSupport.h>
 
@@ -43,9 +45,10 @@
 
 // Implementation of ConfigurableC2Intf based on C2ComponentInterface
 struct CompIntf : public ConfigurableC2Intf {
-    CompIntf(const std::shared_ptr<C2ComponentInterface>& intf) :
+    CompIntf(const std::shared_ptr<C2ComponentInterface>& intf,
+        const std::shared_ptr<MultiAccessUnitInterface>& multiAccessUnitIntf):
         ConfigurableC2Intf{intf->getName(), intf->getId()},
-        mIntf{intf} {
+        mIntf{intf}, mMultiAccessUnitIntf{multiAccessUnitIntf} {
     }
 
     virtual c2_status_t config(
@@ -53,7 +56,54 @@
             c2_blocking_t mayBlock,
             std::vector<std::unique_ptr<C2SettingResult>>* const failures
             ) override {
-        return mIntf->config_vb(params, mayBlock, failures);
+        std::vector<C2Param*> paramsToIntf;
+        std::vector<C2Param*> paramsToLargeFrameIntf;
+        c2_status_t err = C2_OK;
+        if (mMultiAccessUnitIntf == nullptr) {
+            err = mIntf->config_vb(params, mayBlock, failures);
+            return err;
+        }
+        for (auto &p : params) {
+            if (mMultiAccessUnitIntf->isParamSupported(p->index())) {
+                paramsToLargeFrameIntf.push_back(p);
+            } else {
+                paramsToIntf.push_back(p);
+            }
+        }
+        c2_status_t err1 = C2_OK;
+        if (paramsToIntf.size() > 0) {
+            err1 = mIntf->config_vb(paramsToIntf, mayBlock, failures);
+        }
+        if (err1 != C2_OK) {
+            LOG(ERROR) << "We have a failed config";
+        }
+        c2_status_t err2 = C2_OK;
+        if (paramsToLargeFrameIntf.size() > 0) {
+            C2ComponentKindSetting kind;
+            C2StreamMaxBufferSizeInfo::input maxInputSize(0);
+            c2_status_t err = mIntf->query_vb(
+                    {&kind, &maxInputSize}, {}, C2_MAY_BLOCK, nullptr);
+            if ((err == C2_OK) && (kind.value == C2Component::KIND_ENCODER)) {
+                for (int i = 0 ; i < paramsToLargeFrameIntf.size(); i++) {
+                    if (paramsToLargeFrameIntf[i]->index() ==
+                            C2LargeFrame::output::PARAM_TYPE) {
+                        C2LargeFrame::output *lfp = C2LargeFrame::output::From(
+                                    paramsToLargeFrameIntf[i]);
+                        // This is assuming a worst case compression ratio of 1:1
+                        // In no case the encoder should give an output more than
+                        // what is being provided to the encoder in a single call.
+                        if (lfp && (lfp->maxSize < maxInputSize.value)) {
+                            lfp->maxSize = maxInputSize.value;
+                        }
+                        break;
+                    }
+                }
+            }
+            err2 = mMultiAccessUnitIntf->config(
+                    paramsToLargeFrameIntf, mayBlock, failures);
+        }
+        // TODO: correct failure vector
+        return err1 != C2_OK ? err1 : err2;
     }
 
     virtual c2_status_t query(
@@ -61,23 +111,82 @@
             c2_blocking_t mayBlock,
             std::vector<std::unique_ptr<C2Param>>* const params
             ) const override {
-        return mIntf->query_vb({}, indices, mayBlock, params);
+        c2_status_t err = C2_OK;
+        if (mMultiAccessUnitIntf == nullptr) {
+            err = mIntf->query_vb({}, indices, mayBlock, params);
+            return err;
+        }
+        std::vector<C2Param::Index> paramsToIntf;
+        std::vector<C2Param::Index> paramsToLargeFrameIntf;
+        for (auto &i : indices) {
+            if (mMultiAccessUnitIntf->isParamSupported(i)) {
+                paramsToLargeFrameIntf.push_back(i);
+            } else {
+                paramsToIntf.push_back(i);
+            }
+        }
+        c2_status_t err1 = C2_OK;
+        if (paramsToIntf.size() > 0) {
+            err1 = mIntf->query_vb({}, paramsToIntf, mayBlock, params);
+        }
+        c2_status_t err2 = C2_OK;
+        if (paramsToLargeFrameIntf.size() > 0) {
+            err2 = mMultiAccessUnitIntf->query(
+                    {}, paramsToLargeFrameIntf, mayBlock, params);
+        }
+        // TODO: correct failure vector
+        return err1 != C2_OK ? err1 : err2;
     }
 
     virtual c2_status_t querySupportedParams(
             std::vector<std::shared_ptr<C2ParamDescriptor>>* const params
             ) const override {
-        return mIntf->querySupportedParams_nb(params);
+        c2_status_t err = mIntf->querySupportedParams_nb(params);
+        if (mMultiAccessUnitIntf != nullptr) {
+            err =  mMultiAccessUnitIntf->querySupportedParams(params);
+        }
+        return err;
     }
 
     virtual c2_status_t querySupportedValues(
             std::vector<C2FieldSupportedValuesQuery>& fields,
             c2_blocking_t mayBlock) const override {
-        return mIntf->querySupportedValues_vb(fields, mayBlock);
+        if (mMultiAccessUnitIntf == nullptr) {
+           return  mIntf->querySupportedValues_vb(fields, mayBlock);
+        }
+        std::vector<C2FieldSupportedValuesQuery> dup = fields;
+        std::vector<C2FieldSupportedValuesQuery> queryArray[2];
+        std::map<C2ParamField, std::pair<uint32_t, size_t>> queryMap;
+        c2_status_t err = C2_OK;
+        for (int i = 0 ; i < fields.size(); i++) {
+            const C2ParamField &field = fields[i].field();
+            uint32_t queryArrayIdx = 1;
+            if (mMultiAccessUnitIntf->isValidField(fields[i].field())) {
+                queryArrayIdx = 0;
+            }
+            queryMap[field] = std::make_pair(
+                    queryArrayIdx, queryArray[queryArrayIdx].size());
+            queryArray[queryArrayIdx].push_back(fields[i]);
+        }
+        if (queryArray[0].size() > 0) {
+            err = mMultiAccessUnitIntf->querySupportedValues(queryArray[0], mayBlock);
+        }
+        if (queryArray[1].size() > 0) {
+             err = mIntf->querySupportedValues_vb(queryArray[1], mayBlock);
+        }
+        for (int i = 0 ; i < dup.size(); i++) {
+            auto it = queryMap.find(dup[i].field());
+            if (it != queryMap.end()) {
+                std::pair<uint32_t, size_t> queryid = it->second;
+                fields[i] = queryArray[queryid.first][queryid.second];
+            }
+        }
+        return err;
     }
 
 protected:
     std::shared_ptr<C2ComponentInterface> mIntf;
+    std::shared_ptr<MultiAccessUnitInterface> mMultiAccessUnitIntf;
 };
 
 } // unnamed namespace
@@ -85,10 +194,16 @@
 // ComponentInterface
 ComponentInterface::ComponentInterface(
         const std::shared_ptr<C2ComponentInterface>& intf,
+        const std::shared_ptr<ParameterCache>& cache):ComponentInterface(intf, nullptr, cache) {
+}
+
+ComponentInterface::ComponentInterface(
+        const std::shared_ptr<C2ComponentInterface>& intf,
+        const std::shared_ptr<MultiAccessUnitInterface>& multiAccessUnitIntf,
         const std::shared_ptr<ParameterCache>& cache)
       : mInterface{intf},
         mConfigurable{SharedRefBase::make<CachedConfigurable>(
-                std::make_unique<CompIntf>(intf))} {
+                std::make_unique<CompIntf>(intf, multiAccessUnitIntf))} {
     mInit = mConfigurable->init(cache);
 }
 
diff --git a/media/codec2/hal/aidl/ComponentStore.cpp b/media/codec2/hal/aidl/ComponentStore.cpp
index 3f687b5..ea4d045 100644
--- a/media/codec2/hal/aidl/ComponentStore.cpp
+++ b/media/codec2/hal/aidl/ComponentStore.cpp
@@ -36,7 +36,7 @@
 #include <ostream>
 #include <sstream>
 
-#ifndef __ANDROID_APEX__
+#ifndef __ANDROID_APEX__  // Filters are not supported for APEX modules
 #include <codec2/hidl/plugin/FilterPlugin.h>
 #include <dlfcn.h>
 #include <C2Config.h>
@@ -51,7 +51,7 @@
 namespace c2 {
 namespace utils {
 
-#ifndef __ANDROID_APEX__
+#ifndef __ANDROID_APEX__  // Filters are not supported for APEX modules
 using ::android::DefaultFilterPlugin;
 using ::android::FilterWrapper;
 #endif
@@ -144,7 +144,15 @@
     ::android::SetPreferredCodec2ComponentStore(store);
 
     // Retrieve struct descriptors
-    mParamReflector = mStore->getParamReflector();
+    mParamReflectors.push_back(mStore->getParamReflector());
+#ifndef __ANDROID_APEX__  // Filters are not supported for APEX modules
+    std::shared_ptr<C2ParamReflector> paramReflector =
+        GetFilterWrapper()->getParamReflector();
+    if (paramReflector != nullptr) {
+        ALOGD("[%s] added param reflector from filter wrapper", mStore->getName().c_str());
+        mParamReflectors.push_back(paramReflector);
+    }
+#endif
 
     // Retrieve supported parameters from store
     using namespace std::placeholders;
@@ -173,8 +181,7 @@
         std::lock_guard<std::mutex> lock(mStructDescriptorsMutex);
         auto it = mStructDescriptors.find(coreIndex);
         if (it == mStructDescriptors.end()) {
-            std::shared_ptr<C2StructDescriptor> structDesc =
-                    mParamReflector->describe(coreIndex);
+            std::shared_ptr<C2StructDescriptor> structDesc = describe(coreIndex);
             if (!structDesc) {
                 // All supported params must be described
                 res = C2_BAD_INDEX;
@@ -189,7 +196,7 @@
     return mParameterCache;
 }
 
-#ifndef __ANDROID_APEX__
+#ifndef __ANDROID_APEX__  // Filters are not supported for APEX modules
 // static
 std::shared_ptr<FilterWrapper> ComponentStore::GetFilterWrapper() {
     constexpr const char kPluginPath[] = "libc2filterplugin.so";
@@ -199,6 +206,41 @@
 }
 #endif
 
+std::shared_ptr<MultiAccessUnitInterface> ComponentStore::tryCreateMultiAccessUnitInterface(
+        const std::shared_ptr<C2ComponentInterface> &c2interface) {
+    std::shared_ptr<MultiAccessUnitInterface> multiAccessUnitIntf = nullptr;
+    if (c2interface == nullptr) {
+        return nullptr;
+    }
+    if (MultiAccessUnitHelper::isEnabledOnPlatform()) {
+        c2_status_t err = C2_OK;
+        C2ComponentDomainSetting domain;
+        std::vector<std::unique_ptr<C2Param>> heapParams;
+        err = c2interface->query_vb({&domain}, {}, C2_MAY_BLOCK, &heapParams);
+        if (err == C2_OK && (domain.value == C2Component::DOMAIN_AUDIO)) {
+            std::vector<std::shared_ptr<C2ParamDescriptor>> params;
+            bool isComponentSupportsLargeAudioFrame = false;
+            c2interface->querySupportedParams_nb(&params);
+            for (const auto &paramDesc : params) {
+                if (paramDesc->name().compare(C2_PARAMKEY_OUTPUT_LARGE_FRAME) == 0) {
+                    isComponentSupportsLargeAudioFrame = true;
+                    break;
+                }
+            }
+            if (!isComponentSupportsLargeAudioFrame) {
+                // TODO - b/342269852: MultiAccessUnitInterface also needs to take multiple
+                // param reflectors. Currently filters work on video domain only,
+                // and the MultiAccessUnitHelper is only enabled on audio domain;
+                // thus we pass the component's param reflector, which is mParamReflectors[0].
+                multiAccessUnitIntf = std::make_shared<MultiAccessUnitInterface>(
+                        c2interface,
+                        std::static_pointer_cast<C2ReflectorHelper>(mParamReflectors[0]));
+            }
+        }
+    }
+    return multiAccessUnitIntf;
+}
+
 // Methods from ::aidl::android::hardware::media::c2::IComponentStore
 ScopedAStatus ComponentStore::createComponent(
         const std::string& name,
@@ -206,14 +248,21 @@
         const std::shared_ptr<IClientManager>& pool,
         std::shared_ptr<IComponent> *component) {
 
+    if (!listener) {
+        ALOGE("createComponent(): listener is null");
+        return ScopedAStatus::fromServiceSpecificError(Status::BAD_VALUE);
+    }
+    if (!pool) {
+        ALOGE("createComponent(): pool is null");
+        return ScopedAStatus::fromServiceSpecificError(Status::BAD_VALUE);
+    }
+
     std::shared_ptr<C2Component> c2component;
     c2_status_t status =
             mStore->createComponent(name, &c2component);
 
-    ALOGD("createComponent(): listener(%d)", bool(listener));
-
     if (status == C2_OK) {
-#ifndef __ANDROID_APEX__
+#ifndef __ANDROID_APEX__  // Filters are not supported for APEX modules
         c2component = GetFilterWrapper()->maybeWrapComponent(c2component);
 #endif
         onInterfaceLoaded(c2component->intf());
@@ -247,11 +296,14 @@
     std::shared_ptr<C2ComponentInterface> c2interface;
     c2_status_t res = mStore->createInterface(name, &c2interface);
     if (res == C2_OK) {
-#ifndef __ANDROID_APEX__
+#ifndef __ANDROID_APEX__  // Filters are not supported for APEX modules
         c2interface = GetFilterWrapper()->maybeWrapInterface(c2interface);
 #endif
         onInterfaceLoaded(c2interface);
-        *intf = SharedRefBase::make<ComponentInterface>(c2interface, mParameterCache);
+        std::shared_ptr<MultiAccessUnitInterface> multiAccessUnitIntf =
+                tryCreateMultiAccessUnitInterface(c2interface);
+        *intf = SharedRefBase::make<ComponentInterface>(
+                c2interface, multiAccessUnitIntf, mParameterCache);
         return ScopedAStatus::ok();
     }
     return ScopedAStatus::fromServiceSpecificError(res);
@@ -307,8 +359,7 @@
         if (item == mStructDescriptors.end()) {
             // not in the cache, and not known to be unsupported, query local reflector
             if (!mUnsupportedStructDescriptors.count(coreIndex)) {
-                std::shared_ptr<C2StructDescriptor> structDesc =
-                    mParamReflector->describe(coreIndex);
+                std::shared_ptr<C2StructDescriptor> structDesc = describe(coreIndex);
                 if (!structDesc) {
                     mUnsupportedStructDescriptors.emplace(coreIndex);
                 } else {
@@ -361,6 +412,16 @@
     return ScopedAStatus::ok();
 }
 
+std::shared_ptr<C2StructDescriptor> ComponentStore::describe(const C2Param::CoreIndex &index) {
+    for (const std::shared_ptr<C2ParamReflector> &reflector : mParamReflectors) {
+        std::shared_ptr<C2StructDescriptor> desc = reflector->describe(index);
+        if (desc) {
+            return desc;
+        }
+    }
+    return nullptr;
+}
+
 // Called from createComponent() after a successful creation of `component`.
 void ComponentStore::reportComponentBirth(Component* component) {
     ComponentStatus componentStatus;
diff --git a/media/codec2/hal/aidl/include/codec2/aidl/Component.h b/media/codec2/hal/aidl/include/codec2/aidl/Component.h
index 94b760f..9725bcf 100644
--- a/media/codec2/hal/aidl/include/codec2/aidl/Component.h
+++ b/media/codec2/hal/aidl/include/codec2/aidl/Component.h
@@ -31,6 +31,8 @@
 #include <aidl/android/hardware/media/c2/IInputSurface.h>
 #include <aidl/android/hardware/media/c2/IInputSurfaceConnection.h>
 
+#include <codec2/common/MultiAccessUnitHelper.h>
+
 #include <C2Component.h>
 #include <C2Buffer.h>
 #include <C2.h>
@@ -46,6 +48,8 @@
 namespace c2 {
 namespace utils {
 
+using ::android::MultiAccessUnitInterface;
+using ::android::MultiAccessUnitHelper;
 
 struct ComponentStore;
 
@@ -85,6 +89,8 @@
     std::shared_ptr<C2Component> mComponent;
     std::shared_ptr<ComponentInterface> mInterface;
     std::shared_ptr<IComponentListener> mListener;
+    std::shared_ptr<MultiAccessUnitInterface> mMultiAccessUnitIntf;
+    std::shared_ptr<MultiAccessUnitHelper> mMultiAccessUnitHelper;
     std::shared_ptr<ComponentStore> mStore;
     DefaultBufferPoolSender mBufferPoolSender;
 
@@ -102,6 +108,8 @@
 
     struct Listener;
 
+    friend struct MultiAccessUnitListener;
+
     ::ndk::ScopedAIBinder_DeathRecipient mDeathRecipient;
     static void OnBinderDied(void *cookie);
     static void OnBinderUnlinked(void *cookie);
diff --git a/media/codec2/hal/aidl/include/codec2/aidl/ComponentInterface.h b/media/codec2/hal/aidl/include/codec2/aidl/ComponentInterface.h
index 7723bee..bd19cd6 100644
--- a/media/codec2/hal/aidl/include/codec2/aidl/ComponentInterface.h
+++ b/media/codec2/hal/aidl/include/codec2/aidl/ComponentInterface.h
@@ -22,11 +22,14 @@
 
 #include <aidl/android/hardware/media/c2/BnComponentInterface.h>
 
+#include <codec2/common/MultiAccessUnitHelper.h>
+
 #include <C2Component.h>
 #include <C2Buffer.h>
 #include <C2.h>
 
 #include <memory>
+#include <set>
 
 namespace aidl {
 namespace android {
@@ -35,12 +38,16 @@
 namespace c2 {
 namespace utils {
 
-struct ComponentStore;
+using ::android::MultiAccessUnitInterface;
 
 struct ComponentInterface : public BnComponentInterface {
     ComponentInterface(
             const std::shared_ptr<C2ComponentInterface>& interface,
             const std::shared_ptr<ParameterCache>& cache);
+    ComponentInterface(
+        const std::shared_ptr<C2ComponentInterface>& interface,
+        const std::shared_ptr<MultiAccessUnitInterface>& largeBufferIntf,
+        const std::shared_ptr<ParameterCache>& cache);
     c2_status_t status() const;
     ::ndk::ScopedAStatus getConfigurable(
             std::shared_ptr<IConfigurable> *intf) override;
@@ -51,7 +58,6 @@
     c2_status_t mInit;
 };
 
-
 }  // namespace utils
 }  // namespace c2
 }  // namespace media
diff --git a/media/codec2/hal/aidl/include/codec2/aidl/ComponentStore.h b/media/codec2/hal/aidl/include/codec2/aidl/ComponentStore.h
index 0698b0f..b2158a6 100644
--- a/media/codec2/hal/aidl/include/codec2/aidl/ComponentStore.h
+++ b/media/codec2/hal/aidl/include/codec2/aidl/ComponentStore.h
@@ -75,6 +75,9 @@
 
     static std::shared_ptr<::android::FilterWrapper> GetFilterWrapper();
 
+    std::shared_ptr<MultiAccessUnitInterface> tryCreateMultiAccessUnitInterface(
+            const std::shared_ptr<C2ComponentInterface> &c2interface);
+
     // Methods from ::aidl::android::hardware::media::c2::IComponentStore.
     virtual ::ndk::ScopedAStatus createComponent(
             const std::string& name,
@@ -115,7 +118,7 @@
 
     c2_status_t mInit;
     std::shared_ptr<C2ComponentStore> mStore;
-    std::shared_ptr<C2ParamReflector> mParamReflector;
+    std::vector<std::shared_ptr<C2ParamReflector>> mParamReflectors;
 
     std::map<C2Param::CoreIndex, std::shared_ptr<C2StructDescriptor>> mStructDescriptors;
     std::set<C2Param::CoreIndex> mUnsupportedStructDescriptors;
@@ -132,6 +135,9 @@
     mutable std::mutex mComponentRosterMutex;
     std::map<Component*, ComponentStatus> mComponentRoster;
 
+    // describe from mParamReflectors
+    std::shared_ptr<C2StructDescriptor> describe(const C2Param::CoreIndex &index);
+
     // Called whenever Component is created.
     void reportComponentBirth(Component* component);
     // Called only from the destructor of Component.
diff --git a/media/codec2/hal/client/Android.bp b/media/codec2/hal/client/Android.bp
index af6f4ae..864eeb8 100644
--- a/media/codec2/hal/client/Android.bp
+++ b/media/codec2/hal/client/Android.bp
@@ -33,6 +33,13 @@
         "libcodec2-aidl-client-defaults",
     ],
 
+    // http://b/343951602#comment4 Explicitly set cpp_std to gnu++20.  The
+    // default inherited from libcodec2-impl-defaults sets it to gnu++17 which
+    // causes a segfault when mixing global std::string symbols built with
+    // gnu++17 and gnu++20.  TODO(b/343951602): clean this after
+    // libcodec2-impl-defaults opt into gnu++17 is removed.
+    cpp_std: "gnu++20",
+
     header_libs: [
         "libcodec2_internal", // private
     ],
diff --git a/media/codec2/hal/client/GraphicBufferAllocator.cpp b/media/codec2/hal/client/GraphicBufferAllocator.cpp
index 8f489ec..6a6da0f 100644
--- a/media/codec2/hal/client/GraphicBufferAllocator.cpp
+++ b/media/codec2/hal/client/GraphicBufferAllocator.cpp
@@ -17,7 +17,6 @@
 #define LOG_TAG "Codec2-GraphicBufferAllocator"
 
 
-#include <gui/IProducerListener.h>
 #include <media/stagefright/foundation/ADebug.h>
 
 #include <codec2/aidl/GraphicBufferAllocator.h>
@@ -25,25 +24,6 @@
 
 namespace aidl::android::hardware::media::c2::implementation {
 
-class OnBufferReleasedListener : public ::android::BnProducerListener {
-private:
-    uint32_t mGeneration;
-    std::weak_ptr<GraphicBufferAllocator> mAllocator;
-public:
-    OnBufferReleasedListener(
-            uint32_t generation,
-            const std::shared_ptr<GraphicBufferAllocator> &allocator)
-            : mGeneration(generation), mAllocator(allocator) {}
-    virtual ~OnBufferReleasedListener() = default;
-    virtual void onBufferReleased() {
-        auto p = mAllocator.lock();
-        if (p) {
-            p->onBufferReleased(mGeneration);
-        }
-    }
-    virtual bool needsReleaseNotify() { return true; }
-};
-
 ::ndk::ScopedAStatus GraphicBufferAllocator::allocate(
         const IGraphicBufferAllocator::Description& in_desc,
         IGraphicBufferAllocator::Allocation* _aidl_return) {
@@ -108,15 +88,14 @@
     mGraphicsTracker->stop();
 }
 
-const ::android::sp<::android::IProducerListener> GraphicBufferAllocator::createReleaseListener(
-      uint32_t generation) {
-    return new OnBufferReleasedListener(generation, ref<GraphicBufferAllocator>());
-}
-
 void GraphicBufferAllocator::onBufferReleased(uint32_t generation) {
     mGraphicsTracker->onReleased(generation);
 }
 
+void GraphicBufferAllocator::onBufferAttached(uint32_t generation) {
+    mGraphicsTracker->onAttached(generation);
+}
+
 c2_status_t GraphicBufferAllocator::allocate(
         uint32_t width, uint32_t height, ::android::PixelFormat format, uint64_t usage,
         AHardwareBuffer **buf, ::android::sp<::android::Fence> *fence) {
diff --git a/media/codec2/hal/client/GraphicsTracker.cpp b/media/codec2/hal/client/GraphicsTracker.cpp
index 01b0678..8d9e76e 100644
--- a/media/codec2/hal/client/GraphicsTracker.cpp
+++ b/media/codec2/hal/client/GraphicsTracker.cpp
@@ -173,7 +173,7 @@
 }
 
 GraphicsTracker::GraphicsTracker(int maxDequeueCount)
-    : mBufferCache(new BufferCache()), mMaxDequeue{maxDequeueCount},
+    : mBufferCache(new BufferCache()), mNumDequeueing{0}, mMaxDequeue{maxDequeueCount},
     mMaxDequeueCommitted{maxDequeueCount},
     mDequeueable{maxDequeueCount},
     mTotalDequeued{0}, mTotalCancelled{0}, mTotalDropped{0}, mTotalReleased{0},
@@ -235,6 +235,7 @@
         const sp<IGraphicBufferProducer>& igbp, uint32_t generation) {
     // TODO: wait until operations to previous IGBP is completed.
     std::shared_ptr<BufferCache> prevCache;
+    int prevDequeueRequested = 0;
     int prevDequeueCommitted;
 
     std::unique_lock<std::mutex> cl(mConfigLock);
@@ -243,6 +244,9 @@
         mInConfig = true;
         prevCache = mBufferCache;
         prevDequeueCommitted = mMaxDequeueCommitted;
+        if (mMaxDequeueRequested.has_value()) {
+            prevDequeueRequested = mMaxDequeueRequested.value();
+        }
     }
     // NOTE: Switching to the same surface is blocked from MediaCodec.
     // Switching to the same surface might not work if tried, since disconnect()
@@ -263,6 +267,11 @@
         mInConfig = false;
         return C2_BAD_VALUE;
     }
+    ALOGD("new surface in configuration: maxDequeueRequested(%d), maxDequeueCommitted(%d)",
+          prevDequeueRequested, prevDequeueCommitted);
+    if (prevDequeueRequested > 0 && prevDequeueRequested > prevDequeueCommitted) {
+        prevDequeueCommitted = prevDequeueRequested;
+    }
     if (igbp) {
         ret = igbp->setMaxDequeuedBufferCount(prevDequeueCommitted);
         if (ret != ::android::OK) {
@@ -280,6 +289,34 @@
         std::unique_lock<std::mutex> l(mLock);
         mInConfig = false;
         mBufferCache = newCache;
+        // {@code dequeued} is the number of currently dequeued buffers.
+        // {@code prevDequeueCommitted} is max dequeued buffer at any moment
+        //  from the new surface.
+        // {@code newDequeueable} is hence the current # of dequeueable buffers
+        //  if no change occurs.
+        int dequeued = mDequeued.size() + mNumDequeueing;
+        int newDequeueable = prevDequeueCommitted - dequeued;
+        if (newDequeueable < 0) {
+            // This will not happen.
+            // But if this happens, we respect the value and try to continue.
+            ALOGE("calculated new dequeueable is negative: %d max(%d),dequeued(%d)",
+                  newDequeueable, prevDequeueCommitted, dequeued);
+        }
+
+        if (mMaxDequeueRequested.has_value() && mMaxDequeueRequested == prevDequeueCommitted) {
+            mMaxDequeueRequested.reset();
+        }
+        mMaxDequeue = mMaxDequeueCommitted = prevDequeueCommitted;
+
+        int delta = newDequeueable - mDequeueable;
+        if (delta > 0) {
+            writeIncDequeueableLocked(delta);
+        } else if (delta < 0) {
+            drainDequeueableLocked(-delta);
+        }
+        ALOGV("new surfcace dequeueable %d(delta %d), maxDequeue %d",
+              newDequeueable, delta, mMaxDequeue);
+        mDequeueable = newDequeueable;
     }
     return C2_OK;
 }
@@ -529,6 +566,7 @@
             ALOGE("writing end for the waitable object seems to be closed");
             return C2_BAD_STATE;
         }
+        mNumDequeueing++;
         mDequeueable--;
         *cache = mBufferCache;
         return C2_OK;
@@ -543,6 +581,7 @@
                     bool cached, int slot, const sp<Fence> &fence,
                     std::shared_ptr<BufferItem> *pBuffer, bool *updateDequeue) {
     std::unique_lock<std::mutex> l(mLock);
+    mNumDequeueing--;
     if (res == C2_OK) {
         if (cached) {
             auto it = cache->mBuffers.find(slot);
@@ -563,6 +602,8 @@
         auto mapRet = mDequeued.emplace(bid, *pBuffer);
         CHECK(mapRet.second);
     } else {
+        ALOGD("allocate error(%d): Dequeued(%zu), Dequeuable(%d)",
+              (int)res, mDequeued.size(), mDequeueable + 1);
         if (adjustDequeueConfLocked(updateDequeue)) {
             return;
         }
@@ -623,13 +664,25 @@
 
     int slotId;
     uint64_t outBufferAge;
-    ::android::FrameEventHistoryDelta outTimestamps;
     sp<Fence> fence;
 
     ::android::status_t status = igbp->dequeueBuffer(
-            &slotId, &fence, width, height, format, usage, &outBufferAge, &outTimestamps);
+            &slotId, &fence, width, height, format, usage, &outBufferAge, nullptr);
     if (status < ::android::OK) {
-        ALOGE("dequeueBuffer() error %d", (int)status);
+        if (status == ::android::TIMED_OUT || status == ::android::WOULD_BLOCK) {
+            ALOGW("BQ might not be ready for dequeueBuffer()");
+            return C2_BLOCKING;
+        }
+        bool cacheExpired = false;
+        {
+            std::unique_lock<std::mutex> l(mLock);
+            cacheExpired = (mBufferCache.get() != cache.get());
+        }
+        if (cacheExpired) {
+            ALOGW("a new BQ is configured. dequeueBuffer() error %d", (int)status);
+            return C2_BLOCKING;
+        }
+        ALOGE("BQ in inconsistent status. dequeueBuffer() error %d", (int)status);
         return C2_CORRUPTED;
     }
     cache->waitOnSlot(slotId);
@@ -649,7 +702,8 @@
             ALOGE("allocate by dequeueBuffer() successful, but requestBuffer() failed %d",
                   status);
             igbp->cancelBuffer(slotId, fence);
-            return C2_CORRUPTED;
+            // This might be due to life-cycle end and/or surface switching.
+            return C2_BLOCKING;
         }
         *buffer = std::make_shared<BufferItem>(generation, slotId, realloced, fence);
         if (!*buffer) {
@@ -947,6 +1001,11 @@
     {
         std::unique_lock<std::mutex> l(mLock);
         if (mBufferCache->mGeneration == generation) {
+            if (mBufferCache->mNumAttached > 0) {
+                ALOGV("one onReleased() ignored for each prior onAttached().");
+                mBufferCache->mNumAttached--;
+                return;
+            }
             if (!adjustDequeueConfLocked(&updateDequeue)) {
                 mDequeueable++;
                 writeIncDequeueableLocked(1);
@@ -958,4 +1017,12 @@
     }
 }
 
+void GraphicsTracker::onAttached(uint32_t generation) {
+    std::unique_lock<std::mutex> l(mLock);
+    if (mBufferCache->mGeneration == generation) {
+        ALOGV("buffer attached");
+        mBufferCache->mNumAttached++;
+    }
+}
+
 } // namespace aidl::android::hardware::media::c2::implementation
diff --git a/media/codec2/hal/client/client.cpp b/media/codec2/hal/client/client.cpp
index 9ed9458..a137dbb 100644
--- a/media/codec2/hal/client/client.cpp
+++ b/media/codec2/hal/client/client.cpp
@@ -649,7 +649,7 @@
         return C2_CORRUPTED;
     }
     size_t i = 0;
-    size_t numUpdatedStackParams = 0;
+    size_t numQueried = 0;
     for (auto it = paramPointers.begin(); it != paramPointers.end(); ) {
         C2Param* paramPointer = *it;
         if (numStackIndices > 0) {
@@ -678,7 +678,7 @@
                 continue;
             }
             if (stackParams[i++]->updateFrom(*paramPointer)) {
-                ++numUpdatedStackParams;
+                ++numQueried;
             } else {
                 LOG(WARNING) << "query -- param update failed: "
                                 "index = "
@@ -695,14 +695,11 @@
                                 "unexpected extra stack param.";
             } else {
                 heapParams->emplace_back(C2Param::Copy(*paramPointer));
+                ++numQueried;
             }
         }
         ++it;
     }
-    size_t numQueried = numUpdatedStackParams;
-    if (heapParams) {
-        numQueried += heapParams->size();
-    }
     if (status == C2_OK && indices.size() != numQueried) {
         status = C2_BAD_INDEX;
     }
@@ -1871,6 +1868,10 @@
     return nullptr;
 }
 
+bool Codec2Client::IsAidlSelected() {
+    return c2_aidl::utils::IsSelected();
+}
+
 // Codec2Client::Interface
 Codec2Client::Interface::Interface(const sp<HidlBase>& base)
       : Configurable{
@@ -2555,6 +2556,19 @@
     mOutputBufferQueue->onBufferReleased(generation);
 }
 
+void Codec2Client::Component::onBufferAttachedToOutputSurface(
+        uint32_t generation) {
+    if (mAidlBase) {
+        std::shared_ptr<AidlGraphicBufferAllocator> gba =
+                mGraphicBufferAllocators->current();
+        if (gba) {
+            gba->onBufferAttached(generation);
+        }
+        return;
+    }
+    mOutputBufferQueue->onBufferAttached(generation);
+}
+
 void Codec2Client::Component::holdIgbaBlocks(
         const std::list<std::unique_ptr<C2Work>>& workList) {
     if (!mAidlBase) {
diff --git a/media/codec2/hal/client/include/codec2/aidl/GraphicBufferAllocator.h b/media/codec2/hal/client/include/codec2/aidl/GraphicBufferAllocator.h
index 902c53f..a797cb7 100644
--- a/media/codec2/hal/client/include/codec2/aidl/GraphicBufferAllocator.h
+++ b/media/codec2/hal/client/include/codec2/aidl/GraphicBufferAllocator.h
@@ -71,18 +71,6 @@
     void reset();
 
     /**
-     * Create a listener for buffer being released.
-     *
-     * Surface will register this listener and notify whenever the consumer
-     * releases a buffer.
-     *
-     * @param   generation        generation # for the BufferQueue.
-     * @return  IProducerListener can be used when connect# to Surface.
-     */
-    const ::android::sp<::android::IProducerListener> createReleaseListener(
-            uint32_t generation);
-
-    /**
      * Notifies a buffer being released.
      *
      * @param   generation        generation # for the BufferQueue.
@@ -90,6 +78,13 @@
     void onBufferReleased(uint32_t generation);
 
     /**
+     * Notifies a buffer being attached to the consumer.
+     *
+     * @param   generation        generation # for the BufferQueue.
+     */
+    void onBufferAttached(uint32_t generation);
+
+    /**
      * Allocates a buffer.
      *
      * @param   width             width of the requested buffer.
diff --git a/media/codec2/hal/client/include/codec2/aidl/GraphicsTracker.h b/media/codec2/hal/client/include/codec2/aidl/GraphicsTracker.h
index dd6c869..9a4fa12 100644
--- a/media/codec2/hal/client/include/codec2/aidl/GraphicsTracker.h
+++ b/media/codec2/hal/client/include/codec2/aidl/GraphicsTracker.h
@@ -143,6 +143,18 @@
     void onReleased(uint32_t generation);
 
     /**
+     * Notifies when a Buffer is attached to Graphics(consumer side).
+     * If generation does not match to the current, notifications via the interface
+     * will be ignored. (In the case, the notifications are from one of the old surfaces
+     * which is no longer used.)
+     * One onReleased() should be ignored for one onAttached() when both of
+     * them have the same generation as params.
+     *
+     * @param[in] generation    generation id for specifying Graphics(BQ)
+     */
+    void onAttached(uint32_t generation);
+
+    /**
      * Get waitable fd for events.(allocate is ready, end of life cycle)
      *
      * @param[out]  pipeFd      a file descriptor created from pipe2()
@@ -217,9 +229,11 @@
 
         BlockedSlot mBlockedSlots[kNumSlots];
 
-        BufferCache() : mBqId{0ULL}, mGeneration{0}, mIgbp{nullptr} {}
+        std::atomic<int> mNumAttached;
+
+        BufferCache() : mBqId{0ULL}, mGeneration{0}, mIgbp{nullptr}, mNumAttached{0} {}
         BufferCache(uint64_t bqId, uint32_t generation, const sp<IGraphicBufferProducer>& igbp) :
-            mBqId{bqId}, mGeneration{generation}, mIgbp{igbp} {}
+            mBqId{bqId}, mGeneration{generation}, mIgbp{igbp}, mNumAttached{0} {}
 
         ~BufferCache();
 
@@ -234,6 +248,7 @@
     // Maps bufferId to buffer
     std::map<uint64_t, std::shared_ptr<BufferItem>> mDequeued;
     std::set<uint64_t> mDeallocating;
+    int mNumDequeueing;
 
     // These member variables are read and modified accessed as follows.
     // 1. mConfigLock being held
diff --git a/media/codec2/hal/client/include/codec2/hidl/client.h b/media/codec2/hal/client/include/codec2/hidl/client.h
index 3b7f7a6..413e92e 100644
--- a/media/codec2/hal/client/include/codec2/hidl/client.h
+++ b/media/codec2/hal/client/include/codec2/hidl/client.h
@@ -270,6 +270,9 @@
     static std::shared_ptr<InputSurface> CreateInputSurface(
             char const* serviceName = nullptr);
 
+    // Whether AIDL is selected.
+    static bool IsAidlSelected();
+
     // base and/or configurable cannot be null.
     Codec2Client(
             sp<HidlBase> const& base,
@@ -478,6 +481,10 @@
     void onBufferReleasedFromOutputSurface(
             uint32_t generation);
 
+    // Notify a buffer is attached to output surface.
+    void onBufferAttachedToOutputSurface(
+            uint32_t generation);
+
     // When the client received \p workList and the blocks inside
     // \p workList are IGBA based graphic blocks, specify the owner
     // as the current IGBA for the future operations.
diff --git a/media/codec2/hal/client/include/codec2/hidl/output.h b/media/codec2/hal/client/include/codec2/hidl/output.h
index fda34a8..ddb9855 100644
--- a/media/codec2/hal/client/include/codec2/hidl/output.h
+++ b/media/codec2/hal/client/include/codec2/hidl/output.h
@@ -69,6 +69,9 @@
     // update the number of dequeueable/allocatable buffers.
     void onBufferReleased(uint32_t generation);
 
+    // Nofify a buffer is attached to the output surface.
+    void onBufferAttached(uint32_t generation);
+
     // Retrieve frame event history from the output surface.
     void pollForRenderedFrames(FrameEventHistoryDelta* delta);
 
diff --git a/media/codec2/hal/client/output.cpp b/media/codec2/hal/client/output.cpp
index 36322f5..54d78a0 100644
--- a/media/codec2/hal/client/output.cpp
+++ b/media/codec2/hal/client/output.cpp
@@ -542,6 +542,11 @@
     }
 }
 
+void OutputBufferQueue::onBufferAttached(uint32_t generation) {
+    // TODO
+    (void) generation;
+}
+
 void OutputBufferQueue::pollForRenderedFrames(FrameEventHistoryDelta* delta) {
     if (mIgbp) {
         mIgbp->getFrameTimestamps(delta);
diff --git a/media/codec2/hal/common/Android.bp b/media/codec2/hal/common/Android.bp
index 2aedd8b..0638363 100644
--- a/media/codec2/hal/common/Android.bp
+++ b/media/codec2/hal/common/Android.bp
@@ -11,6 +11,7 @@
 
     srcs: [
         "BufferTypes.cpp",
+        "MultiAccessUnitHelper.cpp",
     ],
 
     export_include_dirs: ["include/"],
@@ -26,7 +27,10 @@
         "libcodec2_vndk",
         "liblog",
         "libstagefright_foundation",
+        "server_configurable_flags",
+        "libaconfig_storage_read_api_cc",
     ],
+    static_libs: ["aconfig_mediacodec_flags_c_lib"],
 }
 
 cc_library_static {
@@ -49,6 +53,7 @@
     shared_libs: [
         "libbase",
         "server_configurable_flags",
+        "libaconfig_storage_read_api_cc",
     ],
 
     static_libs: ["aconfig_mediacodec_flags_c_lib"],
@@ -63,5 +68,6 @@
     shared_libs: [
         "libbase",
         "server_configurable_flags",
+        "libaconfig_storage_read_api_cc",
     ],
 }
diff --git a/media/codec2/hal/common/HalSelection.cpp b/media/codec2/hal/common/HalSelection.cpp
index 761a409..d3ea181 100644
--- a/media/codec2/hal/common/HalSelection.cpp
+++ b/media/codec2/hal/common/HalSelection.cpp
@@ -28,7 +28,12 @@
 namespace android {
 
 bool IsCodec2AidlHalSelected() {
-    if (!com::android::media::codec::flags::provider_->aidl_hal()) {
+    // For new devices with vendor software targeting 202404, we always want to
+    // use AIDL if it exists
+    constexpr int kAndroidApi202404 = 202404;
+    int vendorVersion = ::android::base::GetIntProperty("ro.vendor.api_level", -1);
+    if (!com::android::media::codec::flags::provider_->aidl_hal() &&
+        vendorVersion < kAndroidApi202404) {
         // Cannot select AIDL if not enabled
         return false;
     }
diff --git a/media/codec2/hal/common/MultiAccessUnitHelper.cpp b/media/codec2/hal/common/MultiAccessUnitHelper.cpp
new file mode 100644
index 0000000..b287b91
--- /dev/null
+++ b/media/codec2/hal/common/MultiAccessUnitHelper.cpp
@@ -0,0 +1,856 @@
+/*
+ * Copyright 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "Codec2-MultiAccessUnitHelper"
+#include <android-base/logging.h>
+
+#include <com_android_media_codec_flags.h>
+
+#include <codec2/common/MultiAccessUnitHelper.h>
+#include <android-base/properties.h>
+
+#include <C2BufferPriv.h>
+#include <C2Debug.h>
+#include <C2PlatformSupport.h>
+
+static inline constexpr  uint32_t MAX_SUPPORTED_SIZE = ( 10 * 512000 * 8 * 2u);
+namespace android {
+
+static C2R MultiAccessUnitParamsSetter(
+        bool mayBlock, C2InterfaceHelper::C2P<C2LargeFrame::output> &me) {
+    (void)mayBlock;
+    C2R res = C2R::Ok();
+    if (!me.F(me.v.maxSize).supportsAtAll(me.v.maxSize)) {
+        res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.maxSize)));
+    } else if (!me.F(me.v.thresholdSize).supportsAtAll(me.v.thresholdSize)) {
+        res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.thresholdSize)));
+    } else if (me.v.maxSize < me.v.thresholdSize) {
+        me.set().maxSize = me.v.thresholdSize;
+    }
+    std::vector<std::unique_ptr<C2SettingResult>> failures;
+    res.retrieveFailures(&failures);
+    if (!failures.empty()) {
+        me.set().maxSize = 0;
+        me.set().thresholdSize = 0;
+    }
+    return res;
+}
+
+MultiAccessUnitInterface::MultiAccessUnitInterface(
+        const std::shared_ptr<C2ComponentInterface>& interface,
+        std::shared_ptr<C2ReflectorHelper> helper)
+        : C2InterfaceHelper(helper), mC2ComponentIntf(interface) {
+    setDerivedInstance(this);
+    addParameter(
+            DefineParam(mLargeFrameParams, C2_PARAMKEY_OUTPUT_LARGE_FRAME)
+            .withDefault(new C2LargeFrame::output(0u, 0, 0))
+            .withFields({
+                C2F(mLargeFrameParams, maxSize).inRange(
+                        0, c2_min(UINT_MAX, MAX_SUPPORTED_SIZE)),
+                C2F(mLargeFrameParams, thresholdSize).inRange(
+                        0, c2_min(UINT_MAX, MAX_SUPPORTED_SIZE))
+            })
+            .withSetter(MultiAccessUnitParamsSetter)
+            .build());
+    std::vector<std::shared_ptr<C2ParamDescriptor>> supportedParams;
+    querySupportedParams(&supportedParams);
+    // Adding to set to do intf seperation in query/config
+    for (std::shared_ptr<C2ParamDescriptor> &desc : supportedParams) {
+        mSupportedParamIndexSet.insert(desc->index());
+    }
+    mParamFields.emplace_back(mLargeFrameParams.get(), &(mLargeFrameParams.get()->maxSize));
+    mParamFields.emplace_back(mLargeFrameParams.get(), &(mLargeFrameParams.get()->thresholdSize));
+
+    if (mC2ComponentIntf) {
+        c2_status_t err = mC2ComponentIntf->query_vb({&mKind}, {}, C2_MAY_BLOCK, nullptr);
+    }
+}
+
+bool MultiAccessUnitInterface::isValidField(const C2ParamField &field) const {
+    return (std::find(mParamFields.begin(), mParamFields.end(), field) != mParamFields.end());
+}
+bool MultiAccessUnitInterface::isParamSupported(C2Param::Index index) {
+    return (mSupportedParamIndexSet.count(index) != 0);
+}
+
+C2LargeFrame::output MultiAccessUnitInterface::getLargeFrameParam() const {
+    return *mLargeFrameParams;
+}
+
+C2Component::kind_t MultiAccessUnitInterface::kind() const {
+    return (C2Component::kind_t)(mKind.value);
+}
+
+bool MultiAccessUnitInterface::getDecoderSampleRateAndChannelCount(
+        uint32_t * const sampleRate_, uint32_t * const channelCount_) const {
+    if (sampleRate_ == nullptr || channelCount_ == nullptr) {
+        return false;
+    }
+    if (mC2ComponentIntf) {
+        C2StreamSampleRateInfo::output sampleRate;
+        C2StreamChannelCountInfo::output channelCount;
+        c2_status_t res = mC2ComponentIntf->query_vb(
+                {&sampleRate, &channelCount}, {}, C2_MAY_BLOCK, nullptr);
+        if (res == C2_OK && sampleRate.value > 0 && channelCount.value > 0) {
+            *sampleRate_ = sampleRate.value;
+            *channelCount_ = channelCount.value;
+            return true;
+        }
+    }
+    return false;
+}
+
+bool MultiAccessUnitInterface::getMaxInputSize(
+        C2StreamMaxBufferSizeInfo::input* const maxInputSize) const {
+    if (maxInputSize == nullptr || mC2ComponentIntf == nullptr) {
+        return false;
+    }
+    c2_status_t err = mC2ComponentIntf->query_vb({maxInputSize}, {}, C2_MAY_BLOCK, nullptr);
+    if (err != OK) {
+        return false;
+    }
+    return true;
+}
+
+//C2MultiAccessUnitBuffer
+class C2MultiAccessUnitBuffer : public C2Buffer {
+    public:
+        explicit C2MultiAccessUnitBuffer(
+                const std::vector<C2ConstLinearBlock> &blocks):
+                C2Buffer(blocks) {
+        }
+};
+
+//MultiAccessUnitHelper
+MultiAccessUnitHelper::MultiAccessUnitHelper(
+        const std::shared_ptr<MultiAccessUnitInterface>& intf,
+        std::shared_ptr<C2BlockPool>& linearPool):
+        mMultiAccessOnOffAllowed(true),
+        mInit(false),
+        mInterface(intf),
+        mLinearPool(linearPool) {
+    if (mLinearPool) {
+        mInit = true;
+    }
+}
+
+MultiAccessUnitHelper::~MultiAccessUnitHelper() {
+    std::unique_lock<std::mutex> l(mLock);
+    mFrameHolder.clear();
+}
+
+bool MultiAccessUnitHelper::isEnabledOnPlatform() {
+    bool result = com::android::media::codec::flags::provider_->large_audio_frame();
+    if (!result) {
+        return false;
+    }
+    //TODO: remove this before launch
+    result = ::android::base::GetBoolProperty("debug.media.c2.large.audio.frame", true);
+    LOG(DEBUG) << "MultiAccessUnitHelper " << (result ? "enabled" : "disabled");
+    return result;
+}
+
+bool MultiAccessUnitHelper::tryReconfigure(const std::unique_ptr<C2Param> &param) {
+    C2LargeFrame::output *lfp = C2LargeFrame::output::From(param.get());
+    if (lfp == nullptr) {
+        return false;
+    }
+    bool isDecoder = (mInterface->kind() == C2Component::KIND_DECODER) ? true : false;
+    if (!isDecoder) {
+        C2StreamMaxBufferSizeInfo::input maxInputSize(0);
+        if (!mInterface->getMaxInputSize(&maxInputSize)) {
+            LOG(ERROR) << "Error in reconfigure: "
+                    << "Encoder failed to respond with a valid max input size";
+            return false;
+        }
+        // This is assuming a worst case compression ratio of 1:1
+        // In no case the encoder should give an output more than
+        // what is being provided to the encoder in a single call.
+        if (lfp->maxSize < maxInputSize.value) {
+            lfp->maxSize = maxInputSize.value;
+        }
+    }
+    lfp->maxSize =
+            (lfp->maxSize > MAX_SUPPORTED_SIZE) ? MAX_SUPPORTED_SIZE :
+                    (lfp->maxSize < 0) ? 0 : lfp->maxSize;
+    lfp->thresholdSize =
+            (lfp->thresholdSize > MAX_SUPPORTED_SIZE) ? MAX_SUPPORTED_SIZE :
+                    (lfp->thresholdSize < 0) ? 0 : lfp->thresholdSize;
+    C2LargeFrame::output currentConfig = mInterface->getLargeFrameParam();
+    if ((currentConfig.maxSize == lfp->maxSize)
+            && (currentConfig.thresholdSize == lfp->thresholdSize)) {
+        // no need to update
+        return false;
+    }
+    if (isDecoder) {
+        bool isOnOffTransition =
+                (currentConfig.maxSize == 0 && lfp->maxSize != 0)
+                || (currentConfig.maxSize != 0 && lfp->maxSize == 0);
+            if (isOnOffTransition && !mMultiAccessOnOffAllowed) {
+                LOG(ERROR) << "Setting new configs not allowed"
+                        << " MaxSize: " << lfp->maxSize
+                        << " ThresholdSize: " << lfp->thresholdSize;
+                return false;
+            }
+    }
+    std::vector<C2Param*> config{lfp};
+    std::vector<std::unique_ptr<C2SettingResult>> failures;
+    if (C2_OK != mInterface->config(config, C2_MAY_BLOCK, &failures)) {
+        LOG(ERROR) << "Dynamic config not applied for"
+                << " MaxSize: " << lfp->maxSize
+                << " ThresholdSize: " << lfp->thresholdSize;
+        return false;
+    }
+    LOG(DEBUG) << "Updated from param maxSize "
+            << lfp->maxSize
+            << " ThresholdSize " << lfp->thresholdSize;
+    return true;
+}
+
+std::shared_ptr<MultiAccessUnitInterface> MultiAccessUnitHelper::getInterface() {
+    return mInterface;
+}
+
+bool MultiAccessUnitHelper::getStatus() {
+    return mInit;
+}
+
+void MultiAccessUnitHelper::reset() {
+    std::lock_guard<std::mutex> l(mLock);
+    mFrameHolder.clear();
+    mMultiAccessOnOffAllowed = true;
+}
+
+c2_status_t MultiAccessUnitHelper::error(
+        std::list<std::unique_ptr<C2Work>> * const worklist) {
+    if (worklist == nullptr) {
+        LOG(ERROR) << "Provided null worklist for error()";
+        mFrameHolder.clear();
+        return C2_OK;
+    }
+    std::unique_lock<std::mutex> l(mLock);
+    for (auto frame = mFrameHolder.begin(); frame != mFrameHolder.end(); frame++) {
+        if (frame->mLargeWork) {
+            finalizeWork(*frame, 0, true);
+            worklist->push_back(std::move(frame->mLargeWork));
+            frame->reset();
+        }
+    }
+    mFrameHolder.clear();
+    mMultiAccessOnOffAllowed = true;
+    return C2_OK;
+}
+
+c2_status_t MultiAccessUnitHelper::flush(
+        std::list<std::unique_ptr<C2Work>>* const c2flushedWorks) {
+    c2_status_t c2res = C2_OK;
+    std::lock_guard<std::mutex> l(mLock);
+    for (auto iterWork = c2flushedWorks->begin() ; iterWork != c2flushedWorks->end(); ) {
+        bool foundFlushedFrame = false;
+        std::list<MultiAccessUnitInfo>::iterator frame =
+                mFrameHolder.begin();
+        while (frame != mFrameHolder.end() && !foundFlushedFrame) {
+            auto it = frame->mComponentFrameIds.find(
+                    (*iterWork)->input.ordinal.frameIndex.peekull());
+            if (it != frame->mComponentFrameIds.end()) {
+                LOG(DEBUG) << "Multi access-unit flush "
+                        << (*iterWork)->input.ordinal.frameIndex.peekull()
+                        << " with " << frame->inOrdinal.frameIndex.peekull();
+                (*iterWork)->input.ordinal.frameIndex = frame->inOrdinal.frameIndex;
+                frame = mFrameHolder.erase(frame);
+                foundFlushedFrame = true;
+            } else {
+                ++frame;
+            }
+        }
+        if (!foundFlushedFrame) {
+            iterWork = c2flushedWorks->erase(iterWork);
+        } else {
+            ++iterWork;
+        }
+    }
+    return c2res;
+}
+
+c2_status_t MultiAccessUnitHelper::scatter(
+        std::list<std::unique_ptr<C2Work>> &largeWork,
+        std::list<std::list<std::unique_ptr<C2Work>>>* const processedWork) {
+    LOG(DEBUG) << "Multiple access-unit: scatter";
+    if (processedWork == nullptr) {
+        LOG(ERROR) << "MultiAccessUnitHelper provided with no work list";
+        return C2_CORRUPTED;
+    }
+    for (std::unique_ptr<C2Work>& w : largeWork) {
+        std::list<std::unique_ptr<C2Work>> sliceWork;
+        C2WorkOrdinalStruct inputOrdinal = w->input.ordinal;
+        // To hold correspondence and processing bits b/w input and output
+        MultiAccessUnitInfo frameInfo(inputOrdinal);
+        std::set<uint64_t>& frameSet = frameInfo.mComponentFrameIds;
+        uint64_t newFrameIdx = mFrameIndex++;
+        // TODO: Do not split buffers if component inherantly supports MultipleFrames.
+        // if thats case, only replace frameindex.
+        auto cloneInputWork = [&frameInfo, &newFrameIdx, this]
+                (std::unique_ptr<C2Work>& inWork, uint32_t flags) -> std::unique_ptr<C2Work> {
+            std::unique_ptr<C2Work> newWork(new C2Work);
+            newWork->input.flags = (C2FrameData::flags_t)flags;
+            newWork->input.ordinal = inWork->input.ordinal;
+            newWork->input.ordinal.frameIndex = newFrameIdx;
+            if (!inWork->input.configUpdate.empty()) {
+                for (std::unique_ptr<C2Param>& param : inWork->input.configUpdate) {
+                    if (param->index() == C2LargeFrame::output::PARAM_TYPE) {
+                        if (tryReconfigure(param)) {
+                            frameInfo.mConfigUpdate.push_back(std::move(param));
+                        }
+                    } else {
+                        newWork->input.configUpdate.push_back(std::move(param));
+                    }
+                }
+                inWork->input.configUpdate.clear();
+            }
+            newWork->input.infoBuffers = (inWork->input.infoBuffers);
+            if (!inWork->worklets.empty() && inWork->worklets.front() != nullptr) {
+                newWork->worklets.emplace_back(new C2Worklet);
+                newWork->worklets.front()->component = inWork->worklets.front()->component;
+                std::vector<std::unique_ptr<C2Tuning>> tunings;
+                for (std::unique_ptr<C2Tuning>& tuning : inWork->worklets.front()->tunings) {
+                    tunings.push_back(
+                            std::unique_ptr<C2Tuning>(
+                                    static_cast<C2Tuning*>(
+                                            C2Param::Copy(*(tuning.get())).release())));
+                }
+                newWork->worklets.front()->tunings = std::move(tunings);
+            }
+            return newWork;
+        };
+        if (w->input.buffers.empty()
+                || (w->input.buffers.front() == nullptr)
+                || (!w->input.buffers.front()->hasInfo(
+                        C2AccessUnitInfos::input::PARAM_TYPE))) {
+            LOG(DEBUG) << "Empty or MultiAU info buffer scatter frames with frameIndex "
+                    << inputOrdinal.frameIndex.peekull()
+                    << ") -> newFrameIndex " << newFrameIdx
+                    <<" : input ts " << inputOrdinal.timestamp.peekull();
+            sliceWork.push_back(cloneInputWork(w, w->input.flags));
+            if (!w->input.buffers.empty() && w->input.buffers.front() != nullptr) {
+                sliceWork.back()->input.buffers = std::move(w->input.buffers);
+            }
+            frameSet.insert(newFrameIdx);
+            processedWork->push_back(std::move(sliceWork));
+        }  else {
+            const std::vector<std::shared_ptr<C2Buffer>>& inBuffers = w->input.buffers;
+            if (inBuffers.front()->data().linearBlocks().size() == 0) {
+                LOG(ERROR) << "ERROR: Work has Large frame info but has no linear blocks.";
+                return C2_CORRUPTED;
+            }
+            frameInfo.mInputC2Ref = inBuffers;
+            const std::vector<C2ConstLinearBlock>& multiAU =
+                    inBuffers.front()->data().linearBlocks();
+            std::shared_ptr<const C2AccessUnitInfos::input> auInfo =
+                    std::static_pointer_cast<const C2AccessUnitInfos::input>(
+                    w->input.buffers.front()->getInfo(C2AccessUnitInfos::input::PARAM_TYPE));
+            uint32_t offset = 0; uint32_t multiAUSize = multiAU.front().size();
+            bool sendEos = false;
+            for (int idx = 0; idx < auInfo->flexCount(); ++idx) {
+                std::vector<C2ConstLinearBlock> au;
+                const C2AccessUnitInfosStruct &info = auInfo->m.values[idx];
+                sendEos |= (info.flags & C2FrameData::FLAG_END_OF_STREAM);
+                std::unique_ptr<C2Work> newWork = cloneInputWork(w, info.flags);
+                frameSet.insert(newFrameIdx);
+                newFrameIdx = mFrameIndex++;
+                newWork->input.ordinal.timestamp = info.timestamp;
+                au.push_back(multiAU.front().subBlock(offset, info.size));
+                if ((offset + info.size) > multiAUSize) {
+                    LOG(ERROR) << "ERROR: access-unit offset > buffer size"
+                            << " current offset " << (offset + info.size)
+                            << " buffer size " << multiAUSize;
+                    return C2_CORRUPTED;
+                }
+                newWork->input.buffers.push_back(
+                        std::shared_ptr<C2Buffer>(new C2MultiAccessUnitBuffer(au)));
+                LOG(DEBUG) << "Frame scatter queuing frames WITH info in ordinal "
+                        << inputOrdinal.frameIndex.peekull()
+                        << " info.size " << info.size
+                        << " : TS " << newWork->input.ordinal.timestamp.peekull()
+                        << " with index " << newFrameIdx - 1;
+                // add to worklist
+                sliceWork.push_back(std::move(newWork));
+                processedWork->push_back(std::move(sliceWork));
+                offset += info.size;
+            }
+            mFrameIndex--;
+            if (!sendEos && (w->input.flags & C2FrameData::FLAG_END_OF_STREAM)) {
+                if (!processedWork->empty()) {
+                    std::list<std::unique_ptr<C2Work>> &sliceWork = processedWork->back();
+                    if (!sliceWork.empty()) {
+                        std::unique_ptr<C2Work> &work = sliceWork.back();
+                        if (work) {
+                            work->input.flags = C2FrameData::FLAG_END_OF_STREAM;
+                        }
+                    }
+                }
+            }
+        }
+        if (!processedWork->empty()) {
+            C2LargeFrame::output multiAccessParams = mInterface->getLargeFrameParam();
+            frameInfo.mLargeFrameTuning = multiAccessParams;
+            std::lock_guard<std::mutex> l(mLock);
+            mFrameHolder.push_back(std::move(frameInfo));
+            mMultiAccessOnOffAllowed = false;
+        }
+    }
+    return C2_OK;
+}
+
+c2_status_t MultiAccessUnitHelper::gather(
+        std::list<std::unique_ptr<C2Work>> &c2workItems,
+        std::list<std::unique_ptr<C2Work>>* const processedWork) {
+    LOG(DEBUG) << "Multi access-unit gather process";
+    if (processedWork == nullptr) {
+        LOG(ERROR) << "Nothing provided for processed work";
+        return C2_CORRUPTED;
+    }
+    auto addOutWork = [&processedWork](std::unique_ptr<C2Work>& work) {
+        processedWork->push_back(std::move(work));
+    };
+    {
+        std::lock_guard<std::mutex> l(mLock);
+        for (auto& work : c2workItems) {
+            LOG(DEBUG) << "FrameHolder Size: " << mFrameHolder.size();
+            uint64_t thisFrameIndex = work->input.ordinal.frameIndex.peekull();
+            bool removeEntry = work->worklets.empty()
+                    || !work->worklets.front()
+                    || (work->worklets.front()->output.flags
+                        & C2FrameData::FLAG_INCOMPLETE) == 0;
+            bool foundFrame = false;
+            std::list<MultiAccessUnitInfo>::iterator frame =
+                    mFrameHolder.begin();
+            while (!foundFrame && frame != mFrameHolder.end()) {
+                c2_status_t res = C2_OK;
+                auto it = frame->mComponentFrameIds.find(thisFrameIndex);
+                if (it != frame->mComponentFrameIds.end()) {
+                    foundFrame = true;
+                    LOG(DEBUG) << "onWorkDone (frameIndex " << thisFrameIndex
+                            << " worklstsSze " << work->worklets.size()
+                            << ") -> frameIndex " << frame->inOrdinal.frameIndex.peekull();
+                    if (work->result != C2_OK
+                            || work->worklets.empty()
+                            || !work->worklets.front()
+                            || frame->mLargeFrameTuning.maxSize == 0) {
+                        if (removeEntry) {
+                            frame->mComponentFrameIds.erase(it);
+                            removeEntry = false;
+                        }
+                        if (frame->mLargeWork) {
+                            finalizeWork(*frame);
+                            addOutWork(frame->mLargeWork);
+                            frame->reset();
+                        }
+                        c2_status_t workResult = work->result;
+                        frame->mLargeWork = std::move(work);
+                        frame->mLargeWork->input.ordinal.frameIndex =
+                                frame->inOrdinal.frameIndex;
+                        finalizeWork(*frame);
+                        addOutWork(frame->mLargeWork);
+                        frame->reset();
+                        if (workResult != C2_OK) {
+                            frame->mComponentFrameIds.clear();
+                            removeEntry = false;
+                        }
+                    } else if (C2_OK != (res = processWorklets(*frame, work, addOutWork))) {
+                        // Upon error in processing worklets, we return the work with
+                        // result set to the error. This should indicate the error to the
+                        // framework and thus doing what is necessary to handle the
+                        // error.
+                        LOG(DEBUG) << "Error while processing worklets";
+                        if (frame->mLargeWork == nullptr) {
+                            frame->mLargeWork.reset(new C2Work);
+                            frame->mLargeWork->input.ordinal = frame->inOrdinal;
+                            frame->mLargeWork->input.ordinal.frameIndex =
+                                    frame->inOrdinal.frameIndex;
+                        }
+                        frame->mLargeWork->result = res;
+                        finalizeWork(*frame);
+                        addOutWork(frame->mLargeWork);
+                        frame->reset();
+                        frame->mComponentFrameIds.clear();
+                        removeEntry = false;
+                    }
+                    if (removeEntry) {
+                        LOG(DEBUG) << "Removing entry: " << thisFrameIndex
+                                << " -> " << frame->inOrdinal.frameIndex.peekull();
+                        frame->mComponentFrameIds.erase(it);
+                    }
+                    // This is to take care of the last bytes and to decide to send with
+                    // FLAG_INCOMPLETE or not.
+                    if ((frame->mWview
+                            && (frame->mWview->offset() >= frame->mLargeFrameTuning.thresholdSize))
+                            || frame->mComponentFrameIds.empty()) {
+                        if (frame->mLargeWork) {
+                            frame->mLargeWork->result = C2_OK;
+                            finalizeWork(*frame);
+                            addOutWork(frame->mLargeWork);
+                            frame->reset();
+                        }
+                    }
+                    if (frame->mComponentFrameIds.empty()) {
+                        LOG(DEBUG) << "This frame is finished ID " << thisFrameIndex;
+                        frame = mFrameHolder.erase(frame);
+                        continue;
+                    }
+                } else {
+                    LOG(DEBUG) << "Received an out-of-order output " << thisFrameIndex
+                            << " expected: " <<mFrameHolder.front().inOrdinal.frameIndex.peekull();
+                }
+                frame++;
+            }
+            if (!foundFrame) {
+                LOG(ERROR) <<" Error: Frame Holder reports no frame " << thisFrameIndex;
+            }
+        }
+    }
+    return C2_OK;
+}
+
+c2_status_t MultiAccessUnitHelper::createLinearBlock(MultiAccessUnitInfo &frame) {
+    if (!mInit) {
+        LOG(ERROR) << "Large buffer allocator failed";
+        return C2_NO_MEMORY;
+    }
+    C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE };
+    uint32_t maxOutSize = frame.mLargeFrameTuning.maxSize;
+    c2_status_t err = mLinearPool->fetchLinearBlock(maxOutSize, usage, &frame.mBlock);
+    LOG(DEBUG) << "Allocated block with offset : " << frame.mBlock->offset()
+            << " size " << frame.mBlock->size() << " Capacity " << frame.mBlock->capacity();
+    if (err != C2_OK) {
+        LOG(ERROR) << "Error allocating Multi access-unit Buffer";
+        return err;
+    }
+    frame.mWview = std::make_shared<C2WriteView>(frame.mBlock->map().get());
+    LOG(DEBUG) << "Allocated buffer : requested size : " <<
+            frame.mLargeFrameTuning.maxSize
+            << " alloc size " << frame.mWview->size();
+    return C2_OK;
+}
+
+/*
+ * For every work from the component, we try to do aggregation of work here.
+*/
+c2_status_t MultiAccessUnitHelper::processWorklets(MultiAccessUnitInfo &frame,
+        std::unique_ptr<C2Work>& work,
+        const std::function <void(std::unique_ptr<C2Work>&)>& addWork) {
+    // This will allocate work, worklet, c2Block
+    auto allocateWork = [&](MultiAccessUnitInfo &frame,
+            bool allocateWorket = false,
+            bool allocateBuffer = false) {
+        c2_status_t ret = C2_OK;
+        if (frame.mLargeWork == nullptr) {
+            frame.mLargeWork.reset(new C2Work);
+            frame.mLargeWork->result = C2_OK;
+            frame.mLargeWork->input.flags = (C2FrameData::flags_t)0;
+            frame.mLargeWork->input.ordinal = frame.inOrdinal;
+            frame.mLargeWork->input.ordinal.frameIndex = frame.inOrdinal.frameIndex;
+        }
+        if (allocateWorket) {
+            if (frame.mLargeWork->worklets.size() == 0) {
+                frame.mLargeWork->worklets.emplace_back(new C2Worklet);
+                frame.mLargeWork->worklets.back()->output.flags = (C2FrameData::flags_t)0;
+            }
+        }
+        if (allocateBuffer) {
+            if (frame.mWview == nullptr) {
+                ret = createLinearBlock(frame);
+            }
+        }
+        return ret;
+    };
+    // we will only have one worklet.
+    bool foundEndOfStream = false;
+    for (auto worklet = work->worklets.begin();
+             worklet != work->worklets.end() && (*worklet) != nullptr; ++worklet) {
+        uint32_t flagsForNoCopy = C2FrameData::FLAG_DROP_FRAME
+                | C2FrameData::FLAG_DISCARD_FRAME
+                | C2FrameData::FLAG_CORRUPT;
+        if ((*worklet)->output.flags & flagsForNoCopy) {
+            if (frame.mLargeWork) {
+                finalizeWork(frame);
+                addWork(frame.mLargeWork);
+                frame.reset();
+            }
+            frame.mLargeWork = std::move(work);
+            frame.mLargeWork->input.ordinal.frameIndex = frame.inOrdinal.frameIndex;
+            finalizeWork(frame, (*worklet)->output.flags, true);
+            addWork(frame.mLargeWork);
+            frame.reset();
+            return C2_OK;
+        }
+        int64_t sampleTimeUs = 0;
+        uint32_t frameSize = 0;
+        uint32_t sampleRate = 0;
+        uint32_t channelCount = 0;
+        if (mInterface->getDecoderSampleRateAndChannelCount(&sampleRate, &channelCount)) {
+            sampleTimeUs = (1000000u) / (sampleRate * channelCount * 2);
+            frameSize = channelCount * 2;
+            if (mInterface->kind() == C2Component::KIND_DECODER) {
+                frame.mLargeFrameTuning.maxSize =
+                        (frame.mLargeFrameTuning.maxSize / frameSize) * frameSize;
+                frame.mLargeFrameTuning.thresholdSize =
+                        (frame.mLargeFrameTuning.thresholdSize / frameSize) * frameSize;
+            }
+        }
+        c2_status_t c2ret = allocateWork(frame, true);
+        if (c2ret != C2_OK) {
+            return c2ret;
+        }
+        uint32_t flags = work->input.flags;
+        flags |= frame.mLargeWork->input.flags;
+        frame.mLargeWork->input.flags = (C2FrameData::flags_t)flags;
+        C2FrameData& outputFramedata = frame.mLargeWork->worklets.front()->output;
+        if (!(*worklet)->output.configUpdate.empty()) {
+            for (auto& configUpdate : (*worklet)->output.configUpdate) {
+                outputFramedata.configUpdate.push_back(std::move(configUpdate));
+            }
+            (*worklet)->output.configUpdate.clear();
+        }
+        outputFramedata.infoBuffers.insert(outputFramedata.infoBuffers.begin(),
+                (*worklet)->output.infoBuffers.begin(),
+                (*worklet)->output.infoBuffers.end());
+
+        LOG(DEBUG) << "maxOutSize " << frame.mLargeFrameTuning.maxSize
+                << " threshold " << frame.mLargeFrameTuning.thresholdSize;
+        LOG(DEBUG) << "This worklet has " << (*worklet)->output.buffers.size() << " buffers"
+                << " ts: " << (*worklet)->output.ordinal.timestamp.peekull();
+        int64_t workletTimestamp = (*worklet)->output.ordinal.timestamp.peekull();
+        int64_t timestamp = workletTimestamp;
+        uint32_t flagsForCopy =  ((*worklet)->output.flags) & C2FrameData::FLAG_CODEC_CONFIG;
+        for (int bufIdx = 0; bufIdx < (*worklet)->output.buffers.size(); ++bufIdx) {
+            std::shared_ptr<C2Buffer>& buffer = (*worklet)->output.buffers[bufIdx];
+            if (!buffer || buffer->data().linearBlocks().empty()) {
+                continue;
+            }
+            const std::vector<C2ConstLinearBlock>& blocks = buffer->data().linearBlocks();
+            if (blocks.size() > 0) {
+                uint32_t inputOffset = 0;
+                uint32_t inputSize = blocks.front().size();
+                frame.mInfos.insert(frame.mInfos.end(),
+                        buffer->info().begin(), buffer->info().end());
+                if (frameSize != 0 && (mInterface->kind() == C2Component::KIND_DECODER)) {
+                    // For decoders we only split multiples of 16bChannelCount*2
+                    inputSize -= (inputSize % frameSize);
+                }
+                while (inputOffset < inputSize) {
+                    if ((frame.mWview != nullptr)
+                            && (frame.mWview->offset() >= frame.mLargeFrameTuning.thresholdSize)) {
+                        frame.mLargeWork->result = C2_OK;
+                        finalizeWork(frame, flagsForCopy);
+                        addWork(frame.mLargeWork);
+                        frame.reset();
+                    }
+                    if (mInterface->kind() == C2Component::KIND_ENCODER) {
+                        if (inputSize > frame.mLargeFrameTuning.maxSize) {
+                            LOG(WARNING) << "WARNING Encoder:"
+                                    << " Output buffer too small for configuration"
+                                    << " configured max size " << frame.mLargeFrameTuning.maxSize
+                                    << " access unit size " << inputSize;
+                            if (frame.mLargeWork && (frame.mWview && frame.mWview->offset() > 0)) {
+                                frame.mLargeWork->result = C2_OK;
+                                finalizeWork(frame, flagsForCopy);
+                                addWork(frame.mLargeWork);
+                                frame.reset();
+                            }
+                            frame.mLargeFrameTuning.maxSize = inputSize;
+                        } else if ((frame.mWview != nullptr)
+                                && (inputSize > frame.mWview->size())) {
+                            LOG(DEBUG) << "Enc: Large frame hitting bufer limit, current size "
+                                << frame.mWview->offset();
+                            if (frame.mWview->offset() > 0) {
+                                frame.mLargeWork->result = C2_OK;
+                                finalizeWork(frame, flagsForCopy);
+                                addWork(frame.mLargeWork);
+                                frame.reset();
+                            }
+                        }
+                    }
+                    allocateWork(frame, true, true);
+                    uint32_t flags = work->input.flags;
+                    flags |= frame.mLargeWork->input.flags;
+                    frame.mLargeWork->input.flags = (C2FrameData::flags_t)flags;
+                    C2ReadView rView = blocks.front().map().get();
+                    if (rView.error()) {
+                        LOG(ERROR) << "Buffer read view error";
+                        frame.mLargeWork->result = rView.error();
+                        frame.mLargeWork->worklets.clear();
+                        finalizeWork(frame, 0, true);
+                        addWork(frame.mLargeWork);
+                        frame.reset();
+                        return C2_NO_MEMORY;
+                    }
+                    uint32_t toCopy = 0;
+                    if (mInterface->kind() == C2Component::KIND_ENCODER) {
+                        toCopy = inputSize;
+                    } else {
+                        toCopy = c2_min(frame.mWview->size(), (inputSize - inputOffset));
+                        timestamp = workletTimestamp + inputOffset * sampleTimeUs;
+                        LOG(DEBUG) << "ts " << timestamp
+                                << " copiedOutput " << inputOffset
+                                << " sampleTimeUs " << sampleTimeUs;
+                    }
+                    LOG(DEBUG) << " Copy size " << toCopy
+                            << " ts " << timestamp;
+                    memcpy(frame.mWview->data(), rView.data() + inputOffset, toCopy);
+                    frame.mWview->setOffset(frame.mWview->offset() + toCopy);
+                    inputOffset += toCopy;
+                    mergeAccessUnitInfo(frame, flagsForCopy, toCopy, timestamp);
+                }
+            } else {
+                frame.mLargeWork->worklets.front()->output.buffers.push_back(std::move(buffer));
+                LOG(DEBUG) << "Copying worklets without linear buffer";
+            }
+        }
+        uint32_t flagsForCsdOrEnd = (*worklet)->output.flags
+                & (C2FrameData::FLAG_END_OF_STREAM | C2FrameData::FLAG_CODEC_CONFIG);
+        if (flagsForCsdOrEnd) {
+            LOG(DEBUG) << "Output worklet has CSD/EOS data";
+            frame.mLargeWork->result = C2_OK;
+            // we can assign timestamp as this will be evaluated in finalizeWork
+            frame.mLargeWork->worklets.front()->output.ordinal.timestamp = timestamp;
+            finalizeWork(frame, flagsForCsdOrEnd, true);
+            addWork(frame.mLargeWork);
+            frame.reset();
+        }
+    }
+    return C2_OK;
+}
+
+c2_status_t MultiAccessUnitHelper::finalizeWork(
+        MultiAccessUnitInfo& frame, uint32_t inFlags, bool forceComplete) {
+    if (frame.mLargeWork == nullptr) {
+        return C2_OK;
+    }
+    //prepare input ordinal
+    frame.mLargeWork->input.ordinal = frame.inOrdinal;
+    // remove this
+    int64_t timeStampUs = frame.inOrdinal.timestamp.peekull();
+    if (!frame.mAccessUnitInfos.empty()) {
+        timeStampUs = frame.mAccessUnitInfos.front().timestamp;
+    } else if (!frame.mLargeWork->worklets.empty()) {
+        std::unique_ptr<C2Worklet> &worklet = frame.mLargeWork->worklets.front();
+        if (worklet) {
+            timeStampUs = worklet->output.ordinal.timestamp.peekull();
+        }
+    }
+    LOG(DEBUG) << "Finalizing work with input Idx "
+            << frame.mLargeWork->input.ordinal.frameIndex.peekull()
+            << " timestamp " << timeStampUs
+            << " inFlags " << inFlags;
+    uint32_t finalFlags = 0;
+    if ((!forceComplete)
+            && (frame.mLargeWork->result == C2_OK)
+            && (!frame.mComponentFrameIds.empty())) {
+        finalFlags |= C2FrameData::FLAG_INCOMPLETE;
+    }
+    if (frame.mLargeWork->result == C2_OK) {
+        finalFlags |= inFlags;
+    }
+    // update worklet if present
+    if (!frame.mLargeWork->worklets.empty() &&
+            frame.mLargeWork->worklets.front() != nullptr) {
+        frame.mLargeWork->workletsProcessed = 1;
+        C2FrameData& outFrameData = frame.mLargeWork->worklets.front()->output;
+        outFrameData.ordinal.frameIndex = frame.inOrdinal.frameIndex.peekull();
+        outFrameData.ordinal.timestamp = timeStampUs;
+        finalFlags |= frame.mLargeWork->worklets.front()->output.flags;
+        outFrameData.flags = (C2FrameData::flags_t)finalFlags;
+        // update buffers
+        if (frame.mBlock && (frame.mWview->offset() > 0)) {
+            size_t size = frame.mWview->offset();
+            LOG(DEBUG) << "Finalize : Block: Large frame size set as " << size
+                    << " timestamp as " << timeStampUs
+                    << "frameIndex " << outFrameData.ordinal.frameIndex.peekull();
+            frame.mWview->setOffset(0);
+            std::shared_ptr<C2Buffer> c2Buffer = C2Buffer::CreateLinearBuffer(
+                    frame.mBlock->share(0, size, ::C2Fence()));
+            frame.mLargeWork->worklets.front()->output.buffers.push_back(std::move(c2Buffer));
+        }
+        if (frame.mLargeWork->worklets.front()->output.buffers.size() > 0) {
+            std::shared_ptr<C2Buffer>& c2Buffer =
+                frame.mLargeWork->worklets.front()->output.buffers.front();
+            if (c2Buffer != nullptr) {
+                if (frame.mAccessUnitInfos.size() > 0) {
+                    if (finalFlags & C2FrameData::FLAG_END_OF_STREAM) {
+                        frame.mAccessUnitInfos.back().flags |= C2FrameData::FLAG_END_OF_STREAM;
+                    }
+                    std::shared_ptr<C2AccessUnitInfos::output> largeFrame =
+                            C2AccessUnitInfos::output::AllocShared(
+                                    frame.mAccessUnitInfos.size(), 0u, frame.mAccessUnitInfos);
+                    frame.mInfos.push_back(largeFrame);
+                    frame.mAccessUnitInfos.clear();
+                }
+                for (auto &info : frame.mInfos) {
+                    c2Buffer->setInfo(std::const_pointer_cast<C2Info>(info));
+                }
+            }
+        }
+        if (frame.mConfigUpdate.size() > 0) {
+            outFrameData.configUpdate.insert(
+                    outFrameData.configUpdate.end(),
+                    make_move_iterator(frame.mConfigUpdate.begin()),
+                    make_move_iterator(frame.mConfigUpdate.end()));
+        }
+    }
+    frame.mConfigUpdate.clear();
+    frame.mInfos.clear();
+    frame.mBlock.reset();
+    frame.mWview.reset();
+
+    LOG(DEBUG) << "Multi access-unitflag setting as " << finalFlags;
+    return C2_OK;
+}
+
+void MultiAccessUnitHelper::mergeAccessUnitInfo(
+        MultiAccessUnitInfo &frame,
+        uint32_t flags_,
+        uint32_t size,
+        int64_t timestamp) {
+    // Remove flags that are not part of Access unit info
+    uint32_t flags = flags_ & ~(C2FrameData::FLAG_INCOMPLETE
+            | C2FrameData::FLAG_DISCARD_FRAME
+            | C2FrameData::FLAG_CORRUPT
+            | C2FrameData::FLAG_CORRECTED);
+    if (frame.mAccessUnitInfos.empty()) {
+        frame.mAccessUnitInfos.emplace_back(flags, size, timestamp);
+        return;
+    }
+    if ((mInterface->kind() == C2Component::KIND_DECODER) &&
+            (frame.mAccessUnitInfos.back().flags == flags)) {
+        // merge access units here
+        C2AccessUnitInfosStruct &s = frame.mAccessUnitInfos.back();
+        s.size += size; // don't have to update timestamp
+    } else {
+        frame.mAccessUnitInfos.emplace_back(flags, size, timestamp);
+    }
+}
+
+void MultiAccessUnitHelper::MultiAccessUnitInfo::reset() {
+    mBlock.reset();
+    mWview.reset();
+    mInfos.clear();
+    mConfigUpdate.clear();
+    mAccessUnitInfos.clear();
+    mLargeWork.reset();
+}
+
+}  // namespace android
diff --git a/media/codec2/hal/common/include/codec2/common/MultiAccessUnitHelper.h b/media/codec2/hal/common/include/codec2/common/MultiAccessUnitHelper.h
new file mode 100644
index 0000000..070a1f5
--- /dev/null
+++ b/media/codec2/hal/common/include/codec2/common/MultiAccessUnitHelper.h
@@ -0,0 +1,236 @@
+/*
+ * Copyright 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef CODEC2_COMMON_MULTI_ACCESSUNIT_HELPER_H
+#define CODEC2_COMMON_MULTI_ACCESSUNIT_HELPER_H
+
+#include <hidl/Status.h>
+#include <hwbinder/IBinder.h>
+
+#include <C2Config.h>
+#include <util/C2InterfaceHelper.h>
+#include <C2Buffer.h>
+#include <C2.h>
+
+#include <set>
+#include <memory>
+#include <mutex>
+
+namespace android {
+
+struct MultiAccessUnitHelper;
+
+struct MultiAccessUnitInterface : public C2InterfaceHelper {
+    explicit MultiAccessUnitInterface(
+            const std::shared_ptr<C2ComponentInterface>& interface,
+            std::shared_ptr<C2ReflectorHelper> helper);
+
+    bool isParamSupported(C2Param::Index index);
+    C2LargeFrame::output getLargeFrameParam() const;
+    C2Component::kind_t kind() const;
+    bool isValidField(const C2ParamField &field) const;
+
+protected:
+    bool getDecoderSampleRateAndChannelCount(
+            uint32_t * const sampleRate_, uint32_t * const channelCount_) const;
+    bool getMaxInputSize(C2StreamMaxBufferSizeInfo::input* const maxInputSize) const;
+    const std::shared_ptr<C2ComponentInterface> mC2ComponentIntf;
+    std::shared_ptr<C2LargeFrame::output> mLargeFrameParams;
+    C2ComponentKindSetting mKind;
+    std::set<C2Param::Index> mSupportedParamIndexSet;
+    std::vector<C2ParamField> mParamFields;
+
+    friend struct MultiAccessUnitHelper;
+};
+
+struct MultiAccessUnitHelper {
+public:
+    MultiAccessUnitHelper(
+            const std::shared_ptr<MultiAccessUnitInterface>& intf,
+            std::shared_ptr<C2BlockPool> &linearPool);
+
+    virtual ~MultiAccessUnitHelper();
+
+    static bool isEnabledOnPlatform();
+
+    /*
+     * Scatters the incoming linear buffer into access-unit sized buffers
+     * based on the access-unit info.
+     */
+    c2_status_t scatter(
+            std::list<std::unique_ptr<C2Work>> &c2workItems,
+            std::list<std::list<std::unique_ptr<C2Work>>> * const processedWork);
+
+    /*
+     * Gathers different access-units into a single buffer based on the scatter list
+     * and the configured max and threshold sizes. This also generates the associated
+     * access-unit information and attach it with the final result.
+     */
+    c2_status_t gather(
+            std::list<std::unique_ptr<C2Work>> &c2workItems,
+            std::list<std::unique_ptr<C2Work>> * const processedWork);
+
+    /*
+     * Flushes the codec and generated the list of flushed buffers.
+     */
+    c2_status_t flush(
+            std::list<std::unique_ptr<C2Work>> * const c2flushedWorks);
+
+    /*
+     * Gets all the pending buffers under generation in c2workItems.
+     */
+    c2_status_t error(std::list<std::unique_ptr<C2Work>> * const c2workItems);
+
+    /*
+     * Get the interface object of this handler.
+     */
+    std::shared_ptr<MultiAccessUnitInterface> getInterface();
+
+    /*
+     * Gets the status of the object. This really is to make sure that
+     * all the allocators are configured properly within the handler.
+     */
+    bool getStatus();
+
+    /*
+     * Resets the structures inside the handler.
+     */
+    void reset();
+
+protected:
+
+    struct MultiAccessUnitInfo {
+        /*
+         * From the input
+         * Ordinal of the input frame
+         */
+        C2WorkOrdinalStruct inOrdinal;
+
+        /*
+         * Frame indexes of the scattered buffers
+         */
+        std::set<uint64_t> mComponentFrameIds;
+
+        /*
+         * For the output
+         * Current output block.
+         */
+        std::shared_ptr<C2LinearBlock> mBlock;
+
+        /*
+         * Write view of current block
+         */
+        std::shared_ptr<C2WriteView> mWview;
+
+        /*
+         * C2Info related to the current mBlock
+         */
+        std::vector<std::shared_ptr<const C2Info>> mInfos;
+
+        /*
+         * Vector for holding config updates from the wrapper
+         */
+        std::vector<std::unique_ptr<C2Param>> mConfigUpdate;
+
+        /*
+         * C2AccessUnitInfos for the current buffer
+         */
+        std::vector<C2AccessUnitInfosStruct> mAccessUnitInfos;
+
+        /*
+         * Current tuning used to process this input work
+         */
+        C2LargeFrame::output mLargeFrameTuning;
+
+        /*
+         * Current output C2Work being processed
+         */
+        std::unique_ptr<C2Work> mLargeWork;
+
+        /*
+         * For holding a reference to the incoming buffer
+         */
+        std::vector<std::shared_ptr<C2Buffer>> mInputC2Ref;
+
+        MultiAccessUnitInfo(C2WorkOrdinalStruct ordinal):inOrdinal(ordinal) {
+
+        }
+
+        /*
+         * Resets this frame
+         */
+        void reset();
+    };
+
+    /*
+     * Reconfigure helper
+     */
+    bool tryReconfigure(const std::unique_ptr<C2Param> &p);
+
+    /*
+     * Creates a linear block to be used with work
+     */
+    c2_status_t createLinearBlock(MultiAccessUnitInfo &frame);
+
+    /*
+     * Processes worklets from the component
+     */
+    c2_status_t processWorklets(MultiAccessUnitInfo &frame,
+                std::unique_ptr<C2Work> &work,
+                const std::function <void(std::unique_ptr<C2Work>&)> &addWork);
+
+    /*
+     * Finalizes the work to be send out.
+     */
+    c2_status_t finalizeWork(MultiAccessUnitInfo &frame,
+            uint32_t flags = 0, bool forceComplete = false);
+
+    /*
+     * Merges different access unit infos if possible
+     */
+    void mergeAccessUnitInfo(MultiAccessUnitInfo &frame,
+            uint32_t flags,
+            uint32_t size,
+            int64_t timestamp);
+
+    // Flag to allow dynamic on/off settings on this helper.
+    // Once enabled and buffers in transit, it is not possible
+    // to turn this module off by setting the max output value
+    // to 0. This is because the skip cut buffer expects the
+    // metadata to be always present along with a valid buffer.
+    // This flag is used to monitor that state of this module.
+    bool mMultiAccessOnOffAllowed;
+
+    bool mInit;
+
+    // Interface of this module
+    std::shared_ptr<MultiAccessUnitInterface> mInterface;
+    // Local pool id used for output buffer allocation
+    C2BlockPool::local_id_t mBlockPoolId;
+    // C2Blockpool for output buffer allocation
+    std::shared_ptr<C2BlockPool> mLinearPool;
+    // FrameIndex for the current outgoing work
+    std::atomic_uint64_t mFrameIndex;
+    // Mutex to protect mFrameHolder
+    std::mutex mLock;
+    // List of Infos that contains the input and
+    // output work and buffer objects
+    std::list<MultiAccessUnitInfo> mFrameHolder;
+};
+
+}  // namespace android
+
+#endif  // CODEC2_COMMON_MULTI_ACCESSUNIT_HELPER_H
diff --git a/media/codec2/hal/hidl/1.0/utils/Android.bp b/media/codec2/hal/hidl/1.0/utils/Android.bp
index 2f2ecd1..9646a0b 100644
--- a/media/codec2/hal/hidl/1.0/utils/Android.bp
+++ b/media/codec2/hal/hidl/1.0/utils/Android.bp
@@ -52,7 +52,6 @@
     ],
 }
 
-
 // DO NOT DEPEND ON THIS DIRECTLY
 // use libcodec2-hidl-defaults instead
 cc_library {
diff --git a/media/codec2/hal/hidl/1.0/utils/Component.cpp b/media/codec2/hal/hidl/1.0/utils/Component.cpp
index 0aeed08..62f0e25 100644
--- a/media/codec2/hal/hidl/1.0/utils/Component.cpp
+++ b/media/codec2/hal/hidl/1.0/utils/Component.cpp
@@ -135,6 +135,52 @@
     wp<IComponentListener> mListener;
 };
 
+// Component listener for handle multiple access-units
+struct MultiAccessUnitListener : public Component::Listener {
+    MultiAccessUnitListener(const sp<Component> &component,
+            const std::shared_ptr<MultiAccessUnitHelper> &handler):
+        Listener(component), mHandler(handler) {
+    }
+
+    virtual void onError_nb(
+            std::weak_ptr<C2Component> c2component,
+            uint32_t errorCode) override {
+        if (mHandler) {
+            std::list<std::unique_ptr<C2Work>> worklist;
+            mHandler->error(&worklist);
+            if (!worklist.empty()) {
+                Listener::onWorkDone_nb(c2component, std::move(worklist));
+            }
+        }
+        Listener::onError_nb(c2component, errorCode);
+    }
+
+    virtual void onTripped_nb(
+            std::weak_ptr<C2Component> c2component,
+            std::vector<std::shared_ptr<C2SettingResult>> c2settingResult
+            ) override {
+        Listener::onTripped_nb(c2component,
+                c2settingResult);
+    }
+
+    virtual void onWorkDone_nb(
+            std::weak_ptr<C2Component> c2component,
+            std::list<std::unique_ptr<C2Work>> c2workItems) override {
+        if (mHandler) {
+            std::list<std::unique_ptr<C2Work>> processedWork;
+            mHandler->gather(c2workItems, &processedWork);
+            if (!processedWork.empty()) {
+                Listener::onWorkDone_nb(c2component, std::move(processedWork));
+            }
+        } else {
+            Listener::onWorkDone_nb(c2component, std::move(c2workItems));
+        }
+    }
+
+    protected:
+        std::shared_ptr<MultiAccessUnitHelper> mHandler;
+};
+
 // Component::Sink
 struct Component::Sink : public IInputSink {
     std::shared_ptr<Component> mComponent;
@@ -208,13 +254,14 @@
         const sp<::android::hardware::media::bufferpool::V2_0::
         IClientManager>& clientPoolManager)
       : mComponent{component},
-        mInterface{new ComponentInterface(component->intf(),
-                                          store->getParameterCache())},
         mListener{listener},
         mStore{store},
         mBufferPoolSender{clientPoolManager} {
     // Retrieve supported parameters from store
     // TODO: We could cache this per component/interface type
+    mMultiAccessUnitIntf = store->tryCreateMultiAccessUnitInterface(component->intf());
+    mInterface = new ComponentInterface(
+            component->intf(), mMultiAccessUnitIntf, store->getParameterCache());
     mInit = mInterface->status();
 }
 
@@ -240,7 +287,6 @@
 // Methods from ::android::hardware::media::c2::V1_0::IComponent
 Return<Status> Component::queue(const WorkBundle& workBundle) {
     std::list<std::unique_ptr<C2Work>> c2works;
-
     if (!objcpy(&c2works, workBundle)) {
         return Status::CORRUPTED;
     }
@@ -252,7 +298,19 @@
                     registerFrameData(mListener, work->input);
         }
     }
-
+    c2_status_t err = C2_OK;
+    if (mMultiAccessUnitHelper) {
+        std::list<std::list<std::unique_ptr<C2Work>>> c2worklists;
+        mMultiAccessUnitHelper->scatter(c2works, &c2worklists);
+        for (auto &c2worklist : c2worklists) {
+            err = mComponent->queue_nb(&c2worklist);
+            if (err != C2_OK) {
+                LOG(ERROR) << "Error Queuing to component.";
+                break;
+            }
+        }
+        return static_cast<Status>(err);
+    }
     return static_cast<Status>(mComponent->queue_nb(&c2works));
 }
 
@@ -261,6 +319,9 @@
     c2_status_t c2res = mComponent->flush_sm(
             C2Component::FLUSH_COMPONENT,
             &c2flushedWorks);
+    if (mMultiAccessUnitHelper) {
+        c2res = mMultiAccessUnitHelper->flush(&c2flushedWorks);
+    }
 
     // Unregister input buffers.
     for (const std::unique_ptr<C2Work>& work : c2flushedWorks) {
@@ -460,7 +521,18 @@
 
 Return<Status> Component::stop() {
     InputBufferManager::unregisterFrameData(mListener);
-    return static_cast<Status>(mComponent->stop());
+    Status status = static_cast<Status>(mComponent->stop());
+    {
+        std::lock_guard<std::mutex> lock(mBlockPoolsMutex);
+        for (auto it = mBlockPools.begin(); it != mBlockPools.end(); ++it) {
+            if (it->second->getAllocatorId() == C2PlatformAllocatorStore::BUFFERQUEUE) {
+                std::shared_ptr<C2BufferQueueBlockPool> bqPool =
+                        std::static_pointer_cast<C2BufferQueueBlockPool>(it->second);
+                bqPool->clearDeferredBlocks();
+            }
+        }
+    }
+    return status;
 }
 
 Return<Status> Component::reset() {
@@ -469,6 +541,9 @@
         std::lock_guard<std::mutex> lock(mBlockPoolsMutex);
         mBlockPools.clear();
     }
+    if (mMultiAccessUnitHelper) {
+        mMultiAccessUnitHelper->reset();
+    }
     InputBufferManager::unregisterFrameData(mListener);
     return status;
 }
@@ -479,6 +554,9 @@
         std::lock_guard<std::mutex> lock(mBlockPoolsMutex);
         mBlockPools.clear();
     }
+    if (mMultiAccessUnitHelper) {
+        mMultiAccessUnitHelper->reset();
+    }
     InputBufferManager::unregisterFrameData(mListener);
     return status;
 }
@@ -501,8 +579,26 @@
 }
 
 void Component::initListener(const sp<Component>& self) {
-    std::shared_ptr<C2Component::Listener> c2listener =
+    std::shared_ptr<C2Component::Listener> c2listener;
+    if (mMultiAccessUnitIntf) {
+        std::shared_ptr<C2Allocator> allocator;
+        std::shared_ptr<C2BlockPool> linearPool;
+        std::shared_ptr<C2AllocatorStore> store = ::android::GetCodec2PlatformAllocatorStore();
+        if(store->fetchAllocator(C2AllocatorStore::DEFAULT_LINEAR, &allocator) == C2_OK) {
+            ::android::C2PlatformAllocatorDesc desc;
+            desc.allocatorId = allocator->getId();
+            if (C2_OK == CreateCodec2BlockPool(desc, mComponent, &linearPool)) {
+                if (linearPool) {
+                    mMultiAccessUnitHelper = std::make_shared<MultiAccessUnitHelper>(
+                            mMultiAccessUnitIntf, linearPool);
+                }
+            }
+        }
+    }
+    c2listener = mMultiAccessUnitHelper ?
+            std::make_shared<MultiAccessUnitListener>(self, mMultiAccessUnitHelper) :
             std::make_shared<Listener>(self);
+
     c2_status_t res = mComponent->setListener_vb(c2listener, C2_DONT_BLOCK);
     if (res != C2_OK) {
         mInit = res;
diff --git a/media/codec2/hal/hidl/1.0/utils/ComponentInterface.cpp b/media/codec2/hal/hidl/1.0/utils/ComponentInterface.cpp
index 12078e0..08f1ae2 100644
--- a/media/codec2/hal/hidl/1.0/utils/ComponentInterface.cpp
+++ b/media/codec2/hal/hidl/1.0/utils/ComponentInterface.cpp
@@ -25,7 +25,6 @@
 #include <hidl/HidlBinderSupport.h>
 #include <utils/Timers.h>
 
-#include <C2BqBufferPriv.h>
 #include <C2Debug.h>
 #include <C2PlatformSupport.h>
 
@@ -45,9 +44,10 @@
 
 // Implementation of ConfigurableC2Intf based on C2ComponentInterface
 struct CompIntf : public ConfigurableC2Intf {
-    CompIntf(const std::shared_ptr<C2ComponentInterface>& intf) :
+    CompIntf(const std::shared_ptr<C2ComponentInterface>& intf,
+        const std::shared_ptr<MultiAccessUnitInterface>& multiAccessUnitIntf):
         ConfigurableC2Intf{intf->getName(), intf->getId()},
-        mIntf{intf} {
+        mIntf{intf}, mMultiAccessUnitIntf{multiAccessUnitIntf} {
     }
 
     virtual c2_status_t config(
@@ -55,7 +55,54 @@
             c2_blocking_t mayBlock,
             std::vector<std::unique_ptr<C2SettingResult>>* const failures
             ) override {
-        return mIntf->config_vb(params, mayBlock, failures);
+        std::vector<C2Param*> paramsToIntf;
+        std::vector<C2Param*> paramsToLargeFrameIntf;
+        c2_status_t err = C2_OK;
+        if (mMultiAccessUnitIntf == nullptr) {
+            err = mIntf->config_vb(params, mayBlock, failures);
+            return err;
+        }
+        for (auto &p : params) {
+            if (mMultiAccessUnitIntf->isParamSupported(p->index())) {
+                paramsToLargeFrameIntf.push_back(p);
+            } else {
+                paramsToIntf.push_back(p);
+            }
+        }
+        c2_status_t err1 = C2_OK;
+        if (paramsToIntf.size() > 0) {
+            err1 = mIntf->config_vb(paramsToIntf, mayBlock, failures);
+        }
+        if (err1 != C2_OK) {
+            LOG(ERROR) << "We have a failed config";
+        }
+        c2_status_t err2 = C2_OK;
+        if (paramsToLargeFrameIntf.size() > 0) {
+            C2ComponentKindSetting kind;
+            C2StreamMaxBufferSizeInfo::input maxInputSize(0);
+            c2_status_t err = mIntf->query_vb(
+                    {&kind, &maxInputSize}, {}, C2_MAY_BLOCK, nullptr);
+            if ((err == C2_OK) && (kind.value == C2Component::KIND_ENCODER)) {
+                for (int i = 0 ; i < paramsToLargeFrameIntf.size(); i++) {
+                    if (paramsToLargeFrameIntf[i]->index() ==
+                            C2LargeFrame::output::PARAM_TYPE) {
+                        C2LargeFrame::output *lfp = C2LargeFrame::output::From(
+                                    paramsToLargeFrameIntf[i]);
+                        // This is assuming a worst case compression ratio of 1:1
+                        // In no case the encoder should give an output more than
+                        // what is being provided to the encoder in a single call.
+                        if (lfp && (lfp->maxSize < maxInputSize.value)) {
+                            lfp->maxSize = maxInputSize.value;
+                        }
+                        break;
+                    }
+                }
+            }
+            err2 = mMultiAccessUnitIntf->config(
+                    paramsToLargeFrameIntf, mayBlock, failures);
+        }
+        // TODO: correct failure vector
+        return err1 != C2_OK ? err1 : err2;
     }
 
     virtual c2_status_t query(
@@ -63,33 +110,100 @@
             c2_blocking_t mayBlock,
             std::vector<std::unique_ptr<C2Param>>* const params
             ) const override {
-        return mIntf->query_vb({}, indices, mayBlock, params);
+        c2_status_t err = C2_OK;
+        if (mMultiAccessUnitIntf == nullptr) {
+            err = mIntf->query_vb({}, indices, mayBlock, params);
+            return err;
+        }
+        std::vector<C2Param::Index> paramsToIntf;
+        std::vector<C2Param::Index> paramsToLargeFrameIntf;
+        for (auto &i : indices) {
+            if (mMultiAccessUnitIntf->isParamSupported(i)) {
+                paramsToLargeFrameIntf.push_back(i);
+            } else {
+                paramsToIntf.push_back(i);
+            }
+        }
+        c2_status_t err1 = C2_OK;
+        if (paramsToIntf.size() > 0) {
+            err1 = mIntf->query_vb({}, paramsToIntf, mayBlock, params);
+        }
+        c2_status_t err2 = C2_OK;
+        if (paramsToLargeFrameIntf.size() > 0) {
+            err2 = mMultiAccessUnitIntf->query(
+                    {}, paramsToLargeFrameIntf, mayBlock, params);
+        }
+        // TODO: correct failure vector
+        return err1 != C2_OK ? err1 : err2;
     }
 
     virtual c2_status_t querySupportedParams(
             std::vector<std::shared_ptr<C2ParamDescriptor>>* const params
             ) const override {
-        return mIntf->querySupportedParams_nb(params);
+        c2_status_t err = mIntf->querySupportedParams_nb(params);
+        if (mMultiAccessUnitIntf != nullptr) {
+            err =  mMultiAccessUnitIntf->querySupportedParams(params);
+        }
+        return err;
     }
 
     virtual c2_status_t querySupportedValues(
             std::vector<C2FieldSupportedValuesQuery>& fields,
             c2_blocking_t mayBlock) const override {
-        return mIntf->querySupportedValues_vb(fields, mayBlock);
+        if (mMultiAccessUnitIntf == nullptr) {
+           return  mIntf->querySupportedValues_vb(fields, mayBlock);
+        }
+        std::vector<C2FieldSupportedValuesQuery> dup = fields;
+        std::vector<C2FieldSupportedValuesQuery> queryArray[2];
+        std::map<C2ParamField, std::pair<uint32_t, size_t>> queryMap;
+        c2_status_t err = C2_OK;
+        for (int i = 0 ; i < fields.size(); i++) {
+            const C2ParamField &field = fields[i].field();
+            uint32_t queryArrayIdx = 1;
+            if (mMultiAccessUnitIntf->isValidField(field)) {
+                queryArrayIdx = 0;
+            }
+            queryMap[field] = std::make_pair(
+                    queryArrayIdx, queryArray[queryArrayIdx].size());
+            queryArray[queryArrayIdx].push_back(fields[i]);
+        }
+        if (queryArray[0].size() > 0) {
+            err = mMultiAccessUnitIntf->querySupportedValues(queryArray[0], mayBlock);
+        }
+        if (queryArray[1].size() > 0) {
+             err = mIntf->querySupportedValues_vb(queryArray[1], mayBlock);
+        }
+        for (int i = 0 ; i < dup.size(); i++) {
+            auto it = queryMap.find(dup[i].field());
+            if (it != queryMap.end()) {
+                std::pair<uint32_t, size_t> queryid = it->second;
+                fields[i] = queryArray[queryid.first][queryid.second];
+            }
+        }
+        return err;
     }
 
 protected:
     std::shared_ptr<C2ComponentInterface> mIntf;
+    std::shared_ptr<MultiAccessUnitInterface> mMultiAccessUnitIntf;
 };
 
 } // unnamed namespace
 
+
 // ComponentInterface
 ComponentInterface::ComponentInterface(
         const std::shared_ptr<C2ComponentInterface>& intf,
+        const std::shared_ptr<ParameterCache>& cache):ComponentInterface(intf, nullptr, cache) {
+}
+
+ComponentInterface::ComponentInterface(
+        const std::shared_ptr<C2ComponentInterface>& intf,
+        const std::shared_ptr<MultiAccessUnitInterface>& multiAccessUnitIntf,
         const std::shared_ptr<ParameterCache>& cache)
       : mInterface{intf},
-        mConfigurable{new CachedConfigurable(std::make_unique<CompIntf>(intf))} {
+        mConfigurable{new CachedConfigurable(
+                std::make_unique<CompIntf>(intf, multiAccessUnitIntf))} {
     mInit = mConfigurable->init(cache);
 }
 
diff --git a/media/codec2/hal/hidl/1.0/utils/ComponentStore.cpp b/media/codec2/hal/hidl/1.0/utils/ComponentStore.cpp
index 1c0d5b0..1ba1889 100644
--- a/media/codec2/hal/hidl/1.0/utils/ComponentStore.cpp
+++ b/media/codec2/hal/hidl/1.0/utils/ComponentStore.cpp
@@ -139,7 +139,15 @@
     SetPreferredCodec2ComponentStore(store);
 
     // Retrieve struct descriptors
-    mParamReflector = mStore->getParamReflector();
+    mParamReflectors.push_back(mStore->getParamReflector());
+#ifndef __ANDROID_APEX__
+    std::shared_ptr<C2ParamReflector> paramReflector =
+        GetFilterWrapper()->getParamReflector();
+    if (paramReflector != nullptr) {
+        ALOGD("[%s] added param reflector from filter wrapper", mStore->getName().c_str());
+        mParamReflectors.push_back(paramReflector);
+    }
+#endif
 
     // Retrieve supported parameters from store
     using namespace std::placeholders;
@@ -168,8 +176,7 @@
         std::lock_guard<std::mutex> lock(mStructDescriptorsMutex);
         auto it = mStructDescriptors.find(coreIndex);
         if (it == mStructDescriptors.end()) {
-            std::shared_ptr<C2StructDescriptor> structDesc =
-                    mParamReflector->describe(coreIndex);
+            std::shared_ptr<C2StructDescriptor> structDesc = describe(coreIndex);
             if (!structDesc) {
                 // All supported params must be described
                 res = C2_BAD_INDEX;
@@ -194,6 +201,37 @@
 }
 #endif
 
+std::shared_ptr<MultiAccessUnitInterface> ComponentStore::tryCreateMultiAccessUnitInterface(
+        const std::shared_ptr<C2ComponentInterface> &c2interface) {
+    std::shared_ptr<MultiAccessUnitInterface> multiAccessUnitIntf = nullptr;
+    if (c2interface == nullptr) {
+        return nullptr;
+    }
+    if (MultiAccessUnitHelper::isEnabledOnPlatform()) {
+        c2_status_t err = C2_OK;
+        C2ComponentDomainSetting domain;
+        std::vector<std::unique_ptr<C2Param>> heapParams;
+        err = c2interface->query_vb({&domain}, {}, C2_MAY_BLOCK, &heapParams);
+        if (err == C2_OK && (domain.value == C2Component::DOMAIN_AUDIO)) {
+            std::vector<std::shared_ptr<C2ParamDescriptor>> params;
+            bool isComponentSupportsLargeAudioFrame = false;
+            c2interface->querySupportedParams_nb(&params);
+            for (const auto &paramDesc : params) {
+                if (paramDesc->name().compare(C2_PARAMKEY_OUTPUT_LARGE_FRAME) == 0) {
+                    isComponentSupportsLargeAudioFrame = true;
+                    break;
+                }
+            }
+            if (!isComponentSupportsLargeAudioFrame) {
+                multiAccessUnitIntf = std::make_shared<MultiAccessUnitInterface>(
+                        c2interface,
+                        std::static_pointer_cast<C2ReflectorHelper>(mParamReflectors[0]));
+            }
+        }
+    }
+    return multiAccessUnitIntf;
+}
+
 // Methods from ::android::hardware::media::c2::V1_0::IComponentStore
 Return<void> ComponentStore::createComponent(
         const hidl_string& name,
@@ -242,7 +280,9 @@
         c2interface = GetFilterWrapper()->maybeWrapInterface(c2interface);
 #endif
         onInterfaceLoaded(c2interface);
-        interface = new ComponentInterface(c2interface, mParameterCache);
+        std::shared_ptr<MultiAccessUnitInterface> multiAccessUnitIntf =
+                tryCreateMultiAccessUnitInterface(c2interface);
+        interface = new ComponentInterface(c2interface, multiAccessUnitIntf, mParameterCache);
     }
     _hidl_cb(static_cast<Status>(res), interface);
     return Void();
@@ -307,8 +347,7 @@
         if (item == mStructDescriptors.end()) {
             // not in the cache, and not known to be unsupported, query local reflector
             if (!mUnsupportedStructDescriptors.count(coreIndex)) {
-                std::shared_ptr<C2StructDescriptor> structDesc =
-                    mParamReflector->describe(coreIndex);
+                std::shared_ptr<C2StructDescriptor> structDesc = describe(coreIndex);
                 if (!structDesc) {
                     mUnsupportedStructDescriptors.emplace(coreIndex);
                 } else {
@@ -354,6 +393,16 @@
     return mConfigurable;
 }
 
+std::shared_ptr<C2StructDescriptor> ComponentStore::describe(const C2Param::CoreIndex &index) {
+    for (const std::shared_ptr<C2ParamReflector> &reflector : mParamReflectors) {
+        std::shared_ptr<C2StructDescriptor> desc = reflector->describe(index);
+        if (desc) {
+            return desc;
+        }
+    }
+    return nullptr;
+}
+
 // Called from createComponent() after a successful creation of `component`.
 void ComponentStore::reportComponentBirth(Component* component) {
     ComponentStatus componentStatus;
diff --git a/media/codec2/hal/hidl/1.0/utils/include/codec2/hidl/1.0/Component.h b/media/codec2/hal/hidl/1.0/utils/include/codec2/hidl/1.0/Component.h
index 3f55618..aed94ec 100644
--- a/media/codec2/hal/hidl/1.0/utils/include/codec2/hidl/1.0/Component.h
+++ b/media/codec2/hal/hidl/1.0/utils/include/codec2/hidl/1.0/Component.h
@@ -30,6 +30,8 @@
 #include <hidl/Status.h>
 #include <hwbinder/IBinder.h>
 
+#include <codec2/common/MultiAccessUnitHelper.h>
+
 #include <C2Component.h>
 #include <C2Buffer.h>
 #include <C2.h>
@@ -54,6 +56,8 @@
 using ::android::hardware::IBinder;
 using ::android::sp;
 using ::android::wp;
+using ::android::MultiAccessUnitInterface;
+using ::android::MultiAccessUnitHelper;
 
 struct ComponentStore;
 
@@ -113,6 +117,8 @@
     std::shared_ptr<C2Component> mComponent;
     sp<ComponentInterface> mInterface;
     sp<IComponentListener> mListener;
+    std::shared_ptr<MultiAccessUnitInterface> mMultiAccessUnitIntf;
+    std::shared_ptr<MultiAccessUnitHelper> mMultiAccessUnitHelper;
     sp<ComponentStore> mStore;
     ::android::hardware::media::c2::V1_0::utils::DefaultBufferPoolSender
             mBufferPoolSender;
@@ -135,6 +141,8 @@
 
     struct Listener;
 
+    friend struct MultiAccessUnitListener;
+
     using HwDeathRecipient = ::android::hardware::hidl_death_recipient;
     sp<HwDeathRecipient> mDeathRecipient;
     bool mClientDied{false};
diff --git a/media/codec2/hal/hidl/1.0/utils/include/codec2/hidl/1.0/ComponentInterface.h b/media/codec2/hal/hidl/1.0/utils/include/codec2/hidl/1.0/ComponentInterface.h
index 9102f92..2995faf 100644
--- a/media/codec2/hal/hidl/1.0/utils/include/codec2/hidl/1.0/ComponentInterface.h
+++ b/media/codec2/hal/hidl/1.0/utils/include/codec2/hidl/1.0/ComponentInterface.h
@@ -23,10 +23,15 @@
 #include <android/hardware/media/c2/1.0/IComponentInterface.h>
 #include <hidl/Status.h>
 
+#include <codec2/common/MultiAccessUnitHelper.h>
+
 #include <C2Component.h>
 #include <C2Buffer.h>
+#include <C2Config.h>
+#include <util/C2InterfaceHelper.h>
 #include <C2.h>
 
+#include <set>
 #include <memory>
 
 namespace android {
@@ -39,6 +44,7 @@
 using ::android::hardware::Return;
 using ::android::hardware::Void;
 using ::android::sp;
+using ::android::MultiAccessUnitInterface;
 
 struct ComponentStore;
 
@@ -46,6 +52,11 @@
     ComponentInterface(
             const std::shared_ptr<C2ComponentInterface>& interface,
             const std::shared_ptr<ParameterCache>& cache);
+
+    ComponentInterface(
+            const std::shared_ptr<C2ComponentInterface>& interface,
+            const std::shared_ptr<MultiAccessUnitInterface>& largeBufferIntf,
+            const std::shared_ptr<ParameterCache>& cache);
     c2_status_t status() const;
     virtual Return<sp<IConfigurable>> getConfigurable() override;
 
diff --git a/media/codec2/hal/hidl/1.0/utils/include/codec2/hidl/1.0/ComponentStore.h b/media/codec2/hal/hidl/1.0/utils/include/codec2/hidl/1.0/ComponentStore.h
index 27e2a05..44b8ec1 100644
--- a/media/codec2/hal/hidl/1.0/utils/include/codec2/hidl/1.0/ComponentStore.h
+++ b/media/codec2/hal/hidl/1.0/utils/include/codec2/hidl/1.0/ComponentStore.h
@@ -78,6 +78,9 @@
 
     static std::shared_ptr<FilterWrapper> GetFilterWrapper();
 
+    std::shared_ptr<MultiAccessUnitInterface> tryCreateMultiAccessUnitInterface(
+            const std::shared_ptr<C2ComponentInterface> &c2interface);
+
     // Methods from ::android::hardware::media::c2::V1_0::IComponentStore.
     virtual Return<void> createComponent(
             const hidl_string& name,
@@ -114,9 +117,12 @@
     // Does bookkeeping for an interface that has been loaded.
     void onInterfaceLoaded(const std::shared_ptr<C2ComponentInterface> &intf);
 
+    // describe from mParamReflectors
+    std::shared_ptr<C2StructDescriptor> describe(const C2Param::CoreIndex &index);
+
     c2_status_t mInit;
     std::shared_ptr<C2ComponentStore> mStore;
-    std::shared_ptr<C2ParamReflector> mParamReflector;
+    std::vector<std::shared_ptr<C2ParamReflector>> mParamReflectors;
 
     std::map<C2Param::CoreIndex, std::shared_ptr<C2StructDescriptor>> mStructDescriptors;
     std::set<C2Param::CoreIndex> mUnsupportedStructDescriptors;
diff --git a/media/codec2/hal/hidl/1.0/vts/functional/Android.bp b/media/codec2/hal/hidl/1.0/vts/functional/Android.bp
index 2054fe6..ccdde5e 100644
--- a/media/codec2/hal/hidl/1.0/vts/functional/Android.bp
+++ b/media/codec2/hal/hidl/1.0/vts/functional/Android.bp
@@ -15,6 +15,7 @@
 //
 
 package {
+    default_team: "trendy_team_media_codec_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/media/codec2/hal/hidl/1.0/vts/functional/audio/Android.bp b/media/codec2/hal/hidl/1.0/vts/functional/audio/Android.bp
index 624aad2..2b1bca0 100644
--- a/media/codec2/hal/hidl/1.0/vts/functional/audio/Android.bp
+++ b/media/codec2/hal/hidl/1.0/vts/functional/audio/Android.bp
@@ -15,6 +15,7 @@
  */
 
 package {
+    default_team: "trendy_team_media_codec_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/media/codec2/hal/hidl/1.0/vts/functional/common/Android.bp b/media/codec2/hal/hidl/1.0/vts/functional/common/Android.bp
index 0f07077..564de47 100644
--- a/media/codec2/hal/hidl/1.0/vts/functional/common/Android.bp
+++ b/media/codec2/hal/hidl/1.0/vts/functional/common/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_media_codec_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/media/codec2/hal/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.cpp b/media/codec2/hal/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.cpp
index 92b0bf5..2da6501 100644
--- a/media/codec2/hal/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.cpp
+++ b/media/codec2/hal/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.cpp
@@ -17,7 +17,7 @@
 //#define LOG_NDEBUG 0
 #define LOG_TAG "media_c2_hidl_test_common"
 #include <stdio.h>
-
+#include <numeric>
 #include "media_c2_hidl_test_common.h"
 
 #include <android/hardware/media/c2/1.0/IComponentStore.h>
@@ -221,6 +221,32 @@
     return parameters;
 }
 
+constexpr static std::initializer_list<std::pair<uint32_t, uint32_t>> flagList{
+        {(1 << VTS_BIT_FLAG_SYNC_FRAME), 0},
+        {(1 << VTS_BIT_FLAG_CSD_FRAME), C2FrameData::FLAG_CODEC_CONFIG},
+};
+
+/*
+ * This is a conversion function that can be used to convert
+ * VTS flags to C2 flags and vice-versa based on the initializer list.
+ * @param flags can be a C2 flag or a VTS flag
+ * @param toC2 if true, converts flags to a C2 flag
+ *              if false, converts flags to a VTS flag
+ */
+static uint32_t convertFlags(uint32_t flags, bool toC2) {
+    return std::transform_reduce(
+            flagList.begin(), flagList.end(),
+            0u,
+            std::bit_or{},
+            [flags, toC2](const std::pair<uint32_t, uint32_t> &entry) {
+                if (toC2) {
+                    return (flags & entry.first) ? entry.second : 0;
+                } else {
+                    return (flags & entry.second) ? entry.first : 0;
+                }
+            });
+}
+
 // Populate Info vector and return number of CSDs
 int32_t populateInfoVector(std::string info, android::Vector<FrameInfo>* frameInfo,
                            bool timestampDevTest, std::list<uint64_t>* timestampUslist) {
@@ -258,9 +284,9 @@
                 eleInfo >> bytesCount;
                 eleInfo >> flags;
                 eleInfo >> timestamp;
-                vtsFlags = mapInfoFlagstoVtsFlags(flags);
+                uint32_t c2Flags = convertFlags(flags, true);
                 frameInfo->editItemAt(frameInfo->size() - 1).largeFrameInfo.push_back(
-                        {vtsFlags, static_cast<uint32_t>(bytesCount), timestamp});
+                        {c2Flags, static_cast<uint32_t>(bytesCount), timestamp});
             }
         }
     }
@@ -298,5 +324,8 @@
     else if (infoFlags == 0x10) return (1 << VTS_BIT_FLAG_NO_SHOW_FRAME);
     else if (infoFlags == 0x20) return (1 << VTS_BIT_FLAG_CSD_FRAME);
     else if (infoFlags == 0x40) return (1 << VTS_BIT_FLAG_LARGE_AUDIO_FRAME);
+    else if (infoFlags == 0x80) {
+        return (1 << VTS_BIT_FLAG_LARGE_AUDIO_FRAME) | (1 << VTS_BIT_FLAG_SYNC_FRAME);
+    }
     return 0xFF;
 }
diff --git a/media/codec2/hal/hidl/1.0/vts/functional/component/Android.bp b/media/codec2/hal/hidl/1.0/vts/functional/component/Android.bp
index cc019da..0640f02 100644
--- a/media/codec2/hal/hidl/1.0/vts/functional/component/Android.bp
+++ b/media/codec2/hal/hidl/1.0/vts/functional/component/Android.bp
@@ -15,6 +15,7 @@
  */
 
 package {
+    default_team: "trendy_team_media_codec_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/media/codec2/hal/hidl/1.0/vts/functional/component/VtsHalMediaC2V1_0TargetComponentTest.cpp b/media/codec2/hal/hidl/1.0/vts/functional/component/VtsHalMediaC2V1_0TargetComponentTest.cpp
index 275a721..36907e1 100644
--- a/media/codec2/hal/hidl/1.0/vts/functional/component/VtsHalMediaC2V1_0TargetComponentTest.cpp
+++ b/media/codec2/hal/hidl/1.0/vts/functional/component/VtsHalMediaC2V1_0TargetComponentTest.cpp
@@ -18,6 +18,7 @@
 #define LOG_TAG "codec2_hidl_hal_component_test"
 
 #include <android-base/logging.h>
+#include <android/binder_process.h>
 #include <gtest/gtest.h>
 #include <hidl/GtestPrinter.h>
 
@@ -262,9 +263,6 @@
     ALOGV("mComponent->reset() timeConsumed=%" PRId64 " us", timeConsumed);
     ASSERT_EQ(err, C2_OK);
 
-    err = mComponent->start();
-    ASSERT_EQ(err, C2_OK);
-
     // Query supported params by the component
     std::vector<std::shared_ptr<C2ParamDescriptor>> params;
     startTime = getNowUs();
@@ -297,6 +295,9 @@
               timeConsumed);
     }
 
+    err = mComponent->start();
+    ASSERT_EQ(err, C2_OK);
+
     std::list<std::unique_ptr<C2Work>> workList;
     startTime = getNowUs();
     err = mComponent->queue(&workList);
@@ -382,5 +383,6 @@
     }
 
     ::testing::InitGoogleTest(&argc, argv);
+    ABinderProcess_startThreadPool();
     return RUN_ALL_TESTS();
 }
diff --git a/media/codec2/hal/hidl/1.0/vts/functional/master/Android.bp b/media/codec2/hal/hidl/1.0/vts/functional/master/Android.bp
index 40f5201..5e52fde 100644
--- a/media/codec2/hal/hidl/1.0/vts/functional/master/Android.bp
+++ b/media/codec2/hal/hidl/1.0/vts/functional/master/Android.bp
@@ -15,6 +15,7 @@
  */
 
 package {
+    default_team: "trendy_team_media_codec_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/media/codec2/hal/hidl/1.0/vts/functional/master/VtsHalMediaC2V1_0TargetMasterTest.cpp b/media/codec2/hal/hidl/1.0/vts/functional/master/VtsHalMediaC2V1_0TargetMasterTest.cpp
index 47ceed5..4c2ef9c 100644
--- a/media/codec2/hal/hidl/1.0/vts/functional/master/VtsHalMediaC2V1_0TargetMasterTest.cpp
+++ b/media/codec2/hal/hidl/1.0/vts/functional/master/VtsHalMediaC2V1_0TargetMasterTest.cpp
@@ -18,6 +18,7 @@
 #define LOG_TAG "codec2_hidl_hal_master_test"
 
 #include <android-base/logging.h>
+#include <android-base/properties.h>
 #include <gtest/gtest.h>
 #include <hidl/GtestPrinter.h>
 #include <hidl/ServiceManagement.h>
@@ -82,6 +83,18 @@
     }
 }
 
+// @VsrTest = 3.2-001.003
+TEST_P(Codec2MasterHalTest, MustUseAidlBeyond202404) {
+    static int sVendorApiLevel = android::base::GetIntProperty("ro.vendor.api_level", 0);
+    if (sVendorApiLevel < 202404) {
+        GTEST_SKIP() << "vendor api level less than 202404: " << sVendorApiLevel;
+    }
+    ALOGV("MustUseAidlBeyond202404 Test");
+
+    EXPECT_NE(mClient->getAidlBase(), nullptr) << "android.hardware.media.c2 MUST use AIDL "
+                                               << "for chipsets launching at 202404 or above";
+}
+
 }  // anonymous namespace
 
 INSTANTIATE_TEST_SUITE_P(PerInstance, Codec2MasterHalTest,
diff --git a/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_raw_1ch_8khz_s32le_largeframe.info b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_raw_1ch_8khz_s32le_largeframe.info
index 291e323..ee59a8e 100644
--- a/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_raw_1ch_8khz_s32le_largeframe.info
+++ b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_raw_1ch_8khz_s32le_largeframe.info
@@ -1,5 +1,5 @@
-16384 64 0 		 1 16384 1 0
-49152 64 1024000 3 16384 1 1024000 16384 1 2048000 16384 1 3072000
-32768 64 4096000 2 16384 1 4096000 16384 1 5120000
-49152 64 6144000 3 16384 1 6144000 16384 1 7168000 16384 1 8192000
-10924 64 9216000 1 10924 1 9216000
+16384 128 0 		 1 16384 1 0
+49152 128 1024000 3 16384 1 1024000 16384 1 2048000 16384 1 3072000
+32768 128 4096000 2 16384 1 4096000 16384 1 5120000
+49152 128 6144000 3 16384 1 6144000 16384 1 7168000 16384 1 8192000
+10924 128 9216000 1 10924 1 9216000
diff --git a/media/codec2/hal/hidl/1.0/vts/functional/video/Android.bp b/media/codec2/hal/hidl/1.0/vts/functional/video/Android.bp
index ecc4f9d..d04c2f6 100644
--- a/media/codec2/hal/hidl/1.0/vts/functional/video/Android.bp
+++ b/media/codec2/hal/hidl/1.0/vts/functional/video/Android.bp
@@ -15,6 +15,7 @@
  */
 
 package {
+    default_team: "trendy_team_media_codec_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/media/codec2/hal/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp b/media/codec2/hal/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp
index f8fd425..4a956f5 100644
--- a/media/codec2/hal/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp
+++ b/media/codec2/hal/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp
@@ -139,6 +139,20 @@
         mReorderDepth = -1;
         mTimestampDevTest = false;
         mMd5Offset = 0;
+        mIsTunneledCodec = false;
+
+        // For C2 codecs that support tunneling by default, the default value of
+        // C2PortTunneledModeTuning::mode should (!= NONE). Otherwise VTS
+        // can assume that the codec can support regular (non-tunneled decode)
+        queried.clear();
+        c2err = mComponent->query(
+                {}, {C2PortTunneledModeTuning::output::PARAM_TYPE}, C2_MAY_BLOCK, &queried);
+        if (c2err == C2_OK && !queried.empty() && queried.front() != nullptr) {
+            C2TunneledModeStruct::mode_t tunneledMode =
+                    ((C2PortTunneledModeTuning::output*)queried.front().get())->m.mode;
+            mIsTunneledCodec = (tunneledMode != C2TunneledModeStruct::NONE);
+        }
+
         mMd5Enable = false;
         mRefMd5 = nullptr;
 
@@ -308,6 +322,7 @@
 
     bool mEos;
     bool mDisableTest;
+    bool mIsTunneledCodec;
     bool mMd5Enable;
     bool mTimestampDevTest;
     uint64_t mTimestampUs;
@@ -612,11 +627,14 @@
 
     bool signalEOS = std::get<3>(GetParam());
     surfaceMode_t surfMode = std::get<4>(GetParam());
-    mTimestampDevTest = true;
+    // Disable checking timestamp as tunneled codecs doesn't populate
+    // output buffers in C2Work.
+    mTimestampDevTest = !mIsTunneledCodec;
 
     android::Vector<FrameInfo> Info;
 
-    mMd5Enable = true;
+    // Disable md5 checks as tunneled codecs doesn't populate output buffers in C2Work
+    mMd5Enable = !mIsTunneledCodec;
     if (!mChksumFile.compare(sResourceDir)) mMd5Enable = false;
 
     uint32_t format = HAL_PIXEL_FORMAT_YCBCR_420_888;
@@ -712,7 +730,9 @@
     typedef std::unique_lock<std::mutex> ULock;
     ASSERT_EQ(mComponent->start(), C2_OK);
 
-    mTimestampDevTest = true;
+    // Disable checking timestamp as tunneled codecs doesn't populate
+    // output buffers in C2Work.
+    mTimestampDevTest = !mIsTunneledCodec;
     uint32_t timestampOffset = 0;
     uint32_t offset = 0;
     android::Vector<FrameInfo> Info;
diff --git a/media/codec2/hal/hidl/1.1/utils/Android.bp b/media/codec2/hal/hidl/1.1/utils/Android.bp
index 4f86511..d8b5db5 100644
--- a/media/codec2/hal/hidl/1.1/utils/Android.bp
+++ b/media/codec2/hal/hidl/1.1/utils/Android.bp
@@ -54,7 +54,6 @@
     ],
 }
 
-
 // DO NOT DEPEND ON THIS DIRECTLY
 // use libcodec2-hidl-defaults instead
 cc_library {
@@ -66,7 +65,6 @@
         "com.android.media.swcodec",
     ],
 
-
     defaults: ["hidl_defaults"],
 
     srcs: [
@@ -97,6 +95,7 @@
         "android.hardware.media.omx@1.0",
         "libbase",
         "libcodec2",
+        "libcodec2_hal_common",
         "libcodec2_hidl@1.0",
         "libcodec2_hidl_plugin_stub",
         "libcodec2_vndk",
@@ -173,4 +172,3 @@
         "libhidlbase",
     ],
 }
-
diff --git a/media/codec2/hal/hidl/1.1/utils/Component.cpp b/media/codec2/hal/hidl/1.1/utils/Component.cpp
index d0f4f19..7f2c4dd 100644
--- a/media/codec2/hal/hidl/1.1/utils/Component.cpp
+++ b/media/codec2/hal/hidl/1.1/utils/Component.cpp
@@ -29,6 +29,8 @@
 #include <hidl/HidlBinderSupport.h>
 #include <utils/Timers.h>
 
+#include <codec2/common/MultiAccessUnitHelper.h>
+
 #include <C2BqBufferPriv.h>
 #include <C2Debug.h>
 #include <C2PlatformSupport.h>
@@ -44,6 +46,8 @@
 namespace utils {
 
 using namespace ::android;
+using ::android::MultiAccessUnitInterface;
+using ::android::MultiAccessUnitHelper;
 
 // ComponentListener wrapper
 struct Component::Listener : public C2Component::Listener {
@@ -135,6 +139,52 @@
     wp<IComponentListener> mListener;
 };
 
+// Component listener for handle multiple access-units
+struct MultiAccessUnitListener : public Component::Listener {
+    MultiAccessUnitListener(const sp<Component> &component,
+            const std::shared_ptr<MultiAccessUnitHelper> &helper):
+        Listener(component), mHelper(helper) {
+    }
+
+    virtual void onError_nb(
+            std::weak_ptr<C2Component> c2component,
+            uint32_t errorCode) override {
+        if (mHelper) {
+            std::list<std::unique_ptr<C2Work>> worklist;
+            mHelper->error(&worklist);
+            if (!worklist.empty()) {
+                Listener::onWorkDone_nb(c2component, std::move(worklist));
+            }
+        }
+        Listener::onError_nb(c2component, errorCode);
+    }
+
+    virtual void onTripped_nb(
+            std::weak_ptr<C2Component> c2component,
+            std::vector<std::shared_ptr<C2SettingResult>> c2settingResult
+            ) override {
+        Listener::onTripped_nb(c2component,
+                c2settingResult);
+    }
+
+    virtual void onWorkDone_nb(
+            std::weak_ptr<C2Component> c2component,
+            std::list<std::unique_ptr<C2Work>> c2workItems) override {
+        if (mHelper) {
+            std::list<std::unique_ptr<C2Work>> processedWork;
+            mHelper->gather(c2workItems, &processedWork);
+            if (!processedWork.empty()) {
+                Listener::onWorkDone_nb(c2component, std::move(processedWork));
+            }
+        } else {
+            Listener::onWorkDone_nb(c2component, std::move(c2workItems));
+        }
+    }
+
+    protected:
+        std::shared_ptr<MultiAccessUnitHelper> mHelper;
+};
+
 // Component::Sink
 struct Component::Sink : public IInputSink {
     std::shared_ptr<Component> mComponent;
@@ -208,13 +258,14 @@
         const sp<::android::hardware::media::bufferpool::V2_0::
         IClientManager>& clientPoolManager)
       : mComponent{component},
-        mInterface{new ComponentInterface(component->intf(),
-                                          store->getParameterCache())},
         mListener{listener},
         mStore{store},
         mBufferPoolSender{clientPoolManager} {
     // Retrieve supported parameters from store
     // TODO: We could cache this per component/interface type
+    mMultiAccessUnitIntf = store->tryCreateMultiAccessUnitInterface(component->intf());
+    mInterface = new ComponentInterface(
+            component->intf(), mMultiAccessUnitIntf, store->getParameterCache());
     mInit = mInterface->status();
 }
 
@@ -252,6 +303,19 @@
                     registerFrameData(mListener, work->input);
         }
     }
+    c2_status_t err = C2_OK;
+    if (mMultiAccessUnitHelper) {
+        std::list<std::list<std::unique_ptr<C2Work>>> c2worklists;
+        mMultiAccessUnitHelper->scatter(c2works, &c2worklists);
+        for (auto &c2worklist : c2worklists) {
+            err = mComponent->queue_nb(&c2worklist);
+            if (err != C2_OK) {
+                LOG(ERROR) << "Error Queuing to component.";
+                break;
+            }
+        }
+        return static_cast<Status>(err);
+    }
 
     return static_cast<Status>(mComponent->queue_nb(&c2works));
 }
@@ -261,6 +325,9 @@
     c2_status_t c2res = mComponent->flush_sm(
             C2Component::FLUSH_COMPONENT,
             &c2flushedWorks);
+    if (mMultiAccessUnitHelper) {
+        c2res = mMultiAccessUnitHelper->flush(&c2flushedWorks);
+    }
 
     // Unregister input buffers.
     for (const std::unique_ptr<C2Work>& work : c2flushedWorks) {
@@ -460,7 +527,18 @@
 
 Return<Status> Component::stop() {
     InputBufferManager::unregisterFrameData(mListener);
-    return static_cast<Status>(mComponent->stop());
+    Status status = static_cast<Status>(mComponent->stop());
+    {
+        std::lock_guard<std::mutex> lock(mBlockPoolsMutex);
+        for (auto it = mBlockPools.begin(); it != mBlockPools.end(); ++it) {
+            if (it->second->getAllocatorId() == C2PlatformAllocatorStore::BUFFERQUEUE) {
+                std::shared_ptr<C2BufferQueueBlockPool> bqPool =
+                        std::static_pointer_cast<C2BufferQueueBlockPool>(it->second);
+                bqPool->clearDeferredBlocks();
+            }
+        }
+    }
+    return status;
 }
 
 Return<Status> Component::reset() {
@@ -469,6 +547,9 @@
         std::lock_guard<std::mutex> lock(mBlockPoolsMutex);
         mBlockPools.clear();
     }
+    if (mMultiAccessUnitHelper) {
+        mMultiAccessUnitHelper->reset();
+    }
     InputBufferManager::unregisterFrameData(mListener);
     return status;
 }
@@ -479,6 +560,9 @@
         std::lock_guard<std::mutex> lock(mBlockPoolsMutex);
         mBlockPools.clear();
     }
+    if (mMultiAccessUnitHelper) {
+        mMultiAccessUnitHelper->reset();
+    }
     InputBufferManager::unregisterFrameData(mListener);
     return status;
 }
@@ -508,7 +592,24 @@
 }
 
 void Component::initListener(const sp<Component>& self) {
-    std::shared_ptr<C2Component::Listener> c2listener =
+    std::shared_ptr<C2Component::Listener> c2listener;
+    if (mMultiAccessUnitIntf) {
+        std::shared_ptr<C2Allocator> allocator;
+        std::shared_ptr<C2BlockPool> linearPool;
+        std::shared_ptr<C2AllocatorStore> store = ::android::GetCodec2PlatformAllocatorStore();
+        if(store->fetchAllocator(C2AllocatorStore::DEFAULT_LINEAR, &allocator) == C2_OK) {
+            ::android::C2PlatformAllocatorDesc desc;
+            desc.allocatorId = allocator->getId();
+            if (C2_OK == CreateCodec2BlockPool(desc, mComponent, &linearPool)) {
+                if (linearPool) {
+                    mMultiAccessUnitHelper = std::make_shared<MultiAccessUnitHelper>(
+                            mMultiAccessUnitIntf, linearPool);
+                }
+            }
+        }
+    }
+    c2listener = mMultiAccessUnitHelper ?
+            std::make_shared<MultiAccessUnitListener>(self, mMultiAccessUnitHelper) :
             std::make_shared<Listener>(self);
     c2_status_t res = mComponent->setListener_vb(c2listener, C2_DONT_BLOCK);
     if (res != C2_OK) {
diff --git a/media/codec2/hal/hidl/1.1/utils/ComponentStore.cpp b/media/codec2/hal/hidl/1.1/utils/ComponentStore.cpp
index d47abdd..1b86958 100644
--- a/media/codec2/hal/hidl/1.1/utils/ComponentStore.cpp
+++ b/media/codec2/hal/hidl/1.1/utils/ComponentStore.cpp
@@ -139,7 +139,15 @@
     SetPreferredCodec2ComponentStore(store);
 
     // Retrieve struct descriptors
-    mParamReflector = mStore->getParamReflector();
+    mParamReflectors.push_back(mStore->getParamReflector());
+#ifndef __ANDROID_APEX__
+    std::shared_ptr<C2ParamReflector> paramReflector =
+        GetFilterWrapper()->getParamReflector();
+    if (paramReflector != nullptr) {
+        ALOGD("[%s] added param reflector from filter wrapper", mStore->getName().c_str());
+        mParamReflectors.push_back(paramReflector);
+    }
+#endif
 
     // Retrieve supported parameters from store
     using namespace std::placeholders;
@@ -168,8 +176,7 @@
         std::lock_guard<std::mutex> lock(mStructDescriptorsMutex);
         auto it = mStructDescriptors.find(coreIndex);
         if (it == mStructDescriptors.end()) {
-            std::shared_ptr<C2StructDescriptor> structDesc =
-                    mParamReflector->describe(coreIndex);
+            std::shared_ptr<C2StructDescriptor> structDesc = describe(coreIndex);
             if (!structDesc) {
                 // All supported params must be described
                 res = C2_BAD_INDEX;
@@ -194,6 +201,38 @@
 }
 #endif
 
+std::shared_ptr<MultiAccessUnitInterface> ComponentStore::tryCreateMultiAccessUnitInterface(
+        const std::shared_ptr<C2ComponentInterface> &c2interface) {
+    std::shared_ptr<MultiAccessUnitInterface> multiAccessUnitIntf = nullptr;
+    if (c2interface == nullptr) {
+        return nullptr;
+    }
+    if (MultiAccessUnitHelper::isEnabledOnPlatform()) {
+        c2_status_t err = C2_OK;
+        C2ComponentDomainSetting domain;
+        std::vector<std::unique_ptr<C2Param>> heapParams;
+        err = c2interface->query_vb({&domain}, {}, C2_MAY_BLOCK, &heapParams);
+        if (err == C2_OK && (domain.value == C2Component::DOMAIN_AUDIO)) {
+            std::vector<std::shared_ptr<C2ParamDescriptor>> params;
+            bool isComponentSupportsLargeAudioFrame = false;
+            c2interface->querySupportedParams_nb(&params);
+            for (const auto &paramDesc : params) {
+                if (paramDesc->name().compare(C2_PARAMKEY_OUTPUT_LARGE_FRAME) == 0) {
+                    isComponentSupportsLargeAudioFrame = true;
+                    break;
+                }
+            }
+
+            if (!isComponentSupportsLargeAudioFrame) {
+                multiAccessUnitIntf = std::make_shared<MultiAccessUnitInterface>(
+                        c2interface,
+                        std::static_pointer_cast<C2ReflectorHelper>(mParamReflectors[0]));
+            }
+        }
+    }
+    return multiAccessUnitIntf;
+}
+
 // Methods from ::android::hardware::media::c2::V1_0::IComponentStore
 Return<void> ComponentStore::createComponent(
         const hidl_string& name,
@@ -241,7 +280,10 @@
         c2interface = GetFilterWrapper()->maybeWrapInterface(c2interface);
 #endif
         onInterfaceLoaded(c2interface);
-        interface = new ComponentInterface(c2interface, mParameterCache);
+        std::shared_ptr<MultiAccessUnitInterface> multiAccessUnitIntf =
+                tryCreateMultiAccessUnitInterface(c2interface);
+        interface = new ComponentInterface(
+                c2interface, multiAccessUnitIntf, mParameterCache);
     }
     _hidl_cb(static_cast<Status>(res), interface);
     return Void();
@@ -306,8 +348,7 @@
         if (item == mStructDescriptors.end()) {
             // not in the cache, and not known to be unsupported, query local reflector
             if (!mUnsupportedStructDescriptors.count(coreIndex)) {
-                std::shared_ptr<C2StructDescriptor> structDesc =
-                    mParamReflector->describe(coreIndex);
+                std::shared_ptr<C2StructDescriptor> structDesc = describe(coreIndex);
                 if (!structDesc) {
                     mUnsupportedStructDescriptors.emplace(coreIndex);
                 } else {
@@ -389,6 +430,16 @@
     return Void();
 }
 
+std::shared_ptr<C2StructDescriptor> ComponentStore::describe(const C2Param::CoreIndex &index) {
+    for (const std::shared_ptr<C2ParamReflector> &reflector : mParamReflectors) {
+        std::shared_ptr<C2StructDescriptor> desc = reflector->describe(index);
+        if (desc) {
+            return desc;
+        }
+    }
+    return nullptr;
+}
+
 // Called from createComponent() after a successful creation of `component`.
 void ComponentStore::reportComponentBirth(Component* component) {
     ComponentStatus componentStatus;
diff --git a/media/codec2/hal/hidl/1.1/utils/include/codec2/hidl/1.1/Component.h b/media/codec2/hal/hidl/1.1/utils/include/codec2/hidl/1.1/Component.h
index f16de24..8f0478f 100644
--- a/media/codec2/hal/hidl/1.1/utils/include/codec2/hidl/1.1/Component.h
+++ b/media/codec2/hal/hidl/1.1/utils/include/codec2/hidl/1.1/Component.h
@@ -29,6 +29,8 @@
 #include <hidl/Status.h>
 #include <hwbinder/IBinder.h>
 
+#include <codec2/common/MultiAccessUnitHelper.h>
+
 #include <C2Component.h>
 #include <C2Buffer.h>
 #include <C2.h>
@@ -57,6 +59,8 @@
 using ::android::hardware::IBinder;
 using ::android::sp;
 using ::android::wp;
+using ::android::MultiAccessUnitInterface;
+using ::android::MultiAccessUnitHelper;
 
 struct ComponentStore;
 
@@ -118,6 +122,8 @@
     std::shared_ptr<C2Component> mComponent;
     sp<ComponentInterface> mInterface;
     sp<IComponentListener> mListener;
+    std::shared_ptr<MultiAccessUnitInterface> mMultiAccessUnitIntf;
+    std::shared_ptr<MultiAccessUnitHelper> mMultiAccessUnitHelper;
     sp<ComponentStore> mStore;
     ::android::hardware::media::c2::V1_1::utils::DefaultBufferPoolSender
             mBufferPoolSender;
@@ -140,6 +146,8 @@
 
     struct Listener;
 
+    friend struct MultiAccessUnitListener;
+
     using HwDeathRecipient = ::android::hardware::hidl_death_recipient;
     sp<HwDeathRecipient> mDeathRecipient;
     bool mClientDied{false};
diff --git a/media/codec2/hal/hidl/1.1/utils/include/codec2/hidl/1.1/ComponentStore.h b/media/codec2/hal/hidl/1.1/utils/include/codec2/hidl/1.1/ComponentStore.h
index f6daee7..52d2945 100644
--- a/media/codec2/hal/hidl/1.1/utils/include/codec2/hidl/1.1/ComponentStore.h
+++ b/media/codec2/hal/hidl/1.1/utils/include/codec2/hidl/1.1/ComponentStore.h
@@ -79,6 +79,9 @@
 
     static std::shared_ptr<FilterWrapper> GetFilterWrapper();
 
+    std::shared_ptr<MultiAccessUnitInterface> tryCreateMultiAccessUnitInterface(
+            const std::shared_ptr<C2ComponentInterface> &c2interface);
+
     // Methods from ::android::hardware::media::c2::V1_0::IComponentStore.
     virtual Return<void> createComponent(
             const hidl_string& name,
@@ -122,9 +125,12 @@
     // Does bookkeeping for an interface that has been loaded.
     void onInterfaceLoaded(const std::shared_ptr<C2ComponentInterface> &intf);
 
+    // describe from mParamReflectors
+    std::shared_ptr<C2StructDescriptor> describe(const C2Param::CoreIndex &index);
+
     c2_status_t mInit;
     std::shared_ptr<C2ComponentStore> mStore;
-    std::shared_ptr<C2ParamReflector> mParamReflector;
+    std::vector<std::shared_ptr<C2ParamReflector>> mParamReflectors;
 
     std::map<C2Param::CoreIndex, std::shared_ptr<C2StructDescriptor>> mStructDescriptors;
     std::set<C2Param::CoreIndex> mUnsupportedStructDescriptors;
diff --git a/media/codec2/hal/hidl/1.2/utils/Android.bp b/media/codec2/hal/hidl/1.2/utils/Android.bp
index b92dc07..a339946 100644
--- a/media/codec2/hal/hidl/1.2/utils/Android.bp
+++ b/media/codec2/hal/hidl/1.2/utils/Android.bp
@@ -58,7 +58,6 @@
     ],
 }
 
-
 // DO NOT DEPEND ON THIS DIRECTLY
 // use libcodec2-hidl-defaults instead
 cc_library {
@@ -102,6 +101,7 @@
         "android.hardware.media.omx@1.0",
         "libbase",
         "libcodec2",
+        "libcodec2_hal_common",
         "libcodec2_hidl@1.0",
         "libcodec2_hidl@1.1",
         "libcodec2_hidl_plugin_stub",
@@ -197,4 +197,3 @@
     name: "libcodec2-hidl-client-defaults",
     defaults: ["libcodec2-hidl-client-defaults@1.2"],
 }
-
diff --git a/media/codec2/hal/hidl/1.2/utils/Component.cpp b/media/codec2/hal/hidl/1.2/utils/Component.cpp
index 036c900..7b0aa9b 100644
--- a/media/codec2/hal/hidl/1.2/utils/Component.cpp
+++ b/media/codec2/hal/hidl/1.2/utils/Component.cpp
@@ -44,6 +44,8 @@
 namespace utils {
 
 using namespace ::android;
+using ::android::MultiAccessUnitInterface;
+using ::android::MultiAccessUnitHelper;
 
 // ComponentListener wrapper
 struct Component::Listener : public C2Component::Listener {
@@ -135,6 +137,52 @@
     wp<IComponentListener> mListener;
 };
 
+// Component listener for handle multiple access-units
+struct MultiAccessUnitListener : public Component::Listener {
+    MultiAccessUnitListener(const sp<Component> &component,
+            const std::shared_ptr<MultiAccessUnitHelper> &helper):
+        Listener(component), mHelper(helper) {
+    }
+
+    virtual void onError_nb(
+            std::weak_ptr<C2Component> c2component,
+            uint32_t errorCode) override {
+        if (mHelper) {
+            std::list<std::unique_ptr<C2Work>> worklist;
+            mHelper->error(&worklist);
+            if (!worklist.empty()) {
+                Listener::onWorkDone_nb(c2component, std::move(worklist));
+            }
+        }
+        Listener::onError_nb(c2component, errorCode);
+    }
+
+    virtual void onTripped_nb(
+            std::weak_ptr<C2Component> c2component,
+            std::vector<std::shared_ptr<C2SettingResult>> c2settingResult
+            ) override {
+        Listener::onTripped_nb(c2component,
+                c2settingResult);
+    }
+
+    virtual void onWorkDone_nb(
+            std::weak_ptr<C2Component> c2component,
+            std::list<std::unique_ptr<C2Work>> c2workItems) override {
+        if (mHelper) {
+            std::list<std::unique_ptr<C2Work>> processedWork;
+            mHelper->gather(c2workItems, &processedWork);
+            if (!processedWork.empty()) {
+                Listener::onWorkDone_nb(c2component, std::move(processedWork));
+            }
+        } else {
+            Listener::onWorkDone_nb(c2component, std::move(c2workItems));
+        }
+    }
+
+    protected:
+        std::shared_ptr<MultiAccessUnitHelper> mHelper;
+};
+
 // Component::Sink
 struct Component::Sink : public IInputSink {
     std::shared_ptr<Component> mComponent;
@@ -208,13 +256,14 @@
         const sp<::android::hardware::media::bufferpool::V2_0::
         IClientManager>& clientPoolManager)
       : mComponent{component},
-        mInterface{new ComponentInterface(component->intf(),
-                                          store->getParameterCache())},
         mListener{listener},
         mStore{store},
         mBufferPoolSender{clientPoolManager} {
     // Retrieve supported parameters from store
     // TODO: We could cache this per component/interface type
+    mMultiAccessUnitIntf = store->tryCreateMultiAccessUnitInterface(component->intf());
+    mInterface = new ComponentInterface(
+            component->intf(), mMultiAccessUnitIntf, store->getParameterCache());
     mInit = mInterface->status();
 }
 
@@ -252,7 +301,19 @@
                     registerFrameData(mListener, work->input);
         }
     }
-
+    c2_status_t err = C2_OK;
+    if (mMultiAccessUnitHelper) {
+        std::list<std::list<std::unique_ptr<C2Work>>> c2worklists;
+        mMultiAccessUnitHelper->scatter(c2works, &c2worklists);
+        for (auto &c2worklist : c2worklists) {
+            err = mComponent->queue_nb(&c2worklist);
+            if (err != C2_OK) {
+                LOG(ERROR) << "Error Queuing to component.";
+                break;
+            }
+        }
+        return static_cast<Status>(err);
+    }
     return static_cast<Status>(mComponent->queue_nb(&c2works));
 }
 
@@ -261,7 +322,9 @@
     c2_status_t c2res = mComponent->flush_sm(
             C2Component::FLUSH_COMPONENT,
             &c2flushedWorks);
-
+    if (mMultiAccessUnitHelper) {
+        c2res = mMultiAccessUnitHelper->flush(&c2flushedWorks);
+    }
     // Unregister input buffers.
     for (const std::unique_ptr<C2Work>& work : c2flushedWorks) {
         if (work) {
@@ -460,7 +523,18 @@
 
 Return<Status> Component::stop() {
     InputBufferManager::unregisterFrameData(mListener);
-    return static_cast<Status>(mComponent->stop());
+    Status status = static_cast<Status>(mComponent->stop());
+    {
+        std::lock_guard<std::mutex> lock(mBlockPoolsMutex);
+        for (auto it = mBlockPools.begin(); it != mBlockPools.end(); ++it) {
+            if (it->second->getAllocatorId() == C2PlatformAllocatorStore::BUFFERQUEUE) {
+                std::shared_ptr<C2BufferQueueBlockPool> bqPool =
+                        std::static_pointer_cast<C2BufferQueueBlockPool>(it->second);
+                bqPool->clearDeferredBlocks();
+            }
+        }
+    }
+    return status;
 }
 
 Return<Status> Component::reset() {
@@ -469,6 +543,9 @@
         std::lock_guard<std::mutex> lock(mBlockPoolsMutex);
         mBlockPools.clear();
     }
+    if (mMultiAccessUnitHelper) {
+        mMultiAccessUnitHelper->reset();
+    }
     InputBufferManager::unregisterFrameData(mListener);
     return status;
 }
@@ -479,6 +556,9 @@
         std::lock_guard<std::mutex> lock(mBlockPoolsMutex);
         mBlockPools.clear();
     }
+    if (mMultiAccessUnitHelper) {
+        mMultiAccessUnitHelper->reset();
+    }
     InputBufferManager::unregisterFrameData(mListener);
     return status;
 }
@@ -539,7 +619,24 @@
 }
 
 void Component::initListener(const sp<Component>& self) {
-    std::shared_ptr<C2Component::Listener> c2listener =
+    std::shared_ptr<C2Component::Listener> c2listener;
+    if (mMultiAccessUnitIntf) {
+        std::shared_ptr<C2Allocator> allocator;
+        std::shared_ptr<C2BlockPool> linearPool;
+        std::shared_ptr<C2AllocatorStore> store = ::android::GetCodec2PlatformAllocatorStore();
+        if(store->fetchAllocator(C2AllocatorStore::DEFAULT_LINEAR, &allocator) == C2_OK) {
+            ::android::C2PlatformAllocatorDesc desc;
+            desc.allocatorId = allocator->getId();
+            if (C2_OK == CreateCodec2BlockPool(desc, mComponent, &linearPool)) {
+                if (linearPool) {
+                    mMultiAccessUnitHelper = std::make_shared<MultiAccessUnitHelper>(
+                            mMultiAccessUnitIntf, linearPool);
+                }
+            }
+        }
+    }
+    c2listener = mMultiAccessUnitHelper ?
+            std::make_shared<MultiAccessUnitListener>(self, mMultiAccessUnitHelper) :
             std::make_shared<Listener>(self);
     c2_status_t res = mComponent->setListener_vb(c2listener, C2_DONT_BLOCK);
     if (res != C2_OK) {
diff --git a/media/codec2/hal/hidl/1.2/utils/ComponentStore.cpp b/media/codec2/hal/hidl/1.2/utils/ComponentStore.cpp
index 9fac5d5..2e0386f 100644
--- a/media/codec2/hal/hidl/1.2/utils/ComponentStore.cpp
+++ b/media/codec2/hal/hidl/1.2/utils/ComponentStore.cpp
@@ -139,7 +139,15 @@
     SetPreferredCodec2ComponentStore(store);
 
     // Retrieve struct descriptors
-    mParamReflector = mStore->getParamReflector();
+    mParamReflectors.push_back(mStore->getParamReflector());
+#ifndef __ANDROID_APEX__
+    std::shared_ptr<C2ParamReflector> paramReflector =
+        GetFilterWrapper()->getParamReflector();
+    if (paramReflector != nullptr) {
+        ALOGD("[%s] added param reflector from filter wrapper", mStore->getName().c_str());
+        mParamReflectors.push_back(paramReflector);
+    }
+#endif
 
     // Retrieve supported parameters from store
     using namespace std::placeholders;
@@ -168,8 +176,7 @@
         std::lock_guard<std::mutex> lock(mStructDescriptorsMutex);
         auto it = mStructDescriptors.find(coreIndex);
         if (it == mStructDescriptors.end()) {
-            std::shared_ptr<C2StructDescriptor> structDesc =
-                    mParamReflector->describe(coreIndex);
+            std::shared_ptr<C2StructDescriptor> structDesc = describe(coreIndex);
             if (!structDesc) {
                 // All supported params must be described
                 res = C2_BAD_INDEX;
@@ -194,6 +201,37 @@
 }
 #endif
 
+std::shared_ptr<MultiAccessUnitInterface> ComponentStore::tryCreateMultiAccessUnitInterface(
+        const std::shared_ptr<C2ComponentInterface> &c2interface) {
+    std::shared_ptr<MultiAccessUnitInterface> multiAccessUnitIntf = nullptr;
+    if (c2interface == nullptr) {
+        return nullptr;
+    }
+    if (MultiAccessUnitHelper::isEnabledOnPlatform()) {
+        c2_status_t err = C2_OK;
+        C2ComponentDomainSetting domain;
+        std::vector<std::unique_ptr<C2Param>> heapParams;
+        err = c2interface->query_vb({&domain}, {}, C2_MAY_BLOCK, &heapParams);
+        if (err == C2_OK && (domain.value == C2Component::DOMAIN_AUDIO)) {
+            std::vector<std::shared_ptr<C2ParamDescriptor>> params;
+            bool isComponentSupportsLargeAudioFrame = false;
+            c2interface->querySupportedParams_nb(&params);
+            for (const auto &paramDesc : params) {
+                if (paramDesc->name().compare(C2_PARAMKEY_OUTPUT_LARGE_FRAME) == 0) {
+                    isComponentSupportsLargeAudioFrame = true;
+                    break;
+                }
+            }
+            if (!isComponentSupportsLargeAudioFrame) {
+                multiAccessUnitIntf = std::make_shared<MultiAccessUnitInterface>(
+                        c2interface,
+                        std::static_pointer_cast<C2ReflectorHelper>(mParamReflectors[0]));
+            }
+        }
+    }
+    return multiAccessUnitIntf;
+}
+
 // Methods from ::android::hardware::media::c2::V1_0::IComponentStore
 Return<void> ComponentStore::createComponent(
         const hidl_string& name,
@@ -241,7 +279,9 @@
         c2interface = GetFilterWrapper()->maybeWrapInterface(c2interface);
 #endif
         onInterfaceLoaded(c2interface);
-        interface = new ComponentInterface(c2interface, mParameterCache);
+        std::shared_ptr<MultiAccessUnitInterface> multiAccessUnitIntf =
+                tryCreateMultiAccessUnitInterface(c2interface);
+        interface = new ComponentInterface(c2interface, multiAccessUnitIntf, mParameterCache);
     }
     _hidl_cb(static_cast<Status>(res), interface);
     return Void();
@@ -306,8 +346,7 @@
         if (item == mStructDescriptors.end()) {
             // not in the cache, and not known to be unsupported, query local reflector
             if (!mUnsupportedStructDescriptors.count(coreIndex)) {
-                std::shared_ptr<C2StructDescriptor> structDesc =
-                    mParamReflector->describe(coreIndex);
+                std::shared_ptr<C2StructDescriptor> structDesc = describe(coreIndex);
                 if (!structDesc) {
                     mUnsupportedStructDescriptors.emplace(coreIndex);
                 } else {
@@ -425,6 +464,16 @@
     return Void();
 }
 
+std::shared_ptr<C2StructDescriptor> ComponentStore::describe(const C2Param::CoreIndex &index) {
+    for (const std::shared_ptr<C2ParamReflector> &reflector : mParamReflectors) {
+        std::shared_ptr<C2StructDescriptor> desc = reflector->describe(index);
+        if (desc) {
+            return desc;
+        }
+    }
+    return nullptr;
+}
+
 // Called from createComponent() after a successful creation of `component`.
 void ComponentStore::reportComponentBirth(Component* component) {
     ComponentStatus componentStatus;
diff --git a/media/codec2/hal/hidl/1.2/utils/include/codec2/hidl/1.2/Component.h b/media/codec2/hal/hidl/1.2/utils/include/codec2/hidl/1.2/Component.h
index 6a73392..f2b77bc 100644
--- a/media/codec2/hal/hidl/1.2/utils/include/codec2/hidl/1.2/Component.h
+++ b/media/codec2/hal/hidl/1.2/utils/include/codec2/hidl/1.2/Component.h
@@ -29,6 +29,8 @@
 #include <hidl/Status.h>
 #include <hwbinder/IBinder.h>
 
+#include <codec2/common/MultiAccessUnitHelper.h>
+
 #include <C2Component.h>
 #include <C2Buffer.h>
 #include <C2.h>
@@ -57,6 +59,8 @@
 using ::android::hardware::IBinder;
 using ::android::sp;
 using ::android::wp;
+using ::android::MultiAccessUnitInterface;
+using ::android::MultiAccessUnitHelper;
 
 struct ComponentStore;
 
@@ -123,6 +127,8 @@
     std::shared_ptr<C2Component> mComponent;
     sp<ComponentInterface> mInterface;
     sp<IComponentListener> mListener;
+    std::shared_ptr<MultiAccessUnitInterface> mMultiAccessUnitIntf;
+    std::shared_ptr<MultiAccessUnitHelper> mMultiAccessUnitHelper;
     sp<ComponentStore> mStore;
     ::android::hardware::media::c2::V1_2::utils::DefaultBufferPoolSender
             mBufferPoolSender;
@@ -145,6 +151,8 @@
 
     struct Listener;
 
+    friend struct MultiAccessUnitListener;
+
     using HwDeathRecipient = ::android::hardware::hidl_death_recipient;
     sp<HwDeathRecipient> mDeathRecipient;
     bool mClientDied{false};
diff --git a/media/codec2/hal/hidl/1.2/utils/include/codec2/hidl/1.2/ComponentStore.h b/media/codec2/hal/hidl/1.2/utils/include/codec2/hidl/1.2/ComponentStore.h
index e95a651..1b209e2 100644
--- a/media/codec2/hal/hidl/1.2/utils/include/codec2/hidl/1.2/ComponentStore.h
+++ b/media/codec2/hal/hidl/1.2/utils/include/codec2/hidl/1.2/ComponentStore.h
@@ -79,6 +79,9 @@
 
     static std::shared_ptr<FilterWrapper> GetFilterWrapper();
 
+    std::shared_ptr<MultiAccessUnitInterface> tryCreateMultiAccessUnitInterface(
+            const std::shared_ptr<C2ComponentInterface> &c2interface);
+
     // Methods from ::android::hardware::media::c2::V1_0::IComponentStore.
     virtual Return<void> createComponent(
             const hidl_string& name,
@@ -129,9 +132,12 @@
     // Does bookkeeping for an interface that has been loaded.
     void onInterfaceLoaded(const std::shared_ptr<C2ComponentInterface> &intf);
 
+    // describe from mParamReflectors
+    std::shared_ptr<C2StructDescriptor> describe(const C2Param::CoreIndex &index);
+
     c2_status_t mInit;
     std::shared_ptr<C2ComponentStore> mStore;
-    std::shared_ptr<C2ParamReflector> mParamReflector;
+    std::vector<std::shared_ptr<C2ParamReflector>> mParamReflectors;
 
     std::map<C2Param::CoreIndex, std::shared_ptr<C2StructDescriptor>> mStructDescriptors;
     std::set<C2Param::CoreIndex> mUnsupportedStructDescriptors;
diff --git a/media/codec2/hal/plugin/FilterWrapper.cpp b/media/codec2/hal/plugin/FilterWrapper.cpp
index 197d6e7..ab6e3eb 100644
--- a/media/codec2/hal/plugin/FilterWrapper.cpp
+++ b/media/codec2/hal/plugin/FilterWrapper.cpp
@@ -49,11 +49,6 @@
             std::weak_ptr<FilterWrapper> filterWrapper)
         : mIntf(intf), mFilterWrapper(filterWrapper) {
         takeFilters(std::move(filters));
-        for (size_t i = 0; i < mFilters.size(); ++i) {
-            mControlParamTypes.insert(
-                    mFilters[i].desc.controlParams.begin(),
-                    mFilters[i].desc.controlParams.end());
-        }
     }
 
     ~WrappedDecoderInterface() override = default;
@@ -91,6 +86,12 @@
 
         // TODO: documentation
         mFilters = std::move(filters);
+        mControlParamTypes.clear();
+        for (size_t i = 0; i < mFilters.size(); ++i) {
+            mControlParamTypes.insert(
+                    mFilters[i].desc.controlParams.begin(),
+                    mFilters[i].desc.controlParams.end());
+        }
         mTypeToIndexForQuery.clear();
         mTypeToIndexForConfig.clear();
         for (size_t i = 0; i < mFilters.size(); ++i) {
@@ -1011,4 +1012,11 @@
     return mPlugin->queryParamsForPreviousComponent(intf, params);
 }
 
+std::shared_ptr<C2ParamReflector> FilterWrapper::getParamReflector() {
+    if (mInit != OK) {
+        return nullptr;
+    }
+    return mStore->getParamReflector();
+}
+
 }  // namespace android
diff --git a/media/codec2/hal/plugin/FilterWrapperStub.cpp b/media/codec2/hal/plugin/FilterWrapperStub.cpp
index 3fd5409..a21f6d0 100644
--- a/media/codec2/hal/plugin/FilterWrapperStub.cpp
+++ b/media/codec2/hal/plugin/FilterWrapperStub.cpp
@@ -57,4 +57,8 @@
     return CreateCodec2BlockPool(allocatorParam, component, pool);
 }
 
+std::shared_ptr<C2ParamReflector> FilterWrapper::getParamReflector() {
+    return nullptr;
+}
+
 }  // namespace android
diff --git a/media/codec2/hal/plugin/internal/FilterWrapper.h b/media/codec2/hal/plugin/internal/FilterWrapper.h
index dcffb5c..c27901e 100644
--- a/media/codec2/hal/plugin/internal/FilterWrapper.h
+++ b/media/codec2/hal/plugin/internal/FilterWrapper.h
@@ -104,6 +104,10 @@
             const std::shared_ptr<C2ComponentInterface> &intf,
             std::vector<std::unique_ptr<C2Param>> *params);
 
+    /**
+     * Return the param reflector of the filter plugin store.
+     */
+    std::shared_ptr<C2ParamReflector> getParamReflector();
 private:
     status_t mInit;
     std::unique_ptr<Plugin> mPlugin;
diff --git a/media/codec2/hal/plugin/samples/SampleFilterPlugin.cpp b/media/codec2/hal/plugin/samples/SampleFilterPlugin.cpp
index b5383ad..47412b7 100644
--- a/media/codec2/hal/plugin/samples/SampleFilterPlugin.cpp
+++ b/media/codec2/hal/plugin/samples/SampleFilterPlugin.cpp
@@ -37,6 +37,19 @@
                 kParamIndexColorAspects | C2Param::CoreIndex::IS_REQUEST_FLAG>
         C2StreamColorAspectsRequestInfo;
 
+// In practice the vendor parameters will be defined in a separate header file,
+// but for the purpose of this sample, we just define it here.
+
+// Vendor-specific type index for filters start from this value. 0x7000 is added to
+// avoid conflict with existing vendor type indices.
+constexpr uint32_t kTypeIndexFilterStart = C2Param::TYPE_INDEX_VENDOR_START + 0x7000;
+// Answer to the Ultimate Question of Life, the Universe, and Everything
+// (Reference to The Hitchhiker's Guide to the Galaxy by Douglas Adams)
+constexpr uint32_t kParamIndexVendorUltimateAnswer = kTypeIndexFilterStart + 0;
+typedef C2StreamParam<C2Info, C2Int32Value, kParamIndexVendorUltimateAnswer>
+        C2StreamVendorUltimateAnswerInfo;
+constexpr char C2_PARAMKEY_VENDOR_ULTIMATE_ANSWER[] = "ultimate-answer";
+
 namespace android {
 
 using namespace std::literals::chrono_literals;
@@ -49,10 +62,9 @@
         static const std::string NAME;
         static const FilterPlugin_V1::Descriptor DESCRIPTOR;
 
-        explicit Interface(c2_node_id_t id)
+        Interface(c2_node_id_t id, const std::shared_ptr<C2ReflectorHelper> &reflector)
             : mId(id),
-              mReflector(std::make_shared<C2ReflectorHelper>()),
-              mHelper(mReflector) {
+              mHelper(reflector) {
         }
         ~Interface() override = default;
         C2String getName() const override { return NAME; }
@@ -126,7 +138,6 @@
         }
     private:
         const c2_node_id_t mId;
-        std::shared_ptr<C2ReflectorHelper> mReflector;
         struct Helper : public C2InterfaceHelper {
             explicit Helper(std::shared_ptr<C2ReflectorHelper> reflector)
                 : C2InterfaceHelper(reflector) {
@@ -266,6 +277,15 @@
                         .build());
 
                 addParameter(
+                        DefineParam(mVendorUltimateAnswerInfo, C2_PARAMKEY_VENDOR_ULTIMATE_ANSWER)
+                        .withDefault(new C2StreamVendorUltimateAnswerInfo::input(0u))
+                        .withFields({
+                            C2F(mVendorUltimateAnswerInfo, value).any(),
+                        })
+                        .withSetter(VendorUltimateAnswerSetter)
+                        .build());
+
+                addParameter(
                         DefineParam(mOutputColorAspectInfo, C2_PARAMKEY_COLOR_ASPECTS)
                         .withDefault(new C2StreamColorAspectsInfo::output(0u))
                         .withFields({
@@ -336,6 +356,15 @@
                 return C2R::Ok();
             }
 
+            static C2R VendorUltimateAnswerSetter(
+                    bool mayBlock,
+                    C2P<C2StreamVendorUltimateAnswerInfo::input> &me) {
+                (void)mayBlock;
+                ALOGI("Answer to the Ultimate Question of Life, the Universe, and Everything "
+                      "set to %d", me.v.value);
+                return C2R::Ok();
+            }
+
             std::shared_ptr<C2ApiFeaturesSetting> mApiFeatures;
 
             std::shared_ptr<C2ComponentNameSetting> mName;
@@ -362,11 +391,13 @@
             std::shared_ptr<C2StreamColorAspectsInfo::input> mInputColorAspectInfo;
             std::shared_ptr<C2StreamColorAspectsInfo::output> mOutputColorAspectInfo;
             std::shared_ptr<C2StreamColorAspectsRequestInfo::output> mColorAspectRequestInfo;
+
+            std::shared_ptr<C2StreamVendorUltimateAnswerInfo::input> mVendorUltimateAnswerInfo;
         } mHelper;
     };
 
-    explicit SampleToneMappingFilter(c2_node_id_t id)
-        : mIntf(std::make_shared<Interface>(id)) {
+    SampleToneMappingFilter(c2_node_id_t id, const std::shared_ptr<C2ReflectorHelper> &reflector)
+        : mIntf(std::make_shared<Interface>(id, reflector)) {
     }
     ~SampleToneMappingFilter() override {
         if (mProcessingThread.joinable()) {
@@ -802,7 +833,10 @@
 // static
 const FilterPlugin_V1::Descriptor SampleToneMappingFilter::Interface::DESCRIPTOR = {
     // controlParams
-    { C2StreamColorAspectsRequestInfo::output::PARAM_TYPE },
+    {
+        C2StreamColorAspectsRequestInfo::output::PARAM_TYPE,
+        C2StreamVendorUltimateAnswerInfo::input::PARAM_TYPE,
+    },
     // affectedParams
     {
         C2StreamHdrStaticInfo::output::PARAM_TYPE,
@@ -815,7 +849,7 @@
     SampleC2ComponentStore()
         : mReflector(std::make_shared<C2ReflectorHelper>()),
           mIntf(mReflector),
-          mFactories(CreateFactories()) {
+          mFactories(CreateFactories(mReflector)) {
     }
     ~SampleC2ComponentStore() = default;
 
@@ -892,36 +926,46 @@
     template <class T>
     struct ComponentFactoryImpl : public ComponentFactory {
     public:
-        ComponentFactoryImpl(const std::shared_ptr<const C2Component::Traits> &traits)
-            : ComponentFactory(traits) {
+        ComponentFactoryImpl(
+                const std::shared_ptr<const C2Component::Traits> &traits,
+                const std::shared_ptr<C2ReflectorHelper> &reflector)
+            : ComponentFactory(traits),
+              mReflector(reflector) {
         }
         ~ComponentFactoryImpl() override = default;
         c2_status_t createComponent(
                 c2_node_id_t id,
                 std::shared_ptr<C2Component>* const component) const override {
-            *component = std::make_shared<T>(id);
+            *component = std::make_shared<T>(id, mReflector);
             return C2_OK;
         }
         c2_status_t createInterface(
                 c2_node_id_t id,
                 std::shared_ptr<C2ComponentInterface>* const interface) const override {
-            *interface = std::make_shared<typename T::Interface>(id);
+            *interface = std::make_shared<typename T::Interface>(id, mReflector);
             return C2_OK;
         }
+    private:
+        std::shared_ptr<C2ReflectorHelper> mReflector;
     };
 
     template <class T>
-    static void AddFactory(std::map<C2String, std::unique_ptr<ComponentFactory>> *factories) {
-        std::shared_ptr<C2ComponentInterface> intf{new typename T::Interface(0)};
+    static void AddFactory(
+            std::map<C2String, std::unique_ptr<ComponentFactory>> *factories,
+            const std::shared_ptr<C2ReflectorHelper> &reflector) {
+        std::shared_ptr<C2ComponentInterface> intf{new typename T::Interface(0, reflector)};
         std::shared_ptr<C2Component::Traits> traits(new (std::nothrow) C2Component::Traits);
         CHECK(C2InterfaceUtils::FillTraitsFromInterface(traits.get(), intf))
                 << "Failed to fill traits from interface";
-        factories->emplace(traits->name, new ComponentFactoryImpl<T>(traits));
+        factories->emplace(
+                traits->name,
+                new ComponentFactoryImpl<T>(traits, reflector));
     }
 
-    static std::map<C2String, std::unique_ptr<ComponentFactory>> CreateFactories() {
+    static std::map<C2String, std::unique_ptr<ComponentFactory>> CreateFactories(
+            const std::shared_ptr<C2ReflectorHelper> &reflector) {
         std::map<C2String, std::unique_ptr<ComponentFactory>> factories;
-        AddFactory<SampleToneMappingFilter>(&factories);
+        AddFactory<SampleToneMappingFilter>(&factories, reflector);
         return factories;
     }
 
diff --git a/media/codec2/sfplugin/Android.bp b/media/codec2/sfplugin/Android.bp
index d867eb1..3c8c1b7 100644
--- a/media/codec2/sfplugin/Android.bp
+++ b/media/codec2/sfplugin/Android.bp
@@ -19,7 +19,9 @@
     export_include_dirs: ["include"],
 
     srcs: [
+        "C2AidlNode.cpp",
         "C2OMXNode.cpp",
+        "C2NodeImpl.cpp",
         "CCodec.cpp",
         "CCodecBufferChannel.cpp",
         "CCodecBuffers.cpp",
@@ -45,6 +47,8 @@
 
     static_libs: [
         "libSurfaceFlingerProperties",
+        "aconfig_mediacodec_flags_c_lib",
+        "android.media.codec-aconfig-cc",
     ],
 
     shared_libs: [
@@ -52,8 +56,11 @@
         "android.hardware.drm@1.0",
         "android.hardware.media.c2@1.0",
         "android.hardware.media.omx@1.0",
+        "android.hardware.graphics.common-V5-ndk",
+        "graphicbuffersource-aidl-ndk",
         "libbase",
         "libbinder",
+        "libbinder_ndk",
         "libcodec2",
         "libcodec2_client",
         "libcodec2_vndk",
@@ -65,9 +72,11 @@
         "liblog",
         "libmedia_codeclist",
         "libmedia_omx",
+        "libnativewindow",
         "libsfplugin_ccodec_utils",
         "libstagefright_bufferqueue_helper",
         "libstagefright_codecbase",
+        "libstagefright_graphicbuffersource_aidl",
         "libstagefright_foundation",
         "libstagefright_omx",
         "libstagefright_surface_utils",
@@ -75,6 +84,7 @@
         "libui",
         "libutils",
         "server_configurable_flags",
+        "libaconfig_storage_read_api_cc",
     ],
 
     export_shared_lib_headers: [
diff --git a/media/codec2/sfplugin/C2AidlNode.cpp b/media/codec2/sfplugin/C2AidlNode.cpp
new file mode 100644
index 0000000..4e46ad6
--- /dev/null
+++ b/media/codec2/sfplugin/C2AidlNode.cpp
@@ -0,0 +1,129 @@
+/*
+ * Copyright 2024, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+//#define LOG_NDEBUG 0
+#define LOG_TAG "C2AidlNode"
+#include <log/log.h>
+#include <private/android/AHardwareBufferHelpers.h>
+
+#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/aidlpersistentsurface/wrapper/Conversion.h>
+
+#include "C2NodeImpl.h"
+#include "C2AidlNode.h"
+
+namespace android {
+
+using ::aidl::android::media::IAidlBufferSource;
+using ::aidl::android::media::IAidlNode;
+
+// Conversion
+using ::android::media::aidl_conversion::toAidlStatus;
+
+C2AidlNode::C2AidlNode(const std::shared_ptr<Codec2Client::Component> &comp)
+    : mImpl(new C2NodeImpl(comp, true)) {}
+
+// aidl ndk interfaces
+::ndk::ScopedAStatus C2AidlNode::freeNode() {
+    return toAidlStatus(mImpl->freeNode());
+}
+
+::ndk::ScopedAStatus C2AidlNode::getConsumerUsage(int64_t* _aidl_return) {
+    uint64_t usage;
+    mImpl->getConsumerUsageBits(&usage);
+    *_aidl_return = usage;
+    return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus C2AidlNode::getInputBufferParams(IAidlNode::InputBufferParams* _aidl_return) {
+    mImpl->getInputBufferParams(_aidl_return);
+    return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus C2AidlNode::setConsumerUsage(int64_t usage) {
+    mImpl->setConsumerUsageBits(static_cast<uint64_t>(usage));
+    return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus C2AidlNode::setAdjustTimestampGapUs(int32_t gapUs) {
+    mImpl->setAdjustTimestampGapUs(gapUs);
+    return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus C2AidlNode::setInputSurface(
+        const std::shared_ptr<IAidlBufferSource>& bufferSource) {
+    return toAidlStatus(mImpl->setAidlInputSurface(bufferSource));
+}
+
+::ndk::ScopedAStatus C2AidlNode::submitBuffer(
+        int32_t buffer,
+        const std::optional<::aidl::android::hardware::HardwareBuffer>& hBuffer,
+        int32_t flags, int64_t timestamp, const ::ndk::ScopedFileDescriptor& fence) {
+    sp<GraphicBuffer> gBuf;
+    AHardwareBuffer *ahwb = nullptr;
+    if (hBuffer.has_value()) {
+        ahwb = hBuffer.value().get();
+    }
+
+    if (ahwb) {
+        gBuf = AHardwareBuffer_to_GraphicBuffer(ahwb);
+    }
+    return toAidlStatus(mImpl->submitBuffer(
+            buffer, gBuf, flags, timestamp, ::dup(fence.get())));
+}
+
+::ndk::ScopedAStatus C2AidlNode::onDataSpaceChanged(
+        int32_t dataSpace,
+        int32_t aspects,
+        int32_t pixelFormat) {
+    // NOTE: legacy codes passed aspects, but they didn't used.
+    (void)aspects;
+
+    return toAidlStatus(mImpl->onDataspaceChanged(
+            static_cast<uint32_t>(dataSpace),
+            static_cast<uint32_t>(pixelFormat)));
+}
+
+// cpp interface
+
+std::shared_ptr<IAidlBufferSource> C2AidlNode::getSource() {
+    return mImpl->getAidlSource();
+}
+
+void C2AidlNode::setFrameSize(uint32_t width, uint32_t height) {
+    return mImpl->setFrameSize(width, height);
+}
+
+void C2AidlNode::onInputBufferDone(c2_cntr64_t index) {
+    return mImpl->onInputBufferDone(index);
+}
+
+void C2AidlNode::onInputBufferEmptied() {
+    return mImpl->onInputBufferEmptied();
+}
+
+android_dataspace C2AidlNode::getDataspace() {
+    return mImpl->getDataspace();
+}
+
+uint32_t C2AidlNode::getPixelFormat() {
+    return mImpl->getPixelFormat();
+}
+
+void C2AidlNode::setPriority(int priority) {
+    return mImpl->setPriority(priority);
+}
+
+}  // namespace android
diff --git a/media/codec2/sfplugin/C2AidlNode.h b/media/codec2/sfplugin/C2AidlNode.h
new file mode 100644
index 0000000..95290fd
--- /dev/null
+++ b/media/codec2/sfplugin/C2AidlNode.h
@@ -0,0 +1,103 @@
+/*
+ * Copyright 2024, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <aidl/android/media/BnAidlNode.h>
+#include <codec2/hidl/client.h>
+
+namespace android {
+
+struct C2NodeImpl;
+
+/**
+ * Thin Codec2 AIdL encoder HAL wrapper for InputSurface
+ */
+class C2AidlNode : public ::aidl::android::media::BnAidlNode {
+public:
+    explicit C2AidlNode(const std::shared_ptr<Codec2Client::Component> &comp);
+    ~C2AidlNode() override = default;
+
+    // IAidlNode
+    ::ndk::ScopedAStatus freeNode() override;
+
+    ::ndk::ScopedAStatus getConsumerUsage(int64_t *_aidl_return) override;
+
+    ::ndk::ScopedAStatus getInputBufferParams(
+            ::aidl::android::media::IAidlNode::InputBufferParams *_aidl_return) override;
+
+    ::ndk::ScopedAStatus setConsumerUsage(int64_t usage) override;
+
+    ::ndk::ScopedAStatus setAdjustTimestampGapUs(int32_t gapUs) override;
+
+    ::ndk::ScopedAStatus setInputSurface(
+            const std::shared_ptr<::aidl::android::media::IAidlBufferSource>&
+                    bufferSource) override;
+
+    ::ndk::ScopedAStatus submitBuffer(
+            int32_t buffer,
+            const std::optional<::aidl::android::hardware::HardwareBuffer>& hBuffer,
+            int32_t flags,
+            int64_t timestampUs,
+            const ::ndk::ScopedFileDescriptor& fence) override;
+
+    ::ndk::ScopedAStatus onDataSpaceChanged(
+            int dataSpace, int aspects, int pixelFormat) override;
+
+    /**
+     * Returns underlying IAidlBufferSource object.
+     */
+    std::shared_ptr<::aidl::android::media::IAidlBufferSource> getSource();
+
+    /**
+     * Configure the frame size.
+     */
+    void setFrameSize(uint32_t width, uint32_t height);
+
+    /**
+     * Notify that the input buffer reference is no longer needed by the component.
+     * Clean up if necessary.
+     *
+     * \param index input work index
+     */
+    void onInputBufferDone(c2_cntr64_t index);
+
+    /**
+     * Notify input buffer is emptied.
+     */
+    void onInputBufferEmptied();
+
+    /**
+     * Returns dataspace information from GraphicBufferSource.
+     */
+    android_dataspace getDataspace();
+
+    /**
+     * Returns dataspace information from GraphicBufferSource.
+     */
+    uint32_t getPixelFormat();
+
+    /**
+     * Sets priority of the queue thread.
+     */
+    void setPriority(int priority);
+
+private:
+    std::shared_ptr<C2NodeImpl> mImpl;
+};
+
+}  // namespace android
+
diff --git a/media/codec2/sfplugin/C2NodeImpl.cpp b/media/codec2/sfplugin/C2NodeImpl.cpp
new file mode 100644
index 0000000..585072d
--- /dev/null
+++ b/media/codec2/sfplugin/C2NodeImpl.cpp
@@ -0,0 +1,500 @@
+/*
+ * Copyright 2024, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "C2NodeImpl"
+#include <log/log.h>
+
+#include <C2AllocatorGralloc.h>
+#include <C2BlockInternal.h>
+#include <C2Component.h>
+#include <C2Config.h>
+#include <C2Debug.h>
+#include <C2PlatformSupport.h>
+
+#include <android_media_codec.h>
+#include <android/fdsan.h>
+#include <media/stagefright/foundation/ColorUtils.h>
+#include <ui/Fence.h>
+#include <ui/GraphicBuffer.h>
+#include <utils/Errors.h>
+#include <utils/Thread.h>
+
+#include "utils/Codec2Mapper.h"
+#include "C2NodeImpl.h"
+#include "Codec2Buffer.h"
+
+namespace android {
+
+using ::aidl::android::media::IAidlBufferSource;
+using ::aidl::android::media::IAidlNode;
+
+using ::android::media::BUFFERFLAG_EOS;
+
+namespace {
+
+class Buffer2D : public C2Buffer {
+public:
+    explicit Buffer2D(C2ConstGraphicBlock block) : C2Buffer({ block }) {}
+};
+
+}  // namespace
+
+class C2NodeImpl::QueueThread : public Thread {
+public:
+    QueueThread() : Thread(false) {}
+    ~QueueThread() override = default;
+    void queue(
+            const std::shared_ptr<Codec2Client::Component> &comp,
+            int fenceFd,
+            std::unique_ptr<C2Work> &&work,
+            android::base::unique_fd &&fd0,
+            android::base::unique_fd &&fd1) {
+        Mutexed<Jobs>::Locked jobs(mJobs);
+        auto it = jobs->queues.try_emplace(comp, comp).first;
+        it->second.workList.emplace_back(
+                std::move(work), fenceFd, std::move(fd0), std::move(fd1));
+        jobs->cond.broadcast();
+    }
+
+    void setDataspace(android_dataspace dataspace) {
+        Mutexed<Jobs>::Locked jobs(mJobs);
+        ColorUtils::convertDataSpaceToV0(dataspace);
+        jobs->configUpdate.emplace_back(new C2StreamDataSpaceInfo::input(0u, dataspace));
+        int32_t standard;
+        int32_t transfer;
+        int32_t range;
+        ColorUtils::getColorConfigFromDataSpace(dataspace, &range, &standard, &transfer);
+        std::unique_ptr<C2StreamColorAspectsInfo::input> colorAspects =
+            std::make_unique<C2StreamColorAspectsInfo::input>(0u);
+        if (C2Mapper::map(standard, &colorAspects->primaries, &colorAspects->matrix)
+                && C2Mapper::map(transfer, &colorAspects->transfer)
+                && C2Mapper::map(range, &colorAspects->range)) {
+            jobs->configUpdate.push_back(std::move(colorAspects));
+        }
+    }
+
+    void setPriority(int priority) {
+        androidSetThreadPriority(getTid(), priority);
+    }
+
+protected:
+    bool threadLoop() override {
+        constexpr nsecs_t kIntervalNs = nsecs_t(10) * 1000 * 1000;  // 10ms
+        constexpr nsecs_t kWaitNs = kIntervalNs * 2;
+        for (int i = 0; i < 2; ++i) {
+            Mutexed<Jobs>::Locked jobs(mJobs);
+            nsecs_t nowNs = systemTime();
+            bool queued = false;
+            for (auto it = jobs->queues.begin(); it != jobs->queues.end(); ) {
+                Queue &queue = it->second;
+                if (queue.workList.empty()
+                        || (queue.lastQueuedTimestampNs != 0 &&
+                            nowNs - queue.lastQueuedTimestampNs < kIntervalNs)) {
+                    ++it;
+                    continue;
+                }
+                std::shared_ptr<Codec2Client::Component> comp = queue.component.lock();
+                if (!comp) {
+                    it = jobs->queues.erase(it);
+                    continue;
+                }
+                std::list<std::unique_ptr<C2Work>> items;
+                std::vector<int> fenceFds;
+                std::vector<android::base::unique_fd> uniqueFds;
+                while (!queue.workList.empty()) {
+                    items.push_back(std::move(queue.workList.front().work));
+                    fenceFds.push_back(queue.workList.front().fenceFd);
+                    uniqueFds.push_back(std::move(queue.workList.front().fd0));
+                    uniqueFds.push_back(std::move(queue.workList.front().fd1));
+                    queue.workList.pop_front();
+                }
+                for (const std::unique_ptr<C2Param> &param : jobs->configUpdate) {
+                    items.front()->input.configUpdate.emplace_back(C2Param::Copy(*param));
+                }
+
+                jobs.unlock();
+                for (int fenceFd : fenceFds) {
+                    sp<Fence> fence(new Fence(fenceFd));
+                    fence->waitForever(LOG_TAG);
+                }
+                queue.lastQueuedTimestampNs = nowNs;
+                comp->queue(&items);
+                for (android::base::unique_fd &ufd : uniqueFds) {
+                    (void)ufd.release();
+                }
+                jobs.lock();
+
+                it = jobs->queues.upper_bound(comp);
+                queued = true;
+            }
+            if (queued) {
+                jobs->configUpdate.clear();
+                return true;
+            }
+            if (i == 0) {
+                jobs.waitForConditionRelative(jobs->cond, kWaitNs);
+            }
+        }
+        return true;
+    }
+
+private:
+    struct WorkFence {
+        WorkFence(std::unique_ptr<C2Work> &&w, int fd) : work(std::move(w)), fenceFd(fd) {}
+
+        WorkFence(
+                std::unique_ptr<C2Work> &&w,
+                int fd,
+                android::base::unique_fd &&uniqueFd0,
+                android::base::unique_fd &&uniqueFd1)
+            : work(std::move(w)),
+              fenceFd(fd),
+              fd0(std::move(uniqueFd0)),
+              fd1(std::move(uniqueFd1)) {}
+
+        std::unique_ptr<C2Work> work;
+        int fenceFd;
+        android::base::unique_fd fd0;
+        android::base::unique_fd fd1;
+    };
+    struct Queue {
+        Queue(const std::shared_ptr<Codec2Client::Component> &comp)
+            : component(comp), lastQueuedTimestampNs(0) {}
+        Queue(const Queue &) = delete;
+        Queue &operator =(const Queue &) = delete;
+
+        std::weak_ptr<Codec2Client::Component> component;
+        std::list<WorkFence> workList;
+        nsecs_t lastQueuedTimestampNs;
+    };
+    struct Jobs {
+        std::map<std::weak_ptr<Codec2Client::Component>,
+                 Queue,
+                 std::owner_less<std::weak_ptr<Codec2Client::Component>>> queues;
+        std::vector<std::unique_ptr<C2Param>> configUpdate;
+        Condition cond;
+    };
+    Mutexed<Jobs> mJobs;
+};
+
+C2NodeImpl::C2NodeImpl(const std::shared_ptr<Codec2Client::Component> &comp, bool aidl)
+    : mComp(comp), mFrameIndex(0), mWidth(0), mHeight(0), mUsage(0),
+      mAdjustTimestampGapUs(0), mFirstInputFrame(true),
+      mQueueThread(new QueueThread), mAidlHal(aidl) {
+    android_fdsan_set_error_level(ANDROID_FDSAN_ERROR_LEVEL_WARN_ALWAYS);
+    mQueueThread->run("C2NodeImpl", PRIORITY_AUDIO);
+
+    android_dataspace ds = HAL_DATASPACE_UNKNOWN;
+    mDataspace.lock().set(ds);
+    uint32_t pf = PIXEL_FORMAT_UNKNOWN;
+    mPixelFormat.lock().set(pf);
+}
+
+C2NodeImpl::~C2NodeImpl() {
+}
+
+status_t C2NodeImpl::freeNode() {
+    mComp.reset();
+    android_fdsan_set_error_level(ANDROID_FDSAN_ERROR_LEVEL_WARN_ONCE);
+    return mQueueThread->requestExitAndWait();
+}
+
+void C2NodeImpl::onFirstInputFrame() {
+    mFirstInputFrame = true;
+}
+
+void C2NodeImpl::getConsumerUsageBits(uint64_t *usage) {
+    *usage = mUsage;
+}
+
+void C2NodeImpl::getInputBufferParams(IAidlNode::InputBufferParams *params) {
+    params->bufferCountActual = 16;
+
+    // WORKAROUND: having more slots improve performance while consuming
+    // more memory. This is a temporary workaround to reduce memory for
+    // larger-than-4K scenario.
+    if (mWidth * mHeight > 4096 * 2340) {
+        std::shared_ptr<Codec2Client::Component> comp = mComp.lock();
+        C2PortActualDelayTuning::input inputDelay(0);
+        C2ActualPipelineDelayTuning pipelineDelay(0);
+        c2_status_t c2err = C2_NOT_FOUND;
+        if (comp) {
+            c2err = comp->query(
+                    {&inputDelay, &pipelineDelay}, {}, C2_DONT_BLOCK, nullptr);
+        }
+        if (c2err == C2_OK || c2err == C2_BAD_INDEX) {
+            params->bufferCountActual = 4;
+            params->bufferCountActual += (inputDelay ? inputDelay.value : 0u);
+            params->bufferCountActual += (pipelineDelay ? pipelineDelay.value : 0u);
+        }
+    }
+
+    params->frameWidth = mWidth;
+    params->frameHeight = mHeight;
+}
+
+void C2NodeImpl::setConsumerUsageBits(uint64_t usage) {
+    mUsage = usage;
+}
+
+void C2NodeImpl::setAdjustTimestampGapUs(int32_t gapUs) {
+    mAdjustTimestampGapUs = gapUs;
+}
+
+status_t C2NodeImpl::setInputSurface(const sp<IOMXBufferSource> &bufferSource) {
+    c2_status_t err = GetCodec2PlatformAllocatorStore()->fetchAllocator(
+            C2PlatformAllocatorStore::GRALLOC,
+            &mAllocator);
+    if (err != OK) {
+        return UNKNOWN_ERROR;
+    }
+    CHECK(!mAidlHal);
+    mBufferSource = bufferSource;
+    return OK;
+}
+
+status_t C2NodeImpl::setAidlInputSurface(
+        const std::shared_ptr<IAidlBufferSource> &aidlBufferSource) {
+    c2_status_t err = GetCodec2PlatformAllocatorStore()->fetchAllocator(
+            C2PlatformAllocatorStore::GRALLOC,
+            &mAllocator);
+    if (err != OK) {
+        return UNKNOWN_ERROR;
+    }
+    CHECK(mAidlHal);
+    mAidlBufferSource = aidlBufferSource;
+    return OK;
+}
+
+status_t C2NodeImpl::submitBuffer(
+        uint32_t buffer, const sp<GraphicBuffer> &graphicBuffer,
+        uint32_t flags, int64_t timestamp, int fenceFd) {
+    std::shared_ptr<Codec2Client::Component> comp = mComp.lock();
+    if (!comp) {
+        return NO_INIT;
+    }
+
+    uint32_t c2Flags = (flags & BUFFERFLAG_EOS)
+            ? C2FrameData::FLAG_END_OF_STREAM : 0;
+    std::shared_ptr<C2GraphicBlock> block;
+
+    android::base::unique_fd fd0, fd1;
+    C2Handle *handle = nullptr;
+    if (graphicBuffer) {
+        std::shared_ptr<C2GraphicAllocation> alloc;
+        handle = WrapNativeCodec2GrallocHandle(
+                graphicBuffer->handle,
+                graphicBuffer->width,
+                graphicBuffer->height,
+                graphicBuffer->format,
+                graphicBuffer->usage,
+                graphicBuffer->stride);
+        if (handle != nullptr) {
+            // unique_fd takes ownership of the fds, we'll get warning if these
+            // fds get closed by somebody else. Onwership will be released before
+            // we return, so that the fds get closed as usually when this function
+            // goes out of scope (when both items and block are gone).
+            native_handle_t *nativeHandle = reinterpret_cast<native_handle_t*>(handle);
+            fd0.reset(nativeHandle->numFds > 0 ? nativeHandle->data[0] : -1);
+            fd1.reset(nativeHandle->numFds > 1 ? nativeHandle->data[1] : -1);
+        }
+        c2_status_t err = mAllocator->priorGraphicAllocation(handle, &alloc);
+        if (err != OK) {
+            (void)fd0.release();
+            (void)fd1.release();
+            native_handle_close(handle);
+            native_handle_delete(handle);
+            return UNKNOWN_ERROR;
+        }
+        block = _C2BlockFactory::CreateGraphicBlock(alloc);
+    } else if (!(flags & BUFFERFLAG_EOS)) {
+        return BAD_VALUE;
+    }
+
+    std::unique_ptr<C2Work> work(new C2Work);
+    work->input.flags = (C2FrameData::flags_t)c2Flags;
+    work->input.ordinal.timestamp = timestamp;
+
+    // WORKAROUND: adjust timestamp based on gapUs
+    {
+        work->input.ordinal.customOrdinal = timestamp; // save input timestamp
+        if (mFirstInputFrame) {
+            // grab timestamps on first frame
+            mPrevInputTimestamp = timestamp;
+            mPrevCodecTimestamp = timestamp;
+            mFirstInputFrame = false;
+        } else if (mAdjustTimestampGapUs > 0) {
+            work->input.ordinal.timestamp =
+                mPrevCodecTimestamp
+                        + c2_min((timestamp - mPrevInputTimestamp).peek(), mAdjustTimestampGapUs);
+        } else if (mAdjustTimestampGapUs < 0) {
+            work->input.ordinal.timestamp = mPrevCodecTimestamp - mAdjustTimestampGapUs;
+        }
+        mPrevInputTimestamp = work->input.ordinal.customOrdinal;
+        mPrevCodecTimestamp = work->input.ordinal.timestamp;
+        ALOGV("adjusting %lld to %lld (gap=%lld)",
+              work->input.ordinal.customOrdinal.peekll(),
+              work->input.ordinal.timestamp.peekll(),
+              (long long)mAdjustTimestampGapUs);
+    }
+
+    work->input.ordinal.frameIndex = mFrameIndex++;
+    work->input.buffers.clear();
+    if (block) {
+        std::shared_ptr<C2Buffer> c2Buffer(
+                new Buffer2D(block->share(
+                        C2Rect(block->width(), block->height()), ::C2Fence())));
+        work->input.buffers.push_back(c2Buffer);
+        std::shared_ptr<C2StreamHdrStaticInfo::input> staticInfo;
+        std::shared_ptr<C2StreamHdrDynamicMetadataInfo::input> dynamicInfo;
+        GetHdrMetadataFromGralloc4Handle(
+                block->handle(),
+                &staticInfo,
+                &dynamicInfo);
+        if (staticInfo && *staticInfo) {
+            c2Buffer->setInfo(staticInfo);
+        }
+        if (dynamicInfo && *dynamicInfo) {
+            c2Buffer->setInfo(dynamicInfo);
+        }
+    }
+    work->worklets.clear();
+    work->worklets.emplace_back(new C2Worklet);
+    {
+        Mutexed<BuffersTracker>::Locked buffers(mBuffersTracker);
+        buffers->mIdsInUse.emplace(work->input.ordinal.frameIndex.peeku(), buffer);
+    }
+    mQueueThread->queue(comp, fenceFd, std::move(work), std::move(fd0), std::move(fd1));
+
+    return OK;
+}
+
+status_t C2NodeImpl::onDataspaceChanged(uint32_t dataSpace, uint32_t pixelFormat) {
+    ALOGD("dataspace changed to %#x pixel format: %#x", dataSpace, pixelFormat);
+    android_dataspace d = (android_dataspace)dataSpace;
+    mQueueThread->setDataspace(d);
+
+    mDataspace.lock().set(d);
+    mPixelFormat.lock().set(pixelFormat);
+    return OK;
+}
+
+sp<IOMXBufferSource> C2NodeImpl::getSource() {
+    CHECK(!mAidlHal);
+    return mBufferSource;
+}
+
+std::shared_ptr<IAidlBufferSource> C2NodeImpl::getAidlSource() {
+    CHECK(mAidlHal);
+    return mAidlBufferSource;
+}
+
+void C2NodeImpl::setFrameSize(uint32_t width, uint32_t height) {
+    mWidth = width;
+    mHeight = height;
+}
+
+void C2NodeImpl::onInputBufferDone(c2_cntr64_t index) {
+    if (android::media::codec::provider_->input_surface_throttle()) {
+        Mutexed<BuffersTracker>::Locked buffers(mBuffersTracker);
+        auto it = buffers->mIdsInUse.find(index.peeku());
+        if (it == buffers->mIdsInUse.end()) {
+            ALOGV("Untracked input index %llu (maybe already removed)", index.peekull());
+            return;
+        }
+        int32_t bufferId = it->second;
+        (void)buffers->mIdsInUse.erase(it);
+        buffers->mAvailableIds.push_back(bufferId);
+    } else {
+        if (!hasBufferSource()) {
+            return;
+        }
+        int32_t bufferId = 0;
+        {
+            Mutexed<BuffersTracker>::Locked buffers(mBuffersTracker);
+            auto it = buffers->mIdsInUse.find(index.peeku());
+            if (it == buffers->mIdsInUse.end()) {
+                ALOGV("Untracked input index %llu (maybe already removed)", index.peekull());
+                return;
+            }
+            bufferId = it->second;
+            (void)buffers->mIdsInUse.erase(it);
+        }
+        notifyInputBufferEmptied(bufferId);
+    }
+}
+
+void C2NodeImpl::onInputBufferEmptied() {
+    if (!android::media::codec::provider_->input_surface_throttle()) {
+        ALOGE("onInputBufferEmptied should not be called "
+              "when input_surface_throttle is false");
+        return;
+    }
+    if (!hasBufferSource()) {
+        return;
+    }
+    int32_t bufferId = 0;
+    {
+        Mutexed<BuffersTracker>::Locked buffers(mBuffersTracker);
+        if (buffers->mAvailableIds.empty()) {
+            ALOGV("The codec is ready to take more input buffers "
+                    "but no input buffers are ready yet.");
+            return;
+        }
+        bufferId = buffers->mAvailableIds.front();
+        buffers->mAvailableIds.pop_front();
+    }
+    notifyInputBufferEmptied(bufferId);
+}
+
+bool C2NodeImpl::hasBufferSource() {
+    if (mAidlHal) {
+        if (!mAidlBufferSource) {
+            ALOGD("Buffer source not set");
+            return false;
+        }
+    } else {
+        if (!mBufferSource) {
+            ALOGD("Buffer source not set");
+            return false;
+        }
+    }
+    return true;
+}
+
+void C2NodeImpl::notifyInputBufferEmptied(int32_t bufferId) {
+    if (mAidlHal) {
+        ::ndk::ScopedFileDescriptor nullFence;
+        (void)mAidlBufferSource->onInputBufferEmptied(bufferId, nullFence);
+    } else {
+        (void)mBufferSource->onInputBufferEmptied(bufferId, -1);
+    }
+}
+
+android_dataspace C2NodeImpl::getDataspace() {
+    return *mDataspace.lock();
+}
+
+uint32_t C2NodeImpl::getPixelFormat() {
+    return *mPixelFormat.lock();
+}
+
+void C2NodeImpl::setPriority(int priority) {
+    mQueueThread->setPriority(priority);
+}
+
+}  // namespace android
diff --git a/media/codec2/sfplugin/C2NodeImpl.h b/media/codec2/sfplugin/C2NodeImpl.h
new file mode 100644
index 0000000..cc826b4
--- /dev/null
+++ b/media/codec2/sfplugin/C2NodeImpl.h
@@ -0,0 +1,147 @@
+/*
+ * Copyright 2024, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <atomic>
+
+#include <android/IOMXBufferSource.h>
+#include <aidl/android/media/IAidlBufferSource.h>
+#include <aidl/android/media/IAidlNode.h>
+#include <codec2/hidl/client.h>
+#include <media/stagefright/foundation/Mutexed.h>
+#include <media/stagefright/aidlpersistentsurface/C2NodeDef.h>
+
+namespace android {
+
+/**
+ * IOmxNode implementation around codec 2.0 component, only to be used in
+ * IGraphicBufferSource::configure. Only subset of IOmxNode API is implemented.
+ * As a result, one cannot expect this IOmxNode to work in any other usage than
+ * IGraphicBufferSource(if aidl hal is used, IAidlGraphicBufferSource).
+ */
+struct C2NodeImpl {
+    explicit C2NodeImpl(const std::shared_ptr<Codec2Client::Component> &comp, bool aidl);
+    ~C2NodeImpl();
+
+    // IOMXNode and/or IAidlNode
+    status_t freeNode();
+
+    void onFirstInputFrame();
+    void getConsumerUsageBits(uint64_t *usage /* nonnull */);
+    void getInputBufferParams(
+            ::aidl::android::media::IAidlNode::InputBufferParams *params /* nonnull */);
+    void setConsumerUsageBits(uint64_t usage);
+    void setAdjustTimestampGapUs(int32_t gapUs);
+
+    status_t setInputSurface(
+            const sp<IOMXBufferSource> &bufferSource);
+    status_t setAidlInputSurface(
+            const std::shared_ptr<::aidl::android::media::IAidlBufferSource> &aidlBufferSource);
+
+    status_t submitBuffer(
+            uint32_t buffer, const sp<GraphicBuffer> &graphicBuffer,
+            uint32_t flags, int64_t timestamp, int fenceFd);
+    status_t onDataspaceChanged(uint32_t dataSpace, uint32_t pixelFormat);
+
+    /**
+     * Returns underlying IOMXBufferSource object.
+     */
+    sp<IOMXBufferSource> getSource();
+
+    /**
+     * Returns underlying IAidlBufferSource object.
+     */
+    std::shared_ptr<::aidl::android::media::IAidlBufferSource> getAidlSource();
+
+    /**
+     * Configure the frame size.
+     */
+    void setFrameSize(uint32_t width, uint32_t height);
+
+    /**
+     * Notify that the input buffer reference is no longer needed by the component.
+     * Clean up if necessary.
+     *
+     * \param index input work index
+     */
+    void onInputBufferDone(c2_cntr64_t index);
+
+    /**
+     * Notify input buffer is emptied.
+     */
+    void onInputBufferEmptied();
+
+    /**
+     * Returns dataspace information from GraphicBufferSource.
+     */
+    android_dataspace getDataspace();
+
+    /**
+     * Returns dataspace information from GraphicBufferSource.
+     */
+    uint32_t getPixelFormat();
+
+    /**
+     * Sets priority of the queue thread.
+     */
+    void setPriority(int priority);
+
+private:
+    std::weak_ptr<Codec2Client::Component> mComp;
+
+    sp<IOMXBufferSource> mBufferSource;
+    std::shared_ptr<::aidl::android::media::IAidlBufferSource> mAidlBufferSource;
+
+    std::shared_ptr<C2Allocator> mAllocator;
+    std::atomic_uint64_t mFrameIndex;
+    uint32_t mWidth;
+    uint32_t mHeight;
+    uint64_t mUsage;
+    Mutexed<android_dataspace> mDataspace;
+    Mutexed<uint32_t> mPixelFormat;
+
+    // WORKAROUND: timestamp adjustment
+
+    // if >0: this is the max timestamp gap, if <0: this is -1 times the fixed timestamp gap
+    // if 0: no timestamp adjustment is made
+    // note that C2OMXNode can be recycled between encoding sessions.
+    int32_t mAdjustTimestampGapUs;
+    bool mFirstInputFrame; // true for first input
+    c2_cntr64_t mPrevInputTimestamp; // input timestamp for previous frame
+    c2_cntr64_t mPrevCodecTimestamp; // adjusted (codec) timestamp for previous frame
+
+    // Tracks the status of buffers
+    struct BuffersTracker {
+        BuffersTracker() = default;
+
+        // Keeps track of buffers that are used by the component. Maps timestamp -> ID
+        std::map<uint64_t, uint32_t> mIdsInUse;
+        // Keeps track of the buffer IDs that are available after being released from the component.
+        std::list<uint32_t> mAvailableIds;
+    };
+    Mutexed<BuffersTracker> mBuffersTracker;
+
+    class QueueThread;
+    sp<QueueThread> mQueueThread;
+
+    bool mAidlHal;
+
+    bool hasBufferSource();
+    void notifyInputBufferEmptied(int32_t bufferId);
+};
+
+}  // namespace android
diff --git a/media/codec2/sfplugin/C2OMXNode.cpp b/media/codec2/sfplugin/C2OMXNode.cpp
index bba022b..98e25e2 100644
--- a/media/codec2/sfplugin/C2OMXNode.cpp
+++ b/media/codec2/sfplugin/C2OMXNode.cpp
@@ -1,5 +1,5 @@
 /*
- * Copyright 2018, The Android Open Source Project
+ * Copyright 2024, The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -19,30 +19,17 @@
 #endif
 
 //#define LOG_NDEBUG 0
-#define LOG_TAG "C2OMXNode"
+#define LOG_TAG "C2OMXNODE"
 #include <log/log.h>
 
-#include <C2AllocatorGralloc.h>
-#include <C2BlockInternal.h>
-#include <C2Component.h>
-#include <C2Config.h>
-#include <C2PlatformSupport.h>
-
 #include <OMX_Component.h>
 #include <OMX_Index.h>
 #include <OMX_IndexExt.h>
 
-#include <android/fdsan.h>
-#include <media/stagefright/foundation/ColorUtils.h>
-#include <media/stagefright/omx/OMXUtils.h>
 #include <media/stagefright/MediaErrors.h>
-#include <ui/Fence.h>
-#include <ui/GraphicBuffer.h>
-#include <utils/Thread.h>
 
-#include "utils/Codec2Mapper.h"
 #include "C2OMXNode.h"
-#include "Codec2Buffer.h"
+#include "C2NodeImpl.h"
 
 namespace android {
 
@@ -50,175 +37,25 @@
 
 constexpr OMX_U32 kPortIndexInput = 0;
 
-class Buffer2D : public C2Buffer {
-public:
-    explicit Buffer2D(C2ConstGraphicBlock block) : C2Buffer({ block }) {}
-};
+} // anomymous namespace
 
-}  // namespace
+using ::android::media::BUFFERFLAG_ENDOFFRAME;
+using ::android::media::BUFFERFLAG_EOS;
 
-class C2OMXNode::QueueThread : public Thread {
-public:
-    QueueThread() : Thread(false) {}
-    ~QueueThread() override = default;
-    void queue(
-            const std::shared_ptr<Codec2Client::Component> &comp,
-            int fenceFd,
-            std::unique_ptr<C2Work> &&work,
-            android::base::unique_fd &&fd0,
-            android::base::unique_fd &&fd1) {
-        Mutexed<Jobs>::Locked jobs(mJobs);
-        auto it = jobs->queues.try_emplace(comp, comp).first;
-        it->second.workList.emplace_back(
-                std::move(work), fenceFd, std::move(fd0), std::move(fd1));
-        jobs->cond.broadcast();
-    }
-
-    void setDataspace(android_dataspace dataspace) {
-        Mutexed<Jobs>::Locked jobs(mJobs);
-        ColorUtils::convertDataSpaceToV0(dataspace);
-        jobs->configUpdate.emplace_back(new C2StreamDataSpaceInfo::input(0u, dataspace));
-        int32_t standard;
-        int32_t transfer;
-        int32_t range;
-        ColorUtils::getColorConfigFromDataSpace(dataspace, &range, &standard, &transfer);
-        std::unique_ptr<C2StreamColorAspectsInfo::input> colorAspects =
-            std::make_unique<C2StreamColorAspectsInfo::input>(0u);
-        if (C2Mapper::map(standard, &colorAspects->primaries, &colorAspects->matrix)
-                && C2Mapper::map(transfer, &colorAspects->transfer)
-                && C2Mapper::map(range, &colorAspects->range)) {
-            jobs->configUpdate.push_back(std::move(colorAspects));
-        }
-    }
-
-    void setPriority(int priority) {
-        androidSetThreadPriority(getTid(), priority);
-    }
-
-protected:
-    bool threadLoop() override {
-        constexpr nsecs_t kIntervalNs = nsecs_t(10) * 1000 * 1000;  // 10ms
-        constexpr nsecs_t kWaitNs = kIntervalNs * 2;
-        for (int i = 0; i < 2; ++i) {
-            Mutexed<Jobs>::Locked jobs(mJobs);
-            nsecs_t nowNs = systemTime();
-            bool queued = false;
-            for (auto it = jobs->queues.begin(); it != jobs->queues.end(); ) {
-                Queue &queue = it->second;
-                if (queue.workList.empty()
-                        || (queue.lastQueuedTimestampNs != 0 &&
-                            nowNs - queue.lastQueuedTimestampNs < kIntervalNs)) {
-                    ++it;
-                    continue;
-                }
-                std::shared_ptr<Codec2Client::Component> comp = queue.component.lock();
-                if (!comp) {
-                    it = jobs->queues.erase(it);
-                    continue;
-                }
-                std::list<std::unique_ptr<C2Work>> items;
-                std::vector<int> fenceFds;
-                std::vector<android::base::unique_fd> uniqueFds;
-                while (!queue.workList.empty()) {
-                    items.push_back(std::move(queue.workList.front().work));
-                    fenceFds.push_back(queue.workList.front().fenceFd);
-                    uniqueFds.push_back(std::move(queue.workList.front().fd0));
-                    uniqueFds.push_back(std::move(queue.workList.front().fd1));
-                    queue.workList.pop_front();
-                }
-                for (const std::unique_ptr<C2Param> &param : jobs->configUpdate) {
-                    items.front()->input.configUpdate.emplace_back(C2Param::Copy(*param));
-                }
-
-                jobs.unlock();
-                for (int fenceFd : fenceFds) {
-                    sp<Fence> fence(new Fence(fenceFd));
-                    fence->waitForever(LOG_TAG);
-                }
-                queue.lastQueuedTimestampNs = nowNs;
-                comp->queue(&items);
-                for (android::base::unique_fd &ufd : uniqueFds) {
-                    (void)ufd.release();
-                }
-                jobs.lock();
-
-                it = jobs->queues.upper_bound(comp);
-                queued = true;
-            }
-            if (queued) {
-                jobs->configUpdate.clear();
-                return true;
-            }
-            if (i == 0) {
-                jobs.waitForConditionRelative(jobs->cond, kWaitNs);
-            }
-        }
-        return true;
-    }
-
-private:
-    struct WorkFence {
-        WorkFence(std::unique_ptr<C2Work> &&w, int fd) : work(std::move(w)), fenceFd(fd) {}
-
-        WorkFence(
-                std::unique_ptr<C2Work> &&w,
-                int fd,
-                android::base::unique_fd &&uniqueFd0,
-                android::base::unique_fd &&uniqueFd1)
-            : work(std::move(w)),
-              fenceFd(fd),
-              fd0(std::move(uniqueFd0)),
-              fd1(std::move(uniqueFd1)) {}
-
-        std::unique_ptr<C2Work> work;
-        int fenceFd;
-        android::base::unique_fd fd0;
-        android::base::unique_fd fd1;
-    };
-    struct Queue {
-        Queue(const std::shared_ptr<Codec2Client::Component> &comp)
-            : component(comp), lastQueuedTimestampNs(0) {}
-        Queue(const Queue &) = delete;
-        Queue &operator =(const Queue &) = delete;
-
-        std::weak_ptr<Codec2Client::Component> component;
-        std::list<WorkFence> workList;
-        nsecs_t lastQueuedTimestampNs;
-    };
-    struct Jobs {
-        std::map<std::weak_ptr<Codec2Client::Component>,
-                 Queue,
-                 std::owner_less<std::weak_ptr<Codec2Client::Component>>> queues;
-        std::vector<std::unique_ptr<C2Param>> configUpdate;
-        Condition cond;
-    };
-    Mutexed<Jobs> mJobs;
-};
+using ::aidl::android::media::IAidlNode;
 
 C2OMXNode::C2OMXNode(const std::shared_ptr<Codec2Client::Component> &comp)
-    : mComp(comp), mFrameIndex(0), mWidth(0), mHeight(0), mUsage(0),
-      mAdjustTimestampGapUs(0), mFirstInputFrame(true),
-      mQueueThread(new QueueThread) {
-    android_fdsan_set_error_level(ANDROID_FDSAN_ERROR_LEVEL_WARN_ALWAYS);
-    mQueueThread->run("C2OMXNode", PRIORITY_AUDIO);
-
-    android_dataspace ds = HAL_DATASPACE_UNKNOWN;
-    mDataspace.lock().set(ds);
-    uint32_t pf = PIXEL_FORMAT_UNKNOWN;
-    mPixelFormat.lock().set(pf);
-}
+    : mImpl(new C2NodeImpl(comp, false)) {}
 
 status_t C2OMXNode::freeNode() {
-    mComp.reset();
-    android_fdsan_set_error_level(ANDROID_FDSAN_ERROR_LEVEL_WARN_ONCE);
-    return mQueueThread->requestExitAndWait();
+    return mImpl->freeNode();
 }
 
 status_t C2OMXNode::sendCommand(OMX_COMMANDTYPE cmd, OMX_S32 param) {
     if (cmd == OMX_CommandStateSet && param == OMX_StateLoaded) {
         // Reset first input frame so if C2OMXNode is recycled, the timestamp does not become
         // negative. This is a workaround for HW codecs that do not handle timestamp rollover.
-        mFirstInputFrame = true;
+        mImpl->onFirstInputFrame();
     }
     return ERROR_UNSUPPORTED;
 }
@@ -228,13 +65,19 @@
     switch ((uint32_t)index) {
         case OMX_IndexParamConsumerUsageBits: {
             OMX_U32 *usage = (OMX_U32 *)params;
-            *usage = mUsage;
+            uint64_t val;
+            mImpl->getConsumerUsageBits(&val);
+            *usage = static_cast<uint32_t>(val & 0xFFFFFFFF);
+            ALOGW("retrieving usage bits in 32 bits %llu -> %u",
+                  (unsigned long long)val, (unsigned int)*usage);
             err = OK;
             break;
         }
         case OMX_IndexParamConsumerUsageBits64: {
             OMX_U64 *usage = (OMX_U64 *)params;
-            *usage = mUsage;
+            uint64_t val;
+            mImpl->getConsumerUsageBits(&val);
+            *usage = val;
             err = OK;
             break;
         }
@@ -246,31 +89,12 @@
             if (pDef->nPortIndex != kPortIndexInput) {
                 break;
             }
-
-            pDef->nBufferCountActual = 16;
-
-            // WORKAROUND: having more slots improve performance while consuming
-            // more memory. This is a temporary workaround to reduce memory for
-            // larger-than-4K scenario.
-            if (mWidth * mHeight > 4096 * 2340) {
-                std::shared_ptr<Codec2Client::Component> comp = mComp.lock();
-                C2PortActualDelayTuning::input inputDelay(0);
-                C2ActualPipelineDelayTuning pipelineDelay(0);
-                c2_status_t c2err = C2_NOT_FOUND;
-                if (comp) {
-                    c2err = comp->query(
-                            {&inputDelay, &pipelineDelay}, {}, C2_DONT_BLOCK, nullptr);
-                }
-                if (c2err == C2_OK || c2err == C2_BAD_INDEX) {
-                    pDef->nBufferCountActual = 4;
-                    pDef->nBufferCountActual += (inputDelay ? inputDelay.value : 0u);
-                    pDef->nBufferCountActual += (pipelineDelay ? pipelineDelay.value : 0u);
-                }
-            }
-
+            IAidlNode::InputBufferParams bufferParams;
+            mImpl->getInputBufferParams(&bufferParams);
+            pDef->nBufferCountActual = bufferParams.bufferCountActual;
             pDef->eDomain = OMX_PortDomainVideo;
-            pDef->format.video.nFrameWidth = mWidth;
-            pDef->format.video.nFrameHeight = mHeight;
+            pDef->format.video.nFrameWidth = bufferParams.frameWidth;
+            pDef->format.video.nFrameHeight = bufferParams.frameHeight;
             pDef->format.video.eColorFormat = OMX_COLOR_FormatAndroidOpaque;
             err = OK;
             break;
@@ -286,28 +110,34 @@
         return BAD_VALUE;
     }
     switch ((uint32_t)index) {
-        case OMX_IndexParamMaxFrameDurationForBitrateControl:
+        case OMX_IndexParamMaxFrameDurationForBitrateControl: {
             // handle max/fixed frame duration control
             if (size != sizeof(OMX_PARAM_U32TYPE)) {
                 return BAD_VALUE;
             }
             // The incoming number is an int32_t contained in OMX_U32.
-            mAdjustTimestampGapUs = (int32_t)((OMX_PARAM_U32TYPE*)params)->nU32;
+            int32_t gapUs = (int32_t)((OMX_PARAM_U32TYPE*)params)->nU32;
+            mImpl->setAdjustTimestampGapUs(gapUs);
             return OK;
-
-        case OMX_IndexParamConsumerUsageBits:
+        }
+        case OMX_IndexParamConsumerUsageBits: {
             if (size != sizeof(OMX_U32)) {
                 return BAD_VALUE;
             }
-            mUsage = *((OMX_U32 *)params);
+            uint32_t usage = *((OMX_U32 *)params);
+            mImpl->setConsumerUsageBits(static_cast<uint64_t>(usage));
             return OK;
-
-        case OMX_IndexParamConsumerUsageBits64:
+        }
+        case OMX_IndexParamConsumerUsageBits64: {
             if (size != sizeof(OMX_U64)) {
                 return BAD_VALUE;
             }
-            mUsage = *((OMX_U64 *)params);
+            uint64_t usagell = *((OMX_U64 *)params);
+            mImpl->setConsumerUsageBits(usagell);
             return OK;
+        }
+        default:
+            break;
     }
     return ERROR_UNSUPPORTED;
 }
@@ -359,14 +189,7 @@
 }
 
 status_t C2OMXNode::setInputSurface(const sp<IOMXBufferSource> &bufferSource) {
-    c2_status_t err = GetCodec2PlatformAllocatorStore()->fetchAllocator(
-            C2PlatformAllocatorStore::GRALLOC,
-            &mAllocator);
-    if (err != OK) {
-        return UNKNOWN_ERROR;
-    }
-    mBufferSource = bufferSource;
-    return OK;
+    return mImpl->setInputSurface(bufferSource);
 }
 
 status_t C2OMXNode::allocateSecureBuffer(
@@ -402,105 +225,39 @@
     return ERROR_UNSUPPORTED;
 }
 
+namespace {
+    uint32_t toNodeFlags(OMX_U32 flags) {
+        uint32_t retFlags = 0;
+        if (flags & OMX_BUFFERFLAG_ENDOFFRAME) {
+            retFlags |= BUFFERFLAG_ENDOFFRAME;
+        }
+        if (flags & OMX_BUFFERFLAG_EOS) {
+            retFlags |= BUFFERFLAG_EOS;
+        }
+        return retFlags;
+    }
+    int64_t toNodeTimestamp(OMX_TICKS ticks) {
+        int64_t timestamp = 0;
+#ifndef OMX_SKIP64BIT
+        timestamp = ticks;
+#else
+        timestamp = ((ticks.nHighPart << 32) | ticks.nLowPart);
+#endif
+        return timestamp;
+    }
+} // anonymous namespace
+
 status_t C2OMXNode::emptyBuffer(
         buffer_id buffer, const OMXBuffer &omxBuf,
         OMX_U32 flags, OMX_TICKS timestamp, int fenceFd) {
-    std::shared_ptr<Codec2Client::Component> comp = mComp.lock();
-    if (!comp) {
-        return NO_INIT;
-    }
-
-    uint32_t c2Flags = (flags & OMX_BUFFERFLAG_EOS)
-            ? C2FrameData::FLAG_END_OF_STREAM : 0;
-    std::shared_ptr<C2GraphicBlock> block;
-
-    android::base::unique_fd fd0, fd1;
-    C2Handle *handle = nullptr;
     if (omxBuf.mBufferType == OMXBuffer::kBufferTypeANWBuffer
             && omxBuf.mGraphicBuffer != nullptr) {
-        std::shared_ptr<C2GraphicAllocation> alloc;
-        handle = WrapNativeCodec2GrallocHandle(
-                omxBuf.mGraphicBuffer->handle,
-                omxBuf.mGraphicBuffer->width,
-                omxBuf.mGraphicBuffer->height,
-                omxBuf.mGraphicBuffer->format,
-                omxBuf.mGraphicBuffer->usage,
-                omxBuf.mGraphicBuffer->stride);
-        if (handle != nullptr) {
-            // unique_fd takes ownership of the fds, we'll get warning if these
-            // fds get closed by somebody else. Onwership will be released before
-            // we return, so that the fds get closed as usually when this function
-            // goes out of scope (when both items and block are gone).
-            native_handle_t *nativeHandle = reinterpret_cast<native_handle_t*>(handle);
-            fd0.reset(nativeHandle->numFds > 0 ? nativeHandle->data[0] : -1);
-            fd1.reset(nativeHandle->numFds > 1 ? nativeHandle->data[1] : -1);
-        }
-        c2_status_t err = mAllocator->priorGraphicAllocation(handle, &alloc);
-        if (err != OK) {
-            (void)fd0.release();
-            (void)fd1.release();
-            native_handle_close(handle);
-            native_handle_delete(handle);
-            return UNKNOWN_ERROR;
-        }
-        block = _C2BlockFactory::CreateGraphicBlock(alloc);
-    } else if (!(flags & OMX_BUFFERFLAG_EOS)) {
-        return BAD_VALUE;
+        return mImpl->submitBuffer(buffer, omxBuf.mGraphicBuffer, toNodeFlags(flags),
+                                  toNodeTimestamp(timestamp), fenceFd);
     }
-
-    std::unique_ptr<C2Work> work(new C2Work);
-    work->input.flags = (C2FrameData::flags_t)c2Flags;
-    work->input.ordinal.timestamp = timestamp;
-
-    // WORKAROUND: adjust timestamp based on gapUs
-    {
-        work->input.ordinal.customOrdinal = timestamp; // save input timestamp
-        if (mFirstInputFrame) {
-            // grab timestamps on first frame
-            mPrevInputTimestamp = timestamp;
-            mPrevCodecTimestamp = timestamp;
-            mFirstInputFrame = false;
-        } else if (mAdjustTimestampGapUs > 0) {
-            work->input.ordinal.timestamp =
-                mPrevCodecTimestamp
-                        + c2_min((timestamp - mPrevInputTimestamp).peek(), mAdjustTimestampGapUs);
-        } else if (mAdjustTimestampGapUs < 0) {
-            work->input.ordinal.timestamp = mPrevCodecTimestamp - mAdjustTimestampGapUs;
-        }
-        mPrevInputTimestamp = work->input.ordinal.customOrdinal;
-        mPrevCodecTimestamp = work->input.ordinal.timestamp;
-        ALOGV("adjusting %lld to %lld (gap=%lld)",
-              work->input.ordinal.customOrdinal.peekll(),
-              work->input.ordinal.timestamp.peekll(),
-              (long long)mAdjustTimestampGapUs);
-    }
-
-    work->input.ordinal.frameIndex = mFrameIndex++;
-    work->input.buffers.clear();
-    if (block) {
-        std::shared_ptr<C2Buffer> c2Buffer(
-                new Buffer2D(block->share(
-                        C2Rect(block->width(), block->height()), ::C2Fence())));
-        work->input.buffers.push_back(c2Buffer);
-        std::shared_ptr<C2StreamHdrStaticInfo::input> staticInfo;
-        std::shared_ptr<C2StreamHdrDynamicMetadataInfo::input> dynamicInfo;
-        GetHdrMetadataFromGralloc4Handle(
-                block->handle(),
-                &staticInfo,
-                &dynamicInfo);
-        if (staticInfo && *staticInfo) {
-            c2Buffer->setInfo(staticInfo);
-        }
-        if (dynamicInfo && *dynamicInfo) {
-            c2Buffer->setInfo(dynamicInfo);
-        }
-    }
-    work->worklets.clear();
-    work->worklets.emplace_back(new C2Worklet);
-    mBufferIdsInUse.lock()->emplace(work->input.ordinal.frameIndex.peeku(), buffer);
-    mQueueThread->queue(comp, fenceFd, std::move(work), std::move(fd0), std::move(fd1));
-
-    return OK;
+    sp<GraphicBuffer> gBuf;
+    return mImpl->submitBuffer(buffer, gBuf, toNodeFlags(flags),
+                              toNodeTimestamp(timestamp), fenceFd);
 }
 
 status_t C2OMXNode::getExtensionIndex(
@@ -517,56 +274,37 @@
     if (msg.u.event_data.event != OMX_EventDataSpaceChanged) {
         return ERROR_UNSUPPORTED;
     }
-    android_dataspace dataSpace = (android_dataspace)msg.u.event_data.data1;
-    uint32_t pixelFormat = msg.u.event_data.data3;
-
-    ALOGD("dataspace changed to %#x pixel format: %#x", dataSpace, pixelFormat);
-    mQueueThread->setDataspace(dataSpace);
-
-    mDataspace.lock().set(dataSpace);
-    mPixelFormat.lock().set(pixelFormat);
-    return OK;
+    return mImpl->onDataspaceChanged(
+            msg.u.event_data.data1,
+            msg.u.event_data.data3);
 }
 
 sp<IOMXBufferSource> C2OMXNode::getSource() {
-    return mBufferSource;
+    return mImpl->getSource();
 }
 
 void C2OMXNode::setFrameSize(uint32_t width, uint32_t height) {
-    mWidth = width;
-    mHeight = height;
+    return mImpl->setFrameSize(width, height);
 }
 
 void C2OMXNode::onInputBufferDone(c2_cntr64_t index) {
-    if (!mBufferSource) {
-        ALOGD("Buffer source not set (index=%llu)", index.peekull());
-        return;
-    }
+    return mImpl->onInputBufferDone(index);
+}
 
-    int32_t bufferId = 0;
-    {
-        decltype(mBufferIdsInUse)::Locked bufferIds(mBufferIdsInUse);
-        auto it = bufferIds->find(index.peeku());
-        if (it == bufferIds->end()) {
-            ALOGV("Untracked input index %llu (maybe already removed)", index.peekull());
-            return;
-        }
-        bufferId = it->second;
-        (void)bufferIds->erase(it);
-    }
-    (void)mBufferSource->onInputBufferEmptied(bufferId, -1);
+void C2OMXNode::onInputBufferEmptied() {
+    return mImpl->onInputBufferEmptied();
 }
 
 android_dataspace C2OMXNode::getDataspace() {
-    return *mDataspace.lock();
+    return mImpl->getDataspace();
 }
 
 uint32_t C2OMXNode::getPixelFormat() {
-    return *mPixelFormat.lock();
+    return mImpl->getPixelFormat();
 }
 
 void C2OMXNode::setPriority(int priority) {
-    mQueueThread->setPriority(priority);
+    return mImpl->setPriority(priority);
 }
 
 }  // namespace android
diff --git a/media/codec2/sfplugin/C2OMXNode.h b/media/codec2/sfplugin/C2OMXNode.h
index c8ce336..5549b88 100644
--- a/media/codec2/sfplugin/C2OMXNode.h
+++ b/media/codec2/sfplugin/C2OMXNode.h
@@ -1,5 +1,5 @@
 /*
- * Copyright 2018, The Android Open Source Project
+ * Copyright 2024, The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -17,16 +17,15 @@
 #ifndef C2_OMX_NODE_H_
 #define C2_OMX_NODE_H_
 
-#include <atomic>
-
 #include <android/IOMXBufferSource.h>
 #include <codec2/hidl/client.h>
-#include <media/stagefright/foundation/Mutexed.h>
 #include <media/IOMX.h>
 #include <media/OMXBuffer.h>
 
 namespace android {
 
+struct C2NodeImpl;
+
 /**
  * IOmxNode implementation around codec 2.0 component, only to be used in
  * IGraphicBufferSource::configure. Only subset of IOmxNode API is implemented
@@ -87,13 +86,19 @@
     void setFrameSize(uint32_t width, uint32_t height);
 
     /**
-     * Clean up work item reference.
+     * Notify that the input buffer reference is no longer needed by the component.
+     * Clean up if necessary.
      *
      * \param index input work index
      */
     void onInputBufferDone(c2_cntr64_t index);
 
     /**
+     * Notify input buffer is emptied.
+     */
+    void onInputBufferEmptied();
+
+    /**
      * Returns dataspace information from GraphicBufferSource.
      */
     android_dataspace getDataspace();
@@ -109,30 +114,7 @@
     void setPriority(int priority);
 
 private:
-    std::weak_ptr<Codec2Client::Component> mComp;
-    sp<IOMXBufferSource> mBufferSource;
-    std::shared_ptr<C2Allocator> mAllocator;
-    std::atomic_uint64_t mFrameIndex;
-    uint32_t mWidth;
-    uint32_t mHeight;
-    uint64_t mUsage;
-    Mutexed<android_dataspace> mDataspace;
-    Mutexed<uint32_t> mPixelFormat;
-
-    // WORKAROUND: timestamp adjustment
-
-    // if >0: this is the max timestamp gap, if <0: this is -1 times the fixed timestamp gap
-    // if 0: no timestamp adjustment is made
-    // note that C2OMXNode can be recycled between encoding sessions.
-    int32_t mAdjustTimestampGapUs;
-    bool mFirstInputFrame; // true for first input
-    c2_cntr64_t mPrevInputTimestamp; // input timestamp for previous frame
-    c2_cntr64_t mPrevCodecTimestamp; // adjusted (codec) timestamp for previous frame
-
-    Mutexed<std::map<uint64_t, buffer_id>> mBufferIdsInUse;
-
-    class QueueThread;
-    sp<QueueThread> mQueueThread;
+    std::shared_ptr<C2NodeImpl> mImpl;
 };
 
 }  // namespace android
diff --git a/media/codec2/sfplugin/CCodec.cpp b/media/codec2/sfplugin/CCodec.cpp
index 9c264af..0aae23c 100644
--- a/media/codec2/sfplugin/CCodec.cpp
+++ b/media/codec2/sfplugin/CCodec.cpp
@@ -21,11 +21,16 @@
 #include <sstream>
 #include <thread>
 
+#include <android_media_codec.h>
+
 #include <C2Config.h>
 #include <C2Debug.h>
 #include <C2ParamInternal.h>
 #include <C2PlatformSupport.h>
 
+#include <aidl/android/hardware/graphics/common/Dataspace.h>
+#include <aidl/android/media/IAidlGraphicBufferSource.h>
+#include <aidl/android/media/IAidlBufferSource.h>
 #include <android/IOMXBufferSource.h>
 #include <android/hardware/media/c2/1.0/IInputSurface.h>
 #include <android/hardware/media/omx/1.0/IGraphicBufferSource.h>
@@ -40,6 +45,11 @@
 #include <media/openmax/OMX_Core.h>
 #include <media/openmax/OMX_IndexExt.h>
 #include <media/stagefright/foundation/avc_utils.h>
+#include <media/stagefright/foundation/AUtils.h>
+#include <media/stagefright/aidlpersistentsurface/AidlGraphicBufferSource.h>
+#include <media/stagefright/aidlpersistentsurface/C2NodeDef.h>
+#include <media/stagefright/aidlpersistentsurface/wrapper/Conversion.h>
+#include <media/stagefright/aidlpersistentsurface/wrapper/WAidlGraphicBufferSource.h>
 #include <media/stagefright/omx/1.0/WGraphicBufferSource.h>
 #include <media/stagefright/omx/OmxGraphicBufferSource.h>
 #include <media/stagefright/CCodec.h>
@@ -50,6 +60,7 @@
 #include <media/stagefright/RenderedFrameInfo.h>
 #include <utils/NativeHandle.h>
 
+#include "C2AidlNode.h"
 #include "C2OMXNode.h"
 #include "CCodecBufferChannel.h"
 #include "CCodecConfig.h"
@@ -64,8 +75,14 @@
 using ::android::hardware::graphics::bufferqueue::V1_0::utils::H2BGraphicBufferProducer;
 using android::base::StringPrintf;
 using ::android::hardware::media::c2::V1_0::IInputSurface;
+using ::aidl::android::media::IAidlBufferSource;
+using ::aidl::android::media::IAidlNode;
+using ::android::media::AidlGraphicBufferSource;
+using ::android::media::WAidlGraphicBufferSource;
+using ::android::media::aidl_conversion::fromAidlStatus;
 
 typedef hardware::media::omx::V1_0::IGraphicBufferSource HGraphicBufferSource;
+typedef aidl::android::media::IAidlGraphicBufferSource AGraphicBufferSource;
 typedef CCodecConfig Config;
 
 namespace {
@@ -189,11 +206,11 @@
     std::shared_ptr<Codec2Client::InputSurfaceConnection> mConnection;
 };
 
-class GraphicBufferSourceWrapper : public InputSurfaceWrapper {
+class HGraphicBufferSourceWrapper : public InputSurfaceWrapper {
 public:
     typedef hardware::media::omx::V1_0::Status OmxStatus;
 
-    GraphicBufferSourceWrapper(
+    HGraphicBufferSourceWrapper(
             const sp<HGraphicBufferSource> &source,
             uint32_t width,
             uint32_t height,
@@ -202,7 +219,7 @@
         mDataSpace = HAL_DATASPACE_BT709;
         mConfig.mUsage = usage;
     }
-    ~GraphicBufferSourceWrapper() override = default;
+    ~HGraphicBufferSourceWrapper() override = default;
 
     status_t connect(const std::shared_ptr<Codec2Client::Component> &comp) override {
         mNode = new C2OMXNode(comp);
@@ -427,6 +444,10 @@
         mNode->onInputBufferDone(index);
     }
 
+    void onInputBufferEmptied() override {
+        mNode->onInputBufferEmptied();
+    }
+
     android_dataspace getDataspace() override {
         return mNode->getDataspace();
     }
@@ -444,6 +465,228 @@
     Config mConfig;
 };
 
+class AGraphicBufferSourceWrapper : public InputSurfaceWrapper {
+public:
+    AGraphicBufferSourceWrapper(
+            const std::shared_ptr<AGraphicBufferSource> &source,
+            uint32_t width,
+            uint32_t height,
+            uint64_t usage)
+        : mSource(source), mWidth(width), mHeight(height) {
+        mDataSpace = HAL_DATASPACE_BT709;
+        mConfig.mUsage = usage;
+    }
+    ~AGraphicBufferSourceWrapper() override = default;
+
+    status_t connect(const std::shared_ptr<Codec2Client::Component> &comp) override {
+        mNode = ::ndk::SharedRefBase::make<C2AidlNode>(comp);
+        mNode->setFrameSize(mWidth, mHeight);
+        // Usage is queried during configure(), so setting it beforehand.
+        uint64_t usage = mConfig.mUsage;
+        (void)mNode->setConsumerUsage((int64_t)usage);
+
+        return fromAidlStatus(mSource->configure(
+                mNode, static_cast<::aidl::android::hardware::graphics::common::Dataspace>(
+                        mDataSpace)));
+    }
+
+    void disconnect() override {
+        if (mNode == nullptr) {
+            return;
+        }
+        std::shared_ptr<IAidlBufferSource> source = mNode->getSource();
+        if (source == nullptr) {
+            ALOGD("GBSWrapper::disconnect: node is not configured with OMXBufferSource.");
+            return;
+        }
+        (void)source->onStop();
+        (void)source->onRelease();
+        mNode.reset();
+    }
+
+    status_t start() override {
+        std::shared_ptr<IAidlBufferSource> source = mNode->getSource();
+        if (source == nullptr) {
+            return NO_INIT;
+        }
+
+        size_t numSlots = 16;
+
+        IAidlNode::InputBufferParams param;
+        status_t err = fromAidlStatus(mNode->getInputBufferParams(&param));
+        if (err == OK) {
+            numSlots = param.bufferCountActual;
+        }
+
+        for (size_t i = 0; i < numSlots; ++i) {
+            (void)source->onInputBufferAdded(i);
+        }
+
+        (void)source->onStart();
+        return OK;
+    }
+
+    status_t signalEndOfInputStream() override {
+        return fromAidlStatus(mSource->signalEndOfInputStream());
+    }
+
+    status_t configure(Config &config) {
+        std::stringstream status;
+        status_t err = OK;
+
+        // handle each configuration granually, in case we need to handle part of the configuration
+        // elsewhere
+
+        // TRICKY: we do not unset frame delay repeating
+        if (config.mMinFps > 0 && config.mMinFps != mConfig.mMinFps) {
+            int64_t us = 1e6 / config.mMinFps + 0.5;
+            status_t res = fromAidlStatus(mSource->setRepeatPreviousFrameDelayUs(us));
+            status << " minFps=" << config.mMinFps << " => repeatDelayUs=" << us;
+            if (res != OK) {
+                status << " (=> " << asString(res) << ")";
+                err = res;
+            }
+            mConfig.mMinFps = config.mMinFps;
+        }
+
+        // pts gap
+        if (config.mMinAdjustedFps > 0 || config.mFixedAdjustedFps > 0) {
+            if (mNode != nullptr) {
+                float gap = (config.mMinAdjustedFps > 0)
+                        ? c2_min(INT32_MAX + 0., 1e6 / config.mMinAdjustedFps + 0.5)
+                        : c2_max(0. - INT32_MAX, -1e6 / config.mFixedAdjustedFps - 0.5);
+                // float -> uint32_t is undefined if the value is negative.
+                // First convert to int32_t to ensure the expected behavior.
+                int32_t gapUs = int32_t(gap);
+                (void)mNode->setAdjustTimestampGapUs(gapUs);
+            }
+        }
+
+        // max fps
+        // TRICKY: we do not unset max fps to 0 unless using fixed fps
+        if ((config.mMaxFps > 0 || (config.mFixedAdjustedFps > 0 && config.mMaxFps == -1))
+                && config.mMaxFps != mConfig.mMaxFps) {
+            status_t res = fromAidlStatus(mSource->setMaxFps(config.mMaxFps));
+            status << " maxFps=" << config.mMaxFps;
+            if (res != OK) {
+                status << " (=> " << asString(res) << ")";
+                err = res;
+            }
+            mConfig.mMaxFps = config.mMaxFps;
+        }
+
+        if (config.mTimeOffsetUs != mConfig.mTimeOffsetUs) {
+            status_t res = fromAidlStatus(mSource->setTimeOffsetUs(config.mTimeOffsetUs));
+            status << " timeOffset " << config.mTimeOffsetUs << "us";
+            if (res != OK) {
+                status << " (=> " << asString(res) << ")";
+                err = res;
+            }
+            mConfig.mTimeOffsetUs = config.mTimeOffsetUs;
+        }
+
+        if (config.mCaptureFps != mConfig.mCaptureFps || config.mCodedFps != mConfig.mCodedFps) {
+            status_t res =
+                fromAidlStatus(mSource->setTimeLapseConfig(config.mCodedFps, config.mCaptureFps));
+            status << " timeLapse " << config.mCaptureFps << "fps as " << config.mCodedFps << "fps";
+            if (res != OK) {
+                status << " (=> " << asString(res) << ")";
+                err = res;
+            }
+            mConfig.mCaptureFps = config.mCaptureFps;
+            mConfig.mCodedFps = config.mCodedFps;
+        }
+
+        if (config.mStartAtUs != mConfig.mStartAtUs
+                || (config.mStopped != mConfig.mStopped && !config.mStopped)) {
+            status_t res = fromAidlStatus(mSource->setStartTimeUs(config.mStartAtUs));
+            status << " start at " << config.mStartAtUs << "us";
+            if (res != OK) {
+                status << " (=> " << asString(res) << ")";
+                err = res;
+            }
+            mConfig.mStartAtUs = config.mStartAtUs;
+            mConfig.mStopped = config.mStopped;
+        }
+
+        // suspend-resume
+        if (config.mSuspended != mConfig.mSuspended) {
+            status_t res = fromAidlStatus(mSource->setSuspend(
+                    config.mSuspended, config.mSuspendAtUs));
+            status << " " << (config.mSuspended ? "suspend" : "resume")
+                    << " at " << config.mSuspendAtUs << "us";
+            if (res != OK) {
+                status << " (=> " << asString(res) << ")";
+                err = res;
+            }
+            mConfig.mSuspended = config.mSuspended;
+            mConfig.mSuspendAtUs = config.mSuspendAtUs;
+        }
+
+        if (config.mStopped != mConfig.mStopped && config.mStopped) {
+            status_t res = fromAidlStatus(mSource->setStopTimeUs(config.mStopAtUs));
+            status << " stop at " << config.mStopAtUs << "us";
+            if (res != OK) {
+                status << " (=> " << asString(res) << ")";
+                err = res;
+            } else {
+                status << " delayUs";
+                res = fromAidlStatus(mSource->getStopTimeOffsetUs(&config.mInputDelayUs));
+                if (res != OK) {
+                    status << " (=> " << asString(res) << ")";
+                } else {
+                    status << "=" << config.mInputDelayUs << "us";
+                }
+                mConfig.mInputDelayUs = config.mInputDelayUs;
+            }
+            mConfig.mStopAtUs = config.mStopAtUs;
+            mConfig.mStopped = config.mStopped;
+        }
+
+        // color aspects (android._color-aspects)
+
+        // consumer usage is queried earlier.
+
+        // priority
+        if (mConfig.mPriority != config.mPriority) {
+            if (config.mPriority != INT_MAX) {
+                mNode->setPriority(config.mPriority);
+            }
+            mConfig.mPriority = config.mPriority;
+        }
+
+        if (status.str().empty()) {
+            ALOGD("ISConfig not changed");
+        } else {
+            ALOGD("ISConfig%s", status.str().c_str());
+        }
+        return err;
+    }
+
+    void onInputBufferDone(c2_cntr64_t index) override {
+        mNode->onInputBufferDone(index);
+    }
+
+    void onInputBufferEmptied() override {
+        mNode->onInputBufferEmptied();
+    }
+
+    android_dataspace getDataspace() override {
+        return mNode->getDataspace();
+    }
+
+    uint32_t getPixelFormat() override {
+        return mNode->getPixelFormat();
+    }
+
+private:
+    std::shared_ptr<AGraphicBufferSource> mSource;
+    std::shared_ptr<C2AidlNode> mNode;
+    uint32_t mWidth;
+    uint32_t mHeight;
+    Config mConfig;
+};
+
 class Codec2ClientInterfaceWrapper : public C2ComponentStore {
     std::shared_ptr<Codec2Client> mClient;
 
@@ -1178,6 +1421,23 @@
             }
         }
 
+        /*
+         * configure mock region of interest if Feature_Roi is enabled
+         */
+        if (android::media::codec::provider_->region_of_interest()
+            && android::media::codec::provider_->region_of_interest_support()) {
+            if ((config->mDomain & Config::IS_ENCODER) && (config->mDomain & Config::IS_VIDEO)) {
+                int32_t enableRoi;
+                if (msg->findInt32("feature-region-of-interest", &enableRoi) && enableRoi != 0) {
+                    if (!msg->contains(PARAMETER_KEY_QP_OFFSET_MAP) &&
+                        !msg->contains(PARAMETER_KEY_QP_OFFSET_RECTS)) {
+                        msg->setString(PARAMETER_KEY_QP_OFFSET_RECTS,
+                                       AStringPrintf("%d,%d-%d,%d=%d;", 0, 0, height, width, 0));
+                    }
+                }
+            }
+        }
+
         std::vector<std::unique_ptr<C2Param>> configUpdate;
         // NOTE: We used to ignore "video-bitrate" at configure; replicate
         //       the behavior here.
@@ -1458,7 +1718,8 @@
                 int64_t blockUsage =
                     usage.value | C2MemoryUsage::CPU_READ | C2MemoryUsage::CPU_WRITE;
                 std::shared_ptr<C2GraphicBlock> block = FetchGraphicBlock(
-                        width, height, componentColorFormat, blockUsage, {comp->getName()});
+                        align(width, 2), align(height, 2), componentColorFormat, blockUsage,
+                        {comp->getName()});
                 sp<GraphicBlockBuffer> buffer;
                 if (block) {
                     buffer = GraphicBlockBuffer::Allocate(
@@ -1627,28 +1888,46 @@
     }
 
     sp<PersistentSurface> persistentSurface = CreateCompatibleInputSurface();
-    sp<hidl::base::V1_0::IBase> hidlTarget = persistentSurface->getHidlTarget();
-    sp<IInputSurface> hidlInputSurface = IInputSurface::castFrom(hidlTarget);
-    sp<HGraphicBufferSource> gbs = HGraphicBufferSource::castFrom(hidlTarget);
-
-    if (hidlInputSurface) {
-        std::shared_ptr<Codec2Client::InputSurface> inputSurface =
-                std::make_shared<Codec2Client::InputSurface>(hidlInputSurface);
-        err = setupInputSurface(std::make_shared<C2InputSurfaceWrapper>(
-                inputSurface));
-        bufferProducer = inputSurface->getGraphicBufferProducer();
-    } else if (gbs) {
-        int32_t width = 0;
-        (void)outputFormat->findInt32("width", &width);
-        int32_t height = 0;
-        (void)outputFormat->findInt32("height", &height);
-        err = setupInputSurface(std::make_shared<GraphicBufferSourceWrapper>(
-                gbs, width, height, usage));
-        bufferProducer = persistentSurface->getBufferProducer();
+    if (persistentSurface->isTargetAidl()) {
+        ::ndk::SpAIBinder aidlTarget = persistentSurface->getAidlTarget();
+        std::shared_ptr<AGraphicBufferSource> gbs = AGraphicBufferSource::fromBinder(aidlTarget);
+        if (gbs) {
+            int32_t width = 0;
+            (void)outputFormat->findInt32("width", &width);
+            int32_t height = 0;
+            (void)outputFormat->findInt32("height", &height);
+            err = setupInputSurface(std::make_shared<AGraphicBufferSourceWrapper>(
+                    gbs, width, height, usage));
+            bufferProducer = persistentSurface->getBufferProducer();
+        } else {
+            ALOGE("Corrupted input surface(aidl)");
+            mCallback->onInputSurfaceCreationFailed(UNKNOWN_ERROR);
+            return;
+        }
     } else {
-        ALOGE("Corrupted input surface");
-        mCallback->onInputSurfaceCreationFailed(UNKNOWN_ERROR);
-        return;
+        sp<hidl::base::V1_0::IBase> hidlTarget = persistentSurface->getHidlTarget();
+        sp<IInputSurface> hidlInputSurface = IInputSurface::castFrom(hidlTarget);
+        sp<HGraphicBufferSource> gbs = HGraphicBufferSource::castFrom(hidlTarget);
+
+        if (hidlInputSurface) {
+            std::shared_ptr<Codec2Client::InputSurface> inputSurface =
+                    std::make_shared<Codec2Client::InputSurface>(hidlInputSurface);
+            err = setupInputSurface(std::make_shared<C2InputSurfaceWrapper>(
+                    inputSurface));
+            bufferProducer = inputSurface->getGraphicBufferProducer();
+        } else if (gbs) {
+            int32_t width = 0;
+            (void)outputFormat->findInt32("width", &width);
+            int32_t height = 0;
+            (void)outputFormat->findInt32("height", &height);
+            err = setupInputSurface(std::make_shared<HGraphicBufferSourceWrapper>(
+                    gbs, width, height, usage));
+            bufferProducer = persistentSurface->getBufferProducer();
+        } else {
+            ALOGE("Corrupted input surface");
+            mCallback->onInputSurfaceCreationFailed(UNKNOWN_ERROR);
+            return;
+        }
     }
 
     if (err != OK) {
@@ -1743,33 +2022,56 @@
         outputFormat = config->mOutputFormat;
         usage = config->mISConfig ? config->mISConfig->mUsage : 0;
     }
-    sp<hidl::base::V1_0::IBase> hidlTarget = surface->getHidlTarget();
-    sp<IInputSurface> inputSurface = IInputSurface::castFrom(hidlTarget);
-    sp<HGraphicBufferSource> gbs = HGraphicBufferSource::castFrom(hidlTarget);
-    if (inputSurface) {
-        status_t err = setupInputSurface(std::make_shared<C2InputSurfaceWrapper>(
-                std::make_shared<Codec2Client::InputSurface>(inputSurface)));
-        if (err != OK) {
-            ALOGE("Failed to set up input surface: %d", err);
-            mCallback->onInputSurfaceDeclined(err);
-            return;
-        }
-    } else if (gbs) {
-        int32_t width = 0;
-        (void)outputFormat->findInt32("width", &width);
-        int32_t height = 0;
-        (void)outputFormat->findInt32("height", &height);
-        status_t err = setupInputSurface(std::make_shared<GraphicBufferSourceWrapper>(
-                gbs, width, height, usage));
-        if (err != OK) {
-            ALOGE("Failed to set up input surface: %d", err);
-            mCallback->onInputSurfaceDeclined(err);
+    if (surface->isTargetAidl()) {
+        ::ndk::SpAIBinder aidlTarget = surface->getAidlTarget();
+        std::shared_ptr<AGraphicBufferSource> gbs = AGraphicBufferSource::fromBinder(aidlTarget);
+        if (gbs) {
+            int32_t width = 0;
+            (void)outputFormat->findInt32("width", &width);
+            int32_t height = 0;
+            (void)outputFormat->findInt32("height", &height);
+
+            status_t err = setupInputSurface(std::make_shared<AGraphicBufferSourceWrapper>(
+                    gbs, width, height, usage));
+            if (err != OK) {
+                ALOGE("Failed to set up input surface(aidl): %d", err);
+                mCallback->onInputSurfaceDeclined(err);
+                return;
+            }
+        } else {
+            ALOGE("Failed to set input surface(aidl): Corrupted surface.");
+            mCallback->onInputSurfaceDeclined(UNKNOWN_ERROR);
             return;
         }
     } else {
-        ALOGE("Failed to set input surface: Corrupted surface.");
-        mCallback->onInputSurfaceDeclined(UNKNOWN_ERROR);
-        return;
+        sp<hidl::base::V1_0::IBase> hidlTarget = surface->getHidlTarget();
+        sp<IInputSurface> inputSurface = IInputSurface::castFrom(hidlTarget);
+        sp<HGraphicBufferSource> gbs = HGraphicBufferSource::castFrom(hidlTarget);
+        if (inputSurface) {
+            status_t err = setupInputSurface(std::make_shared<C2InputSurfaceWrapper>(
+                    std::make_shared<Codec2Client::InputSurface>(inputSurface)));
+            if (err != OK) {
+                ALOGE("Failed to set up input surface: %d", err);
+                mCallback->onInputSurfaceDeclined(err);
+                return;
+            }
+        } else if (gbs) {
+            int32_t width = 0;
+            (void)outputFormat->findInt32("width", &width);
+            int32_t height = 0;
+            (void)outputFormat->findInt32("height", &height);
+            status_t err = setupInputSurface(std::make_shared<HGraphicBufferSourceWrapper>(
+                    gbs, width, height, usage));
+            if (err != OK) {
+                ALOGE("Failed to set up input surface: %d", err);
+                mCallback->onInputSurfaceDeclined(err);
+                return;
+            }
+        } else {
+            ALOGE("Failed to set input surface: Corrupted surface.");
+            mCallback->onInputSurfaceDeclined(UNKNOWN_ERROR);
+            return;
+        }
     }
     // Formats can change after setupInputSurface
     sp<AMessage> inputFormat;
@@ -1933,8 +2235,23 @@
     // So we reverse their order for stopUseOutputSurface() to notify C2Fence waiters
     // prior to comp->stop().
     // See also b/300350761.
-    mChannel->stopUseOutputSurface(pushBlankBuffer);
-    status_t err = comp->stop();
+    //
+    // The workaround is no longer needed with fetchGraphicBlock & C2Fence changes.
+    // so we are reverting back to the logical sequence of the operations when
+    // AIDL HALs are selected.
+    // When the HIDL HALs are selected, we retained workaround(the reversed
+    // order) as default in order to keep legacy behavior.
+    bool stopHalBeforeSurface =
+            Codec2Client::IsAidlSelected() ||
+            property_get_bool("debug.codec2.stop_hal_before_surface", false);
+    status_t err = C2_OK;
+    if (stopHalBeforeSurface && android::media::codec::provider_->stop_hal_before_surface()) {
+        err = comp->stop();
+        mChannel->stopUseOutputSurface(pushBlankBuffer);
+    } else {
+        mChannel->stopUseOutputSurface(pushBlankBuffer);
+        err = comp->stop();
+    }
     if (err != C2_OK) {
         // TODO: convert err into status_t
         mCallback->onError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
@@ -2029,8 +2346,22 @@
     // So we reverse their order for stopUseOutputSurface() to notify C2Fence waiters
     // prior to comp->release().
     // See also b/300350761.
-    mChannel->stopUseOutputSurface(pushBlankBuffer);
-    comp->release();
+    //
+    // The workaround is no longer needed with fetchGraphicBlock & C2Fence changes.
+    // so we are reverting back to the logical sequence of the operations when
+    // AIDL HALs are selected.
+    // When the HIDL HALs are selected, we retained workaround(the reversed
+    // order) as default in order to keep legacy behavior.
+    bool stopHalBeforeSurface =
+            Codec2Client::IsAidlSelected() ||
+            property_get_bool("debug.codec2.stop_hal_before_surface", false);
+    if (stopHalBeforeSurface && android::media::codec::provider_->stop_hal_before_surface()) {
+        comp->release();
+        mChannel->stopUseOutputSurface(pushBlankBuffer);
+    } else {
+        mChannel->stopUseOutputSurface(pushBlankBuffer);
+        comp->release();
+    }
 
     {
         Mutexed<State>::Locked state(mState);
@@ -2259,6 +2590,53 @@
         }
     }
 
+    /**
+     * Handle ROI QP map configuration. Recover the QP map configuration from AMessage as an
+     * ABuffer and configure to CCodecBufferChannel as a C2InfoBuffer
+     */
+    if (android::media::codec::provider_->region_of_interest()
+            && android::media::codec::provider_->region_of_interest_support()) {
+        sp<ABuffer> qpOffsetMap;
+        if ((config->mDomain & (Config::IS_VIDEO | Config::IS_IMAGE))
+                && (config->mDomain & Config::IS_ENCODER)
+                &&  params->findBuffer(PARAMETER_KEY_QP_OFFSET_MAP, &qpOffsetMap)) {
+            std::shared_ptr<C2BlockPool> pool;
+            // TODO(b/331443865) Use pooled block pool to improve efficiency
+            c2_status_t status = GetCodec2BlockPool(C2BlockPool::BASIC_LINEAR, nullptr, &pool);
+
+            if (status == C2_OK) {
+                int width, height;
+                config->mInputFormat->findInt32("width", &width);
+                config->mInputFormat->findInt32("height", &height);
+                // The length of the qp-map corresponds to the number of 16x16 blocks in one frame
+                int expectedMapSize = ((width + 15) / 16) * ((height + 15) / 16);
+                size_t mapSize = qpOffsetMap->size();
+                if (mapSize >= expectedMapSize) {
+                    std::shared_ptr<C2LinearBlock> block;
+                    status = pool->fetchLinearBlock(
+                            expectedMapSize,
+                            C2MemoryUsage{C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE},
+                            &block);
+                    if (status == C2_OK && !block->map().get().error()) {
+                        C2WriteView wView = block->map().get();
+                        uint8_t* outData = wView.data();
+                        memcpy(outData, qpOffsetMap->data(), expectedMapSize);
+                        C2InfoBuffer info = C2InfoBuffer::CreateLinearBuffer(
+                                kParamIndexQpOffsetMapBuffer,
+                                block->share(0, expectedMapSize, C2Fence()));
+                        mChannel->setInfoBuffer(std::make_shared<C2InfoBuffer>(info));
+                    }
+                } else {
+                    ALOGE("Ignoring param key %s as buffer size %zu is less than expected "
+                          "buffer size %d",
+                          PARAMETER_KEY_QP_OFFSET_MAP, mapSize, expectedMapSize);
+                }
+            }
+            params->removeEntryByName(PARAMETER_KEY_QP_OFFSET_MAP);
+        }
+    }
+
+
     std::vector<std::unique_ptr<C2Param>> configUpdate;
     (void)config->getConfigUpdateFromSdkParams(
             comp, params, Config::IS_PARAM, C2_MAY_BLOCK, &configUpdate);
@@ -2268,6 +2646,15 @@
     if (config->mInputSurface == nullptr
             && (property_get_bool("debug.stagefright.ccodec_delayed_params", false)
                     || comp->getName().find("c2.android.") == 0)) {
+        std::vector<std::unique_ptr<C2Param>> localConfigUpdate;
+        for (const std::unique_ptr<C2Param> &param : configUpdate) {
+            if (param && param->coreIndex().coreIndex() == C2StreamSurfaceScalingInfo::CORE_INDEX) {
+                localConfigUpdate.push_back(C2Param::Copy(*param));
+            }
+        }
+        if (!localConfigUpdate.empty()) {
+            (void)config->setParameters(comp, localConfigUpdate, C2_MAY_BLOCK);
+        }
         mChannel->setParameters(configUpdate);
     } else {
         sp<AMessage> outputFormat = config->mOutputFormat;
@@ -2661,15 +3048,32 @@
             Codec2Client::CreateInputSurface();
     if (!inputSurface) {
         if (property_get_int32("debug.stagefright.c2inputsurface", 0) == -1) {
-            sp<IGraphicBufferProducer> gbp;
-            sp<OmxGraphicBufferSource> gbs = new OmxGraphicBufferSource();
-            status_t err = gbs->initCheck();
-            if (err != OK) {
-                ALOGE("Failed to create persistent input surface: error %d", err);
-                return nullptr;
+            if (Codec2Client::IsAidlSelected()) {
+                sp<IGraphicBufferProducer> gbp;
+                sp<AidlGraphicBufferSource> gbs = new AidlGraphicBufferSource();
+                status_t err = gbs->initCheck();
+                if (err != OK) {
+                    ALOGE("Failed to create persistent input surface: error %d", err);
+                    return nullptr;
+                }
+                ALOGD("aidl based PersistentSurface created");
+                std::shared_ptr<WAidlGraphicBufferSource> wrapper =
+                        ::ndk::SharedRefBase::make<WAidlGraphicBufferSource>(gbs);
+
+                return new PersistentSurface(
+                      gbs->getIGraphicBufferProducer(), wrapper->asBinder());
+            } else {
+                sp<IGraphicBufferProducer> gbp;
+                sp<OmxGraphicBufferSource> gbs = new OmxGraphicBufferSource();
+                status_t err = gbs->initCheck();
+                if (err != OK) {
+                    ALOGE("Failed to create persistent input surface: error %d", err);
+                    return nullptr;
+                }
+                ALOGD("hidl based PersistentSurface created");
+                return new PersistentSurface(
+                        gbs->getIGraphicBufferProducer(), new TWGraphicBufferSource(gbs));
             }
-            return new PersistentSurface(
-                    gbs->getIGraphicBufferProducer(), new TWGraphicBufferSource(gbs));
         } else {
             return nullptr;
         }
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.cpp b/media/codec2/sfplugin/CCodecBufferChannel.cpp
index 6e6d3f7..7583e6f 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.cpp
+++ b/media/codec2/sfplugin/CCodecBufferChannel.cpp
@@ -28,6 +28,8 @@
 #include <thread>
 #include <chrono>
 
+#include <android_media_codec.h>
+
 #include <C2AllocatorGralloc.h>
 #include <C2PlatformSupport.h>
 #include <C2BlockInternal.h>
@@ -90,6 +92,28 @@
     return v == "true";
 }
 
+// Flags can come with individual BufferInfos
+// when used with large frame audio
+constexpr static std::initializer_list<std::pair<uint32_t, uint32_t>> flagList = {
+        {BUFFER_FLAG_CODEC_CONFIG, C2FrameData::FLAG_CODEC_CONFIG},
+        {BUFFER_FLAG_END_OF_STREAM, C2FrameData::FLAG_END_OF_STREAM},
+        {BUFFER_FLAG_DECODE_ONLY, C2FrameData::FLAG_DROP_FRAME}
+};
+
+static uint32_t convertFlags(uint32_t flags, bool toC2) {
+    return std::transform_reduce(
+            flagList.begin(), flagList.end(),
+            0u,
+            std::bit_or{},
+            [flags, toC2](const std::pair<uint32_t, uint32_t> &entry) {
+                if (toC2) {
+                    return (flags & entry.first) ? entry.second : 0;
+                } else {
+                    return (flags & entry.second) ? entry.first : 0;
+                }
+            });
+}
+
 }  // namespace
 
 CCodecBufferChannel::QueueGuard::QueueGuard(
@@ -245,7 +269,8 @@
     if (buffer->meta()->findInt32("decode-only", &tmp) && tmp) {
         flags |= C2FrameData::FLAG_DROP_FRAME;
     }
-    ALOGV("[%s] queueInputBuffer: buffer->size() = %zu", mName, buffer->size());
+    ALOGV("[%s] queueInputBuffer: buffer->size() = %zu time: %lld",
+            mName, buffer->size(), (long long)timeUs);
     std::list<std::unique_ptr<C2Work>> items;
     std::unique_ptr<C2Work> work(new C2Work);
     work->input.ordinal.timestamp = timeUs;
@@ -296,6 +321,34 @@
                 uint64_t frameIndex = work->input.ordinal.frameIndex.peeku();
                 output->rotation[frameIndex] = rotation;
             }
+            sp<RefBase> obj;
+            if (buffer->meta()->findObject("accessUnitInfo", &obj)) {
+                ALOGV("Filling C2Info from multiple access units");
+                sp<WrapperObject<std::vector<AccessUnitInfo>>> infos{
+                        (decltype(infos.get()))obj.get()};
+                std::vector<AccessUnitInfo> &accessUnitInfoVec = infos->value;
+                std::vector<C2AccessUnitInfosStruct> multipleAccessUnitInfos;
+                uint32_t outFlags = 0;
+                for (int i = 0; i < accessUnitInfoVec.size(); i++) {
+                    outFlags = 0;
+                    outFlags = convertFlags(accessUnitInfoVec[i].mFlags, true);
+                    if (eos && (outFlags & C2FrameData::FLAG_END_OF_STREAM)) {
+                        outFlags &= (~C2FrameData::FLAG_END_OF_STREAM);
+                    }
+                    multipleAccessUnitInfos.emplace_back(
+                            outFlags,
+                            accessUnitInfoVec[i].mSize,
+                            accessUnitInfoVec[i].mTimestamp);
+                    ALOGV("%d) flags: %d, size: %d, time: %llu",
+                            i, outFlags, accessUnitInfoVec[i].mSize,
+                            (long long)accessUnitInfoVec[i].mTimestamp);
+
+                }
+                const std::shared_ptr<C2AccessUnitInfos::input> c2AccessUnitInfos =
+                        C2AccessUnitInfos::input::AllocShared(
+                                multipleAccessUnitInfos.size(), 0u, multipleAccessUnitInfos);
+                c2buffer->setInfo(c2AccessUnitInfos);
+            }
             work->input.buffers.push_back(c2buffer);
             if (encryptedBlock) {
                 work->input.infoBuffers.emplace_back(C2InfoBuffer::CreateLinearBuffer(
@@ -319,7 +372,17 @@
         }
     } else {
         work->input.flags = (C2FrameData::flags_t)flags;
+
         // TODO: fill info's
+        if (android::media::codec::provider_->region_of_interest()
+                && android::media::codec::provider_->region_of_interest_support()) {
+            if (mInfoBuffers.size()) {
+                for (auto infoBuffer : mInfoBuffers) {
+                    work->input.infoBuffers.emplace_back(*infoBuffer);
+                }
+                mInfoBuffers.clear();
+            }
+        }
 
         work->input.configUpdate = std::move(mParamsToBeSet);
         if (tunnelFirstFrame) {
@@ -432,6 +495,131 @@
     return heapSeqNum;
 }
 
+typedef WrapperObject<std::vector<AccessUnitInfo>> BufferInfosWrapper;
+typedef WrapperObject<std::vector<std::unique_ptr<CodecCryptoInfo>>> CryptoInfosWrapper;
+status_t CCodecBufferChannel::attachEncryptedBuffers(
+        const sp<hardware::HidlMemory> &memory,
+        size_t offset,
+        const sp<MediaCodecBuffer> &buffer,
+        bool secure,
+        AString* errorDetailMsg) {
+    static const C2MemoryUsage kDefaultReadWriteUsage{
+        C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE};
+    if (!hasCryptoOrDescrambler()) {
+        ALOGE("attachEncryptedBuffers requires Crypto/descrambler object");
+        return -ENOSYS;
+    }
+    size_t size = 0;
+    CHECK(buffer->meta()->findSize("ssize", &size));
+    if (size == 0) {
+        buffer->setRange(0, 0);
+        return OK;
+    }
+    sp<RefBase> obj;
+    CHECK(buffer->meta()->findObject("cryptoInfos", &obj));
+    sp<CryptoInfosWrapper> cryptoInfos{(CryptoInfosWrapper *)obj.get()};
+    CHECK(buffer->meta()->findObject("accessUnitInfo", &obj));
+    sp<BufferInfosWrapper> bufferInfos{(BufferInfosWrapper *)obj.get()};
+    if (secure || (mCrypto == nullptr)) {
+        if (cryptoInfos->value.size() != 1) {
+            ALOGE("Cannot decrypt multiple access units");
+            return -ENOSYS;
+        }
+        // we are dealing with just one cryptoInfo or descrambler.
+        std::unique_ptr<CodecCryptoInfo> info = std::move(cryptoInfos->value[0]);
+        if (info == nullptr) {
+            ALOGE("Cannot decrypt, CryptoInfos are null.");
+            return -ENOSYS;
+        }
+        return attachEncryptedBuffer(
+                memory,
+                secure,
+                info->mKey,
+                info->mIv,
+                info->mMode,
+                info->mPattern,
+                offset,
+                info->mSubSamples,
+                info->mNumSubSamples,
+                buffer,
+                errorDetailMsg);
+    }
+    std::shared_ptr<C2BlockPool> pool = mBlockPools.lock()->inputPool;
+    std::shared_ptr<C2LinearBlock> block;
+    c2_status_t err = pool->fetchLinearBlock(
+            size,
+            kDefaultReadWriteUsage,
+            &block);
+    if (err != C2_OK) {
+        ALOGI("[%s] attachEncryptedBuffers: fetchLinearBlock failed: size = %zu (%s) err = %d",
+              mName, size, secure ? "secure" : "non-secure", err);
+        return NO_MEMORY;
+    }
+    ensureDecryptDestination(size);
+    C2WriteView wView = block->map().get();
+    if (wView.error() != C2_OK) {
+        ALOGI("[%s] attachEncryptedBuffers: block map error: %d (non-secure)",
+              mName, wView.error());
+        return UNKNOWN_ERROR;
+    }
+
+    ssize_t result = -1;
+    size_t srcOffset = offset;
+    size_t outBufferSize = 0;
+    uint32_t cryptoInfoIdx = 0;
+    int32_t heapSeqNum = getHeapSeqNum(memory);
+    hardware::drm::V1_0::SharedBuffer src{(uint32_t)heapSeqNum, offset, size};
+    hardware::drm::V1_0::DestinationBuffer dst;
+    dst.type = DrmBufferType::SHARED_MEMORY;
+    IMemoryToSharedBuffer(
+            mDecryptDestination, mHeapSeqNum, &dst.nonsecureMemory);
+    for (int i = 0; i < bufferInfos->value.size(); i++) {
+        if (bufferInfos->value[i].mSize > 0) {
+            std::unique_ptr<CodecCryptoInfo> info = std::move(cryptoInfos->value[cryptoInfoIdx++]);
+            src.offset = srcOffset;
+            src.size = bufferInfos->value[i].mSize;
+            result = mCrypto->decrypt(
+                    (uint8_t*)info->mKey,
+                    (uint8_t*)info->mIv,
+                    info->mMode,
+                    info->mPattern,
+                    src,
+                    0,
+                    info->mSubSamples,
+                    info->mNumSubSamples,
+                    dst,
+                    errorDetailMsg);
+            srcOffset += bufferInfos->value[i].mSize;
+            if (result < 0) {
+                ALOGI("[%s] attachEncryptedBuffers: decrypt failed: result = %zd",
+                        mName, result);
+                return result;
+            }
+            if (wView.error() == C2_OK) {
+                if (wView.size() < result) {
+                    ALOGI("[%s] attachEncryptedBuffers: block size too small:"
+                            "size=%u result=%zd (non-secure)", mName, wView.size(), result);
+                    return UNKNOWN_ERROR;
+                }
+                memcpy(wView.data(), mDecryptDestination->unsecurePointer(), result);
+                bufferInfos->value[i].mSize = result;
+                wView.setOffset(wView.offset() + result);
+            }
+            outBufferSize += result;
+        }
+    }
+    if (wView.error() == C2_OK) {
+        wView.setOffset(0);
+    }
+    std::shared_ptr<C2Buffer> c2Buffer{C2Buffer::CreateLinearBuffer(
+            block->share(0, outBufferSize, C2Fence{}))};
+    if (!buffer->copy(c2Buffer)) {
+        ALOGI("[%s] attachEncryptedBuffers: buffer copy failed", mName);
+        return -ENOSYS;
+    }
+    return OK;
+}
+
 status_t CCodecBufferChannel::attachEncryptedBuffer(
         const sp<hardware::HidlMemory> &memory,
         bool secure,
@@ -726,6 +914,140 @@
     return queueInputBufferInternal(buffer, block, bufferSize);
 }
 
+status_t CCodecBufferChannel::queueSecureInputBuffers(
+        const sp<MediaCodecBuffer> &buffer,
+        bool secure,
+        AString *errorDetailMsg) {
+    QueueGuard guard(mSync);
+    if (!guard.isRunning()) {
+        ALOGD("[%s] No more buffers should be queued at current state.", mName);
+        return -ENOSYS;
+    }
+
+    if (!hasCryptoOrDescrambler()) {
+        ALOGE("queueSecureInputBuffers requires a Crypto/descrambler Object");
+        return -ENOSYS;
+    }
+    sp<RefBase> obj;
+    CHECK(buffer->meta()->findObject("cryptoInfos", &obj));
+    sp<CryptoInfosWrapper> cryptoInfos{(CryptoInfosWrapper *)obj.get()};
+    CHECK(buffer->meta()->findObject("accessUnitInfo", &obj));
+    sp<BufferInfosWrapper> bufferInfos{(BufferInfosWrapper *)obj.get()};
+    if (secure || mCrypto == nullptr) {
+        if (cryptoInfos->value.size() != 1) {
+            ALOGE("Cannot decrypt multiple access units on native handles");
+            return -ENOSYS;
+        }
+        std::unique_ptr<CodecCryptoInfo> info = std::move(cryptoInfos->value[0]);
+        if (info == nullptr) {
+            ALOGE("Cannot decrypt, CryptoInfos are null");
+            return -ENOSYS;
+        }
+        return queueSecureInputBuffer(
+                buffer,
+                secure,
+                info->mKey,
+                info->mIv,
+                info->mMode,
+                info->mPattern,
+                info->mSubSamples,
+                info->mNumSubSamples,
+                errorDetailMsg);
+    }
+    sp<EncryptedLinearBlockBuffer> encryptedBuffer((EncryptedLinearBlockBuffer *)buffer.get());
+
+    std::shared_ptr<C2LinearBlock> block;
+    size_t allocSize = buffer->size();
+    size_t bufferSize = 0;
+    c2_status_t blockRes = C2_OK;
+    bool copied = false;
+    ScopedTrace trace(ATRACE_TAG, android::base::StringPrintf(
+            "CCodecBufferChannel::decrypt(%s)", mName).c_str());
+    if (mSendEncryptedInfoBuffer) {
+        static const C2MemoryUsage kDefaultReadWriteUsage{
+            C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE};
+        constexpr int kAllocGranule0 = 1024 * 64;
+        constexpr int kAllocGranule1 = 1024 * 1024;
+        std::shared_ptr<C2BlockPool> pool = mBlockPools.lock()->inputPool;
+        // round up encrypted sizes to limit fragmentation and encourage buffer reuse
+        if (allocSize <= kAllocGranule1) {
+            bufferSize = align(allocSize, kAllocGranule0);
+        } else {
+            bufferSize = align(allocSize, kAllocGranule1);
+        }
+        blockRes = pool->fetchLinearBlock(
+                bufferSize, kDefaultReadWriteUsage, &block);
+
+        if (blockRes == C2_OK) {
+            C2WriteView view = block->map().get();
+            if (view.error() == C2_OK && view.size() == bufferSize) {
+                copied = true;
+                // TODO: only copy clear sections
+                memcpy(view.data(), buffer->data(), allocSize);
+            }
+        }
+    }
+
+    if (!copied) {
+        block.reset();
+    }
+    // size of cryptoInfo and accessUnitInfo should be the same?
+    ssize_t result = -1;
+    size_t srcOffset = 0;
+    size_t outBufferSize = 0;
+    uint32_t cryptoInfoIdx = 0;
+    {
+        // scoped this block to enable destruction of mappedBlock
+        std::unique_ptr<EncryptedLinearBlockBuffer::MappedBlock> mappedBlock = nullptr;
+        hardware::drm::V1_0::DestinationBuffer destination;
+        destination.type = DrmBufferType::SHARED_MEMORY;
+        IMemoryToSharedBuffer(
+                mDecryptDestination, mHeapSeqNum, &destination.nonsecureMemory);
+        encryptedBuffer->getMappedBlock(&mappedBlock);
+        hardware::drm::V1_0::SharedBuffer source;
+        encryptedBuffer->fillSourceBuffer(&source);
+        srcOffset = source.offset;
+        for (int i = 0 ; i < bufferInfos->value.size(); i++) {
+            if (bufferInfos->value[i].mSize > 0) {
+                std::unique_ptr<CodecCryptoInfo> info =
+                        std::move(cryptoInfos->value[cryptoInfoIdx++]);
+                if (info->mNumSubSamples == 1
+                        && info->mSubSamples[0].mNumBytesOfClearData == 0
+                        && info->mSubSamples[0].mNumBytesOfEncryptedData == 0) {
+                    // no data so we only populate the bufferInfo
+                    result = 0;
+                } else {
+                    source.offset = srcOffset;
+                    source.size = bufferInfos->value[i].mSize;
+                    result = mCrypto->decrypt(
+                            (uint8_t*)info->mKey,
+                            (uint8_t*)info->mIv,
+                            info->mMode,
+                            info->mPattern,
+                            source,
+                            buffer->offset(),
+                            info->mSubSamples,
+                            info->mNumSubSamples,
+                            destination,
+                            errorDetailMsg);
+                    srcOffset += bufferInfos->value[i].mSize;
+                    if (result < 0) {
+                        ALOGI("[%s] decrypt failed: result=%zd", mName, result);
+                        return result;
+                    }
+                    if (destination.type == DrmBufferType::SHARED_MEMORY && mappedBlock) {
+                        mappedBlock->copyDecryptedContent(mDecryptDestination, result);
+                    }
+                    bufferInfos->value[i].mSize = result;
+                    outBufferSize += result;
+                }
+            }
+        }
+        buffer->setRange(0, outBufferSize);
+    }
+    return queueInputBufferInternal(buffer, block, bufferSize);
+}
+
 void CCodecBufferChannel::feedInputBufferIfAvailable() {
     QueueGuard guard(mSync);
     if (!guard.isRunning()) {
@@ -747,6 +1069,10 @@
             return;
         }
     }
+    if (android::media::codec::provider_->input_surface_throttle()
+            && mInputSurface != nullptr) {
+        mInputSurface->onInputBufferEmptied();
+    }
     size_t numActiveSlots = 0;
     while (!mPipelineWatcher.lock()->pipelineFull()) {
         sp<MediaCodecBuffer> inBuffer;
@@ -1143,6 +1469,17 @@
     }
 }
 
+void CCodecBufferChannel::onBufferAttachedToOutputSurface(uint32_t generation) {
+    // Note: Since this is called asynchronously from IProducerListener not
+    // knowing the internal state of CCodec/CCodecBufferChannel,
+    // prevent mComponent from being destroyed by holding the shared reference
+    // during this interface being executed.
+    std::shared_ptr<Codec2Client::Component> comp = mComponent;
+    if (comp) {
+        comp->onBufferAttachedToOutputSurface(generation);
+    }
+}
+
 status_t CCodecBufferChannel::discardBuffer(const sp<MediaCodecBuffer> &buffer) {
     ALOGV("[%s] discardBuffer: %p", mName, buffer.get());
     bool released = false;
@@ -1748,6 +2085,7 @@
 void CCodecBufferChannel::stop() {
     mSync.stop();
     mFirstValidFrameIndex = mFrameIndex.load(std::memory_order_relaxed);
+    mInfoBuffers.clear();
 }
 
 void CCodecBufferChannel::stopUseOutputSurface(bool pushBlankBuffer) {
@@ -1789,6 +2127,7 @@
 }
 
 void CCodecBufferChannel::release() {
+    mInfoBuffers.clear();
     mComponent.reset();
     mInputAllocator.reset();
     mOutputSurface.lock()->surface.clear();
@@ -1854,6 +2193,7 @@
             output->buffers->flushStash();
         }
     }
+    mInfoBuffers.clear();
 }
 
 void CCodecBufferChannel::onWorkDone(
@@ -2265,12 +2605,34 @@
         case OutputBuffers::DISCARD:
             break;
         case OutputBuffers::NOTIFY_CLIENT:
+        {
             // TRICKY: we want popped buffers reported in order, so sending
             // the callback while holding the lock here. This assumes that
             // onOutputBufferAvailable() does not block. onOutputBufferAvailable()
             // callbacks are always sent with the Output lock held.
+            if (c2Buffer) {
+                std::shared_ptr<const C2AccessUnitInfos::output> bufferMetadata =
+                        std::static_pointer_cast<const C2AccessUnitInfos::output>(
+                        c2Buffer->getInfo(C2AccessUnitInfos::output::PARAM_TYPE));
+                if (bufferMetadata && bufferMetadata->flexCount() > 0) {
+                    uint32_t flag = 0;
+                    std::vector<AccessUnitInfo> accessUnitInfos;
+                    for (int nMeta = 0; nMeta < bufferMetadata->flexCount(); nMeta++) {
+                        const C2AccessUnitInfosStruct &bufferMetadataStruct =
+                                bufferMetadata->m.values[nMeta];
+                        flag = convertFlags(bufferMetadataStruct.flags, false);
+                        accessUnitInfos.emplace_back(flag,
+                                static_cast<size_t>(bufferMetadataStruct.size),
+                                static_cast<size_t>(bufferMetadataStruct.timestamp));
+                    }
+                    sp<WrapperObject<std::vector<AccessUnitInfo>>> obj{
+                        new WrapperObject<std::vector<AccessUnitInfo>>{accessUnitInfos}};
+                    outBuffer->meta()->setObject("accessUnitInfo", obj);
+                }
+            }
             mCallback->onOutputBufferAvailable(index, outBuffer);
             break;
+        }
         case OutputBuffers::REALLOCATE:
             if (++reallocTryNum > kMaxReallocTry) {
                 output.unlock();
@@ -2436,6 +2798,18 @@
     }
 }
 
+void CCodecBufferChannel::setInfoBuffer(const std::shared_ptr<C2InfoBuffer> &buffer) {
+    if (mInputSurface == nullptr) {
+        mInfoBuffers.push_back(buffer);
+    } else {
+        std::list<std::unique_ptr<C2Work>> items;
+        std::unique_ptr<C2Work> work(new C2Work);
+        work->input.infoBuffers.emplace_back(*buffer);
+        work->worklets.emplace_back(new C2Worklet);
+        items.push_back(std::move(work));
+    }
+}
+
 status_t toStatusT(c2_status_t c2s, c2_operation_t c2op) {
     // C2_OK is always translated to OK.
     if (c2s == C2_OK) {
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.h b/media/codec2/sfplugin/CCodecBufferChannel.h
index 8dc9fb6..e62742b 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.h
+++ b/media/codec2/sfplugin/CCodecBufferChannel.h
@@ -73,6 +73,10 @@
             const CryptoPlugin::SubSample *subSamples,
             size_t numSubSamples,
             AString *errorDetailMsg) override;
+    status_t queueSecureInputBuffers(
+            const sp<MediaCodecBuffer> &buffer,
+            bool secure,
+            AString *errorDetailMsg) override;
     status_t attachBuffer(
             const std::shared_ptr<C2Buffer> &c2Buffer,
             const sp<MediaCodecBuffer> &buffer) override;
@@ -88,10 +92,17 @@
             size_t numSubSamples,
             const sp<MediaCodecBuffer> &buffer,
             AString* errorDetailMsg) override;
+    status_t attachEncryptedBuffers(
+            const sp<hardware::HidlMemory> &memory,
+            size_t offset,
+            const sp<MediaCodecBuffer> &buffer,
+            bool secure,
+            AString* errorDetailMsg) override;
     status_t renderOutputBuffer(
             const sp<MediaCodecBuffer> &buffer, int64_t timestampNs) override;
     void pollForRenderedBuffers() override;
     void onBufferReleasedFromOutputSurface(uint32_t generation) override;
+    void onBufferAttachedToOutputSurface(uint32_t generation) override;
     status_t discardBuffer(const sp<MediaCodecBuffer> &buffer) override;
     void getInputBufferArray(Vector<sp<MediaCodecBuffer>> *array) override;
     void getOutputBufferArray(Vector<sp<MediaCodecBuffer>> *array) override;
@@ -217,6 +228,14 @@
 
     void resetBuffersPixelFormat(bool isEncoder);
 
+    /**
+     * Queue a C2 info buffer that will be sent to codec in the subsequent
+     * queueInputBuffer
+     *
+     * @param buffer C2 info buffer
+     */
+    void setInfoBuffer(const std::shared_ptr<C2InfoBuffer> &buffer);
+
 private:
     uint32_t getInputBuffersPixelFormat();
 
@@ -390,6 +409,8 @@
     std::atomic_bool mSendEncryptedInfoBuffer;
 
     std::atomic_bool mTunneled;
+
+    std::vector<std::shared_ptr<C2InfoBuffer>> mInfoBuffers;
 };
 
 // Conversion of a c2_status_t value to a status_t value may depend on the
diff --git a/media/codec2/sfplugin/CCodecBuffers.cpp b/media/codec2/sfplugin/CCodecBuffers.cpp
index 670923b..3eec0f3 100644
--- a/media/codec2/sfplugin/CCodecBuffers.cpp
+++ b/media/codec2/sfplugin/CCodecBuffers.cpp
@@ -18,11 +18,15 @@
 #define LOG_TAG "CCodecBuffers"
 #include <utils/Log.h>
 
+#include <numeric>
+
 #include <C2AllocatorGralloc.h>
 #include <C2PlatformSupport.h>
 
 #include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AUtils.h>
 #include <media/stagefright/foundation/MediaDefs.h>
+#include <media/stagefright/CodecBase.h>
 #include <media/stagefright/MediaCodecConstants.h>
 #include <media/stagefright/SkipCutBuffer.h>
 #include <mediadrm/ICrypto.h>
@@ -54,7 +58,7 @@
 
     std::shared_ptr<C2GraphicBlock> block;
     c2_status_t err = pool->fetchGraphicBlock(
-            width, height, pixelFormat, fullUsage, &block);
+            align(width, 2), align(height, 2), pixelFormat, fullUsage, &block);
     if (err != C2_OK) {
         ALOGD("fetch graphic block failed: %d", err);
         return nullptr;
@@ -147,6 +151,171 @@
     return copy;
 }
 
+// MultiAccessUnitSkipCutBuffer for buffer and bufferInfos
+
+class MultiAccessUnitSkipCutBuffer : public SkipCutBuffer {
+
+public:
+    explicit MultiAccessUnitSkipCutBuffer(
+            int32_t skip, int32_t cut, size_t num16BitChannels):
+        SkipCutBuffer(skip, cut, num16BitChannels),
+        mFrontPaddingDelay(0), mSize(0) {
+    }
+    void clearAll() {
+        mInfos.clear();
+        mFrontPaddingDelay = 0;
+        mSize = 0;
+        SkipCutBuffer::clear();
+    }
+
+    virtual ~MultiAccessUnitSkipCutBuffer() {
+
+    }
+
+    void submitMultiAccessUnits(
+            const sp<MediaCodecBuffer>& buffer,
+            int32_t sampleRate, size_t num16BitChannels,
+            std::shared_ptr<const C2AccessUnitInfos::output> &infos) {
+        if (infos == nullptr) {
+            // there is nothing to do more.
+            SkipCutBuffer::submit(buffer);
+            return;
+        }
+        typedef WrapperObject<std::vector<AccessUnitInfo>> BufferInfosWrapper;
+        CHECK_EQ(mSize, SkipCutBuffer::size());
+        sp<BufferInfosWrapper> bufferInfos{new BufferInfosWrapper(decltype(bufferInfos->value)())};
+        uint32_t availableSize = buffer->size() + SkipCutBuffer::size();
+        uint32_t frontPadding = mFrontPadding;
+        int32_t lastEmptyAccessUnitIndex = -1;
+        int64_t byteInUs = 0;
+        if (sampleRate > 0 && num16BitChannels > 0) {
+            byteInUs = (1000000u / (sampleRate * num16BitChannels * 2));
+        }
+        if (frontPadding > 0) {
+            mInfos.clear();
+            mSize = 0;
+        }
+        for (int i = 0 ; i < infos->flexCount() && frontPadding > 0; i++) {
+            uint32_t flagsInPadding = 0;
+            int64_t timeInPadding = 0;
+            if (infos->m.values[i].size <= frontPadding) {
+                // we have more front padding so this buffer is not going to be used.
+                int32_t consumed = infos->m.values[i].size;
+                frontPadding -= consumed;
+                mFrontPaddingDelay += byteInUs * (consumed);
+                availableSize -= consumed;
+                flagsInPadding |= toMediaCodecFlags(infos->m.values[i].flags);
+                timeInPadding = infos->m.values[i].timestamp;
+            } else {
+                C2AccessUnitInfosStruct info = infos->m.values[i];
+                mFrontPaddingDelay +=  byteInUs * (frontPadding);
+                info.size -= frontPadding;
+                info.timestamp -= mFrontPaddingDelay;
+                availableSize -= frontPadding;
+                flagsInPadding |= toMediaCodecFlags(infos->m.values[i].flags);
+                timeInPadding = infos->m.values[i].timestamp;
+                frontPadding = 0;
+                mInfos.push_back(info);
+                mSize += info.size;
+            }
+            if (flagsInPadding != 0) {
+                bufferInfos->value.emplace_back(
+                        flagsInPadding, 0, timeInPadding);
+            }
+            lastEmptyAccessUnitIndex = i;
+        }
+        if (frontPadding <= 0) {
+            // process what's already in the buffer first
+            auto it = mInfos.begin();
+            while (it != mInfos.end() && availableSize > mBackPadding) {
+                // we have samples to send out.
+                if ((availableSize - it->size) >= mBackPadding) {
+                    // this is totally used here.
+                    int32_t consumed = it->size;
+                    bufferInfos->value.emplace_back(
+                            toMediaCodecFlags(it->flags), consumed, it->timestamp);
+                    availableSize -= consumed;
+                    mSize -= consumed;
+                    it = mInfos.erase(it);
+                } else {
+                    int32_t consumed = availableSize - mBackPadding;
+                    bufferInfos->value.emplace_back(
+                            toMediaCodecFlags(it->flags),
+                            consumed,
+                            it->timestamp);
+                    it->size -= consumed;
+                    it->timestamp += consumed * byteInUs;
+                    availableSize -= consumed;
+                    mSize -= consumed;
+                    it++;
+                }
+            }
+            // if buffer has more process all of it and keep the remaining info.
+            for (int i = (lastEmptyAccessUnitIndex + 1) ; i < infos->flexCount() ; i++) {
+                // upddate updatedInfo and mInfos
+                if (availableSize > mBackPadding) {
+                    // we have to take data from the new buffer.
+                    if (availableSize - infos->m.values[i].size >= mBackPadding) {
+                        // we are using this info
+                        int32_t consumed = infos->m.values[i].size;
+                        bufferInfos->value.emplace_back(
+                                toMediaCodecFlags(infos->m.values[i].flags),
+                                consumed,
+                                infos->m.values[i].timestamp - mFrontPaddingDelay);
+                        availableSize -= consumed;
+                    } else {
+                        // if we need to update the size
+                        C2AccessUnitInfosStruct info = infos->m.values[i];
+                        int32_t consumed = availableSize - mBackPadding;
+                        bufferInfos->value.emplace_back(
+                                toMediaCodecFlags(infos->m.values[i].flags),
+                                consumed,
+                                infos->m.values[i].timestamp - mFrontPaddingDelay);
+                        info.size -= consumed;
+                        info.timestamp = info.timestamp - mFrontPaddingDelay +
+                                consumed * byteInUs;
+                        mInfos.push_back(info);
+                        availableSize -= consumed;
+                        mSize += info.size;
+                    }
+                } else {
+                    // we have to maintain infos
+                    C2AccessUnitInfosStruct info = infos->m.values[i];
+                    info.timestamp -= mFrontPaddingDelay;
+                    mInfos.push_back(info);
+                    mSize += info.size;
+                }
+            }
+        }
+        SkipCutBuffer::submit(buffer);
+        infos = nullptr;
+        if (!bufferInfos->value.empty()) {
+            buffer->meta()->setObject("accessUnitInfo", bufferInfos);
+        }
+    }
+protected:
+    // Flags can come with individual BufferInfos
+    // when used with large frame audio
+    constexpr static std::initializer_list<std::pair<uint32_t, uint32_t>> flagList = {
+            {BUFFER_FLAG_CODEC_CONFIG, C2FrameData::FLAG_CODEC_CONFIG},
+            {BUFFER_FLAG_END_OF_STREAM, C2FrameData::FLAG_END_OF_STREAM},
+            {BUFFER_FLAG_DECODE_ONLY, C2FrameData::FLAG_DROP_FRAME}
+    };
+
+    static uint32_t toMediaCodecFlags(uint32_t flags) {
+        return std::transform_reduce(
+                flagList.begin(), flagList.end(),
+                0u,
+                std::bit_or{},
+                [flags](const std::pair<uint32_t, uint32_t> &entry) {
+                    return (flags & entry.second) ? entry.first : 0;
+                });
+    }
+    std::list<C2AccessUnitInfosStruct> mInfos;
+    int64_t mFrontPaddingDelay;
+    size_t mSize;
+};
+
 // OutputBuffers
 
 OutputBuffers::OutputBuffers(const char *componentName, const char *name)
@@ -201,6 +370,15 @@
     }
 }
 
+bool OutputBuffers::submit(const sp<MediaCodecBuffer> &buffer, int32_t sampleRate,
+            int32_t channelCount, std::shared_ptr<const C2AccessUnitInfos::output> &infos) {
+    if (mSkipCutBuffer == nullptr) {
+        return false;
+    }
+    mSkipCutBuffer->submitMultiAccessUnits(buffer, sampleRate, channelCount, infos);
+    return true;
+}
+
 void OutputBuffers::setSkipCutBuffer(int32_t skip, int32_t cut) {
     if (mSkipCutBuffer != nullptr) {
         size_t prevSize = mSkipCutBuffer->size();
@@ -208,7 +386,7 @@
             ALOGD("[%s] Replacing SkipCutBuffer holding %zu bytes", mName, prevSize);
         }
     }
-    mSkipCutBuffer = new SkipCutBuffer(skip, cut, mChannelCount);
+    mSkipCutBuffer = new MultiAccessUnitSkipCutBuffer(skip, cut, mChannelCount);
 }
 
 bool OutputBuffers::convert(
@@ -1160,7 +1338,16 @@
         ALOGD("[%s] copy buffer failed", mName);
         return WOULD_BLOCK;
     }
-    submit(c2Buffer);
+    if (buffer && buffer->hasInfo(C2AccessUnitInfos::output::PARAM_TYPE)) {
+        std::shared_ptr<const C2AccessUnitInfos::output> bufferMetadata =
+                        std::static_pointer_cast<const C2AccessUnitInfos::output>(
+                        buffer->getInfo(C2AccessUnitInfos::output::PARAM_TYPE));
+        if (submit(c2Buffer, mSampleRate, mChannelCount, bufferMetadata)) {
+            buffer->removeInfo(C2AccessUnitInfos::output::PARAM_TYPE);
+        }
+    } else {
+        submit(c2Buffer);
+    }
     handleImageData(c2Buffer);
     *clientBuffer = c2Buffer;
     ALOGV("[%s] grabbed buffer %zu", mName, *index);
@@ -1198,7 +1385,7 @@
     (void)flushedWork;
     mImpl.flush();
     if (mSkipCutBuffer != nullptr) {
-        mSkipCutBuffer->clear();
+        mSkipCutBuffer->clearAll();
     }
 }
 
@@ -1356,26 +1543,30 @@
 void LinearOutputBuffers::flush(
         const std::list<std::unique_ptr<C2Work>> &flushedWork) {
     if (mSkipCutBuffer != nullptr) {
-        mSkipCutBuffer->clear();
+        mSkipCutBuffer->clearAll();
     }
     FlexOutputBuffers::flush(flushedWork);
 }
 
 sp<Codec2Buffer> LinearOutputBuffers::wrap(const std::shared_ptr<C2Buffer> &buffer) {
     if (buffer == nullptr) {
-        ALOGV("[%s] using a dummy buffer", mName);
+        ALOGD("[%s] received null buffer", mName);
         return new LocalLinearBuffer(mFormat, new ABuffer(0));
     }
     if (buffer->data().type() != C2BufferData::LINEAR) {
-        ALOGV("[%s] non-linear buffer %d", mName, buffer->data().type());
+        ALOGW("[%s] non-linear buffer %d", mName, buffer->data().type());
         // We expect linear output buffers from the component.
         return nullptr;
     }
     if (buffer->data().linearBlocks().size() != 1u) {
-        ALOGV("[%s] no linear buffers", mName);
+        ALOGW("[%s] no linear buffers", mName);
         // We expect one and only one linear block from the component.
         return nullptr;
     }
+    if (buffer->data().linearBlocks().front().size() == 0) {
+        ALOGD("[%s] received 0-sized buffer", mName);
+        return new LocalLinearBuffer(mFormat, new ABuffer(0));
+    }
     sp<Codec2Buffer> clientBuffer = ConstLinearBlockBuffer::Allocate(mFormat, buffer);
     if (clientBuffer == nullptr) {
         ALOGD("[%s] ConstLinearBlockBuffer::Allocate failed", mName);
diff --git a/media/codec2/sfplugin/CCodecBuffers.h b/media/codec2/sfplugin/CCodecBuffers.h
index cbef644..f0936bc 100644
--- a/media/codec2/sfplugin/CCodecBuffers.h
+++ b/media/codec2/sfplugin/CCodecBuffers.h
@@ -20,6 +20,7 @@
 
 #include <optional>
 #include <string>
+#include <vector>
 
 #include <C2Config.h>
 #include <DataConverter.h>
@@ -33,6 +34,8 @@
 struct ICrypto;
 class MemoryDealer;
 class SkipCutBuffer;
+class MultiAccessUnitSkipCutBuffer;
+struct AccessUnitInfo;
 
 constexpr size_t kLinearBufferSize = 1048576;
 // This can fit an 8K frame.
@@ -382,13 +385,17 @@
             sp<MediaCodecBuffer>* outBuffer);
 
 protected:
-    sp<SkipCutBuffer> mSkipCutBuffer;
+
+    sp<MultiAccessUnitSkipCutBuffer> mSkipCutBuffer;
 
     /**
      * Update the SkipCutBuffer object. No-op if it's never initialized.
      */
     void updateSkipCutBuffer(int32_t sampleRate, int32_t channelCount);
 
+    bool submit(const sp<MediaCodecBuffer> &buffer, int32_t sampleRate,
+            int32_t channelCount, std::shared_ptr<const C2AccessUnitInfos::output> &infos);
+
     /**
      * Submit buffer to SkipCutBuffer object, if initialized.
      */
diff --git a/media/codec2/sfplugin/CCodecConfig.cpp b/media/codec2/sfplugin/CCodecConfig.cpp
index 6d49fa8..36725ec 100644
--- a/media/codec2/sfplugin/CCodecConfig.cpp
+++ b/media/codec2/sfplugin/CCodecConfig.cpp
@@ -19,6 +19,8 @@
 
 #include <initializer_list>
 
+#include <android_media_codec.h>
+
 #include <cutils/properties.h>
 #include <log/log.h>
 #include <utils/NativeHandle.h>
@@ -402,10 +404,19 @@
 
     add(ConfigMapper(KEY_MAX_INPUT_SIZE, C2_PARAMKEY_INPUT_MAX_BUFFER_SIZE, "value")
         .limitTo(D::INPUT));
+
     // remove when codecs switch to PARAMKEY
     deprecated(ConfigMapper(KEY_MAX_INPUT_SIZE, "coded.max-frame-size", "value")
                .limitTo(D::INPUT));
 
+    // large frame params
+    add(ConfigMapper(KEY_BUFFER_BATCH_MAX_OUTPUT_SIZE,
+            C2_PARAMKEY_OUTPUT_LARGE_FRAME, "max-size")
+        .limitTo(D::AUDIO & D::OUTPUT));
+    add(ConfigMapper(KEY_BUFFER_BATCH_THRESHOLD_OUTPUT_SIZE,
+            C2_PARAMKEY_OUTPUT_LARGE_FRAME, "threshold-size")
+        .limitTo(D::AUDIO & D::OUTPUT));
+
     // Rotation
     // Note: SDK rotation is clock-wise, while C2 rotation is counter-clock-wise
     add(ConfigMapper(KEY_ROTATION, C2_PARAMKEY_VUI_ROTATION, "value")
@@ -582,6 +593,13 @@
             }
             return C2Value();
         }));
+
+    if (android::media::codec::provider_->region_of_interest()
+        && android::media::codec::provider_->region_of_interest_support()) {
+        add(ConfigMapper(C2_PARAMKEY_QP_OFFSET_RECTS, C2_PARAMKEY_QP_OFFSET_RECTS, "")
+            .limitTo(D::VIDEO & (D::CONFIG | D::PARAM) & D::ENCODER & D::INPUT));
+    }
+
     deprecated(ConfigMapper(PARAMETER_KEY_REQUEST_SYNC_FRAME,
                      "coding.request-sync", "value")
         .limitTo(D::PARAM & D::ENCODER)
@@ -1112,6 +1130,11 @@
     mParamUpdater->clear();
     mParamUpdater->supportWholeParam(
             C2_PARAMKEY_TEMPORAL_LAYERING, C2StreamTemporalLayeringTuning::CORE_INDEX);
+    if (android::media::codec::provider_->region_of_interest()
+        && android::media::codec::provider_->region_of_interest_support()) {
+        mParamUpdater->supportWholeParam(
+                C2_PARAMKEY_QP_OFFSET_RECTS, C2StreamQpOffsetRects::CORE_INDEX);
+    }
     mParamUpdater->addParamDesc(mReflector, mParamDescs);
 
     // TEMP: add some standard fields even if not reflected
@@ -1862,6 +1885,48 @@
         }
     }
 
+    if (android::media::codec::provider_->region_of_interest()
+        && android::media::codec::provider_->region_of_interest_support()) {
+        if (mDomain == (IS_VIDEO | IS_ENCODER)) {
+            AString qpOffsetRects;
+            if (params->findString(PARAMETER_KEY_QP_OFFSET_RECTS, &qpOffsetRects)) {
+                int width, height;
+                mInputFormat->findInt32("width", &width);
+                mInputFormat->findInt32("height", &height);
+                std::vector<C2QpOffsetRectStruct> c2QpOffsetRects;
+                char mutableStrQpOffsetRects[strlen(qpOffsetRects.c_str()) + 1];
+                strcpy(mutableStrQpOffsetRects, qpOffsetRects.c_str());
+                char* box = strtok(mutableStrQpOffsetRects, ";");
+                while (box != nullptr) {
+                    int top, left, bottom, right, offset;
+                    if (sscanf(box, "%d,%d-%d,%d=%d", &top, &left, &bottom, &right, &offset) == 5) {
+                        left = c2_max(0, left);
+                        top = c2_max(0, top);
+                        right = c2_min(right, width);
+                        bottom = c2_min(bottom, height);
+                        if (right > left && bottom > top) {
+                            C2Rect rect(right - left, bottom - top);
+                            rect.at(left, top);
+                            c2QpOffsetRects.push_back(C2QpOffsetRectStruct(rect, offset));
+                        } else {
+                            ALOGE("Rects configuration %s is not valid.", box);
+                        }
+                    } else {
+                        ALOGE("Rects configuration %s doesn't follow the string pattern.", box);
+                    }
+                    box = strtok(nullptr, ";");
+                }
+                if (c2QpOffsetRects.size() != 0) {
+                    const std::unique_ptr<C2StreamQpOffsetRects::output> regions =
+                            C2StreamQpOffsetRects::output::AllocUnique(
+                                    c2QpOffsetRects.size(), 0u, c2QpOffsetRects);
+                    params->setBuffer(C2_PARAMKEY_QP_OFFSET_RECTS,
+                                      ABuffer::CreateAsCopy(regions.get(), regions->size()));
+                }
+            }
+        }
+    }
+
     // this is to verify that we set proper signedness for standard parameters
     bool beVeryStrict = property_get_bool("debug.stagefright.ccodec_strict_type", false);
     // this is to allow vendors to use the wrong signedness for standard parameters
diff --git a/media/codec2/sfplugin/Codec2Buffer.cpp b/media/codec2/sfplugin/Codec2Buffer.cpp
index 5c1755e..2550dcf 100644
--- a/media/codec2/sfplugin/Codec2Buffer.cpp
+++ b/media/codec2/sfplugin/Codec2Buffer.cpp
@@ -179,10 +179,17 @@
     if (!buffer
             || buffer->data().type() != C2BufferData::LINEAR
             || buffer->data().linearBlocks().size() != 1u) {
+        if (!buffer) {
+            ALOGD("ConstLinearBlockBuffer::Allocate: null buffer");
+        } else {
+            ALOGW("ConstLinearBlockBuffer::Allocate: type=%d # linear blocks=%zu",
+                  buffer->data().type(), buffer->data().linearBlocks().size());
+        }
         return nullptr;
     }
     C2ReadView readView(buffer->data().linearBlocks()[0].map().get());
     if (readView.error() != C2_OK) {
+        ALOGW("ConstLinearBlockBuffer::Allocate: readView.error()=%d", readView.error());
         return nullptr;
     }
     return new ConstLinearBlockBuffer(format, std::move(readView), buffer);
@@ -1036,6 +1043,37 @@
     return const_cast<native_handle_t *>(mBlock->handle());
 }
 
+void EncryptedLinearBlockBuffer::getMappedBlock(
+        std::unique_ptr<MappedBlock> * const mappedBlock) const {
+    if (mappedBlock) {
+        mappedBlock->reset(new EncryptedLinearBlockBuffer::MappedBlock(mBlock));
+    }
+    return;
+}
+
+EncryptedLinearBlockBuffer::MappedBlock::MappedBlock(
+        const std::shared_ptr<C2LinearBlock> &block) : mView(block->map().get()) {
+}
+
+bool EncryptedLinearBlockBuffer::MappedBlock::copyDecryptedContent(
+        const sp<IMemory> &decrypted, size_t length) {
+    if (mView.error() != C2_OK) {
+        return false;
+    }
+    if (mView.size() < length) {
+        ALOGE("View size(%d) less than decrypted length(%zu)",
+                mView.size(), length);
+        return false;
+    }
+    memcpy(mView.data(), decrypted->unsecurePointer(), length);
+    mView.setOffset(mView.offset() + length);
+    return true;
+}
+
+EncryptedLinearBlockBuffer::MappedBlock::~MappedBlock() {
+    mView.setOffset(0);
+}
+
 using ::aidl::android::hardware::graphics::common::Cta861_3;
 using ::aidl::android::hardware::graphics::common::Smpte2086;
 
@@ -1049,11 +1087,8 @@
         // Unwrap raw buffer handle from the C2Handle
         native_handle_t *nh = UnwrapNativeCodec2GrallocHandle(handle);
         if (!nh) {
-            nh = UnwrapNativeCodec2AhwbHandle(handle);
-            if (!nh) {
-                ALOGE("handle is not compatible to neither C2HandleGralloc nor C2HandleAhwb");
-                return;
-            }
+            ALOGE("handle is not compatible to any gralloc C2Handle types");
+            return;
         }
         // Import the raw handle so IMapper can use the buffer. The imported
         // handle must be freed when the client is done with the buffer.
@@ -1109,7 +1144,7 @@
 
         std::optional<Smpte2086> smpte2086;
         status_t status = mapper.getSmpte2086(buffer.get(), &smpte2086);
-        if (status != OK) {
+        if (status != OK || !smpte2086) {
             err = C2_CORRUPTED;
         } else {
             if (smpte2086) {
@@ -1129,7 +1164,7 @@
 
         std::optional<Cta861_3> cta861_3;
         status = mapper.getCta861_3(buffer.get(), &cta861_3);
-        if (status != OK) {
+        if (status != OK || !cta861_3) {
             err = C2_CORRUPTED;
         } else {
             if (cta861_3) {
@@ -1148,7 +1183,7 @@
         dynamicInfo->reset();
         std::optional<std::vector<uint8_t>> vec;
         status_t status = mapper.getSmpte2094_40(buffer.get(), &vec);
-        if (status != OK) {
+        if (status != OK || !vec) {
             dynamicInfo->reset();
             err = C2_CORRUPTED;
         } else {
diff --git a/media/codec2/sfplugin/Codec2Buffer.h b/media/codec2/sfplugin/Codec2Buffer.h
index b73acab..5e96921 100644
--- a/media/codec2/sfplugin/Codec2Buffer.h
+++ b/media/codec2/sfplugin/Codec2Buffer.h
@@ -384,6 +384,17 @@
      */
     native_handle_t *handle() const;
 
+    class MappedBlock {
+    public:
+        explicit MappedBlock(const std::shared_ptr<C2LinearBlock> &block);
+        virtual ~MappedBlock();
+        bool copyDecryptedContent(const sp<IMemory> &decrypted, size_t length);
+    private:
+        C2WriteView mView;
+    };
+
+    void getMappedBlock(std::unique_ptr<MappedBlock> * const mappedBlock) const;
+
 private:
 
     std::shared_ptr<C2LinearBlock> mBlock;
diff --git a/media/codec2/sfplugin/Codec2InfoBuilder.cpp b/media/codec2/sfplugin/Codec2InfoBuilder.cpp
index 453a0d2..692f700 100644
--- a/media/codec2/sfplugin/Codec2InfoBuilder.cpp
+++ b/media/codec2/sfplugin/Codec2InfoBuilder.cpp
@@ -20,6 +20,9 @@
 
 #include <strings.h>
 
+#include <com_android_media_codec_flags.h>
+#include <android_media_codec.h>
+
 #include <C2Component.h>
 #include <C2Config.h>
 #include <C2Debug.h>
@@ -684,6 +687,11 @@
                 const MediaCodecsXmlParser::AttributeMap &attrMap = typeIt->second;
                 std::unique_ptr<MediaCodecInfo::CapabilitiesWriter> caps =
                     codecInfo->addMediaType(mediaType.c_str());
+
+                // we could detect tunneled playback via the playback interface, but we never did
+                // that for the advertised feature, so for now use only the advertised feature.
+                bool canDoTunneledPlayback = false;
+
                 for (const auto &v : attrMap) {
                     std::string key = v.first;
                     std::string value = v.second;
@@ -704,6 +712,11 @@
                         // Ignore trailing bad characters and default to 0.
                         (void)sscanf(value.c_str(), "%d", &intValue);
                         caps->addDetail(key.c_str(), intValue);
+
+                        if (key.compare(
+                                MediaCodecInfo::Capabilities::FEATURE_TUNNELED_PLAYBACK) == 0) {
+                            canDoTunneledPlayback = true;
+                        }
                     } else {
                         caps->addDetail(key.c_str(), value.c_str());
                     }
@@ -752,6 +765,36 @@
                 }
                 addSupportedColorFormats(
                         intf, caps.get(), trait, mediaType, it->second);
+
+                if (com::android::media::codec::flags::provider_->large_audio_frame()
+                        && android::media::codec::provider_->large_audio_frame_finish()) {
+                    // Adding feature-multiple-frames when C2LargeFrame param is present
+                    if (trait.domain == C2Component::DOMAIN_AUDIO) {
+                        std::vector<std::shared_ptr<C2ParamDescriptor>> params;
+                        c2_status_t err = intf->querySupportedParams(&params);
+                        if (err == C2_OK) {
+                            for (const auto &paramDesc : params) {
+                                if (C2LargeFrame::output::PARAM_TYPE == paramDesc->index()) {
+                                    std::string featureMultipleFrames =
+                                            std::string(KEY_FEATURE_) + FEATURE_MultipleFrames;
+                                    caps->addDetail(featureMultipleFrames.c_str(), 0);
+                                    break;
+                                }
+                            }
+                        }
+                    }
+                }
+
+                if (android::media::codec::provider_->null_output_surface_support() &&
+                        android::media::codec::provider_->null_output_surface()) {
+                    // all non-tunneled video decoders support detached surface mode
+                    if (trait.kind == C2Component::KIND_DECODER &&
+                            trait.domain == C2Component::DOMAIN_VIDEO &&
+                            !canDoTunneledPlayback) {
+                        caps->addDetail(
+                                MediaCodecInfo::Capabilities::FEATURE_DETACHED_SURFACE, 0);
+                    }
+                }
             }
         }
     }
diff --git a/media/codec2/sfplugin/InputSurfaceWrapper.h b/media/codec2/sfplugin/InputSurfaceWrapper.h
index 4bf6cd0..c158c5b 100644
--- a/media/codec2/sfplugin/InputSurfaceWrapper.h
+++ b/media/codec2/sfplugin/InputSurfaceWrapper.h
@@ -102,6 +102,7 @@
     }
 
     /**
+     * Notify that the input buffer reference is no longer needed.
      * Clean up C2Work related references if necessary. No-op by default.
      *
      * \param index index of input work.
@@ -109,6 +110,12 @@
     virtual void onInputBufferDone(c2_cntr64_t /* index */) {}
 
     /**
+     * Signal one input buffer as emptied.
+     * No-op by default.
+     */
+    virtual void onInputBufferEmptied() {}
+
+    /**
      * Returns dataspace information from GraphicBufferSource.
      */
     virtual android_dataspace getDataspace() { return mDataSpace; }
diff --git a/media/codec2/sfplugin/tests/Android.bp b/media/codec2/sfplugin/tests/Android.bp
index 246e563..2739f44 100644
--- a/media/codec2/sfplugin/tests/Android.bp
+++ b/media/codec2/sfplugin/tests/Android.bp
@@ -42,6 +42,7 @@
     ],
 
     static_libs: [
+        "android.media.codec-aconfig-cc",
         "libcodec2_hidl@1.0",
         "libstagefright_bufferpool@2.0",
     ],
diff --git a/media/codec2/sfplugin/tests/CCodecConfig_test.cpp b/media/codec2/sfplugin/tests/CCodecConfig_test.cpp
index 3615289..508bec2 100644
--- a/media/codec2/sfplugin/tests/CCodecConfig_test.cpp
+++ b/media/codec2/sfplugin/tests/CCodecConfig_test.cpp
@@ -20,6 +20,8 @@
 
 #include <gtest/gtest.h>
 
+#include <android_media_codec.h>
+
 #include <codec2/hidl/1.0/Configurable.h>
 #include <codec2/hidl/client.h>
 #include <util/C2InterfaceHelper.h>
@@ -235,6 +237,22 @@
                             })
                             .withSetter(Setter<C2StreamProfileLevelInfo::output>)
                             .build());
+
+                    std::vector<C2QpOffsetRectStruct> c2QpOffsetRectsInfo;
+                    addParameter(
+                            DefineParam(mInputQpOffsetRects, C2_PARAMKEY_QP_OFFSET_RECTS)
+                                    .withDefault(C2StreamQpOffsetRects::output::AllocShared(
+                                            c2QpOffsetRectsInfo.size(), 0, c2QpOffsetRectsInfo))
+                                    .withFields({
+                                            C2F(mInputQpOffsetRects, m.values[0].qpOffset)
+                                                    .inRange(-128, 127),
+                                            C2F(mInputQpOffsetRects, m.values[0].left).any(),
+                                            C2F(mInputQpOffsetRects, m.values[0].top).any(),
+                                            C2F(mInputQpOffsetRects, m.values[0].width).any(),
+                                            C2F(mInputQpOffsetRects, m.values[0].height).any(),
+                                    })
+                                    .withSetter(Setter<C2StreamQpOffsetRects::output>)
+                                    .build());
                 }
 
                 // TODO: more SDK params
@@ -254,6 +272,7 @@
             std::shared_ptr<C2StreamBitrateInfo::output> mOutputBitrate;
             std::shared_ptr<C2StreamProfileLevelInfo::input> mInputProfileLevel;
             std::shared_ptr<C2StreamProfileLevelInfo::output> mOutputProfileLevel;
+            std::shared_ptr<C2StreamQpOffsetRects::output> mInputQpOffsetRects;
 
             template<typename T>
             static C2R Setter(bool, C2P<T> &) {
@@ -636,4 +655,56 @@
         HdrProfilesTest,
         ::testing::ValuesIn(kHdrProfilesParams));
 
+TEST_F(CCodecConfigTest, SetRegionOfInterestParams) {
+    if (!android::media::codec::provider_->region_of_interest()
+        || !android::media::codec::provider_->region_of_interest_support()) {
+        GTEST_SKIP() << "Skipping the test as region_of_interest flags are not enabled.\n";
+    }
+
+    init(C2Component::DOMAIN_VIDEO, C2Component::KIND_ENCODER, MIMETYPE_VIDEO_VP9);
+
+    ASSERT_EQ(OK, mConfig.initialize(mReflector, mConfigurable));
+
+    const int kWidth = 32;
+    const int kHeight = 32;
+    const int kNumBlocks = ((kWidth + 15) / 16) * ((kHeight + 15) / 16);
+    int8_t mapInfo[kNumBlocks] = {-1, 0, 1, 1};
+    int top[kNumBlocks] = {0, 0, 16, 16};
+    int left[kNumBlocks] = {0, 16, 0, 16};
+    int bottom[kNumBlocks] = {16, 16, 32, 32};
+    int right[kNumBlocks] = {16, 32, 16, 32};
+    sp<AMessage> format{new AMessage};
+    format->setInt32(KEY_WIDTH, kWidth);
+    format->setInt32(KEY_HEIGHT, kHeight);
+    AString val;
+    for (int i = 0; i < kNumBlocks; i++) {
+        val.append(AStringPrintf("%d,%d-%d,%d=%d;", top[i], left[i], bottom[i],
+                                 right[i], mapInfo[i]));
+    }
+    format->setString(PARAMETER_KEY_QP_OFFSET_RECTS, val);
+
+    std::vector<std::unique_ptr<C2Param>> configUpdate;
+    ASSERT_EQ(OK, mConfig.getConfigUpdateFromSdkParams(mConfigurable, format, D::CONFIG,
+                                                       C2_MAY_BLOCK, &configUpdate));
+
+    EXPECT_EQ(1u, configUpdate.size());
+
+    C2StreamQpOffsetRects::output* qpRectParam =
+            FindParam<std::remove_pointer<decltype(qpRectParam)>::type>(configUpdate);
+    ASSERT_NE(nullptr, qpRectParam);
+    ASSERT_EQ(kNumBlocks, qpRectParam->flexCount());
+    for (auto i = 0; i < kNumBlocks; i++) {
+        EXPECT_EQ(mapInfo[i], (int8_t)qpRectParam->m.values[i].qpOffset)
+                << "qp offset for index " << i << " is not as expected ";
+        EXPECT_EQ(left[i], qpRectParam->m.values[i].left)
+                << "left for index " << i << " is not as expected ";
+        EXPECT_EQ(top[i], qpRectParam->m.values[i].top)
+                << "top for index " << i << " is not as expected ";
+        EXPECT_EQ(right[i] - left[i], qpRectParam->m.values[i].width)
+                << "width for index " << i << " is not as expected ";
+        EXPECT_EQ(bottom[i] - top[i], qpRectParam->m.values[i].height)
+                << "height for index " << i << " is not as expected ";
+    }
+}
+
 } // namespace android
diff --git a/media/codec2/sfplugin/utils/Android.bp b/media/codec2/sfplugin/utils/Android.bp
index 54a6fb1..bed594c 100644
--- a/media/codec2/sfplugin/utils/Android.bp
+++ b/media/codec2/sfplugin/utils/Android.bp
@@ -54,7 +54,7 @@
 
     static_libs: [
         "libarect",
-        "libyuv_static",
+        "libyuv",
     ],
 
     sanitize: {
diff --git a/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp b/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
index 261fd05..75e9bbc 100644
--- a/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
+++ b/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
@@ -593,8 +593,6 @@
         uint8_t *dstY, size_t dstStride, size_t dstVStride, size_t bufferSize,
         const C2GraphicView &src, C2Color::matrix_t colorMatrix, C2Color::range_t colorRange) {
     CHECK(dstY != nullptr);
-    CHECK((src.width() & 1) == 0);
-    CHECK((src.height() & 1) == 0);
 
     if (dstStride * dstVStride * 3 / 2 > bufferSize) {
         ALOGD("conversion buffer is too small for converting from RGB to YUV");
diff --git a/media/codec2/sfplugin/utils/Codec2CommonUtils.cpp b/media/codec2/sfplugin/utils/Codec2CommonUtils.cpp
index ff72b1f..7a33af4 100644
--- a/media/codec2/sfplugin/utils/Codec2CommonUtils.cpp
+++ b/media/codec2/sfplugin/utils/Codec2CommonUtils.cpp
@@ -32,10 +32,15 @@
 namespace android {
 
 
-static bool isAtLeast(int version, const char *codeName) {
-    char deviceCodeName[PROP_VALUE_MAX];
-    __system_property_get("ro.build.version.codename", deviceCodeName);
-    return android_get_device_api_level() >= version || !strcmp(deviceCodeName, codeName);
+static bool isAtLeast(int version, const std::string codeName) {
+    static std::once_flag sCheckOnce;
+    static std::string sDeviceCodeName;
+    static int sDeviceApiLevel;
+    std::call_once(sCheckOnce, [&](){
+        sDeviceCodeName = base::GetProperty("ro.build.version.codename", "");
+        sDeviceApiLevel = android_get_device_api_level();
+    });
+    return sDeviceApiLevel >= version || sDeviceCodeName == codeName;
 }
 
 bool isAtLeastT() {
@@ -46,6 +51,10 @@
     return isAtLeast(__ANDROID_API_U__, "UpsideDownCake");
 }
 
+bool isAtLeastV() {
+    return isAtLeast(__ANDROID_API_V__, "VanillaIceCream");
+}
+
 static bool isP010Allowed() {
     // The Vendor API level which is min(ro.product.first_api_level, ro.board.[first_]api_level).
     // This is the api level to which VSR requirement the device conform.
diff --git a/media/codec2/sfplugin/utils/Codec2CommonUtils.h b/media/codec2/sfplugin/utils/Codec2CommonUtils.h
index 9bb52bd..693b3db 100644
--- a/media/codec2/sfplugin/utils/Codec2CommonUtils.h
+++ b/media/codec2/sfplugin/utils/Codec2CommonUtils.h
@@ -25,6 +25,8 @@
 
 bool isAtLeastU();
 
+bool isAtLeastV();
+
 bool isVendorApiOrFirstApiAtLeastT();
 
 /**
diff --git a/media/codec2/tests/Android.bp b/media/codec2/tests/Android.bp
index 02c356c..7d671a7 100644
--- a/media/codec2/tests/Android.bp
+++ b/media/codec2/tests/Android.bp
@@ -45,6 +45,11 @@
         "C2SampleComponent_test.cpp",
         "C2UtilTest.cpp",
         "vndk/C2BufferTest.cpp",
+        "vndk/C2FenceTest.cpp",
+    ],
+
+    static_libs: [
+        "libgmock",
     ],
 
     shared_libs: [
@@ -52,6 +57,7 @@
         "libcodec2_vndk",
         "libcutils",
         "liblog",
+        "libui",
         "libutils",
     ],
 
diff --git a/media/codec2/tests/C2ComponentInterface_test.cpp b/media/codec2/tests/C2ComponentInterface_test.cpp
index 67f733d..d1844b0 100644
--- a/media/codec2/tests/C2ComponentInterface_test.cpp
+++ b/media/codec2/tests/C2ComponentInterface_test.cpp
@@ -235,7 +235,7 @@
     // |*heapParams[0]| is a parameter value. The size of |heapParams| has to be one.
     ASSERT_EQ(1u, heapParams.size());
     EXPECT_TRUE(heapParams[0]);
-    EXPECT_EQ(*heapParams[0], expected);
+    EXPECT_EQ(*heapParams[0], (C2Param &)(expected));
 }
 
 template <typename T> void C2CompIntfTest::querySupportedParam() {
diff --git a/media/codec2/tests/vndk/C2FenceTest.cpp b/media/codec2/tests/vndk/C2FenceTest.cpp
new file mode 100644
index 0000000..9292381
--- /dev/null
+++ b/media/codec2/tests/vndk/C2FenceTest.cpp
@@ -0,0 +1,455 @@
+/*
+ * Copyright 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <gmock/gmock.h>
+#include <gtest/gtest.h>
+
+#include <C2Buffer.h>
+#include <C2FenceFactory.h>
+
+#include <unistd.h>
+
+#include <android-base/unique_fd.h>
+#include <linux/kcmp.h>       /* Definition of KCMP_* constants */
+#include <sys/mman.h>
+#include <sys/syscall.h>      /* Definition of SYS_* constants */
+#include <ui/Fence.h>
+
+namespace android {
+
+static int fd_kcmp(int fd1, int fd2) {
+    static pid_t pid = getpid();
+
+    return syscall(SYS_kcmp, pid, pid, KCMP_FILE, fd1, fd2);
+}
+
+// matcher to check if value (arg) and fd refers to the same file
+MATCHER_P(RefersToTheSameFile, fd, "") {
+    return fd_kcmp(fd, arg) == 0;
+}
+
+// matcher to check if value (arg) is a dup of an fd
+MATCHER_P(IsDupOf, fd, "") {
+    return (ExplainMatchResult(::testing::Ne(-1), arg, result_listener) &&
+            ExplainMatchResult(::testing::Ne(fd), arg, result_listener) &&
+            ExplainMatchResult(RefersToTheSameFile(fd), arg, result_listener));
+}
+
+class C2FenceTest : public ::testing::Test {
+public:
+    C2FenceTest() = default;
+
+    ~C2FenceTest() = default;
+
+
+protected:
+    enum : int32_t {
+        SYNC_FENCE_DEPRECATED_MAGIC     = 3,
+        SYNC_FENCE_UNORDERED_MAGIC      = '\302fsu',
+        SYNC_FENCE_MAGIC                = '\302fso',
+    };
+
+    // Validate a null fence
+    void validateNullFence(const C2Fence &fence);
+
+    // Validate a single fd sync fence
+    void validateSingleFdFence(const C2Fence &fence, int fd);
+
+    // Validate a two fd unordered sync fence
+    void validateTwoFdUnorderedFence(const C2Fence &fence, int fd1, int fd2, int mergeFd);
+
+    // Validate a three fd sync fence
+    void validateThreeFdFence(const C2Fence &fence, int fd1, int fd2, int fd3);
+};
+
+TEST_F(C2FenceTest, IsDupOf_sanity) {
+    int fd1 = memfd_create("test1", 0 /* flags */);
+    int fd2 = memfd_create("test2", 0 /* flags */);
+    int fd3 = memfd_create("test3", 0 /* flags */);
+
+    EXPECT_THAT(fd1, ::testing::Not(IsDupOf(fd2)));
+    EXPECT_THAT(-1, ::testing::Not(IsDupOf(fd2)));
+    EXPECT_THAT(-1, ::testing::Not(IsDupOf(-1)));
+    EXPECT_THAT(fd3, ::testing::Not(IsDupOf(fd3)));
+
+    int fd4 = dup(fd3);
+    EXPECT_THAT(fd4, IsDupOf(fd3));
+    EXPECT_THAT(fd3, IsDupOf(fd4));
+
+    close(fd1);
+    close(fd2);
+    close(fd3);
+    close(fd4);
+}
+
+TEST_F(C2FenceTest, NullFence) {
+    validateNullFence(C2Fence());
+}
+
+void C2FenceTest::validateNullFence(const C2Fence &fence) {
+    // Verify that the fence is valid.
+    EXPECT_TRUE(fence.valid());
+    EXPECT_TRUE(fence.ready());
+    base::unique_fd fenceFd{fence.fd()};
+    EXPECT_EQ(fenceFd.get(), -1);
+    EXPECT_FALSE(fence.isHW()); // perhaps this should be false for a null fence
+
+    // A null fence has no fds
+    std::vector<int> fds = ExtractFdsFromCodec2SyncFence(fence);
+    EXPECT_THAT(fds, ::testing::IsEmpty());
+    for (int fd : fds) {
+        close(fd);
+    }
+
+    // A null fence has no native handle
+    native_handle_t *handle = _C2FenceFactory::CreateNativeHandle(fence);
+    EXPECT_THAT(handle, ::testing::IsNull());
+    if (handle) {
+        native_handle_close(handle);
+        native_handle_delete(handle);
+    }
+}
+
+TEST_F(C2FenceTest, SyncFence_with_negative_fd) {
+    // Create a SyncFence with a negative fd.
+    C2Fence fence = _C2FenceFactory::CreateSyncFence(-1, false /* validate */);
+
+    validateNullFence(fence);
+}
+
+TEST_F(C2FenceTest, SyncFence_with_valid_fd) {
+    // Create a SyncFence with a valid fd. We cannot create an actual sync fd,
+    // so we cannot test wait(), but we can verify the ABI APIs
+
+    int fd = memfd_create("test", 0 /* flags */);
+
+    C2Fence fence = _C2FenceFactory::CreateSyncFence(fd, false /* validate */);
+    validateSingleFdFence(fence, fd);
+}
+
+void C2FenceTest::validateSingleFdFence(const C2Fence &fence, int fd) {
+    // EXPECT_TRUE(fence.valid()); // need a valid sync fd to test this
+    // EXPECT_TRUE(fence.ready());
+    // Verify that the fence says it is a HW sync fence.
+    EXPECT_TRUE(fence.isHW()); // FIXME this may be an implementation detail
+
+    // Verify that the fd returned is a duped version of the initial fd
+    base::unique_fd fenceFd{fence.fd()};
+    EXPECT_THAT(fenceFd.get(), IsDupOf(fd));
+
+    // Verify that fds returns a duped version of the initial fd
+    std::vector<int> fds = ExtractFdsFromCodec2SyncFence(fence);
+    EXPECT_THAT(fds, ::testing::SizeIs(1));
+    EXPECT_THAT(fds, ::testing::ElementsAre(IsDupOf(fd)));
+    for (int fd_i : fds) {
+        close(fd_i);
+    }
+
+    native_handle_t *handle = _C2FenceFactory::CreateNativeHandle(fence);
+    EXPECT_THAT(handle, ::testing::NotNull());
+    if (handle) {
+        EXPECT_EQ(handle->numFds, 1);
+        EXPECT_EQ(handle->numInts, 1);
+        EXPECT_THAT(handle->data[0], IsDupOf(fd));
+        EXPECT_EQ(handle->data[1], SYNC_FENCE_MAGIC);
+
+        native_handle_close(handle);
+        native_handle_delete(handle);
+    }
+}
+
+TEST_F(C2FenceTest, UnorderedMultiSyncFence_with_one_valid_test_fd) {
+    // Create a multi SyncFence with a single valid fd. This should create
+    // a single fd sync fence. We can only validate this through its public
+    // methods: fd/fds and verify the native handle ABI.
+
+    int fd = memfd_create("test", 0 /* flags */);
+
+    c2_status_t status = C2_BAD_VALUE;
+    C2Fence fence = _C2FenceFactory::CreateUnorderedMultiSyncFence(
+        { -1, fd, -1 }, &status);
+    // if we only have one valid fd, we are not merging fences, so the test fd is not validated
+    EXPECT_EQ(status, C2_OK);
+
+    validateSingleFdFence(fence, fd);
+}
+
+TEST_F(C2FenceTest, UnorderedMultiSyncFence_with_one_valid_test_fd_null_status) {
+    // Create a multi SyncFence with a single valid fd. This should create
+    // a single fd sync fence. We can only validate this through its public
+    // methods: fd/fds and verify the native handle ABI.
+
+    int fd = memfd_create("test", 0 /* flags */);
+
+    C2Fence fence = _C2FenceFactory::CreateUnorderedMultiSyncFence(
+        { -1, fd, -1 });
+
+    validateSingleFdFence(fence, fd);
+}
+
+TEST_F(C2FenceTest, UnorderedMultiSyncFence_with_merge_failure) {
+    // Create a multi SyncFence with a multiple non-sync fence fds. This should
+    // result in a fence created, but also an error.
+
+    int fd1 = memfd_create("test1", 0 /* flags */);
+    int fd2 = memfd_create("test2", 0 /* flags */);
+    int fd3 = memfd_create("test3", 0 /* flags */);
+
+    c2_status_t status = C2_BAD_VALUE;
+    C2Fence fence = _C2FenceFactory::CreateUnorderedMultiSyncFence(
+        { fd1, fd2, fd3 }, &status);
+    EXPECT_EQ(status, C2_CORRUPTED);
+
+    validateThreeFdFence(fence, fd1, fd2, fd3);
+}
+
+TEST_F(C2FenceTest, UnorderedMultiSyncFence_with_merge_failure_null_status) {
+    // Create a multi SyncFence with a multiple non-sync fence fds. This should
+    // result in a fence created, but also an error.
+
+    int fd1 = memfd_create("test1", 0 /* flags */);
+    int fd2 = memfd_create("test2", 0 /* flags */);
+    int fd3 = memfd_create("test3", 0 /* flags */);
+
+    C2Fence fence = _C2FenceFactory::CreateUnorderedMultiSyncFence(
+        { fd1, fd2, fd3 });
+
+    validateThreeFdFence(fence, fd1, fd2, fd3);
+}
+
+TEST_F(C2FenceTest, UnorderedMultiSyncFence_with_multiple_fds) {
+    // We cannot create a true unordered multi sync fence as we can only
+    // create test fds and those cannot be merged. As such, we cannot
+    // test the factory method CreateUnorderedMultiSyncFence. We can however
+    // create a test fence from a constructed native handle.
+
+    // Technically, we need 3 fds as if we end up with only 2, we wouldn't
+    // actually need a 2nd (final fence fd) since it is equivalent to the
+    // first. In fact we will generate (and always generated) a single fd
+    // fence in that case.
+    int fd1 = memfd_create("test1", 0 /* flags */);
+    int fd2 = memfd_create("test2", 0 /* flags */);
+    int mergeFd = memfd_create("test3", 0 /* flags */);
+
+    native_handle_t *handle = native_handle_create(3 /* numfds */, 1 /* numints */);
+    handle->data[0] = fd1;
+    handle->data[1] = fd2;
+    handle->data[2] = mergeFd;
+    handle->data[3] = SYNC_FENCE_UNORDERED_MAGIC;
+    C2Fence fence = _C2FenceFactory::CreateFromNativeHandle(handle, true /* takeOwnership */);
+    native_handle_delete(handle);
+
+    validateTwoFdUnorderedFence(fence, fd1, fd2, mergeFd);
+}
+
+void C2FenceTest::validateTwoFdUnorderedFence(
+        const C2Fence &fence, int fd1, int fd2, int mergeFd) {
+    // EXPECT_TRUE(fence.valid()); // need a valid sync fd to test this
+    // EXPECT_TRUE(fence.ready());
+    // Verify that the fence says it is a HW sync fence.
+    EXPECT_TRUE(fence.isHW()); // FIXME this may be an implementation detail
+
+    // Verify that the fd returned is a duped version of the merge fd
+    base::unique_fd fenceFd{fence.fd()};
+    EXPECT_THAT(fenceFd.get(), IsDupOf(mergeFd));
+
+    // Verify that fds returns a duped versions of the initial fds (but not the merge fd)
+    std::vector<int> fds = ExtractFdsFromCodec2SyncFence(fence);
+    EXPECT_THAT(fds, ::testing::SizeIs(2));
+    EXPECT_THAT(fds, ::testing::ElementsAre(IsDupOf(fd1), IsDupOf(fd2)));
+    for (int fd_i : fds) {
+        close(fd_i);
+    }
+
+    native_handle_t *handle = _C2FenceFactory::CreateNativeHandle(fence);
+    EXPECT_THAT(handle, ::testing::NotNull());
+    if (handle) {
+        EXPECT_EQ(handle->numFds, 3);
+        EXPECT_EQ(handle->numInts, 1);
+        EXPECT_THAT(handle->data[0], IsDupOf(fd1));
+        EXPECT_THAT(handle->data[1], IsDupOf(fd2));
+        EXPECT_THAT(handle->data[2], IsDupOf(mergeFd));
+        EXPECT_EQ(handle->data[3], SYNC_FENCE_UNORDERED_MAGIC);
+
+        native_handle_close(handle);
+        native_handle_delete(handle);
+    }
+}
+
+TEST_F(C2FenceTest, MultiSyncFence_with_one_valid_test_fd) {
+    // Create a multi SyncFence with a single valid fd. This should create
+    // a single fd sync fence. We can only validate this through its public
+    // methods: fd/fds and verify the native handle ABI.
+
+    int fd = memfd_create("test", 0 /* flags */);
+
+    c2_status_t status = C2_BAD_VALUE;
+    C2Fence fence = _C2FenceFactory::CreateMultiSyncFence(
+        { -1, fd, -1 }, &status);
+    // if we only have one valid fd, we are not merging fences, so the test fds are not validated
+    EXPECT_EQ(status, C2_OK);
+
+    validateSingleFdFence(fence, fd);
+}
+
+TEST_F(C2FenceTest, MultiSyncFence_with_one_valid_test_fd_null_status) {
+    // Create a multi SyncFence with a single valid fd. This should create
+    // a single fd sync fence. We can only validate this through its public
+    // methods: fd/fds and verify the native handle ABI.
+
+    int fd = memfd_create("test", 0 /* flags */);
+
+    C2Fence fence = _C2FenceFactory::CreateMultiSyncFence(
+        { -1, fd, -1 });
+
+    validateSingleFdFence(fence, fd);
+}
+
+TEST_F(C2FenceTest, MultiSyncFence_with_multiple_fds) {
+    int fd1 = memfd_create("test1", 0 /* flags */);
+    int fd2 = memfd_create("test2", 0 /* flags */);
+    int fd3 = memfd_create("test3", 0 /* flags */);
+
+    c2_status_t status = C2_BAD_VALUE;
+    C2Fence fence = _C2FenceFactory::CreateMultiSyncFence(
+        { fd1, fd2, fd3 }, &status);
+    // test fds are not validated
+    EXPECT_EQ(status, C2_OK);
+
+    validateThreeFdFence(fence, fd1, fd2, fd3);
+}
+
+void C2FenceTest::validateThreeFdFence(const C2Fence &fence, int fd1, int fd2, int fd3) {
+    // EXPECT_TRUE(fence.valid()); // need a valid sync fd to test this
+    // EXPECT_TRUE(fence.ready());
+    // Verify that the fence says it is a HW sync fence.
+    EXPECT_TRUE(fence.isHW()); // FIXME this may be an implementation detail
+
+    // Verify that the fd returned is a duped version of the final fd
+    base::unique_fd fenceFd{fence.fd()};
+    EXPECT_THAT(fenceFd.get(), IsDupOf(fd3));
+
+    // Verify that fds returns a duped versions of all 3 initial fds
+    std::vector<int> fds = ExtractFdsFromCodec2SyncFence(fence);
+    EXPECT_THAT(fds, ::testing::SizeIs(3));
+    EXPECT_THAT(fds, ::testing::ElementsAre(IsDupOf(fd1), IsDupOf(fd2), IsDupOf(fd3)));
+    for (int fd_i : fds) {
+        close(fd_i);
+    }
+
+    native_handle_t *handle = _C2FenceFactory::CreateNativeHandle(fence);
+    EXPECT_THAT(handle, ::testing::NotNull());
+    if (handle) {
+        EXPECT_EQ(handle->numFds, 3);
+        EXPECT_EQ(handle->numInts, 1);
+        EXPECT_THAT(handle->data[0], IsDupOf(fd1));
+        EXPECT_THAT(handle->data[1], IsDupOf(fd2));
+        EXPECT_THAT(handle->data[2], IsDupOf(fd3));
+        EXPECT_EQ(handle->data[3], SYNC_FENCE_MAGIC);
+
+        native_handle_close(handle);
+        native_handle_delete(handle);
+    }
+}
+
+TEST_F(C2FenceTest, BackwardCompat_UDC_sync_fence) {
+    // Create a single SyncFence from a UDC native handle
+
+    int fd = memfd_create("test", 0 /* flags */);
+
+    native_handle_t *handle = native_handle_create(1 /* numfds */, 1 /* numints */);
+    handle->data[0] = fd;
+    handle->data[1] = SYNC_FENCE_DEPRECATED_MAGIC;
+    C2Fence fence = _C2FenceFactory::CreateFromNativeHandle(handle, true /* takeOwnership */);
+    native_handle_delete(handle);
+
+    validateSingleFdFence(fence, fd);
+}
+
+TEST_F(C2FenceTest, BackwardCompat_24Q1_single_fd_fence) {
+    // Create a single SyncFence from a 24Q1 native handle
+    // This had the same (albeit separately duped) fd twice, and used the legacy
+    // magic number.
+
+    int fd = memfd_create("test", 0 /* flags */);
+
+    native_handle_t *handle = native_handle_create(2 /* numfds */, 1 /* numints */);
+    handle->data[0] = fd;
+    handle->data[1] = dup(fd);
+    handle->data[2] = SYNC_FENCE_DEPRECATED_MAGIC;
+    C2Fence fence = _C2FenceFactory::CreateFromNativeHandle(handle, true /* takeOwnership */);
+    native_handle_delete(handle);
+
+    validateSingleFdFence(fence, fd);
+}
+
+TEST_F(C2FenceTest, BackwardCompat_24Q3_single_fd_fence) {
+    // Create a single SyncFence from the defined native handle
+
+    int fd = memfd_create("test", 0 /* flags */);
+
+    native_handle_t *handle = native_handle_create(1 /* numfds */, 1 /* numints */);
+    handle->data[0] = fd;
+    handle->data[1] = SYNC_FENCE_MAGIC;
+    C2Fence fence = _C2FenceFactory::CreateFromNativeHandle(handle, true /* takeOwnership */);
+    native_handle_delete(handle);
+
+    validateSingleFdFence(fence, fd);
+}
+
+TEST_F(C2FenceTest, BackwardCompat_24Q1_multi_fd_fence) {
+    // Create a single SyncFence from a 24Q1 era native handle with
+    // the legacy magic number.
+
+    int fd1 = memfd_create("test1", 0 /* flags */);
+    int fd2 = memfd_create("test2", 0 /* flags */);
+    int mergeFd = memfd_create("test3", 0 /* flags */);
+
+    native_handle_t *handle = native_handle_create(3 /* numfds */, 1 /* numints */);
+    handle->data[0] = fd1;
+    handle->data[1] = fd2;
+    handle->data[2] = mergeFd;
+    handle->data[3] = SYNC_FENCE_DEPRECATED_MAGIC;
+    C2Fence fence = _C2FenceFactory::CreateFromNativeHandle(handle, true /* takeOwnership */);
+    native_handle_delete(handle);
+
+    validateTwoFdUnorderedFence(fence, fd1, fd2, mergeFd);
+}
+
+// No need to create BackwardCompat_24Q3_unordered_multi_fd_fence because
+// we are creating that fence already from the 24Q3 native handle layout
+// in the UnorderedMultiSyncFence_with_multiple_fds test.
+
+TEST_F(C2FenceTest, BackwardCompat_24Q3_multi_fd_fence) {
+    // Create a single SyncFence from a 24Q1 era native handle with
+    // the legacy magic number.
+
+    int fd1 = memfd_create("test1", 0 /* flags */);
+    int fd2 = memfd_create("test2", 0 /* flags */);
+    int fd3 = memfd_create("test3", 0 /* flags */);
+
+    native_handle_t *handle = native_handle_create(3 /* numfds */, 1 /* numints */);
+    handle->data[0] = fd1;
+    handle->data[1] = fd2;
+    handle->data[2] = fd3;
+    handle->data[3] = SYNC_FENCE_MAGIC;
+    C2Fence fence = _C2FenceFactory::CreateFromNativeHandle(handle, true /* takeOwnership */);
+    native_handle_delete(handle);
+
+    validateThreeFdFence(fence, fd1, fd2, fd3);
+}
+
+} // namespace android
diff --git a/media/codec2/vndk/Android.bp b/media/codec2/vndk/Android.bp
index 9f57bfd..dc06ee6 100644
--- a/media/codec2/vndk/Android.bp
+++ b/media/codec2/vndk/Android.bp
@@ -53,6 +53,7 @@
     ],
 
     defaults: [
+	"aconfig_lib_cc_static_link.defaults",
         "libcodec2_hal_selection",
     ],
 
diff --git a/media/codec2/vndk/C2AllocatorGralloc.cpp b/media/codec2/vndk/C2AllocatorGralloc.cpp
index 60b5b29..971b5a5 100644
--- a/media/codec2/vndk/C2AllocatorGralloc.cpp
+++ b/media/codec2/vndk/C2AllocatorGralloc.cpp
@@ -318,6 +318,14 @@
         return reinterpret_cast<C2HandleAhwb *>(res);
     }
 
+    static uint32_t getPixelFormat(const C2Handle *const handle) {
+        if (handle == nullptr) {
+            return 0;
+        }
+        const ExtraData *xd = GetExtraData(handle);
+        return xd->format;
+    }
+
     static C2HandleAhwb* WrapNativeHandle(
             const native_handle_t *const handle,
             uint32_t width, uint32_t height, uint32_t format, uint64_t usage,
@@ -899,7 +907,17 @@
 
 
 native_handle_t *UnwrapNativeCodec2GrallocHandle(const C2Handle *const handle) {
-    return C2HandleGralloc::UnwrapNativeHandle(handle);
+    if (handle == nullptr) {
+        return nullptr;
+    }
+    if (C2AllocatorGralloc::CheckHandle(handle)) {
+        return C2HandleGralloc::UnwrapNativeHandle(handle);
+    }
+    if (C2AllocatorAhwb::CheckHandle(handle)) {
+        return C2HandleAhwb::UnwrapNativeHandle(handle);
+    }
+    ALOGE("tried to unwrap non c2 compatible handle");
+    return nullptr;
 }
 
 C2Handle *WrapNativeCodec2GrallocHandle(
@@ -911,7 +929,38 @@
 }
 
 uint32_t ExtractFormatFromCodec2GrallocHandle(const C2Handle *const handle) {
-    return C2HandleGralloc::getPixelFormat(handle);
+    if (C2AllocatorGralloc::CheckHandle(handle)) {
+        return C2HandleGralloc::getPixelFormat(handle);
+    }
+    if (C2AllocatorAhwb::CheckHandle(handle)) {
+        return C2HandleAhwb::getPixelFormat(handle);
+    }
+    ALOGE("tried to extract pixelformat from non c2 compatible handle");
+    return 0;
+}
+
+bool ExtractMetadataFromCodec2GrallocHandle(
+        const C2Handle *const handle,
+        uint32_t *width, uint32_t *height, uint32_t *format, uint64_t *usage, uint32_t *stride) {
+    if (handle == nullptr) {
+        ALOGE("ExtractMetadata from nullptr");
+        return false;
+    }
+    if (C2AllocatorGralloc::CheckHandle(handle)) {
+        uint32_t generation;
+        uint64_t igbp_id;
+        uint32_t igbp_slot;
+        (void)C2HandleGralloc::Import(handle, width, height, format, usage, stride,
+                                      &generation, &igbp_id, &igbp_slot);
+        return true;
+    }
+    if (C2AllocatorAhwb::CheckHandle(handle)) {
+        uint64_t origId;
+        (void)C2HandleAhwb::Import(handle, width, height, format, usage, stride, &origId);
+        return true;
+    }
+    ALOGE("ExtractMetadata from non compatible handle");
+    return false;
 }
 
 bool MigrateNativeCodec2GrallocHandle(
@@ -1137,8 +1186,17 @@
         const C2Handle *const handle,
         uint32_t *width, uint32_t *height, uint32_t *format,uint64_t *usage, uint32_t *stride,
         uint32_t *generation, uint64_t *igbp_id, uint32_t *igbp_slot) {
-    (void)C2HandleGralloc::Import(handle, width, height, format, usage, stride,
-                                  generation, igbp_id, igbp_slot);
+    if (C2AllocatorGralloc::CheckHandle(handle)) {
+        (void)C2HandleGralloc::Import(handle, width, height, format, usage, stride,
+                                      generation, igbp_id, igbp_slot);
+        return;
+    }
+    if (C2AllocatorAhwb::CheckHandle(handle)) {
+        uint64_t origId;
+        (void)C2HandleAhwb::Import(handle, width, height, format, usage, stride, &origId);
+        return;
+    }
+    ALOGE("Tried to extract metadata from non c2 compatible handle");
 }
 
 C2AllocatorGralloc::Impl::Impl(id_t id, bool bufferQueue)
@@ -1250,10 +1308,6 @@
 }
 
 
-native_handle_t *UnwrapNativeCodec2AhwbHandle(const C2Handle *const handle) {
-    return C2HandleAhwb::UnwrapNativeHandle(handle);
-}
-
 C2Handle *WrapNativeCodec2AhwbHandle(
         const native_handle_t *const handle,
         uint32_t width, uint32_t height, uint32_t format, uint64_t usage, uint32_t stride,
@@ -1477,13 +1531,6 @@
     c2_status_t mInit;
 };
 
-void _UnwrapNativeCodec2AhwbMetadata(
-        const C2Handle *const handle,
-        uint32_t *width, uint32_t *height, uint32_t *format,uint64_t *usage, uint32_t *stride,
-        uint64_t *origId) {
-    (void)C2HandleAhwb::Import(handle, width, height, format, usage, stride, origId);
-}
-
 C2AllocatorAhwb::Impl::Impl(id_t id)
     : mInit(C2_OK) {
     // TODO: get this from allocator
diff --git a/media/codec2/vndk/C2Fence.cpp b/media/codec2/vndk/C2Fence.cpp
index 5d50fc3..d28f926 100644
--- a/media/codec2/vndk/C2Fence.cpp
+++ b/media/codec2/vndk/C2Fence.cpp
@@ -28,16 +28,27 @@
 
 #include <utility>
 
-#define MAX_FENCE_FDS 1
+// support up to 32 sync fds (and an optional merged fd), and 1 int
+#define MAX_FENCE_FDS  33
+#define MAX_FENCE_INTS 1
 
 class C2Fence::Impl {
 public:
-    enum type_t : uint32_t {
-        INVALID_FENCE,
-        NULL_FENCE,
-        SURFACE_FENCE,
-        SYNC_FENCE,
-        PIPE_FENCE,
+    // These enums are not part of the ABI, so can be changed.
+    enum type_t : int32_t {
+        INVALID_FENCE     = -1,
+        NULL_FENCE        = 0,
+        SURFACE_FENCE     = 2,
+
+        SYNC_FENCE        = 3,
+        PIPE_FENCE        = 4,
+    };
+
+    // magic numbers for native handles
+    enum : int32_t {
+        SYNC_FENCE_DEPRECATED_MAGIC     = 3,
+        SYNC_FENCE_UNORDERED_MAGIC      = '\302fsu',
+        SYNC_FENCE_MAGIC                = '\302fso',
     };
 
     virtual c2_status_t wait(c2_nsecs_t timeoutNs) = 0;
@@ -54,7 +65,8 @@
 
     /**
      * Create a native handle for the fence so it can be marshalled.
-     * The native handle must store fence type in the first integer.
+     * All native handles must store fence type in the last integer.
+     * The created native handle (if not null) must be closed by the caller.
      *
      * \return a valid native handle if the fence can be marshalled, otherwise return null.
      */
@@ -64,11 +76,29 @@
 
     Impl() = default;
 
+    /**
+     * Get the type of the fence from the native handle.
+     *
+     * \param nh the native handle to get the type from.
+     * \return the type of the fence, or INVALID_FENCE if the native handle is
+     * invalid or malformed.
+     */
     static type_t GetTypeFromNativeHandle(const native_handle_t* nh) {
-        if (nh && nh->numFds >= 0 && nh->numFds <= MAX_FENCE_FDS && nh->numInts > 0) {
-            return static_cast<type_t>(nh->data[nh->numFds]);
+        if (!nh || nh->numFds < 0 || nh->numFds > MAX_FENCE_FDS
+                || nh->numInts < 1 || nh->numInts > MAX_FENCE_INTS) {
+            return INVALID_FENCE;
         }
-        return INVALID_FENCE;
+
+        // the magic number for Codec 2.0 native handles is the last integer
+        switch (nh->data[nh->numFds + nh->numInts - 1]) {
+            case SYNC_FENCE_MAGIC:
+            case SYNC_FENCE_UNORDERED_MAGIC:
+            case SYNC_FENCE_DEPRECATED_MAGIC:
+                return SYNC_FENCE;
+
+            default:
+                return INVALID_FENCE;
+        }
     }
 };
 
@@ -189,6 +219,53 @@
 
 using namespace android;
 
+/**
+ * Implementation for a sync fence.
+ *
+ * A sync fence is fundamentally a fence that is created from an android sync
+ * fd (which represents a HW fence).
+ *
+ * The native handle layout for a single sync fence is:
+ *   fd[0]  - sync fd
+ *   int[0] - magic (SYNC_FENCE_MAGIC (=`\302fso'))
+ *
+ * Note: Between Android T and 24Q3, the magic number was erroneously
+ * SYNC_FENCE (=3).
+ *
+ * Multi(ple) Sync Fences
+ *
+ * Since Android 24Q3, this implementation also supports a sequence of
+ * sync fences. When this is the case, there is an expectation that the last
+ * sync fence being ready will guarantee that all other sync fences are
+ * also ready. (This guarantees backward compatibility to a single fd sync fence,
+ * and mFence will be that final fence.)
+ *
+ * It is furthermore recommended that the fences be in order - either by
+ * expected signaling time, or by the order in which they need to be ready. The
+ * specific ordering is not specified or enforced, but it could be an
+ * implementation requirement of the specific use case in the future.
+ *
+ * This implementation also supports an unordered set of sync fences. In this
+ * case, it will merge all the fences into a single merged fence, which will
+ * be the backward compatible singular fence (stored in mFence).
+ *
+ * The native handle layout for an unordered multi-fence sync fence (from Android
+ * 24Q3) is:
+ *
+ *   fd[0]   - sync fd 1
+ *   ...
+ *   fd[n-1] - sync fd N
+ *   fd[n]   - merged fence fd
+ *   int[0]  - magic (SYNC_FENCE_UNORDERED_MAGIC (='\302fsu'))
+ *
+ * The native handle layout for an ordered multi-fence sync fence (from Android
+ * 24Q3) is:
+ *
+ *   fd[0]   - sync fd 1
+ *   ...
+ *   fd[n-1] - sync fd N
+ *   int[0]  - magic (SYNC_FENCE_MAGIC (='\302fso'))
+ */
 class _C2FenceFactory::SyncFenceImpl : public C2Fence::Impl {
 public:
     virtual c2_status_t wait(c2_nsecs_t timeoutNs) {
@@ -218,11 +295,19 @@
         return mFence->dup();
     }
 
+    /**
+     * Returns a duped list of fds used when creating this fence. It will
+     * not return the internally created merged fence fd.
+     */
     std::vector<int> fds() const {
         std::vector<int> retFds;
         for (int index = 0; index < mListFences.size(); index++) {
             retFds.push_back(mListFences[index]->dup());
         }
+        // ensure that at least one fd is returned
+        if (mListFences.empty()) {
+            retFds.push_back(mFence->dup());
+        }
         return retFds;
     }
 
@@ -236,7 +321,18 @@
 
     virtual native_handle_t *createNativeHandle() const {
         std::vector<int> nativeFds = fds();
-        nativeFds.push_back(fd());
+        int32_t magic = SYNC_FENCE_MAGIC;
+
+        // Also parcel the singular fence if it is not already part of the list.
+        // If this was a single-fd fence, mListFences will be empty, but fds()
+        // already returned that a list with that single fd.
+        if (!mListFences.empty() && mListFences.back() != mFence) {
+            nativeFds.push_back(fd());
+            if (!mListFences.empty()) {
+                magic = SYNC_FENCE_UNORDERED_MAGIC;
+            }
+        }
+
         native_handle_t* nh = native_handle_create(nativeFds.size(), 1);
         if (!nh) {
             ALOGE("Failed to allocate native handle for sync fence");
@@ -249,71 +345,122 @@
         for (int i = 0; i < nativeFds.size(); i++) {
             nh->data[i] = nativeFds[i];
         }
-        nh->data[nativeFds.size()] = type();
+        nh->data[nativeFds.size()] = magic;
         return nh;
     }
 
     virtual ~SyncFenceImpl() {};
 
+    /**
+     * Constructs a SyncFenceImpl from a single sync fd. No error checking is
+     * performed on the fd here as we cannot make this a null fence.
+     *
+     * \param fenceFd the fence fd to create the SyncFenceImpl from.
+     */
     SyncFenceImpl(int fenceFd) :
         mFence(sp<Fence>::make(fenceFd)) {
-        mListFences.clear();
-        if (mFence) {
-            mListFences.push_back(mFence);
-        }
     }
 
-    SyncFenceImpl(const std::vector<int>& fenceFds, int mergedFd) {
-        mListFences.clear();
-
-        for (int fenceFd : fenceFds) {
-            if (fenceFd < 0) {
-                continue;
-            } else {
-                mListFences.push_back(sp<Fence>::make(fenceFd));
-                if (!mListFences.back()) {
-                    mFence.clear();
-                    break;
-                }
-                if (mergedFd == -1) {
-                    mFence = (mFence == nullptr) ? (mListFences.back()) :
-                        (Fence::merge("syncFence", mFence, mListFences.back()));
-                }
-            }
-        }
-        if (mergedFd != -1)
-        {
-            mFence = sp<Fence>::make(mergedFd);
-        }
-        if (!mFence) {
-            mListFences.clear();
-        }
+    SyncFenceImpl(const sp<Fence> &fence) :
+        mFence(fence) {
     }
 
-    static std::shared_ptr<SyncFenceImpl> CreateFromNativeHandle(const native_handle_t* nh) {
-        if (!nh || nh->numFds < 1 || nh->numInts < 1) {
-            ALOGE("Invalid handle for sync fence");
+    /**
+     * Constructs a SyncFenceImpl from a list of sync fds.
+     *
+     * \param fenceFds the list of fence fds to create the SyncFenceImpl from.
+     * \param finalFence the singular fence for this multi-fd fence. This can
+     * be either the last fence in fences or a sepearate (merged) fence.
+     */
+    SyncFenceImpl(const std::vector<sp<Fence>>& fences, const sp<Fence> &finalFence) :
+        mListFences(fences),
+        mFence(finalFence) {
+    }
+
+    /**
+     * Creates a SyncFenceImpl from a native handle.
+     *
+     * \param nh the native handle to create the SyncFenceImpl from.
+     * \param takeOwnership if true, the SyncFenceImpl will take ownership of the
+     *                      file descriptors in the native handle. Otherwise,
+     *                      the SyncFenceImpl will dup the file descriptors.
+     *
+     * \return a shared_ptr to the SyncFenceImpl, or nullptr if the native
+     * handle is invalid or malformed.
+    */
+    static std::shared_ptr<SyncFenceImpl> CreateFromNativeHandle(
+            const native_handle_t* nh, bool takeOwnership) {
+        // we should only call this method if _C2FenceFactory::GetTypeFromNativeHandle
+        // returned SYNC_FENCE, but do these checks anyways to avoid overflows
+        // in case that does not happen.
+        if (!nh) {
+            ALOGE("Invalid handle for a sync fence (nullptr)");
+            return nullptr;
+        } else if (nh->numFds < 1 || nh->numInts < 1
+                || nh->numFds > MAX_FENCE_FDS || nh->numInts > MAX_FENCE_INTS) {
+            ALOGE("Invalid handle for a sync fence (%d fds, %d ints)", nh->numFds, nh->numInts);
             return nullptr;
         }
-        std::vector<int> fds;
-        for (int i = 0; i < nh->numFds-1; i++) {
-            fds.push_back(dup(nh->data[i]));
-        }
-        std::shared_ptr<SyncFenceImpl> p = (nh->numFds == 1)?
-                (std::make_shared<SyncFenceImpl>(fds.back())):
-                (std::make_shared<SyncFenceImpl>(fds, (dup(nh->data[nh->numFds-1]))));
-        if (!p) {
-            ALOGE("Failed to allocate sync fence impl");
-            for (int fd : fds) {
-                close(fd);
+        std::vector<sp<Fence>> fences;
+        for (int i = 0; i < nh->numFds; i++) {
+            int fd = nh->data[i];
+            if (!takeOwnership && fd >= 0) {
+                fd = dup(fd);
+            }
+            if (fd >= 0) {
+                sp<Fence> fence = sp<Fence>::make(fd);
+                if (fence) {
+                    fences.push_back(fence);
+                } else {
+                    ALOGW("Failed to create fence from fd %d", fd);
+                }
             }
         }
+
+        std::shared_ptr<SyncFenceImpl> p;
+        if (fences.size() == 0) {
+            ALOGE("No valid fences found in handle for a sync fence");
+            return nullptr;
+        } else if (fences.size() == 1) {
+            p = std::make_shared<SyncFenceImpl>(fences[0]);
+        } else {
+            int32_t magic = nh->data[nh->numFds + nh->numInts - 1];
+            if (magic != SYNC_FENCE_MAGIC) {
+                // The last fence is the merged fence. Separate it.
+                sp<Fence> finalFence = fences.back();
+                fences.pop_back();
+
+                // Special case: if we end up with only a single element list
+                // with another merged fence, that merged fence must be the
+                // same fence. This happened in an early version of multi fd
+                // support for single-fd sync fences.
+                if (fences.size() == 1) {
+                    // For single-fd fence the sp-s must be equal
+                    finalFence = fences.back();
+                }
+                p = std::make_shared<SyncFenceImpl>(fences, finalFence);
+            } else {
+                // Use the last fence as the standalone fence.
+                p = std::make_shared<SyncFenceImpl>(fences, fences.back());
+            }
+        }
+
+        ALOGE_IF(!p, "Failed to allocate sync fence impl");
         return p;
     }
 
 private:
+    /**
+     * The list of fences in case of a multi-fence sync fence. Otherwise, this
+     * list is empty.
+     */
     std::vector<sp<Fence>> mListFences;
-    sp<Fence> mFence;  //merged fence in case mListFences size > 0
+
+    /**
+     * The singular fence for this sync fence. For multi-fence sync fences,
+     * this could be a merged fence, or simply the final fence.
+     */
+    sp<Fence> mFence;
 };
 
 std::vector<int> ExtractFdsFromCodec2SyncFence(const C2Fence& fence) {
@@ -324,39 +471,155 @@
     return retFds;
 }
 
-C2Fence _C2FenceFactory::CreateSyncFence(int fenceFd) {
+C2Fence _C2FenceFactory::CreateSyncFence(int fenceFd, bool validate) {
     std::shared_ptr<C2Fence::Impl> p;
     if (fenceFd >= 0) {
         p = std::make_shared<_C2FenceFactory::SyncFenceImpl>(fenceFd);
         if (!p) {
             ALOGE("Failed to allocate sync fence impl");
             close(fenceFd);
-        } else if (!p->valid()) {
+        } else if (validate && (!p->valid() || p->ready())) {
+            // don't create a fence object if the sync fd already signaled or is invalid
             p.reset();
         }
     } else {
-        ALOGV("Create sync fence from invalid fd");
-        return C2Fence();
+        ALOGV("Won't create sync fence from invalid fd");
     }
     return C2Fence(p);
 }
 
-C2Fence _C2FenceFactory::CreateMultipleFdSyncFence(const std::vector<int>& fenceFds) {
-    std::shared_ptr<C2Fence::Impl> p;
-    if (fenceFds.size() > 0) {
-        p = std::make_shared<_C2FenceFactory::SyncFenceImpl>(fenceFds, -1);
-        if (!p) {
-            ALOGE("Failed to allocate sync fence impl closing FDs");
-            for (int fenceFd : fenceFds) {
-                close(fenceFd);
-            }
-        } else if (!p->valid()) {
-            ALOGE("Invalid sync fence created");
-            p.reset();
-        }
-    } else {
-        ALOGE("Create sync fence from invalid fd list of size 0");
+C2Fence _C2FenceFactory::CreateUnorderedMultiSyncFence(
+        const std::vector<int>& fenceFds, c2_status_t *status) {
+    if (status) {
+        *status = C2_OK;
     }
+
+    sp<Fence> finalFence;
+    std::vector<sp<Fence>> fences;
+
+    bool mergeFailed = false;
+    for (int fenceFd : fenceFds) {
+        if (fenceFd < 0) {
+            // ignore invalid fences
+            continue;
+        }
+        sp<Fence> fence = sp<Fence>::make(fenceFd);
+
+        // If we could not create an sp, further sp-s will also fail.
+        if (fence == nullptr) {
+            if (status) {
+                *status = C2_NO_MEMORY;
+            }
+            break;
+        }
+        fences.push_back(fence);
+
+        if (finalFence == nullptr) {
+            finalFence = fence;
+        } else {
+            sp<Fence> mergedFence = Fence::merge("syncFence", finalFence, fence);
+            if (mergedFence == nullptr || mergedFence == Fence::NO_FENCE) {
+                ALOGE_IF(!mergeFailed, "Could not merge fences for sync fence.");
+                mergeFailed = true;
+                if (status) {
+                    *status = (mergedFence == nullptr) ? C2_NO_MEMORY : C2_CORRUPTED;
+                }
+
+                if (mergedFence == nullptr) {
+                    break;
+                }
+                // If we cannot merge one of the fences, the best course of action
+                // is to keep going, as the alternative would be to clear all fences
+                // (making this a null fence) but that will always be ready.
+            } else {
+                finalFence = mergedFence;
+            }
+        }
+    }
+
+    // we may have ended up with a single or no fence due to merging failures or
+    // invalid fds.
+    if (fences.size() == 0) {
+        // we have no fds, we have a null fence.
+        return C2Fence();
+    }
+
+    std::shared_ptr<C2Fence::Impl> p;
+
+    if (fences.size() == 1) {
+        // We have a single sync fd. We don't need the merged fence, which is
+        // already simply that sole fence.
+        p = std::make_shared<_C2FenceFactory::SyncFenceImpl>(finalFence);
+    } else {
+        // if we couldn't merge any fences just use the last one
+        if (finalFence == fences[0]) {
+            finalFence = fences.back();
+        }
+
+        p = std::make_shared<_C2FenceFactory::SyncFenceImpl>(fences, finalFence);
+    }
+
+    if (!p) {
+        ALOGE("Failed to allocate sync fence impl closing FDs");
+        // all fds were moved into Fence objects which will close them.
+        if (status) {
+            *status = C2_NO_MEMORY;
+        }
+        return C2Fence();
+    }
+
+    return C2Fence(p);
+}
+
+C2Fence _C2FenceFactory::CreateMultiSyncFence(
+        const std::vector<int>& fenceFds, c2_status_t *status) {
+    if (status) {
+        *status = C2_OK;
+    }
+
+    std::vector<sp<Fence>> fences;
+
+    for (int fenceFd : fenceFds) {
+        if (fenceFd < 0) {
+            // ignore invalid fences
+            continue;
+        }
+        sp<Fence> fence = sp<Fence>::make(fenceFd);
+
+        // If we could not create an sp, keep going with the existing fences.
+        if (fence == nullptr) {
+            if (status) {
+                *status = C2_NO_MEMORY;
+            }
+            break;
+        }
+        fences.push_back(fence);
+    }
+
+    // we may have ended up with a single or no fence due to invalid fds.
+    if (fences.size() == 0) {
+        // we have no fds, we have a null fence.
+        return C2Fence();
+    }
+
+    std::shared_ptr<C2Fence::Impl> p;
+
+    if (fences.size() == 1) {
+        // We have a single sync fd, this is a simple sync fence.
+        p = std::make_shared<_C2FenceFactory::SyncFenceImpl>(fences[0]);
+    } else {
+        p = std::make_shared<_C2FenceFactory::SyncFenceImpl>(fences, fences.back());
+    }
+
+    if (!p) {
+        ALOGE("Failed to allocate sync fence impl closing FDs");
+        // all fds were moved into Fence objects which will close them.
+        if (status) {
+            *status = C2_NO_MEMORY;
+        }
+        return C2Fence();
+    }
+
     return C2Fence(p);
 }
 
@@ -521,7 +784,8 @@
     return fence.mImpl? fence.mImpl->createNativeHandle() : nullptr;
 }
 
-C2Fence _C2FenceFactory::CreateFromNativeHandle(const native_handle_t* handle) {
+C2Fence _C2FenceFactory::CreateFromNativeHandle(
+        const native_handle_t* handle, bool takeOwnership) {
     if (!handle) {
         return C2Fence();
     }
@@ -529,12 +793,14 @@
     std::shared_ptr<C2Fence::Impl> p;
     switch (type) {
         case C2Fence::Impl::SYNC_FENCE:
-            p = SyncFenceImpl::CreateFromNativeHandle(handle);
+            p = SyncFenceImpl::CreateFromNativeHandle(handle, takeOwnership);
             break;
         default:
             ALOGV("Unsupported fence type %d", type);
-            // If this is malformed-handle close the handle here.
-            (void) native_handle_close(handle);
+            // Still close the handle here if taking ownership.
+            if (takeOwnership) {
+                (void) native_handle_close(handle);
+            }
             // return a null-fence in this case
             break;
     }
diff --git a/media/codec2/vndk/C2Store.cpp b/media/codec2/vndk/C2Store.cpp
index e7fd14f..0987da2 100644
--- a/media/codec2/vndk/C2Store.cpp
+++ b/media/codec2/vndk/C2Store.cpp
@@ -478,13 +478,25 @@
 
 class _C2BlockPoolCache {
 public:
-    _C2BlockPoolCache() : mBlockPoolSeqId(C2BlockPool::PLATFORM_START + 1) {}
+    _C2BlockPoolCache() : mBlockPoolSeqId(C2BlockPool::PLATFORM_START + 1) {
+        mBqPoolDeferDeallocAfterStop = false;
+#ifdef __ANDROID_APEX__
+        bool stopHalBeforeSurface = ::android::base::GetBoolProperty(
+                "debug.codec2.stop_hal_before_surface", false);
+        if (!stopHalBeforeSurface) {
+            mBqPoolDeferDeallocAfterStop =
+                    ::android::base::GetIntProperty(
+                            "debug.codec2.bqpool_dealloc_after_stop", 0) != 0;
+        }
+#endif
+    }
 
 private:
     c2_status_t _createBlockPool(
             C2PlatformAllocatorDesc &allocatorParam,
             std::vector<std::shared_ptr<const C2Component>> components,
             C2BlockPool::local_id_t poolId,
+            bool deferDeallocAfterStop,
             std::shared_ptr<C2BlockPool> *pool) {
         std::shared_ptr<C2AllocatorStore> allocatorStore =
                 GetCodec2PlatformAllocatorStore();
@@ -548,6 +560,11 @@
                 if (res == C2_OK) {
                     std::shared_ptr<C2BlockPool> ptr(
                             new C2BufferQueueBlockPool(allocator, poolId), deleter);
+                    if (deferDeallocAfterStop) {
+                        std::shared_ptr<C2BufferQueueBlockPool> bqPool =
+                            std::static_pointer_cast<C2BufferQueueBlockPool>(ptr);
+                        bqPool->setDeferDeallocationAfterStop();
+                    }
                     *pool = ptr;
                     mBlockPools[poolId] = ptr;
                     mComponents[poolId].insert(
@@ -603,7 +620,8 @@
             std::vector<std::shared_ptr<const C2Component>> components,
             std::shared_ptr<C2BlockPool> *pool) {
         std::unique_lock lock(mMutex);
-        return _createBlockPool(allocator, components, mBlockPoolSeqId++, pool);
+        return _createBlockPool(allocator, components, mBlockPoolSeqId++,
+                                mBqPoolDeferDeallocAfterStop, pool);
     }
 
 
@@ -638,7 +656,7 @@
             C2PlatformAllocatorDesc allocator;
             allocator.allocatorId = C2PlatformAllocatorStore::BUFFERQUEUE;
             return _createBlockPool(
-                    allocator, {component}, blockPoolId, pool);
+                    allocator, {component}, blockPoolId, mBqPoolDeferDeallocAfterStop, pool);
         }
         return C2_NOT_FOUND;
     }
@@ -651,6 +669,8 @@
 
     std::map<C2BlockPool::local_id_t, std::weak_ptr<C2BlockPool>> mBlockPools;
     std::map<C2BlockPool::local_id_t, std::vector<std::weak_ptr<const C2Component>>> mComponents;
+
+    bool mBqPoolDeferDeallocAfterStop;
 };
 
 static std::unique_ptr<_C2BlockPoolCache> sBlockPoolCache =
diff --git a/media/codec2/vndk/include/C2AllocatorGralloc.h b/media/codec2/vndk/include/C2AllocatorGralloc.h
index 1a34c30..53b6262 100644
--- a/media/codec2/vndk/include/C2AllocatorGralloc.h
+++ b/media/codec2/vndk/include/C2AllocatorGralloc.h
@@ -22,8 +22,25 @@
 #include <C2Buffer.h>
 
 namespace android {
+// VNDK
+/**
+ * Extract pixel format from the extra data of gralloc handle.
+ *
+ * @return 0 when no valid pixel format exists.
+ */
+uint32_t ExtractFormatFromCodec2GrallocHandle(const C2Handle *const handle);
 
 /**
+ * Extract metadata from the extra data of gralloc handle.
+ *
+ * @return {@code false} if extraction was failed, {@code true} otherwise.
+ */
+bool ExtractMetadataFromCodec2GrallocHandle(
+    const C2Handle *const handle,
+    uint32_t *width, uint32_t *height, uint32_t *format, uint64_t *usage, uint32_t  *stride);
+
+// Not for VNDK (system partition and inside vndk only)
+/**
  * Unwrap the native handle from a Codec2 handle allocated by C2AllocatorGralloc.
  *
  * @param handle a handle allocated by C2AllocatorGralloc. This includes handles returned for a
@@ -46,13 +63,6 @@
         uint32_t generation = 0, uint64_t igbp_id = 0, uint32_t igbp_slot = 0);
 
 /**
- * Extract pixel format from the extra data of gralloc handle.
- *
- * @return 0 when no valid pixel format exists.
- */
-uint32_t ExtractFormatFromCodec2GrallocHandle(const C2Handle *const handle);
-
-/**
  * When the gralloc handle is migrated to another bufferqueue, update
  * bufferqueue information.
  *
@@ -71,16 +81,6 @@
         uint32_t *generation, uint64_t *igbp_id, uint32_t *igbp_slot);
 
 /**
- * Unwrap the native handle from a Codec2 handle allocated by C2AllocatorAhwb.
- *
- * @param handle a handle allocated by C2AllocatorAhwb. This includes handles returned for a
- * graphic block allocation handle based on an AHardwareBuffer.
- *
- * @return a new NON-OWNING native handle that must be deleted using native_handle_delete.
- */
-native_handle_t *UnwrapNativeCodec2AhwbHandle(const C2Handle *const handle);
-
-/**
  * Wrap the gralloc handle and metadata based on AHardwareBuffer into Codec2 handle
  * recognized by C2AllocatorAhwb.
  *
@@ -92,14 +92,6 @@
         uint32_t width, uint32_t height, uint32_t format, uint64_t usage, uint32_t stride,
         uint64_t origId);
 
-/**
- * \todo Get this from the buffer
- */
-void _UnwrapNativeCodec2AhwbMetadata(
-        const C2Handle *const handle,
-        uint32_t *width, uint32_t *height, uint32_t *format, uint64_t *usage, uint32_t *stride,
-        uint64_t *origId);
-
 class C2AllocatorGralloc : public C2Allocator {
 public:
     virtual id_t getId() const override;
diff --git a/media/codec2/vndk/include/C2BqBufferPriv.h b/media/codec2/vndk/include/C2BqBufferPriv.h
index 320b192..806932c 100644
--- a/media/codec2/vndk/include/C2BqBufferPriv.h
+++ b/media/codec2/vndk/include/C2BqBufferPriv.h
@@ -28,6 +28,94 @@
 class GraphicBuffer;
 }  // namespace android
 
+/**
+ * BufferQueue based BlockPool.
+ *
+ * This creates graphic blocks from BufferQueue. BufferQueue here is HIDL-ized IGBP.
+ * HIDL-ized IGBP enables vendor HAL to call IGBP interfaces via HIDL over process boundary.
+ * HIDL-ized IGBP is called as HGBP. HGBP had been used from multiple places in android,
+ * but now this is the only place HGBP is still used.
+ *
+ * Initially there is no HGBP configured, in the case graphic blocks are allocated
+ * from gralloc directly upon \fetchGraphicBlock() requests.
+ *
+ * HGBP can be configured as null as well, in the case graphic blocks are allocated
+ * from gralloc directly upon \fetchGraphicBlock() requests.
+ *
+ * If a specific HGBP is configured, the HGBP acts as an allocator for creating graphic blocks.
+ *
+ *
+ * HGBP/IGBP and the BlockPool
+ *
+ * GraphicBuffer(s) from BufferQueue(IGBP/IGBC) are based on slot id.
+ * A created GraphicBuffer occupies a slot(so the GraphicBuffer has a slot-id).
+ * A GraphicBuffer is produced and consumed and recyled based on the slot-id
+ * w.r.t. BufferQueue.
+ *
+ * HGBP::dequeueBuffer() returns a slot id where the slot has an available GraphicBuffer.
+ * If it is necessary, HGBP allocates a new GraphicBuffer to the slot and indicates
+ * that a new buffer is allocated as return flag.
+ * To retrieve the GraphicBuffer, HGBP::requestBuffer() along with the slot id
+ * is required. In order to save HGBP remote calls, the blockpool caches the
+ * allocated GraphicBuffer(s) along with the slot information.
+ *
+ * The blockpool provides C2GraphicBlock upon \fetchGraphicBlock().
+ * The C2GraphicBlock has a native handle, which is extracted from a GraphicBuffer
+ * and then cloned for independent life-cycle with the GraphicBuffer. The GraphicBuffer
+ * is allocated by HGBP::dequeueBuffer() and retrieved by HGBP::requestBuffer()
+ * if there is a HGBP configured.
+ *
+ *
+ * Life-cycle of C2GraphicBlock
+ *
+ * The decoder HAL writes a decoded frame into C2GraphicBlock. Upon
+ * completion, the component sends the block to the client in the remote process
+ * (i.e. to MediaCodec). The remote process renders the frame into the output surface
+ * via IGBP::queueBuffer() (Note: this is not hidlized.).
+ *
+ * If the decoder HAL destroys the C2GraphicBlock without transferring to the
+ * client, the destroy request goes to the BlockPool. Then
+ * the BlockPool free the associated GraphicBuffer from a slot to
+ * HGBP in order to recycle via HGBP::cancelBuffer().
+ *
+ *
+ * Clearing the Cache(GraphicBuffer)
+ *
+ * When the output surface is switched to a new surface, The GraphicBuffers from
+ * the old surface is either migrated or cleared.
+ *
+ * The GraphicBuffer(s) still in use are migrated to a new surface during
+ * configuration via HGBP::attachBuffer(). The GraphicBuffer(s) not in use are
+ * cleared from the cache inside the BlockPool.
+ *
+ * When the surface is switched to a null surface, all the
+ * GraphicBuffers in the cache are cleared.
+ *
+ *
+ * Workaround w.r.t. b/322731059 (Deferring cleaning the cache)
+ *
+ * Some vendor devices have issues with graphic buffer lifecycle management,
+ * where the graphic buffers get released even when the cloned native handles
+ * in the remote process are not closed yet. This issue led to rare crashes
+ * for those devices when the cache is cleared early.
+ *
+ * We workarounded the crash by deferring the cleaning of the cache.
+ * The workaround is not enabled by default, and can be enabled via a
+ * system property as shown below:
+ *
+ *        'debug.codec2.bqpool_dealloc_after_stop' = 1
+ *
+ * Configuring the debug flag will call \::setDeferDeallocationAfterStop()
+ * after the blockpool creation. This will enable the deferring.
+ *
+ * After enabling the deferring, clearing the GraphicBuffer is delayed until
+ *  1) \::clearDeferredBlocks() is called.
+ *        Typically after HAL processes stop() request.
+ *  2) Or a new ::fetchGraphicBlock() is called.
+ *
+ *  Since the deferring will delay the deallocation, the deferring will result
+ *  in more memory consumption during the brief period.
+ */
 class C2BufferQueueBlockPool : public C2BlockPool {
 public:
     C2BufferQueueBlockPool(const std::shared_ptr<C2Allocator> &allocator, const local_id_t localId);
@@ -77,6 +165,8 @@
      * is configured as nullptr, unique id which is bundled in native_handle is zero.
      *
      * \param producer      the IGBP, which will be used to fetch blocks
+     *                      This could be null, in the case this blockpool will
+     *                      allocate backed GraphicBuffer via allocator(gralloc).
      */
     virtual void configureProducer(const android::sp<HGraphicBufferProducer> &producer);
 
@@ -89,6 +179,8 @@
      * is configured as nullptr, unique id which is bundled in native_handle is zero.
      *
      * \param producer      the IGBP, which will be used to fetch blocks
+     *                      This could be null, in the case this blockpool will
+     *                      allocate backed GraphicBuffer via allocator(gralloc).
      * \param syncMemory    Shared memory for synchronization of allocation & deallocation.
      * \param bqId          Id of IGBP
      * \param generationId  Generation Id for rendering output
@@ -110,6 +202,26 @@
      */
     virtual void invalidate();
 
+    /**
+     * Defer deallocation of cached blocks.
+     *
+     * Deallocation of cached blocks will be deferred until
+     * \clearDeferredBlocks() is called. Or a new block allocation is
+     * requested by \fetchGraphicBlock().
+     */
+    void setDeferDeallocationAfterStop();
+
+
+    /**
+     * Clear deferred blocks.
+     *
+     * Deallocation of cached blocks can be deferred by
+     * \setDeferDeallocationAfterStop().
+     * clear(deallocate) those deferred cached blocks explicitly.
+     * Use this interface, if the blockpool could be inactive indefinitely.
+     */
+    void clearDeferredBlocks();
+
 private:
     const std::shared_ptr<C2Allocator> mAllocator;
     const local_id_t mLocalId;
diff --git a/media/codec2/vndk/include/C2FenceFactory.h b/media/codec2/vndk/include/C2FenceFactory.h
index 4f974ca..cabd5d9 100644
--- a/media/codec2/vndk/include/C2FenceFactory.h
+++ b/media/codec2/vndk/include/C2FenceFactory.h
@@ -23,13 +23,19 @@
 #include <android-base/unique_fd.h>
 
 /*
- * Create a list of fds from fence
+ * Extract a list of sync fence fds from a potentially multi-sync C2Fence.
+ * This will return dupped file descriptors of the fences used to creating the
+ * sync fence. Specifically, for an unordered mult-sync fence, the merged
+ * singular fence will not be returned even though it is created aspart of
+ * constructing the C2Fence object. On the other hand, for a single fd sync
+ * fence, the returned list will contain the sole file descriptor.
  *
  * \param fence   C2Fence object from which associated
  *                file descriptors need to be extracted
- * \return a vector of fds otherwise return vector of size 0
+ * \return a vector of sync fence fds. This will be a vector of size 0 if C2Fence
+ *         is not a sync fence. The caller is responsible for closing the
+ *         fds in the returned vector.
  */
-
 std::vector<int> ExtractFdsFromCodec2SyncFence(const C2Fence& fence);
 
 class C2SurfaceSyncMemory;
@@ -54,20 +60,76 @@
             uint32_t waitId);
 
     /*
-     * Create C2Fence from a fence file fd.
+     * Create C2Fence from a sync fence fd.
      *
-     * \param fenceFd           Fence file descriptor.
+     * \param fenceFd           Sync fence file descriptor.
      *                          It will be owned and closed by the returned fence object.
+     * \param validate          If true, the fence fd will be validated to ensure
+     *                          it is a valid pending sync fence fd.
      */
-    static C2Fence CreateSyncFence(int fenceFd);
+    static C2Fence CreateSyncFence(int fenceFd, bool validate = true);
 
     /*
-     * Create C2Fence from list of fence file fds.
+     * Create C2Fence from list of sync fence fds, while also merging them to
+     * create a singular fence, which can be used as a backward compatible sync
+     * fence.
      *
-     * \param fenceFds          Vector of file descriptor for fence.
-     *                          It will be owned and closed by the returned fence object.
+     * \param fenceFds   Vector of sync fence file descriptors.
+     *                   All file descriptors will be owned (and closed) by
+     *                   the returned fence object.
      */
-    static C2Fence CreateMultipleFdSyncFence(const std::vector<int>& fenceFds);
+    [[deprecated("Use CreateUnorderedMultiSyncFence instead.")]]
+    static C2Fence CreateMultipleFdSyncFence(const std::vector<int>& fenceFds) {
+        return CreateUnorderedMultiSyncFence(fenceFds);
+    }
+
+    /*
+     * Create C2Fence from a list of unordered sync fence fds, while also merging
+     * them to create a singular fence, which can be used as a backward compatible
+     * sync fence.
+     *
+     * \param fenceFds   Vector of sync fence file descriptors.
+     *                   All file descriptors will be owned (and closed) by
+     *                   the returned fence object.
+     * \param status     Optional pointer to a status field. If not null, it will be
+     *                   updated with the status of the operation. Possible values
+     *                   are:
+     *                   - C2_OK: The operation succeeded.
+     *                   - C2_NO_MEMORY: The operation failed because of lack of
+     *                     memory.
+     *                   - C2_CORRUPTED: The operation failed because the sync
+     *                     fence fds could bot be merged.
+     * \return           A C2Fence object representing the sync fence fds, or
+     *                   an empty C2Fence if the no C2Fence could be created.
+     *                   It is possible for the operation to fail but still return
+     *                   a possibly viable C2Fence object, e.g. if the merge
+     *                   operation failed only partially. Similarly, it is possible
+     *                   for the operation to succeed but still return an empty
+     *                   C2Fence object, e.g. if all fence fds were invalid.
+     */
+    static C2Fence CreateUnorderedMultiSyncFence(
+            const std::vector<int>& fenceFds, c2_status_t *status = nullptr /* nullable */);
+
+    /*
+     * Create C2Fence from a list of sync fence fds. Waiting for the last fence
+     * must guarantee that all other fences are also signaled.
+     *
+     * \param fenceFds   Vector of sync fence file descriptors.
+     *                   All file descriptors will be owned (and closed) by
+     *                   the returned fence object.
+     * \param status     Optional pointer to a status field. If not null, it will be
+     *                   updated with the status of the operation. Possible values
+     *                   are:
+     *                   - C2_OK: The operation succeeded.
+     *                   - C2_NO_MEMORY: The operation failed because of lack of
+     *                     memory.
+     * \return           A C2Fence object representing the sync fence fds, or
+     *                   an empty C2Fence if the operation failed.  It is possible
+     *                   for the operation to succeed but still return an empty
+     *                   C2Fence object, e.g. if all fence fds were invalid.
+     */
+    static C2Fence CreateMultiSyncFence(
+            const std::vector<int>& fenceFds, c2_status_t *status = nullptr /* nullable */);
 
     /*
      * Create C2Fence from an fd created by pipe()/pipe2() syscall.
@@ -97,13 +159,18 @@
 
     /*
      * Create C2Fence from a native handle.
-
+     *
      * \param handle           A native handle representing a fence
-     *                         The fd in the native handle will be duplicated, so the caller will
-     *                         still own the handle and have to close it.
+     * \param takeOwnership    If true, the native handle and the file descriptors
+     *                         within will be owned by the returned fence object.
+     *                         If false (default), the caller will still own the
+     *                         handle and its file descriptors and will have to
+     *                         close it.
+     *                         In either case the caller is responsible for
+     *                         deleting the native handle.
      */
-    static C2Fence CreateFromNativeHandle(const native_handle_t* handle);
+    static C2Fence CreateFromNativeHandle(
+            const native_handle_t* handle, bool takeOwnership = false);
 };
 
-
 #endif // STAGEFRIGHT_CODEC2_FENCE_FACTORY_H_
diff --git a/media/codec2/vndk/platform/C2BqBuffer.cpp b/media/codec2/vndk/platform/C2BqBuffer.cpp
index 48157c8..665f9fc 100644
--- a/media/codec2/vndk/platform/C2BqBuffer.cpp
+++ b/media/codec2/vndk/platform/C2BqBuffer.cpp
@@ -588,11 +588,22 @@
         return C2_BAD_VALUE;
     }
 
+    void clearDeferredBlocks_l() {
+        if (mHavingDeallocationDeferred) {
+            mHavingDeallocationDeferred = false;
+            for (int i = 0; i < NUM_BUFFER_SLOTS; ++i) {
+                mBuffersWithDeallocationDeferred[i].clear();
+            }
+        }
+    }
+
 public:
     Impl(const std::shared_ptr<C2Allocator> &allocator)
         : mInit(C2_OK), mProducerId(0), mGeneration(0),
           mConsumerUsage(0), mDqFailure(0), mLastDqTs(0),
-          mLastDqLogTs(0), mAllocator(allocator), mIgbpValidityToken(std::make_shared<int>(0)) {
+          mLastDqLogTs(0), mAllocator(allocator),
+          mDeferDeallocationAfterStop(false),
+          mHavingDeallocationDeferred(false), mIgbpValidityToken(std::make_shared<int>(0)) {
     }
 
     ~Impl() {
@@ -634,6 +645,7 @@
             }
         }
         if (mProducerId == 0) {
+            clearDeferredBlocks_l();
             std::shared_ptr<C2GraphicAllocation> alloc;
             c2_status_t err = mAllocator->newGraphicAllocation(
                     width, height, format, usage, &alloc);
@@ -692,6 +704,7 @@
                            uint32_t generation,
                            uint64_t usage,
                            bool bqInformation) {
+        bool toNullSurface = false;
         std::shared_ptr<C2SurfaceSyncMemory> c2SyncMem;
         if (syncHandle) {
             if (!producer) {
@@ -714,6 +727,9 @@
                 mProducerId = producerId;
                 mGeneration = bqInformation ? generation : 0;
             } else {
+                if (mProducer) {
+                    toNullSurface = true;
+                }
                 mProducer = nullptr;
                 mProducerId = 0;
                 mGeneration = 0;
@@ -760,6 +776,17 @@
                 // old buffers should not be cancelled since the associated IGBP
                 // is no longer valid.
                 mIgbpValidityToken = std::make_shared<int>(0);
+                if (mDeferDeallocationAfterStop) {
+                    if (toNullSurface) {
+                        mHavingDeallocationDeferred = true;
+                        for (int i = 0; i < NUM_BUFFER_SLOTS; ++i) {
+                            mBuffersWithDeallocationDeferred[i] = mBuffers[i];
+                        }
+                    }
+                }
+            }
+            if (!toNullSurface) {
+                clearDeferredBlocks_l();
             }
             if (mInvalidated) {
                 mIgbpValidityToken = std::make_shared<int>(0);
@@ -811,6 +838,16 @@
         }
     }
 
+    void setDeferDeallocationAfterStop() {
+        std::scoped_lock<std::mutex> lock(mMutex);
+        mDeferDeallocationAfterStop = true;
+    }
+
+    void clearDeferredBlocks() {
+        std::scoped_lock<std::mutex> lock(mMutex);
+        clearDeferredBlocks_l();
+    }
+
 private:
     friend struct C2BufferQueueBlockPoolData;
 
@@ -833,6 +870,14 @@
     sp<GraphicBuffer> mBuffers[NUM_BUFFER_SLOTS];
     std::weak_ptr<C2BufferQueueBlockPoolData> mPoolDatas[NUM_BUFFER_SLOTS];
 
+    // In order to workaround b/322731059,
+    // deallocating buffers due to stop using the current surface
+    // could be deferred until the component calling stop or a
+    // new allocation being requested.
+    bool mDeferDeallocationAfterStop;
+    bool mHavingDeallocationDeferred;
+    sp<GraphicBuffer> mBuffersWithDeallocationDeferred[NUM_BUFFER_SLOTS];
+
     std::mutex mSyncMemMutex;
     std::shared_ptr<C2SurfaceSyncMemory> mSyncMem;
     std::shared_ptr<C2SurfaceSyncMemory> mOldMem;
@@ -1178,3 +1223,15 @@
     }
 }
 
+void C2BufferQueueBlockPool::setDeferDeallocationAfterStop() {
+    if (mImpl) {
+        mImpl->setDeferDeallocationAfterStop();
+    }
+}
+
+void C2BufferQueueBlockPool::clearDeferredBlocks() {
+    if (mImpl) {
+        mImpl->clearDeferredBlocks();
+    }
+}
+
diff --git a/media/codec2/vndk/platform/C2IgbaBuffer.cpp b/media/codec2/vndk/platform/C2IgbaBuffer.cpp
index eafdb22..3622d5e 100644
--- a/media/codec2/vndk/platform/C2IgbaBuffer.cpp
+++ b/media/codec2/vndk/platform/C2IgbaBuffer.cpp
@@ -192,28 +192,25 @@
     c2_status_t res = _fetchGraphicBlock(
             width, height, format, usage, kBlockingFetchTimeoutNs, &origId, block, &fence);
 
-    if (res == C2_BLOCKING) {
+    if (res == C2_TIMED_OUT) {
+        // SyncFence waiting timeout.
+        // Usually HAL treats C2_TIMED_OUT as an irrecoverable error.
+        // We want HAL to re-try.
         return C2_BLOCKING;
     }
-    if (res != C2_OK) {
-        return res;
-    }
-    // TODO: bundle the fence to the block. Are API changes required?
-    res = fence.wait(kSyncFenceWaitNs);
-    if (res != C2_OK) {
-        bool aidlRet = true;
-        ::ndk::ScopedAStatus status = mIgba->deallocate(origId, &aidlRet);
-        ALOGE("Waiting a sync fence failed %d aidl(%d: %d)",
-              res, status.isOk(), aidlRet);
-    }
-    return C2_OK;
+    return res;
 }
 
 c2_status_t C2IgbaBlockPool::fetchGraphicBlock(
         uint32_t width, uint32_t height, uint32_t format, C2MemoryUsage usage,
         std::shared_ptr<C2GraphicBlock> *block, C2Fence *fence) {
     uint64_t origId;
-    return _fetchGraphicBlock(width, height, format, usage, 0LL, &origId, block, fence);
+    c2_status_t res = _fetchGraphicBlock(width, height, format, usage, 0LL, &origId, block, fence);
+    if (res == C2_TIMED_OUT) {
+        *fence = C2Fence();
+        return C2_BLOCKING;
+    }
+    return res;
 }
 
 c2_status_t C2IgbaBlockPool::_fetchGraphicBlock(
@@ -263,10 +260,27 @@
             }
         }
 
-        *fence = _C2FenceFactory::CreateSyncFence(allocation.fence.release());
+        C2Fence syncFence  = _C2FenceFactory::CreateSyncFence(allocation.fence.release());
         AHardwareBuffer *ahwb = allocation.buffer.release(); // This is acquired.
         CHECK(AHardwareBuffer_getId(ahwb, origId) == ::android::OK);
-        c2_status_t res = CreateGraphicBlockFromAhwb(ahwb, mAllocator, mIgba, block);
+
+        // We are waiting for SyncFence here for backward compatibility.
+        // H/W based Sync Fence could be returned to improve pipeline latency.
+        //
+        // TODO: Add a component configuration for returning sync fence
+        // from fetchGraphicBlock() as the C2Fence output param(b/322283520).
+        // In the case C2_OK along with GraphicBlock must be returned together.
+        c2_status_t res = syncFence.wait(kSyncFenceWaitNs);
+        if (res != C2_OK) {
+            AHardwareBuffer_release(ahwb);
+            bool aidlRet = true;
+            ::ndk::ScopedAStatus status = mIgba->deallocate(*origId, &aidlRet);
+            ALOGE("Waiting a sync fence failed %d aidl(%d: %d)",
+                  res, status.isOk(), aidlRet);
+            return C2_TIMED_OUT;
+        }
+
+        res = CreateGraphicBlockFromAhwb(ahwb, mAllocator, mIgba, block);
         AHardwareBuffer_release(ahwb);
         if (res != C2_OK) {
             bool aidlRet = true;
diff --git a/media/libaaudio/examples/Android.bp b/media/libaaudio/examples/Android.bp
index e2c1878..aa3ae5e 100644
--- a/media/libaaudio/examples/Android.bp
+++ b/media/libaaudio/examples/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_media_framework_audio",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/media/libaaudio/examples/input_monitor/Android.bp b/media/libaaudio/examples/input_monitor/Android.bp
index 72adfd7..52a5914 100644
--- a/media/libaaudio/examples/input_monitor/Android.bp
+++ b/media/libaaudio/examples/input_monitor/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_media_framework_audio",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
@@ -11,7 +12,10 @@
     name: "input_monitor",
     gtest: false,
     srcs: ["src/input_monitor.cpp"],
-    cflags: ["-Wall", "-Werror"],
+    cflags: [
+        "-Wall",
+        "-Werror",
+    ],
     shared_libs: ["libaaudio"],
     header_libs: ["libaaudio_example_utils"],
 }
@@ -20,7 +24,10 @@
     name: "input_monitor_callback",
     gtest: false,
     srcs: ["src/input_monitor_callback.cpp"],
-    cflags: ["-Wall", "-Werror"],
+    cflags: [
+        "-Wall",
+        "-Werror",
+    ],
     shared_libs: ["libaaudio"],
     header_libs: ["libaaudio_example_utils"],
 }
diff --git a/media/libaaudio/examples/loopback/Android.bp b/media/libaaudio/examples/loopback/Android.bp
index b18aeec..6552113 100644
--- a/media/libaaudio/examples/loopback/Android.bp
+++ b/media/libaaudio/examples/loopback/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_media_framework_audio",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
@@ -11,13 +12,16 @@
     name: "aaudio_loopback",
     gtest: false,
     srcs: ["src/loopback.cpp"],
-    cflags: ["-Wall", "-Werror"],
+    cflags: [
+        "-Wall",
+        "-Werror",
+    ],
     static_libs: ["libsndfile"],
     include_dirs: ["external/oboe/apps/OboeTester/app/src/main/cpp"],
     shared_libs: [
         "libaaudio",
         "libaudioutils",
-        "liblog"
-        ],
+        "liblog",
+    ],
     header_libs: ["libaaudio_example_utils"],
 }
diff --git a/media/libaaudio/examples/loopback/src/loopback.cpp b/media/libaaudio/examples/loopback/src/loopback.cpp
index 4affaed..6a35ced 100644
--- a/media/libaaudio/examples/loopback/src/loopback.cpp
+++ b/media/libaaudio/examples/loopback/src/loopback.cpp
@@ -323,7 +323,6 @@
     printf("      -C{channels}      number of input channels\n");
     printf("      -D{deviceId}      input device ID\n");
     printf("      -F{0,1,2}         input format, 1=I16, 2=FLOAT\n");
-    printf("      -g{gain}          recirculating loopback gain\n");
     printf("      -h{hangMillis}    occasionally hang in the callback\n");
     printf("      -P{inPerf}        set input AAUDIO_PERFORMANCE_MODE*\n");
     printf("          n for _NONE\n");
@@ -436,7 +435,6 @@
     int                   written                    = 0;
 
     int                   testMode                   = TEST_LATENCY;
-    double                gain                       = 1.0;
     int                   hangTimeMillis             = 0;
     std::string           report;
 
@@ -468,9 +466,6 @@
                     case 'F':
                         requestedInputFormat = atoi(&arg[2]);
                         break;
-                    case 'g':
-                        gain = atof(&arg[2]);
-                        break;
                     case 'h':
                         // Was there a number after the "-h"?
                         if (arg[2]) {
diff --git a/media/libaaudio/examples/write_sine/Android.bp b/media/libaaudio/examples/write_sine/Android.bp
index 1c7e0f1..fe78112 100644
--- a/media/libaaudio/examples/write_sine/Android.bp
+++ b/media/libaaudio/examples/write_sine/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_media_framework_audio",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
@@ -10,7 +11,10 @@
 cc_test {
     name: "write_sine",
     srcs: ["src/write_sine.cpp"],
-    cflags: ["-Wall", "-Werror"],
+    cflags: [
+        "-Wall",
+        "-Werror",
+    ],
     shared_libs: ["libaaudio"],
     header_libs: ["libaaudio_example_utils"],
 }
@@ -18,7 +22,10 @@
 cc_test {
     name: "write_sine_callback",
     srcs: ["src/write_sine_callback.cpp"],
-    cflags: ["-Wall", "-Werror"],
+    cflags: [
+        "-Wall",
+        "-Werror",
+    ],
     shared_libs: ["libaaudio"],
     header_libs: ["libaaudio_example_utils"],
 }
diff --git a/media/libaaudio/fuzzer/Android.bp b/media/libaaudio/fuzzer/Android.bp
index 46c4148..a1551f8 100644
--- a/media/libaaudio/fuzzer/Android.bp
+++ b/media/libaaudio/fuzzer/Android.bp
@@ -15,6 +15,7 @@
  */
 
 package {
+    default_team: "trendy_team_media_framework_audio",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
@@ -35,37 +36,38 @@
         "libaaudio_headers",
     ],
     shared_libs: [
-        "libbinder",
+        "com.android.media.aaudio-aconfig-cc",
+        "libaudio_aidl_conversion_common_cpp",
+        "libaudioclient_aidl_conversion",
         "libaudiomanager",
         "libaudiopolicy",
-        "libaudioclient_aidl_conversion",
-        "libaudio_aidl_conversion_common_cpp",
+        "libbinder",
         "libutils",
-        "com.android.media.aaudio-aconfig-cc",
     ],
     static_libs: [
-        "liblog",
-        "libcutils",
+        "aaudio-aidl-cpp",
+        "audio-permission-aidl-cpp",
+        "audioclient-types-aidl-cpp",
+        "audioflinger-aidl-cpp",
+        "audiopolicy-aidl-cpp",
+        "audiopolicy-types-aidl-cpp",
+        "av-types-aidl-cpp",
+        "framework-permission-aidl-cpp",
         "libaaudio",
-        "libjsoncpp",
+        "libaaudio_internal",
+        "libaudioclient",
+        "libaudioutils",
         "libbase_ndk",
         "libcgrouprc",
-        "libaudioutils",
-        "libaudioclient",
-        "aaudio-aidl-cpp",
+        "libcgrouprc_format",
+        "libcutils",
+        "libjsoncpp",
+        "liblog",
         "libmedia_helper",
         "libmediametrics",
         "libprocessgroup",
-        "av-types-aidl-cpp",
-        "libaaudio_internal",
-        "libcgrouprc_format",
-        "audiopolicy-aidl-cpp",
-        "audioflinger-aidl-cpp",
-        "audiopolicy-types-aidl-cpp",
-        "audioclient-types-aidl-cpp",
-        "shared-file-region-aidl-cpp",
-        "framework-permission-aidl-cpp",
         "mediametricsservice-aidl-cpp",
+        "shared-file-region-aidl-cpp",
     ],
     fuzz_config: {
         cc: [
diff --git a/media/libaaudio/include/aaudio/AAudio.h b/media/libaaudio/include/aaudio/AAudio.h
index 9d9b574..e19d526 100644
--- a/media/libaaudio/include/aaudio/AAudio.h
+++ b/media/libaaudio/include/aaudio/AAudio.h
@@ -574,7 +574,7 @@
      * For privacy, the following usages can not be recorded: AAUDIO_VOICE_COMMUNICATION*,
      * AAUDIO_USAGE_NOTIFICATION*, AAUDIO_USAGE_ASSISTANCE* and {@link #AAUDIO_USAGE_ASSISTANT}.
      *
-     * On <a href="/reference/android/os/Build.VERSION_CODES#Q">Build.VERSION_CODES</a>,
+     * On <a href="/reference/android/os/Build.VERSION_CODES#Q">Q</a>,
      * this means only {@link #AAUDIO_USAGE_MEDIA} and {@link #AAUDIO_USAGE_GAME} may be captured.
      *
      * See <a href="/reference/android/media/AudioAttributes.html#ALLOW_CAPTURE_BY_ALL">
@@ -1115,6 +1115,17 @@
  *
  * The default, if you do not call this function, is {@link #AAUDIO_USAGE_MEDIA}.
  *
+ * If you set Usage then you will need to associate the volume keys with the resulting stream.
+ * Otherwise the volume keys may not work correctly.
+ * This is done in Java with the following code block.
+ *
+ * <pre><code>if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
+ *     AudioAttributes attributes = new AudioAttributes.Builder().setUsage(usage)
+ *             .setContentType(contentType).build();
+ *     setVolumeControlStream(attributes.getVolumeControlStream());
+ * }
+ * </code></pre>
+ *
  * Available since API level 28.
  *
  * @param builder reference provided by AAudio_createStreamBuilder()
@@ -1132,6 +1143,17 @@
  *
  * The default, if you do not call this function, is {@link #AAUDIO_CONTENT_TYPE_MUSIC}.
  *
+ * If you set ContentType then you will need to associate the volume keys with the resulting stream.
+ * Otherwise the volume keys may not work correctly.
+ * This is done in Java with the following code block.
+ *
+ * <pre><code>if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
+ *     AudioAttributes attributes = new AudioAttributes.Builder().setUsage(usage)
+ *             .setContentType(contentType).build();
+ *     setVolumeControlStream(attributes.getVolumeControlStream());
+ * }
+ * </code></pre>
+ *
  * Available since API level 28.
  *
  * @param builder reference provided by AAudio_createStreamBuilder()
diff --git a/media/libaaudio/include/aaudio/AAudioTesting.h b/media/libaaudio/include/aaudio/AAudioTesting.h
index 01d97b6..d67ec70 100644
--- a/media/libaaudio/include/aaudio/AAudioTesting.h
+++ b/media/libaaudio/include/aaudio/AAudioTesting.h
@@ -49,12 +49,6 @@
 };
 typedef int32_t aaudio_policy_t;
 
-// Internal error codes. Only used by the framework.
-enum {
-    AAUDIO_INTERNAL_ERROR_BASE = -1000,
-    AAUDIO_ERROR_STANDBY,
-};
-
 /**
  * Control whether AAudioStreamBuilder_openStream() will use the new MMAP data path
  * or the older "Legacy" data path.
diff --git a/media/libaaudio/src/Android.bp b/media/libaaudio/src/Android.bp
index fcb376c..ebb7637 100644
--- a/media/libaaudio/src/Android.bp
+++ b/media/libaaudio/src/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_media_framework_audio",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
@@ -56,10 +57,10 @@
     "-bugprone-macro-parentheses", // found in SharedMemoryParcelable.h
     "-bugprone-narrowing-conversions", // found in several interface from size_t to int32_t
 
-    "-google-readability-casting", // C++ casts not always necessary and may be verbose
-    "-google-readability-todo", // do not require TODO(info)
     "-google-build-using-namespace", // Reenable and fix later.
     "-google-global-names-in-headers", // found in several files
+    "-google-readability-casting", // C++ casts not always necessary and may be verbose
+    "-google-readability-todo", // do not require TODO(info)
 
     "-misc-non-private-member-variables-in-classes", // found in aidl generated files
 
@@ -89,28 +90,27 @@
     ],
 
     cflags: [
-        "-Wthread-safety",
-        "-Wno-unused-parameter",
         "-Wall",
         "-Werror",
-        // By default, all symbols are hidden.
-        // "-fvisibility=hidden",
+        "-Wno-unused-parameter",
+        "-Wthread-safety",
+
         // AAUDIO_API is used to explicitly export a function or a variable as a visible symbol.
         "-DAAUDIO_API=__attribute__((visibility(\"default\")))",
     ],
 
     shared_libs: [
+        "framework-permission-aidl-cpp",
         "libaaudio_internal",
         "libaudioclient",
         "libaudioutils",
+        "libbinder",
+        "libcutils",
+        "liblog",
         "libmedia_helper",
         "libmediametrics",
         "libmediautils",
-        "liblog",
-        "libcutils",
         "libutils",
-        "libbinder",
-        "framework-permission-aidl-cpp",
     ],
 
     sanitize: {
@@ -128,7 +128,7 @@
     tidy_checks_as_errors: tidy_errors,
     tidy_flags: [
         "-format-style=file",
-    ]
+    ],
 }
 
 cc_library {
@@ -160,56 +160,49 @@
     ],
 
     shared_libs: [
+        "aaudio-aidl-cpp",
+        "audioclient-types-aidl-cpp",
+        "com.android.media.aaudio-aconfig-cc",
+        "framework-permission-aidl-cpp",
         "libaudioclient",
+        "libaudioclient_aidl_conversion",
         "libaudioutils",
+        "libbinder",
+        "libcutils",
+        "liblog",
         "libmedia_helper",
         "libmediametrics",
         "libmediautils",
-        "liblog",
-        "libcutils",
         "libutils",
-        "libbinder",
-        "framework-permission-aidl-cpp",
-        "aaudio-aidl-cpp",
-        "audioclient-types-aidl-cpp",
-        "libaudioclient_aidl_conversion",
-        "com.android.media.aaudio-aconfig-cc",
     ],
 
     cflags: [
-        "-Wno-unused-parameter",
         "-Wall",
         "-Werror",
+        "-Wno-unused-parameter",
     ],
 
     srcs: [
-        "core/AudioGlobal.cpp",
-        "core/AudioStream.cpp",
-        "core/AudioStreamBuilder.cpp",
-        "core/AAudioStreamParameters.cpp",
-        "legacy/AudioStreamLegacy.cpp",
-        "legacy/AudioStreamRecord.cpp",
-        "legacy/AudioStreamTrack.cpp",
-        "utility/AAudioUtilities.cpp",
-        "utility/FixedBlockAdapter.cpp",
-        "utility/FixedBlockReader.cpp",
-        "utility/FixedBlockWriter.cpp",
-        "fifo/FifoBuffer.cpp",
-        "fifo/FifoControllerBase.cpp",
+        "binding/AAudioBinderAdapter.cpp",
+        "binding/AAudioBinderClient.cpp",
+        "binding/AAudioStreamConfiguration.cpp",
+        "binding/AAudioStreamRequest.cpp",
+        "binding/AudioEndpointParcelable.cpp",
+        "binding/RingBufferParcelable.cpp",
+        "binding/SharedMemoryParcelable.cpp",
+        "binding/SharedRegionParcelable.cpp",
         "client/AAudioFlowGraph.cpp",
         "client/AudioEndpoint.cpp",
         "client/AudioStreamInternal.cpp",
         "client/AudioStreamInternalCapture.cpp",
         "client/AudioStreamInternalPlay.cpp",
         "client/IsochronousClockModel.cpp",
-        "binding/AudioEndpointParcelable.cpp",
-        "binding/AAudioBinderAdapter.cpp",
-        "binding/AAudioBinderClient.cpp",
-        "binding/AAudioStreamRequest.cpp",
-        "binding/AAudioStreamConfiguration.cpp",
-        "binding/RingBufferParcelable.cpp",
-        "binding/SharedMemoryParcelable.cpp",
-        "binding/SharedRegionParcelable.cpp",
+        "core/AAudioStreamParameters.cpp",
+        "core/AudioGlobal.cpp",
+        "core/AudioStream.cpp",
+        "core/AudioStreamBuilder.cpp",
+        "fifo/FifoBuffer.cpp",
+        "fifo/FifoControllerBase.cpp",
         "flowgraph/ChannelCountConverter.cpp",
         "flowgraph/ClipToRange.cpp",
         "flowgraph/FlowGraphNode.cpp",
@@ -217,20 +210,20 @@
         "flowgraph/ManyToMultiConverter.cpp",
         "flowgraph/MonoBlend.cpp",
         "flowgraph/MonoToMultiConverter.cpp",
-        "flowgraph/MultiToMonoConverter.cpp",
         "flowgraph/MultiToManyConverter.cpp",
+        "flowgraph/MultiToMonoConverter.cpp",
         "flowgraph/RampLinear.cpp",
         "flowgraph/SampleRateConverter.cpp",
         "flowgraph/SinkFloat.cpp",
+        "flowgraph/SinkI8_24.cpp",
         "flowgraph/SinkI16.cpp",
         "flowgraph/SinkI24.cpp",
         "flowgraph/SinkI32.cpp",
-        "flowgraph/SinkI8_24.cpp",
         "flowgraph/SourceFloat.cpp",
+        "flowgraph/SourceI8_24.cpp",
         "flowgraph/SourceI16.cpp",
         "flowgraph/SourceI24.cpp",
         "flowgraph/SourceI32.cpp",
-        "flowgraph/SourceI8_24.cpp",
         "flowgraph/resampler/IntegerRatio.cpp",
         "flowgraph/resampler/LinearResampler.cpp",
         "flowgraph/resampler/MultiChannelResampler.cpp",
@@ -239,6 +232,13 @@
         "flowgraph/resampler/PolyphaseResamplerStereo.cpp",
         "flowgraph/resampler/SincResampler.cpp",
         "flowgraph/resampler/SincResamplerStereo.cpp",
+        "legacy/AudioStreamLegacy.cpp",
+        "legacy/AudioStreamRecord.cpp",
+        "legacy/AudioStreamTrack.cpp",
+        "utility/AAudioUtilities.cpp",
+        "utility/FixedBlockAdapter.cpp",
+        "utility/FixedBlockReader.cpp",
+        "utility/FixedBlockWriter.cpp",
     ],
     sanitize: {
         integer_overflow: true,
@@ -250,7 +250,7 @@
     tidy_checks_as_errors: tidy_errors,
     tidy_flags: [
         "-format-style=file",
-    ]
+    ],
 }
 
 aidl_interface {
@@ -262,20 +262,19 @@
     ],
     srcs: [
         "binding/aidl/aaudio/Endpoint.aidl",
+        "binding/aidl/aaudio/IAAudioClient.aidl",
+        "binding/aidl/aaudio/IAAudioService.aidl",
         "binding/aidl/aaudio/RingBuffer.aidl",
         "binding/aidl/aaudio/SharedRegion.aidl",
         "binding/aidl/aaudio/StreamParameters.aidl",
         "binding/aidl/aaudio/StreamRequest.aidl",
-        "binding/aidl/aaudio/IAAudioClient.aidl",
-        "binding/aidl/aaudio/IAAudioService.aidl",
     ],
     imports: [
         "audioclient-types-aidl",
-        "shared-file-region-aidl",
         "framework-permission-aidl",
+        "shared-file-region-aidl",
     ],
-    backend:
-    {
+    backend: {
         java: {
             sdk_version: "module_current",
         },
diff --git a/media/libaaudio/src/binding/AAudioBinderClient.cpp b/media/libaaudio/src/binding/AAudioBinderClient.cpp
index 5f34a75..439d5af 100644
--- a/media/libaaudio/src/binding/AAudioBinderClient.cpp
+++ b/media/libaaudio/src/binding/AAudioBinderClient.cpp
@@ -71,18 +71,10 @@
     {
         Mutex::Autolock _l(mServiceLock);
         if (mAdapter == nullptr) {
-            sp<IBinder> binder;
             sp<IServiceManager> sm = defaultServiceManager();
-            // Try several times to get the service.
-            int retries = 4;
-            do {
-                binder = sm->getService(String16(AAUDIO_SERVICE_NAME)); // This will wait a while.
-                if (binder.get() != nullptr) {
-                    break;
-                }
-            } while (retries-- > 0);
+            sp<IBinder> binder = sm->waitForService(String16(AAUDIO_SERVICE_NAME));
 
-            if (binder.get() != nullptr) {
+            if (binder != nullptr) {
                 // Ask for notification if the service dies.
                 status_t status = binder->linkToDeath(mAAudioClient);
                 // TODO review what we should do if this fails
diff --git a/media/libaaudio/src/client/AudioEndpoint.cpp b/media/libaaudio/src/client/AudioEndpoint.cpp
index e780f4f..cd7679c 100644
--- a/media/libaaudio/src/client/AudioEndpoint.cpp
+++ b/media/libaaudio/src/client/AudioEndpoint.cpp
@@ -278,3 +278,9 @@
         mDataQueue->eraseMemory();
     }
 }
+
+void AudioEndpoint::eraseEmptyDataMemory(int32_t numFrames) {
+    if (mDataQueue != nullptr) {
+        mDataQueue->eraseEmptyMemory(numFrames);
+    }
+}
diff --git a/media/libaaudio/src/client/AudioEndpoint.h b/media/libaaudio/src/client/AudioEndpoint.h
index b117572..7e97c6a 100644
--- a/media/libaaudio/src/client/AudioEndpoint.h
+++ b/media/libaaudio/src/client/AudioEndpoint.h
@@ -107,6 +107,8 @@
      */
     void eraseDataMemory();
 
+    void eraseEmptyDataMemory(int32_t numFrames);
+
     void freeDataQueue() { mDataQueue.reset(); }
 
     void dump() const;
diff --git a/media/libaaudio/src/client/AudioStreamInternal.cpp b/media/libaaudio/src/client/AudioStreamInternal.cpp
index 7648e25..fa3f5a0 100644
--- a/media/libaaudio/src/client/AudioStreamInternal.cpp
+++ b/media/libaaudio/src/client/AudioStreamInternal.cpp
@@ -575,10 +575,20 @@
         return AAUDIO_ERROR_INVALID_STATE;
     }
 
+    // For playback, sleep until all the audio data has played.
+    // Then clear the buffer to prevent noise.
+    prepareBuffersForStop();
+
     mClockModel.stop(AudioClock::getNanoseconds());
     setState(AAUDIO_STREAM_STATE_STOPPING);
     mAtomicInternalTimestamp.clear();
 
+#if 0
+    // Simulate very slow CPU, force race condition where the
+    // DSP keeps playing after we stop writing.
+    AudioClock::sleepForNanos(800 * AAUDIO_NANOS_PER_MILLISECOND);
+#endif
+
     result = mServiceInterface.stopStream(mServiceStreamHandleInfo);
     if (result == AAUDIO_ERROR_INVALID_HANDLE) {
         ALOGD("%s() INVALID_HANDLE, stream was probably stolen", __func__);
@@ -610,17 +620,19 @@
                                                  audio_port_handle_t *portHandle) {
     ALOGV("%s() called", __func__);
     if (getServiceHandle() == AAUDIO_HANDLE_INVALID) {
+        ALOGE("%s() getServiceHandle() is invalid", __func__);
         return AAUDIO_ERROR_INVALID_STATE;
     }
     aaudio_result_t result =  mServiceInterface.startClient(mServiceStreamHandleInfo,
                                                             client, attr, portHandle);
-    ALOGV("%s(%d) returning %d", __func__, *portHandle, result);
+    ALOGV("%s(), got %d, returning %d", __func__, *portHandle, result);
     return result;
 }
 
 aaudio_result_t AudioStreamInternal::stopClient(audio_port_handle_t portHandle) {
     ALOGV("%s(%d) called", __func__, portHandle);
     if (getServiceHandle() == AAUDIO_HANDLE_INVALID) {
+        ALOGE("%s(%d) getServiceHandle() is invalid", __func__, portHandle);
         return AAUDIO_ERROR_INVALID_STATE;
     }
     aaudio_result_t result = mServiceInterface.stopClient(mServiceStreamHandleInfo, portHandle);
diff --git a/media/libaaudio/src/client/AudioStreamInternal.h b/media/libaaudio/src/client/AudioStreamInternal.h
index a5981b1..20d55f9 100644
--- a/media/libaaudio/src/client/AudioStreamInternal.h
+++ b/media/libaaudio/src/client/AudioStreamInternal.h
@@ -123,6 +123,8 @@
 
     virtual void prepareBuffersForStart() {}
 
+    virtual void prepareBuffersForStop() {}
+
     virtual void advanceClientToMatchServerPosition(int32_t serverMargin) = 0;
 
     virtual void onFlushFromServer() {}
diff --git a/media/libaaudio/src/client/AudioStreamInternalPlay.cpp b/media/libaaudio/src/client/AudioStreamInternalPlay.cpp
index 5d4c3d4..0427777 100644
--- a/media/libaaudio/src/client/AudioStreamInternalPlay.cpp
+++ b/media/libaaudio/src/client/AudioStreamInternalPlay.cpp
@@ -19,6 +19,8 @@
 
 #define ATRACE_TAG ATRACE_TAG_AUDIO
 
+#include <algorithm>
+
 #include <media/MediaMetricsItem.h>
 #include <utils/Trace.h>
 
@@ -108,6 +110,61 @@
     mAudioEndpoint->eraseDataMemory();
 }
 
+void AudioStreamInternalPlay::prepareBuffersForStop() {
+    // If this is a shared stream and the FIFO is being read by the mixer then
+    // we don't have to worry about the DSP reading past the valid data. We can skip all this.
+    if(!mAudioEndpoint->isFreeRunning()) {
+        return;
+    }
+    // Sleep until the DSP has read all of the data written.
+    int64_t validFramesInBuffer = getFramesWritten() - getFramesRead();
+    if (validFramesInBuffer >= 0) {
+        int64_t emptyFramesInBuffer = ((int64_t) getBufferCapacity()) - validFramesInBuffer;
+
+        // Prevent stale data from being played if the DSP is still running.
+        // Erase some of the FIFO memory in front of the DSP read cursor.
+        // Subtract one burst so we do not accidentally erase data that the DSP might be using.
+        int64_t framesToErase = std::max((int64_t) 0,
+                                         emptyFramesInBuffer - getFramesPerBurst());
+        mAudioEndpoint->eraseEmptyDataMemory(framesToErase);
+
+        // Sleep until we are confident the DSP has consumed all of the valid data.
+        // Sleep for one extra burst as a safety margin because the IsochronousClockModel
+        // is not perfectly accurate.
+        int64_t positionInEmptyMemory = getFramesWritten() + getFramesPerBurst();
+        int64_t timeAllConsumed = mClockModel.convertPositionToTime(positionInEmptyMemory);
+        int64_t durationAllConsumed = timeAllConsumed - AudioClock::getNanoseconds();
+        // Prevent sleeping for too long.
+        durationAllConsumed = std::min(200 * AAUDIO_NANOS_PER_MILLISECOND, durationAllConsumed);
+        AudioClock::sleepForNanos(durationAllConsumed);
+    }
+
+    // Erase all of the memory in case the DSP keeps going and wraps around.
+    mAudioEndpoint->eraseDataMemory();
+
+    // Wait for the last buffer to reach the DAC.
+    // This is because the expected behavior of stop() is that all data written to the stream
+    // should be played before the hardware actually shuts down.
+    // This is different than pause(), where we just end as soon as possible.
+    // This can be important when, for example, playing car navigation and
+    // you want the user to hear the complete instruction.
+    if (mAtomicInternalTimestamp.isValid()) {
+        // Use timestamps to calculate the latency between the DSP reading
+        // a frame and when it reaches the DAC.
+        // This code assumes that timestamps are accurate.
+        Timestamp timestamp = mAtomicInternalTimestamp.read();
+        int64_t dacPosition = timestamp.getPosition();
+        int64_t hardwareReadTime = mClockModel.convertPositionToTime(dacPosition);
+        int64_t hardwareLatencyNanos = timestamp.getNanoseconds() - hardwareReadTime;
+        ALOGD("%s() hardwareLatencyNanos = %lld", __func__,
+              (long long) hardwareLatencyNanos);
+        // Prevent sleeping for too long.
+        hardwareLatencyNanos = std::min(30 * AAUDIO_NANOS_PER_MILLISECOND,
+                                        hardwareLatencyNanos);
+        AudioClock::sleepForNanos(hardwareLatencyNanos);
+    }
+}
+
 void AudioStreamInternalPlay::advanceClientToMatchServerPosition(int32_t serverMargin) {
     int64_t readCounter = mAudioEndpoint->getDataReadCounter() + serverMargin;
     int64_t writeCounter = mAudioEndpoint->getDataWriteCounter();
@@ -353,20 +410,26 @@
         // Call application using the AAudio callback interface.
         callbackResult = maybeCallDataCallback(mCallbackBuffer.get(), mCallbackFrames);
 
-        if (callbackResult == AAUDIO_CALLBACK_RESULT_CONTINUE) {
-            // Write audio data to stream. This is a BLOCKING WRITE!
-            result = write(mCallbackBuffer.get(), mCallbackFrames, timeoutNanos);
-            if ((result != mCallbackFrames)) {
-                if (result >= 0) {
-                    // Only wrote some of the frames requested. The stream can be disconnected
-                    // or timed out.
-                    processCommands();
-                    result = isDisconnected() ? AAUDIO_ERROR_DISCONNECTED : AAUDIO_ERROR_TIMEOUT;
-                }
-                maybeCallErrorCallback(result);
-                break;
+        // Write audio data to stream. This is a BLOCKING WRITE!
+        // Write data regardless of the callbackResult because we assume the data
+        // is valid even when the callback returns AAUDIO_CALLBACK_RESULT_STOP.
+        // Imagine a callback that is playing a large sound in menory.
+        // When it gets to the end of the sound it can partially fill
+        // the last buffer with the end of the sound, then zero pad the buffer, then return STOP.
+        // If the callback has no valid data then it should zero-fill the entire buffer.
+        result = write(mCallbackBuffer.get(), mCallbackFrames, timeoutNanos);
+        if ((result != mCallbackFrames)) {
+            if (result >= 0) {
+                // Only wrote some of the frames requested. The stream can be disconnected
+                // or timed out.
+                processCommands();
+                result = isDisconnected() ? AAUDIO_ERROR_DISCONNECTED : AAUDIO_ERROR_TIMEOUT;
             }
-        } else if (callbackResult == AAUDIO_CALLBACK_RESULT_STOP) {
+            maybeCallErrorCallback(result);
+            break;
+        }
+
+        if (callbackResult == AAUDIO_CALLBACK_RESULT_STOP) {
             ALOGD("%s(): callback returned AAUDIO_CALLBACK_RESULT_STOP", __func__);
             result = systemStopInternal();
             break;
diff --git a/media/libaaudio/src/client/AudioStreamInternalPlay.h b/media/libaaudio/src/client/AudioStreamInternalPlay.h
index b51b5d0..4e14f18 100644
--- a/media/libaaudio/src/client/AudioStreamInternalPlay.h
+++ b/media/libaaudio/src/client/AudioStreamInternalPlay.h
@@ -66,6 +66,8 @@
 
     void prepareBuffersForStart() override;
 
+    void prepareBuffersForStop() override;
+
     void advanceClientToMatchServerPosition(int32_t serverMargin) override;
 
     void onFlushFromServer() override;
diff --git a/media/libaaudio/src/client/IsochronousClockModel.cpp b/media/libaaudio/src/client/IsochronousClockModel.cpp
index a39e90e..430ba83 100644
--- a/media/libaaudio/src/client/IsochronousClockModel.cpp
+++ b/media/libaaudio/src/client/IsochronousClockModel.cpp
@@ -18,7 +18,6 @@
 //#define LOG_NDEBUG 0
 #include <log/log.h>
 
-#define __STDC_FORMAT_MACROS
 #include <inttypes.h>
 #include <stdint.h>
 #include <algorithm>
diff --git a/media/libaaudio/src/core/AAudioStreamParameters.cpp b/media/libaaudio/src/core/AAudioStreamParameters.cpp
index 3e51575..67fc668 100644
--- a/media/libaaudio/src/core/AAudioStreamParameters.cpp
+++ b/media/libaaudio/src/core/AAudioStreamParameters.cpp
@@ -23,13 +23,6 @@
 
 using namespace aaudio;
 
-// TODO These defines should be moved to a central place in audio.
-#define SAMPLES_PER_FRAME_MIN        1
-#define SAMPLES_PER_FRAME_MAX        FCC_LIMIT
-#define SAMPLE_RATE_HZ_MIN           8000
-// HDMI supports up to 32 channels at 1536000 Hz.
-#define SAMPLE_RATE_HZ_MAX           1600000
-
 void AAudioStreamParameters::copyFrom(const AAudioStreamParameters &other) {
     mSamplesPerFrame      = other.mSamplesPerFrame;
     mSampleRate           = other.mSampleRate;
@@ -73,8 +66,8 @@
 }
 
 aaudio_result_t AAudioStreamParameters::validate() const {
-    if (mSamplesPerFrame != AAUDIO_UNSPECIFIED
-        && (mSamplesPerFrame < SAMPLES_PER_FRAME_MIN || mSamplesPerFrame > SAMPLES_PER_FRAME_MAX)) {
+    if (mSamplesPerFrame != AAUDIO_UNSPECIFIED && (mSamplesPerFrame < CHANNEL_COUNT_MIN_AAUDIO ||
+                                                   mSamplesPerFrame > CHANNEL_COUNT_MAX_AAUDIO)) {
         ALOGD("channelCount out of range = %d", mSamplesPerFrame);
         return AAUDIO_ERROR_OUT_OF_RANGE;
     }
@@ -105,8 +98,8 @@
     aaudio_result_t result = isFormatValid (mAudioFormat);
     if (result != AAUDIO_OK) return result;
 
-    if (mSampleRate != AAUDIO_UNSPECIFIED
-        && (mSampleRate < SAMPLE_RATE_HZ_MIN || mSampleRate > SAMPLE_RATE_HZ_MAX)) {
+    if (mSampleRate != AAUDIO_UNSPECIFIED &&
+        (mSampleRate < SAMPLE_RATE_HZ_MIN_AAUDIO || mSampleRate > SAMPLE_RATE_HZ_MAX_IEC610937)) {
         ALOGD("sampleRate out of range = %d", mSampleRate);
         return AAUDIO_ERROR_INVALID_RATE;
     }
diff --git a/media/libaaudio/src/core/AudioGlobal.h b/media/libaaudio/src/core/AudioGlobal.h
index 6c22744..8af49b4 100644
--- a/media/libaaudio/src/core/AudioGlobal.h
+++ b/media/libaaudio/src/core/AudioGlobal.h
@@ -22,6 +22,14 @@
 
 namespace aaudio {
 
+// Internal error codes. Only used by the framework.
+enum {
+    AAUDIO_INTERNAL_ERROR_BASE = -1000,
+    AAUDIO_ERROR_STANDBY,
+    AAUDIO_ERROR_ALREADY_CLOSED,
+
+};
+
 aaudio_policy_t AudioGlobal_getMMapPolicy();
 aaudio_result_t AudioGlobal_setMMapPolicy(aaudio_policy_t policy);
 
diff --git a/media/libaaudio/src/core/AudioStreamBuilder.cpp b/media/libaaudio/src/core/AudioStreamBuilder.cpp
index ac4e2b3..c9d8b35 100644
--- a/media/libaaudio/src/core/AudioStreamBuilder.cpp
+++ b/media/libaaudio/src/core/AudioStreamBuilder.cpp
@@ -24,6 +24,7 @@
 
 #include <aaudio/AAudio.h>
 #include <aaudio/AAudioTesting.h>
+#include <android/media/audio/common/AudioMMapPolicy.h>
 #include <android/media/audio/common/AudioMMapPolicyInfo.h>
 #include <android/media/audio/common/AudioMMapPolicyType.h>
 #include <media/AudioSystem.h>
@@ -40,21 +41,15 @@
 
 using namespace aaudio;
 
+using android::media::audio::common::AudioMMapPolicy;
 using android::media::audio::common::AudioMMapPolicyInfo;
 using android::media::audio::common::AudioMMapPolicyType;
 
 #define AAUDIO_MMAP_POLICY_DEFAULT             AAUDIO_POLICY_NEVER
 #define AAUDIO_MMAP_EXCLUSIVE_POLICY_DEFAULT   AAUDIO_POLICY_NEVER
+#define AAUDIO_MMAP_POLICY_DEFAULT_AIDL        AudioMMapPolicy::NEVER
+#define AAUDIO_MMAP_EXCLUSIVE_POLICY_DEFAULT_AIDL AudioMMapPolicy::NEVER
 
-// These values are for a pre-check before we ask the lower level service to open a stream.
-// So they are just outside the maximum conceivable range of value,
-// on the edge of being ridiculous.
-// TODO These defines should be moved to a central place in audio.
-#define SAMPLES_PER_FRAME_MIN        1
-#define SAMPLES_PER_FRAME_MAX        FCC_LIMIT
-#define SAMPLE_RATE_HZ_MIN           8000
-// HDMI supports up to 32 channels at 1536000 Hz.
-#define SAMPLE_RATE_HZ_MAX           1600000
 #define FRAMES_PER_DATA_CALLBACK_MIN 1
 #define FRAMES_PER_DATA_CALLBACK_MAX (1024 * 1024)
 
@@ -116,7 +111,8 @@
     aaudio_policy_t mmapPolicy = AudioGlobal_getMMapPolicy();
     if (android::AudioSystem::getMmapPolicyInfo(
             AudioMMapPolicyType::DEFAULT, &policyInfos) == NO_ERROR) {
-        aaudio_policy_t systemMmapPolicy = AAudio_getAAudioPolicy(policyInfos);
+        aaudio_policy_t systemMmapPolicy = AAudio_getAAudioPolicy(
+                policyInfos, AAUDIO_MMAP_POLICY_DEFAULT_AIDL);
         if (mmapPolicy == AAUDIO_POLICY_ALWAYS && systemMmapPolicy == AAUDIO_POLICY_NEVER) {
             // No need to try as AAudioService is not created and the client only wants MMAP path.
             return AAUDIO_ERROR_NO_SERVICE;
@@ -145,7 +141,8 @@
     aaudio_policy_t mmapExclusivePolicy = AAUDIO_UNSPECIFIED;
     if (android::AudioSystem::getMmapPolicyInfo(
             AudioMMapPolicyType::EXCLUSIVE, &policyInfos) == NO_ERROR) {
-        mmapExclusivePolicy = AAudio_getAAudioPolicy(policyInfos);
+        mmapExclusivePolicy = AAudio_getAAudioPolicy(
+                policyInfos, AAUDIO_MMAP_EXCLUSIVE_POLICY_DEFAULT_AIDL);
     }
     if (mmapExclusivePolicy == AAUDIO_UNSPECIFIED) {
         mmapExclusivePolicy = AAUDIO_MMAP_EXCLUSIVE_POLICY_DEFAULT;
diff --git a/media/libaaudio/src/fifo/FifoBuffer.cpp b/media/libaaudio/src/fifo/FifoBuffer.cpp
index 5c11882..f3e3bbd 100644
--- a/media/libaaudio/src/fifo/FifoBuffer.cpp
+++ b/media/libaaudio/src/fifo/FifoBuffer.cpp
@@ -150,7 +150,7 @@
 
     getEmptyRoomAvailable(&wrappingBuffer);
 
-    // Read data in one or two parts.
+    // Write data in one or two parts.
     int partIndex = 0;
     while (framesLeft > 0 && partIndex < WrappingBuffer::SIZE) {
         fifo_frames_t framesToWrite = framesLeft;
@@ -192,3 +192,29 @@
         memset(getStorage(), 0, (size_t) numBytes);
     }
 }
+
+fifo_frames_t FifoBuffer::eraseEmptyMemory(fifo_frames_t numFrames) {
+    WrappingBuffer wrappingBuffer;
+    fifo_frames_t framesLeft = numFrames;
+
+    getEmptyRoomAvailable(&wrappingBuffer);
+
+    // Erase data in one or two parts.
+    int partIndex = 0;
+    while (framesLeft > 0 && partIndex < WrappingBuffer::SIZE) {
+        fifo_frames_t framesToWrite = framesLeft;
+        fifo_frames_t framesAvailable = wrappingBuffer.numFrames[partIndex];
+        if (framesAvailable > 0) {
+            if (framesToWrite > framesAvailable) {
+                framesToWrite = framesAvailable;
+            }
+            int32_t numBytes = convertFramesToBytes(framesToWrite);
+            memset(wrappingBuffer.data[partIndex], 0, numBytes);
+            framesLeft -= framesToWrite;
+        } else {
+            break;
+        }
+        partIndex++;
+    }
+    return numFrames - framesLeft; // number erased
+}
diff --git a/media/libaaudio/src/fifo/FifoBuffer.h b/media/libaaudio/src/fifo/FifoBuffer.h
index 7b0aca1..860ccad 100644
--- a/media/libaaudio/src/fifo/FifoBuffer.h
+++ b/media/libaaudio/src/fifo/FifoBuffer.h
@@ -115,6 +115,13 @@
      */
     void eraseMemory();
 
+    /**
+     * Clear some memory after the write pointer.
+     * This can be used to prevent the reader from accidentally reading stale data
+     * in case it is reading asynchronously.
+     */
+    fifo_frames_t eraseEmptyMemory(fifo_frames_t numFrames);
+
 protected:
 
     virtual uint8_t *getStorage() const = 0;
diff --git a/media/libaaudio/src/flowgraph/SampleRateConverter.cpp b/media/libaaudio/src/flowgraph/SampleRateConverter.cpp
index a15fcb8..890057d 100644
--- a/media/libaaudio/src/flowgraph/SampleRateConverter.cpp
+++ b/media/libaaudio/src/flowgraph/SampleRateConverter.cpp
@@ -28,7 +28,8 @@
 
 void SampleRateConverter::reset() {
     FlowGraphNode::reset();
-    mInputCursor = kInitialCallCount;
+    mInputCallCount = kInitialCallCount;
+    mInputCursor = 0;
 }
 
 // Return true if there is a sample available.
diff --git a/media/libaaudio/src/flowgraph/SampleRateConverter.h b/media/libaaudio/src/flowgraph/SampleRateConverter.h
index f883e6c..a4318f0 100644
--- a/media/libaaudio/src/flowgraph/SampleRateConverter.h
+++ b/media/libaaudio/src/flowgraph/SampleRateConverter.h
@@ -54,7 +54,7 @@
     int32_t mNumValidInputFrames = 0; // number of valid frames currently in the input port buffer
     // We need our own callCount for upstream calls because calls occur at a different rate.
     // This means we cannot have cyclic graphs or merges that contain an SRC.
-    int64_t mInputCallCount = 0;
+    int64_t mInputCallCount = kInitialCallCount;
 
 };
 
diff --git a/media/libaaudio/src/legacy/AudioStreamLegacy.cpp b/media/libaaudio/src/legacy/AudioStreamLegacy.cpp
index 8595308..255bd0f 100644
--- a/media/libaaudio/src/legacy/AudioStreamLegacy.cpp
+++ b/media/libaaudio/src/legacy/AudioStreamLegacy.cpp
@@ -261,6 +261,11 @@
 
 void AudioStreamLegacy::onAudioDeviceUpdate(audio_io_handle_t /* audioIo */,
             audio_port_handle_t deviceId) {
+    // Check for an invalid deviceId. Why change to UNSPECIFIED?
+    if (deviceId == AAUDIO_UNSPECIFIED) {
+        ALOGE("%s(, deviceId = AAUDIO_UNSPECIFIED)! Why?", __func__);
+        return;
+    }
     // Device routing is a common source of errors and DISCONNECTS.
     // Please leave this log in place. If there is a bug then this might
     // get called after the stream has been deleted so log before we
diff --git a/media/libaaudio/src/utility/AAudioUtilities.cpp b/media/libaaudio/src/utility/AAudioUtilities.cpp
index 0cbf79d..3df23ee 100644
--- a/media/libaaudio/src/utility/AAudioUtilities.cpp
+++ b/media/libaaudio/src/utility/AAudioUtilities.cpp
@@ -680,12 +680,16 @@
 
 } // namespace
 
-aaudio_policy_t AAudio_getAAudioPolicy(const std::vector<AudioMMapPolicyInfo>& policyInfos) {
-    if (policyInfos.empty()) return AAUDIO_POLICY_AUTO;
-    for (size_t i = 1; i < policyInfos.size(); ++i) {
-        if (policyInfos.at(i).mmapPolicy != policyInfos.at(0).mmapPolicy) {
+aaudio_policy_t AAudio_getAAudioPolicy(const std::vector<AudioMMapPolicyInfo>& policyInfos,
+                                       AudioMMapPolicy defaultPolicy) {
+    AudioMMapPolicy policy = defaultPolicy;
+    for (const auto& policyInfo : policyInfos) {
+        if (policyInfo.mmapPolicy == AudioMMapPolicy::NEVER) {
+            policy = policyInfo.mmapPolicy;
+        } else if (policyInfo.mmapPolicy == AudioMMapPolicy::AUTO ||
+                   policyInfo.mmapPolicy == AudioMMapPolicy::ALWAYS) {
             return AAUDIO_POLICY_AUTO;
         }
     }
-    return aidl2legacy_aaudio_policy(policyInfos.at(0).mmapPolicy);
+    return aidl2legacy_aaudio_policy(policy);
 }
diff --git a/media/libaaudio/src/utility/AAudioUtilities.h b/media/libaaudio/src/utility/AAudioUtilities.h
index d44bbab..7c351e1 100644
--- a/media/libaaudio/src/utility/AAudioUtilities.h
+++ b/media/libaaudio/src/utility/AAudioUtilities.h
@@ -348,9 +348,19 @@
     AAUDIO_CHANNEL_INDEX_MASK_24 = AAUDIO_CHANNEL_BIT_INDEX | (1 << 24) - 1,
 };
 
-// The aaudio policy will be ALWAYS, NEVER, UNSPECIFIED only when all policy info are
-// ALWAYS, NEVER or UNSPECIFIED. Otherwise, the aaudio policy will be AUTO.
+/**
+ * Returns the aaudio mmap policy based on the vector of mmap policy info. The rule as
+ * 1. Returns AUTO if any of the policy is AUTO or ALWAYS
+ * 2. Returns NEVER if all of the policies are NEVER or UNSPECIFIED
+ * 3. Returns default policy if all of the policies are UNSPECIFIED
+ *
+ * @param policyInfos
+ * @param defaultPolicy
+ * @return
+ */
 aaudio_policy_t AAudio_getAAudioPolicy(
-        const std::vector<android::media::audio::common::AudioMMapPolicyInfo>& policyInfos);
+        const std::vector<android::media::audio::common::AudioMMapPolicyInfo>& policyInfos,
+        android::media::audio::common::AudioMMapPolicy defaultPolicy =
+                android::media::audio::common::AudioMMapPolicy::NEVER);
 
 #endif //UTILITY_AAUDIO_UTILITIES_H
diff --git a/media/libaaudio/tests/Android.bp b/media/libaaudio/tests/Android.bp
index d59afef..6aa04a8 100644
--- a/media/libaaudio/tests/Android.bp
+++ b/media/libaaudio/tests/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_media_framework_audio",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
@@ -183,9 +184,9 @@
     defaults: ["libaaudio_tests_defaults"],
     srcs: ["test_full_queue.cpp"],
     shared_libs: [
-		"libaaudio",
-		"liblog"
-	],
+        "libaaudio",
+        "liblog",
+    ],
 }
 
 cc_test {
@@ -205,9 +206,9 @@
     srcs: ["test_steal_exclusive.cpp"],
     shared_libs: [
         "libaaudio",
-        "liblog",
         "libbinder",
         "libcutils",
+        "liblog",
         "libutils",
     ],
 }
@@ -248,3 +249,30 @@
     srcs: ["test_idle_disconnected_shared_stream.cpp"],
     shared_libs: ["libaaudio"],
 }
+
+cc_test {
+    name: "test_multiple_close_simultaneously",
+    defaults: [
+        "latest_android_media_audio_common_types_cpp_shared",
+        "libaaudio_tests_defaults",
+    ],
+    srcs: ["test_multiple_close_simultaneously.cpp"],
+    shared_libs: [
+        "aaudio-aidl-cpp",
+        "framework-permission-aidl-cpp",
+        "libaaudio",
+        "libbinder",
+        "liblog",
+        "libutils",
+    ],
+    // This test will run 1 minute to ensure there is no crash happen.
+    // In that case, set the timeout as 2 minutes to allow the test to complete.
+    test_options: {
+        test_runner_options: [
+            {
+                name: "native-test-timeout",
+                value: "2m",
+            },
+        ],
+    },
+}
diff --git a/media/libaaudio/tests/test_multiple_close_simultaneously.cpp b/media/libaaudio/tests/test_multiple_close_simultaneously.cpp
new file mode 100644
index 0000000..f6351b6
--- /dev/null
+++ b/media/libaaudio/tests/test_multiple_close_simultaneously.cpp
@@ -0,0 +1,153 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "test_multiple_close_simultaneously"
+
+#include <chrono>
+#include <condition_variable>
+#include <shared_mutex>
+#include <string>
+#include <thread>
+
+#include <gtest/gtest.h>
+
+#include <binder/IBinder.h>
+#include <binder/IServiceManager.h>
+#include <utils/Log.h>
+
+#include <aaudio/AAudio.h>
+#include <aaudio/IAAudioService.h>
+#include <aaudio/StreamRequest.h>
+#include <aaudio/StreamParameters.h>
+
+using namespace android;
+using namespace aaudio;
+
+#define AAUDIO_SERVICE_NAME "media.aaudio"
+
+static constexpr int THREAD_NUM = 2;
+static constexpr auto TEST_DURATION = std::chrono::minutes(1);
+
+static std::string sError;
+static bool sTestPassed = true;
+
+struct Signal {
+    std::atomic_int value{0};
+    std::shared_mutex lock;
+    std::condition_variable_any cv;
+};
+
+class AAudioServiceDeathRecipient : public IBinder::DeathRecipient {
+public:
+    void binderDied(const wp<IBinder>& who __unused) override {
+        sError = "AAudioService is dead";
+        ALOGE("%s", sError.c_str());
+        sTestPassed = false;
+    }
+};
+
+sp<IAAudioService> getAAudioService(const sp<IBinder::DeathRecipient>& recipient) {
+    auto sm = defaultServiceManager();
+    if (sm == nullptr) {
+        sError = "Cannot get service manager";
+        ALOGE("%s", sError.c_str());
+        return nullptr;
+    }
+    sp<IBinder> binder = sm->waitForService(String16(AAUDIO_SERVICE_NAME));
+    if (binder == nullptr) {
+        sError = "Cannot get aaudio service";
+        ALOGE("%s", sError.c_str());
+        return nullptr;
+    }
+    if (binder->linkToDeath(recipient) != NO_ERROR) {
+        sError = "Cannot link to binder death";
+        ALOGE("%s", sError.c_str());
+        return nullptr;
+    }
+    return interface_cast<IAAudioService>(binder);
+}
+
+void openAndMultipleClose(const sp<IAAudioService>& aaudioService) {
+    auto start = std::chrono::system_clock::now();
+    bool hasFailedOpening = false;
+    while (sTestPassed && std::chrono::system_clock::now() - start < TEST_DURATION) {
+        StreamRequest inRequest;
+        StreamParameters outParams;
+        int32_t handle = 0;
+        inRequest.attributionSource.uid = getuid();
+        inRequest.attributionSource.pid = getpid();
+        inRequest.attributionSource.token = sp<BBinder>::make();
+        auto status = aaudioService->openStream(inRequest, &outParams, &handle);
+        if (!status.isOk()) {
+            sError = "Cannot open stream, it can be caused by service death";
+            ALOGE("%s", sError.c_str());
+            sTestPassed = false;
+            break;
+        }
+        if (handle <= 0) {
+            sError = "Cannot get stream handle after open, returned handle"
+                    + std::to_string(handle);
+            ALOGE("%s", sError.c_str());
+            sTestPassed = false;
+            break;
+        }
+        hasFailedOpening = false;
+
+        Signal isReady;
+        Signal startWork;
+        Signal isCompleted;
+        std::unique_lock readyLock(isReady.lock);
+        std::unique_lock completedLock(isCompleted.lock);
+        for (int i = 0; i < THREAD_NUM; ++i) {
+            std::thread closeStream([aaudioService, handle, &isReady, &startWork, &isCompleted] {
+                isReady.value++;
+                isReady.cv.notify_one();
+                {
+                    std::shared_lock<std::shared_mutex> _l(startWork.lock);
+                    startWork.cv.wait(_l, [&startWork] { return startWork.value.load() == 1; });
+                }
+                int32_t result;
+                aaudioService->closeStream(handle, &result);
+                isCompleted.value++;
+                isCompleted.cv.notify_one();
+            });
+            closeStream.detach();
+        }
+        isReady.cv.wait(readyLock, [&isReady] { return isReady.value == THREAD_NUM; });
+        {
+            std::unique_lock startWorkLock(startWork.lock);
+            startWork.value.store(1);
+        }
+        startWork.cv.notify_all();
+        isCompleted.cv.wait_for(completedLock,
+                                std::chrono::milliseconds(1000),
+                                [&isCompleted] { return isCompleted.value == THREAD_NUM; });
+        if (isCompleted.value != THREAD_NUM) {
+            sError = "Close is not completed within 1 second";
+            ALOGE("%s", sError.c_str());
+            sTestPassed = false;
+            break;
+        }
+    }
+}
+
+TEST(test_multiple_close_simultaneously, open_multiple_close) {
+    const auto recipient = sp<AAudioServiceDeathRecipient>::make();
+    auto aaudioService = getAAudioService(recipient);
+    ASSERT_NE(nullptr, aaudioService) << sError;
+    openAndMultipleClose(aaudioService);
+    ASSERT_TRUE(sTestPassed) << sError;
+}
diff --git a/media/libaudioclient/Android.bp b/media/libaudioclient/Android.bp
index 51a679b..5785537 100644
--- a/media/libaudioclient/Android.bp
+++ b/media/libaudioclient/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_media_framework_audio",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
@@ -29,14 +30,14 @@
     static_libs: [
         "audioflinger-aidl-cpp",
         "audiopolicy-aidl-cpp",
-        "spatializer-aidl-cpp",
         "av-types-aidl-cpp",
+        "spatializer-aidl-cpp",
     ],
     export_static_lib_headers: [
         "audioflinger-aidl-cpp",
         "audiopolicy-aidl-cpp",
-        "spatializer-aidl-cpp",
         "av-types-aidl-cpp",
+        "spatializer-aidl-cpp",
     ],
     target: {
         darwin: {
@@ -48,11 +49,11 @@
 cc_library {
     name: "libaudiopolicy",
     srcs: [
-        "VolumeGroupAttributes.cpp",
         "AudioPolicy.cpp",
         "AudioProductStrategy.cpp",
         "AudioVolumeGroup.cpp",
-        "PolicyAidlConversion.cpp"
+        "PolicyAidlConversion.cpp",
+        "VolumeGroupAttributes.cpp",
     ],
     defaults: [
         "latest_android_media_audio_common_types_cpp_export_shared",
@@ -63,8 +64,9 @@
         "audiopolicy-aidl-cpp",
         "audiopolicy-types-aidl-cpp",
         "capture_state_listener-aidl-cpp",
-        "libaudiofoundation",
+        "framework-permission-aidl-cpp",
         "libaudioclient_aidl_conversion",
+        "libaudiofoundation",
         "libaudioutils",
         "libbinder",
         "libcutils",
@@ -72,8 +74,8 @@
         "libutils",
     ],
     cflags: [
-        "-Werror",
         "-Wall",
+        "-Werror",
     ],
     include_dirs: ["system/media/audio_utils/include"],
     export_include_dirs: ["include"],
@@ -83,8 +85,8 @@
         "audiopolicy-aidl-cpp",
         "audiopolicy-types-aidl-cpp",
         "capture_state_listener-aidl-cpp",
-        "libaudiofoundation",
         "libaudioclient_aidl_conversion",
+        "libaudiofoundation",
     ],
     header_libs: ["libaudioclient_headers"],
 }
@@ -112,28 +114,30 @@
         "AudioTrack.cpp",
         "AudioTrackShared.cpp",
         "IAudioFlinger.cpp",
-        "ToneGenerator.cpp",
         "PlayerBase.cpp",
         "RecordingActivityTracker.cpp",
+        "ToneGenerator.cpp",
         "TrackPlayerBase.cpp",
     ],
     defaults: [
         "latest_android_media_audio_common_types_cpp_shared",
     ],
     shared_libs: [
+        "android.media.audiopolicy-aconfig-cc",
         "audioclient-types-aidl-cpp",
         "audioflinger-aidl-cpp",
         "audiopolicy-aidl-cpp",
-        "spatializer-aidl-cpp",
         "audiopolicy-types-aidl-cpp",
         "av-types-aidl-cpp",
         "capture_state_listener-aidl-cpp",
+        "com.android.media.audioclient-aconfig-cc",
+        "framework-permission-aidl-cpp",
         "libaudio_aidl_conversion_common_cpp",
         "libaudioclient_aidl_conversion",
         "libaudiofoundation",
-        "libaudioutils",
-        "libaudiopolicy",
         "libaudiomanager",
+        "libaudiopolicy",
+        "libaudioutils",
         "libbinder",
         "libcutils",
         "libdl",
@@ -145,24 +149,24 @@
         "libprocessgroup",
         "libshmemcompat",
         "libutils",
-        "framework-permission-aidl-cpp",
         "packagemanager_aidl-cpp",
+        "spatializer-aidl-cpp",
     ],
     export_shared_lib_headers: [
         "audioflinger-aidl-cpp",
         "audiopolicy-aidl-cpp",
-        "spatializer-aidl-cpp",
         "framework-permission-aidl-cpp",
         "libbinder",
         "libmediametrics",
+        "spatializer-aidl-cpp",
     ],
 
     include_dirs: [
         "frameworks/av/media/libnbaio/include_mono/",
     ],
     local_include_dirs: [
-        "include/media",
         "aidl",
+        "include/media",
     ],
     header_libs: [
         "libaudioclient_headers",
@@ -185,11 +189,12 @@
         "-Wall",
         "-Werror",
         "-Wno-error=deprecated-declarations",
+        "-Wthread-safety",
     ],
     sanitize: {
         misc_undefined: [
-            "unsigned-integer-overflow",
             "signed-integer-overflow",
+            "unsigned-integer-overflow",
         ],
     },
 }
@@ -226,8 +231,8 @@
 filegroup {
     name: "libaudioclient_aidl",
     srcs: [
-        "aidl/android/media/IPlayer.aidl",
         "aidl/android/media/AudioHalVersion.aidl",
+        "aidl/android/media/IPlayer.aidl",
     ],
     path: "aidl",
 }
@@ -293,14 +298,14 @@
         "aidl/android/media/AudioIoDescriptor.aidl",
         "aidl/android/media/AudioPatchFw.aidl",
         "aidl/android/media/AudioPolicyConfig.aidl",
-        "aidl/android/media/AudioPortFw.aidl",
-        "aidl/android/media/AudioPortSys.aidl",
         "aidl/android/media/AudioPortConfigFw.aidl",
         "aidl/android/media/AudioPortConfigSys.aidl",
         "aidl/android/media/AudioPortDeviceExtSys.aidl",
         "aidl/android/media/AudioPortExtSys.aidl",
+        "aidl/android/media/AudioPortFw.aidl",
         "aidl/android/media/AudioPortMixExtSys.aidl",
         "aidl/android/media/AudioPortRole.aidl",
+        "aidl/android/media/AudioPortSys.aidl",
         "aidl/android/media/AudioPortType.aidl",
         "aidl/android/media/AudioProfileSys.aidl",
         "aidl/android/media/AudioRoute.aidl",
@@ -309,8 +314,9 @@
         "aidl/android/media/AudioVibratorInfo.aidl",
         "aidl/android/media/DeviceConnectedState.aidl",
         "aidl/android/media/EffectDescriptor.aidl",
-        "aidl/android/media/TrackSecondaryOutputInfo.aidl",
         "aidl/android/media/SurroundSoundConfig.aidl",
+        "aidl/android/media/TrackInternalMuteInfo.aidl",
+        "aidl/android/media/TrackSecondaryOutputInfo.aidl",
     ],
     defaults: [
         "latest_android_media_audio_common_types_import_interface",
@@ -331,6 +337,7 @@
         },
     },
 }
+
 aidl_interface {
     name: "audiopolicy-types-aidl",
     unstable: true,
@@ -341,14 +348,14 @@
     srcs: [
         "aidl/android/media/AudioAttributesEx.aidl",
         "aidl/android/media/AudioMix.aidl",
-        "aidl/android/media/AudioMixUpdate.aidl",
-        "aidl/android/media/AudioMixerAttributesInternal.aidl",
-        "aidl/android/media/AudioMixerBehavior.aidl",
         "aidl/android/media/AudioMixCallbackFlag.aidl",
         "aidl/android/media/AudioMixMatchCriterion.aidl",
         "aidl/android/media/AudioMixMatchCriterionValue.aidl",
         "aidl/android/media/AudioMixRouteFlag.aidl",
         "aidl/android/media/AudioMixType.aidl",
+        "aidl/android/media/AudioMixUpdate.aidl",
+        "aidl/android/media/AudioMixerAttributesInternal.aidl",
+        "aidl/android/media/AudioMixerBehavior.aidl",
         "aidl/android/media/AudioOffloadMode.aidl",
         "aidl/android/media/AudioPolicyDeviceState.aidl",
         "aidl/android/media/AudioPolicyForceUse.aidl",
@@ -363,6 +370,7 @@
     ],
     imports: [
         "audioclient-types-aidl",
+        "framework-permission-aidl",
     ],
     backend: {
         cpp: {
@@ -397,8 +405,8 @@
         "aidl/android/media/OpenOutputResponse.aidl",
         "aidl/android/media/RenderPosition.aidl",
 
-        "aidl/android/media/IAudioFlingerService.aidl",
         "aidl/android/media/IAudioFlingerClient.aidl",
+        "aidl/android/media/IAudioFlingerService.aidl",
         "aidl/android/media/IAudioRecord.aidl",
         "aidl/android/media/IAudioTrack.aidl",
         "aidl/android/media/IAudioTrackCallback.aidl",
@@ -414,8 +422,8 @@
         "audioclient-types-aidl",
         "av-types-aidl",
         "effect-aidl",
-        "shared-file-region-aidl",
         "framework-permission-aidl",
+        "shared-file-region-aidl",
     ],
     double_loadable: true,
     backend: {
@@ -442,14 +450,15 @@
         "aidl/android/media/GetInputForAttrResponse.aidl",
         "aidl/android/media/GetOutputForAttrResponse.aidl",
         "aidl/android/media/GetSpatializerResponse.aidl",
-        "aidl/android/media/RecordClientInfo.aidl",
         "aidl/android/media/IAudioPolicyService.aidl",
         "aidl/android/media/IAudioPolicyServiceClient.aidl",
+        "aidl/android/media/RecordClientInfo.aidl",
     ],
     defaults: [
         "latest_android_media_audio_common_types_import_interface",
     ],
     imports: [
+        "audio-permission-aidl",
         "audioclient-types-aidl",
         "audiopolicy-types-aidl",
         "capture_state_listener-aidl",
diff --git a/media/libaudioclient/AudioProductStrategy.cpp b/media/libaudioclient/AudioProductStrategy.cpp
index d9fd58c..1417182 100644
--- a/media/libaudioclient/AudioProductStrategy.cpp
+++ b/media/libaudioclient/AudioProductStrategy.cpp
@@ -60,9 +60,13 @@
 }
 
 // Keep in sync with android/media/audiopolicy/AudioProductStrategy#attributeMatches
-int AudioProductStrategy::attributesMatchesScore(const audio_attributes_t refAttributes,
-                                                 const audio_attributes_t clientAttritubes)
+int AudioProductStrategy::attributesMatchesScore(audio_attributes_t refAttributes,
+                                                 audio_attributes_t clientAttritubes)
 {
+    refAttributes.flags = static_cast<audio_flags_mask_t>(
+            refAttributes.flags & AUDIO_FLAGS_AFFECT_STRATEGY_SELECTION);
+    clientAttritubes.flags = static_cast<audio_flags_mask_t>(
+            clientAttritubes.flags & AUDIO_FLAGS_AFFECT_STRATEGY_SELECTION);
     if (refAttributes == clientAttritubes) {
         return MATCH_EQUALS;
     }
diff --git a/media/libaudioclient/AudioRecord.cpp b/media/libaudioclient/AudioRecord.cpp
index 91bc700..f729e1b 100644
--- a/media/libaudioclient/AudioRecord.cpp
+++ b/media/libaudioclient/AudioRecord.cpp
@@ -130,11 +130,7 @@
 }
 
 AudioRecord::AudioRecord(const AttributionSourceState &client)
-    : mActive(false), mStatus(NO_INIT), mClientAttributionSource(client),
-      mSessionId(AUDIO_SESSION_ALLOCATE), mPreviousPriority(ANDROID_PRIORITY_NORMAL),
-      mPreviousSchedulingGroup(SP_DEFAULT), mSelectedDeviceId(AUDIO_PORT_HANDLE_NONE),
-      mRoutedDeviceId(AUDIO_PORT_HANDLE_NONE), mSelectedMicDirection(MIC_DIRECTION_UNSPECIFIED),
-      mSelectedMicFieldDimension(MIC_FIELD_DIMENSION_DEFAULT)
+    : mClientAttributionSource(client)
 {
 }
 
@@ -154,13 +150,7 @@
         audio_port_handle_t selectedDeviceId,
         audio_microphone_direction_t selectedMicDirection,
         float microphoneFieldDimension)
-    : mActive(false),
-      mStatus(NO_INIT),
-      mClientAttributionSource(client),
-      mSessionId(AUDIO_SESSION_ALLOCATE),
-      mPreviousPriority(ANDROID_PRIORITY_NORMAL),
-      mPreviousSchedulingGroup(SP_DEFAULT),
-      mProxy(nullptr)
+    : mClientAttributionSource(client)
 {
     uid_t uid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(mClientAttributionSource.uid));
     pid_t pid = VALUE_OR_FATAL(aidl2legacy_int32_t_pid_t(mClientAttributionSource.pid));
@@ -199,9 +189,6 @@
 }
 
 void AudioRecord::stopAndJoinCallbacks() {
-    // Prevent nullptr crash if it did not open properly.
-    if (mStatus != NO_ERROR) return;
-
     // Make sure that callback function exits in the case where
     // it is looping on buffer empty condition in obtainBuffer().
     // Otherwise the callback thread will never exit.
@@ -693,16 +680,27 @@
     AutoMutex lock(mLock);
     ALOGV("%s(%d): deviceId=%d mSelectedDeviceId=%d",
             __func__, mPortId, deviceId, mSelectedDeviceId);
+
     if (mSelectedDeviceId != deviceId) {
         mSelectedDeviceId = deviceId;
         if (mStatus == NO_ERROR) {
-            // stop capture so that audio policy manager does not reject the new instance start request
-            // as only one capture can be active at a time.
-            if (mAudioRecord != 0 && mActive) {
-                mAudioRecord->stop();
+            if (mActive) {
+                if (mSelectedDeviceId != mRoutedDeviceId) {
+                    // stop capture so that audio policy manager does not reject the new instance
+                    // start request as only one capture can be active at a time.
+                    if (mAudioRecord != 0) {
+                        mAudioRecord->stop();
+                    }
+                    android_atomic_or(CBLK_INVALID, &mCblk->mFlags);
+                    mProxy->interrupt();
+                }
+            } else {
+                // if the track is idle, try to restore now and
+                // defer to next start if not possible
+                if (restoreRecord_l("setInputDevice") != OK) {
+                    android_atomic_or(CBLK_INVALID, &mCblk->mFlags);
+                }
             }
-            android_atomic_or(CBLK_INVALID, &mCblk->mFlags);
-            mProxy->interrupt();
         }
     }
     return NO_ERROR;
@@ -1521,7 +1519,7 @@
             .set(AMEDIAMETRICS_PROP_WHERE, from)
             .record(); });
 
-    ALOGW("%s(%d): dead IAudioRecord, creating a new one from %s()", __func__, mPortId, from);
+    ALOGW("%s(%d) called from %s()", __func__, mPortId, from);
     ++mSequence;
 
     const int INITIAL_RETRIES = 3;
diff --git a/media/libaudioclient/AudioSystem.cpp b/media/libaudioclient/AudioSystem.cpp
index 5bfdd5f..3602e94 100644
--- a/media/libaudioclient/AudioSystem.cpp
+++ b/media/libaudioclient/AudioSystem.cpp
@@ -22,6 +22,7 @@
 #include <android/media/IAudioPolicyService.h>
 #include <android/media/AudioMixUpdate.h>
 #include <android/media/BnCaptureStateListener.h>
+#include <android_media_audiopolicy.h>
 #include <binder/IServiceManager.h>
 #include <binder/ProcessState.h>
 #include <binder/IPCThreadState.h>
@@ -44,6 +45,8 @@
 
 // ----------------------------------------------------------------------------
 
+namespace audio_flags = android::media::audiopolicy;
+
 namespace android {
 using aidl_utils::statusTFromBinderStatus;
 using binder::Status;
@@ -62,115 +65,185 @@
 using media::audio::common::AudioUsage;
 using media::audio::common::Int;
 
-// client singleton for AudioFlinger binder interface
-Mutex AudioSystem::gLock;
-Mutex AudioSystem::gLockErrorCallbacks;
-Mutex AudioSystem::gLockAPS;
-sp<IAudioFlinger> AudioSystem::gAudioFlinger;
-sp<AudioSystem::AudioFlingerClient> AudioSystem::gAudioFlingerClient;
-std::set<audio_error_callback> AudioSystem::gAudioErrorCallbacks;
+std::mutex AudioSystem::gMutex;
 dynamic_policy_callback AudioSystem::gDynPolicyCallback = NULL;
 record_config_callback AudioSystem::gRecordConfigCallback = NULL;
 routing_callback AudioSystem::gRoutingCallback = NULL;
 vol_range_init_req_callback AudioSystem::gVolRangeInitReqCallback = NULL;
 
-// Required to be held while calling into gSoundTriggerCaptureStateListener.
-class CaptureStateListenerImpl;
+std::mutex AudioSystem::gApsCallbackMutex;
+std::mutex AudioSystem::gErrorCallbacksMutex;
+std::set<audio_error_callback> AudioSystem::gAudioErrorCallbacks;
 
-Mutex gSoundTriggerCaptureStateListenerLock;
-sp<CaptureStateListenerImpl> gSoundTriggerCaptureStateListener = nullptr;
+std::mutex AudioSystem::gSoundTriggerMutex;
+sp<CaptureStateListenerImpl> AudioSystem::gSoundTriggerCaptureStateListener;
 
-// Binder for the AudioFlinger service that's passed to this client process from the system server.
+// Sets the Binder for the AudioFlinger service, passed to this client process
+// from the system server.
 // This allows specific isolated processes to access the audio system. Currently used only for the
 // HotwordDetectionService.
-static sp<IBinder> gAudioFlingerBinder = nullptr;
+template <typename ServiceInterface, typename Client, typename AidlInterface,
+        typename ServiceTraits>
+class ServiceHandler {
+public:
+    sp<ServiceInterface> getService()
+            EXCLUDES(mMutex) NO_THREAD_SAFETY_ANALYSIS {  // std::unique_ptr
+        sp<ServiceInterface> service;
+        sp<Client> client;
 
-void AudioSystem::setAudioFlingerBinder(const sp<IBinder>& audioFlinger) {
-    if (audioFlinger->getInterfaceDescriptor() != media::IAudioFlingerService::descriptor) {
-        ALOGE("setAudioFlingerBinder: received a binder of type %s",
-              String8(audioFlinger->getInterfaceDescriptor()).c_str());
-        return;
-    }
-    Mutex::Autolock _l(gLock);
-    if (gAudioFlinger != nullptr) {
-        ALOGW("setAudioFlingerBinder: ignoring; AudioFlinger connection already established.");
-        return;
-    }
-    gAudioFlingerBinder = audioFlinger;
-}
-
-static sp<IAudioFlinger> gLocalAudioFlinger; // set if we are local.
-
-status_t AudioSystem::setLocalAudioFlinger(const sp<IAudioFlinger>& af) {
-    Mutex::Autolock _l(gLock);
-    if (gAudioFlinger != nullptr) return INVALID_OPERATION;
-    gLocalAudioFlinger = af;
-    return OK;
-}
-
-// establish binder interface to AudioFlinger service
-const sp<IAudioFlinger> AudioSystem::getAudioFlingerImpl(bool canStartThreadPool = true) {
-    sp<IAudioFlinger> af;
-    sp<AudioFlingerClient> afc;
-    bool reportNoError = false;
-    {
-        Mutex::Autolock _l(gLock);
-        if (gAudioFlinger != nullptr) {
-            return gAudioFlinger;
+        bool reportNoError = false;
+        {
+            std::lock_guard _l(mMutex);
+            if (mService != nullptr) {
+                return mService;
+            }
         }
 
-        if (gAudioFlingerClient == nullptr) {
-            gAudioFlingerClient = sp<AudioFlingerClient>::make();
+        std::unique_lock ul_only1thread(mSingleGetter);
+        std::unique_lock ul(mMutex);
+        if (mService != nullptr) {
+            return mService;
+        }
+        if (mClient == nullptr) {
+            mClient = sp<Client>::make();
         } else {
             reportNoError = true;
         }
+        while (true) {
+            mService = mLocalService;
+            if (mService != nullptr) break;
 
-        if (gLocalAudioFlinger != nullptr) {
-            gAudioFlinger = gLocalAudioFlinger;
-        } else {
-            sp<IBinder> binder;
-            if (gAudioFlingerBinder != nullptr) {
-                binder = gAudioFlingerBinder;
-            } else {
-                sp<IServiceManager> sm = defaultServiceManager();
-                binder = sm->waitForService(String16(IAudioFlinger::DEFAULT_SERVICE_NAME));
+            sp<IBinder> binder = mBinder;
+            if (binder == nullptr) {
+                sp <IServiceManager> sm = defaultServiceManager();
+                binder = sm->checkService(String16(ServiceTraits::SERVICE_NAME));
                 if (binder == nullptr) {
-                    return nullptr;
+                    ALOGD("%s: waiting for %s", __func__, ServiceTraits::SERVICE_NAME);
+
+                    // if the condition variable is present, setLocalService() and
+                    // setBinder() is allowed to use it to notify us.
+                    if (mCvGetter == nullptr) {
+                        mCvGetter = std::make_shared<std::condition_variable>();
+                    }
+                    mCvGetter->wait_for(ul, std::chrono::seconds(1));
+                    continue;
                 }
             }
-            binder->linkToDeath(gAudioFlingerClient);
-            const auto afs = interface_cast<media::IAudioFlingerService>(binder);
-            LOG_ALWAYS_FATAL_IF(afs == nullptr);
-            gAudioFlinger = sp<AudioFlingerClientAdapter>::make(afs);
+            binder->linkToDeath(mClient);
+            auto aidlInterface = interface_cast<AidlInterface>(binder);
+            LOG_ALWAYS_FATAL_IF(aidlInterface == nullptr);
+            if constexpr (std::is_same_v<ServiceInterface, AidlInterface>) {
+                mService = std::move(aidlInterface);
+            } else /* constexpr */ {
+                mService = ServiceTraits::createServiceAdapter(aidlInterface);
+            }
+            break;
         }
-        afc = gAudioFlingerClient;
-        af = gAudioFlinger;
-        // Make sure callbacks can be received by gAudioFlingerClient
-        if(canStartThreadPool) {
+        if (mCvGetter) mCvGetter.reset();  // remove condition variable.
+        client = mClient;
+        service = mService;
+        // Make sure callbacks can be received by the client
+        if (mCanStartThreadPool) {
             ProcessState::self()->startThreadPool();
         }
+        ul.unlock();
+        ul_only1thread.unlock();
+        ServiceTraits::onServiceCreate(service, client);
+        if (reportNoError) AudioSystem::reportError(NO_ERROR);
+        return service;
     }
-    const int64_t token = IPCThreadState::self()->clearCallingIdentity();
-    af->registerClient(afc);
-    IPCThreadState::self()->restoreCallingIdentity(token);
-    if (reportNoError) reportError(NO_ERROR);
-    return af;
+
+    status_t setLocalService(const sp<ServiceInterface>& service) EXCLUDES(mMutex) {
+        std::lock_guard _l(mMutex);
+        // we allow clearing once set, but not a double non-null set.
+        if (mService != nullptr && service != nullptr) return INVALID_OPERATION;
+        mLocalService = service;
+        if (mCvGetter) mCvGetter->notify_one();
+        return OK;
+    }
+
+    sp<Client> getClient() EXCLUDES(mMutex)  {
+        const auto service = getService();
+        if (service == nullptr) return nullptr;
+        std::lock_guard _l(mMutex);
+        return mClient;
+    }
+
+    void setBinder(const sp<IBinder>& binder) EXCLUDES(mMutex)  {
+        std::lock_guard _l(mMutex);
+        if (mService != nullptr) {
+            ALOGW("%s: ignoring; %s connection already established.",
+                    __func__, ServiceTraits::SERVICE_NAME);
+            return;
+        }
+        mBinder = binder;
+        if (mCvGetter) mCvGetter->notify_one();
+    }
+
+    void clearService() EXCLUDES(mMutex)  {
+        std::lock_guard _l(mMutex);
+        mService.clear();
+        if (mClient) ServiceTraits::onClearService(mClient);
+    }
+
+    void disableThreadPool() {
+        mCanStartThreadPool = false;
+    }
+
+private:
+    std::mutex mSingleGetter;
+    std::mutex mMutex;
+    std::shared_ptr<std::condition_variable> mCvGetter GUARDED_BY(mMutex);
+    sp<IBinder> mBinder GUARDED_BY(mMutex);
+    sp<ServiceInterface> mLocalService GUARDED_BY(mMutex);
+    sp<ServiceInterface> mService GUARDED_BY(mMutex);
+    sp<Client> mClient GUARDED_BY(mMutex);
+    std::atomic<bool> mCanStartThreadPool = true;
+};
+
+struct AudioFlingerTraits {
+    static void onServiceCreate(
+            const sp<IAudioFlinger>& af, const sp<AudioSystem::AudioFlingerClient>& afc) {
+        const int64_t token = IPCThreadState::self()->clearCallingIdentity();
+        af->registerClient(afc);
+        IPCThreadState::self()->restoreCallingIdentity(token);
+    }
+
+    static sp<IAudioFlinger> createServiceAdapter(
+            const sp<media::IAudioFlingerService>& aidlInterface) {
+        return sp<AudioFlingerClientAdapter>::make(aidlInterface);
+    }
+
+    static void onClearService(const sp<AudioSystem::AudioFlingerClient>& afc) {
+        afc->clearIoCache();
+    }
+
+    static constexpr const char* SERVICE_NAME = IAudioFlinger::DEFAULT_SERVICE_NAME;
+};
+
+[[clang::no_destroy]] static constinit ServiceHandler<IAudioFlinger,
+        AudioSystem::AudioFlingerClient, media::IAudioFlingerService,
+        AudioFlingerTraits> gAudioFlingerServiceHandler;
+
+sp<IAudioFlinger> AudioSystem::get_audio_flinger() {
+    return gAudioFlingerServiceHandler.getService();
 }
 
-const sp<IAudioFlinger> AudioSystem:: get_audio_flinger() {
-    return getAudioFlingerImpl();
+sp<AudioSystem::AudioFlingerClient> AudioSystem::getAudioFlingerClient() {
+    return gAudioFlingerServiceHandler.getClient();
 }
 
-const sp<IAudioFlinger> AudioSystem:: get_audio_flinger_for_fuzzer() {
-    return getAudioFlingerImpl(false);
+void AudioSystem::setAudioFlingerBinder(const sp<IBinder>& audioFlinger) {
+    if (audioFlinger->getInterfaceDescriptor() != media::IAudioFlingerService::descriptor) {
+        ALOGE("%s: received a binder of type %s",
+                __func__, String8(audioFlinger->getInterfaceDescriptor()).c_str());
+        return;
+    }
+    gAudioFlingerServiceHandler.setBinder(audioFlinger);
 }
 
-const sp<AudioSystem::AudioFlingerClient> AudioSystem::getAudioFlingerClient() {
-    // calling get_audio_flinger() will initialize gAudioFlingerClient if needed
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
-    if (af == 0) return 0;
-    Mutex::Autolock _l(gLock);
-    return gAudioFlingerClient;
+status_t AudioSystem::setLocalAudioFlinger(const sp<IAudioFlinger>& af) {
+    return gAudioFlingerServiceHandler.setLocalService(af);
 }
 
 sp<AudioIoDescriptor> AudioSystem::getIoDescriptor(audio_io_handle_t ioHandle) {
@@ -192,41 +265,41 @@
 // FIXME Declare in binder opcode order, similarly to IAudioFlinger.h and IAudioFlinger.cpp
 
 status_t AudioSystem::muteMicrophone(bool state) {
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    const sp<IAudioFlinger> af = get_audio_flinger();
     if (af == 0) return PERMISSION_DENIED;
     return af->setMicMute(state);
 }
 
 status_t AudioSystem::isMicrophoneMuted(bool* state) {
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    const sp<IAudioFlinger> af = get_audio_flinger();
     if (af == 0) return PERMISSION_DENIED;
     *state = af->getMicMute();
     return NO_ERROR;
 }
 
 status_t AudioSystem::setMasterVolume(float value) {
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    const sp<IAudioFlinger> af = get_audio_flinger();
     if (af == 0) return PERMISSION_DENIED;
     af->setMasterVolume(value);
     return NO_ERROR;
 }
 
 status_t AudioSystem::setMasterMute(bool mute) {
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    const sp<IAudioFlinger> af = get_audio_flinger();
     if (af == 0) return PERMISSION_DENIED;
     af->setMasterMute(mute);
     return NO_ERROR;
 }
 
 status_t AudioSystem::getMasterVolume(float* volume) {
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    const sp<IAudioFlinger> af = get_audio_flinger();
     if (af == 0) return PERMISSION_DENIED;
     *volume = af->masterVolume();
     return NO_ERROR;
 }
 
 status_t AudioSystem::getMasterMute(bool* mute) {
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    const sp<IAudioFlinger> af = get_audio_flinger();
     if (af == 0) return PERMISSION_DENIED;
     *mute = af->masterMute();
     return NO_ERROR;
@@ -235,7 +308,7 @@
 status_t AudioSystem::setStreamVolume(audio_stream_type_t stream, float value,
                                       audio_io_handle_t output) {
     if (uint32_t(stream) >= AUDIO_STREAM_CNT) return BAD_VALUE;
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    const sp<IAudioFlinger> af = get_audio_flinger();
     if (af == 0) return PERMISSION_DENIED;
     af->setStreamVolume(stream, value, output);
     return NO_ERROR;
@@ -243,50 +316,33 @@
 
 status_t AudioSystem::setStreamMute(audio_stream_type_t stream, bool mute) {
     if (uint32_t(stream) >= AUDIO_STREAM_CNT) return BAD_VALUE;
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    const sp<IAudioFlinger> af = get_audio_flinger();
     if (af == 0) return PERMISSION_DENIED;
     af->setStreamMute(stream, mute);
     return NO_ERROR;
 }
 
-status_t AudioSystem::getStreamVolume(audio_stream_type_t stream, float* volume,
-                                      audio_io_handle_t output) {
-    if (uint32_t(stream) >= AUDIO_STREAM_CNT) return BAD_VALUE;
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
-    if (af == 0) return PERMISSION_DENIED;
-    *volume = af->streamVolume(stream, output);
-    return NO_ERROR;
-}
-
-status_t AudioSystem::getStreamMute(audio_stream_type_t stream, bool* mute) {
-    if (uint32_t(stream) >= AUDIO_STREAM_CNT) return BAD_VALUE;
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
-    if (af == 0) return PERMISSION_DENIED;
-    *mute = af->streamMute(stream);
-    return NO_ERROR;
-}
-
 status_t AudioSystem::setMode(audio_mode_t mode) {
     if (uint32_t(mode) >= AUDIO_MODE_CNT) return BAD_VALUE;
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    const sp<IAudioFlinger> af = get_audio_flinger();
     if (af == 0) return PERMISSION_DENIED;
     return af->setMode(mode);
 }
 
 status_t AudioSystem::setSimulateDeviceConnections(bool enabled) {
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    const sp<IAudioFlinger> af = get_audio_flinger();
     if (af == 0) return PERMISSION_DENIED;
     return af->setSimulateDeviceConnections(enabled);
 }
 
 status_t AudioSystem::setParameters(audio_io_handle_t ioHandle, const String8& keyValuePairs) {
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    const sp<IAudioFlinger> af = get_audio_flinger();
     if (af == 0) return PERMISSION_DENIED;
     return af->setParameters(ioHandle, keyValuePairs);
 }
 
 String8 AudioSystem::getParameters(audio_io_handle_t ioHandle, const String8& keys) {
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    const sp<IAudioFlinger> af = get_audio_flinger();
     String8 result = String8("");
     if (af == 0) return result;
 
@@ -305,23 +361,23 @@
 // convert volume steps to natural log scale
 
 // change this value to change volume scaling
-static const float dBPerStep = 0.5f;
+constexpr float kdbPerStep = 0.5f;
 // shouldn't need to touch these
-static const float dBConvert = -dBPerStep * 2.302585093f / 20.0f;
-static const float dBConvertInverse = 1.0f / dBConvert;
+constexpr float kdBConvert = -kdbPerStep * 2.302585093f / 20.0f;
+constexpr float kdBConvertInverse = 1.0f / kdBConvert;
 
 float AudioSystem::linearToLog(int volume) {
-    // float v = volume ? exp(float(100 - volume) * dBConvert) : 0;
+    // float v = volume ? exp(float(100 - volume) * kdBConvert) : 0;
     // ALOGD("linearToLog(%d)=%f", volume, v);
     // return v;
-    return volume ? exp(float(100 - volume) * dBConvert) : 0;
+    return volume ? exp(float(100 - volume) * kdBConvert) : 0;
 }
 
 int AudioSystem::logToLinear(float volume) {
-    // int v = volume ? 100 - int(dBConvertInverse * log(volume) + 0.5) : 0;
+    // int v = volume ? 100 - int(kdBConvertInverse * log(volume) + 0.5) : 0;
     // ALOGD("logTolinear(%d)=%f", v, volume);
     // return v;
-    return volume ? 100 - int(dBConvertInverse * log(volume) + 0.5) : 0;
+    return volume ? 100 - int(kdBConvertInverse * log(volume) + 0.5) : 0;
 }
 
 /* static */ size_t AudioSystem::calculateMinFrameCount(
@@ -366,7 +422,7 @@
 
 status_t AudioSystem::getSamplingRate(audio_io_handle_t ioHandle,
                                       uint32_t* samplingRate) {
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    const sp<IAudioFlinger> af = get_audio_flinger();
     if (af == 0) return PERMISSION_DENIED;
     sp<AudioIoDescriptor> desc = getIoDescriptor(ioHandle);
     if (desc == 0) {
@@ -401,7 +457,7 @@
 
 status_t AudioSystem::getFrameCount(audio_io_handle_t ioHandle,
                                     size_t* frameCount) {
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    const sp<IAudioFlinger> af = get_audio_flinger();
     if (af == 0) return PERMISSION_DENIED;
     sp<AudioIoDescriptor> desc = getIoDescriptor(ioHandle);
     if (desc == 0) {
@@ -436,7 +492,7 @@
 
 status_t AudioSystem::getLatency(audio_io_handle_t output,
                                  uint32_t* latency) {
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    const sp<IAudioFlinger> af = get_audio_flinger();
     if (af == 0) return PERMISSION_DENIED;
     sp<AudioIoDescriptor> outputDesc = getIoDescriptor(output);
     if (outputDesc == 0) {
@@ -460,21 +516,21 @@
 }
 
 status_t AudioSystem::setVoiceVolume(float value) {
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    const sp<IAudioFlinger> af = get_audio_flinger();
     if (af == 0) return PERMISSION_DENIED;
     return af->setVoiceVolume(value);
 }
 
 status_t AudioSystem::getRenderPosition(audio_io_handle_t output, uint32_t* halFrames,
                                         uint32_t* dspFrames) {
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    const sp<IAudioFlinger> af = get_audio_flinger();
     if (af == 0) return PERMISSION_DENIED;
 
     return af->getRenderPosition(halFrames, dspFrames, output);
 }
 
 uint32_t AudioSystem::getInputFramesLost(audio_io_handle_t ioHandle) {
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    const sp<IAudioFlinger> af = get_audio_flinger();
     uint32_t result = 0;
     if (af == 0) return result;
     if (ioHandle == AUDIO_IO_HANDLE_NONE) return result;
@@ -485,46 +541,46 @@
 
 audio_unique_id_t AudioSystem::newAudioUniqueId(audio_unique_id_use_t use) {
     // Must not use AF as IDs will re-roll on audioserver restart, b/130369529.
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    const sp<IAudioFlinger> af = get_audio_flinger();
     if (af == 0) return AUDIO_UNIQUE_ID_ALLOCATE;
     return af->newAudioUniqueId(use);
 }
 
 void AudioSystem::acquireAudioSessionId(audio_session_t audioSession, pid_t pid, uid_t uid) {
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    const sp<IAudioFlinger> af = get_audio_flinger();
     if (af != 0) {
         af->acquireAudioSessionId(audioSession, pid, uid);
     }
 }
 
 void AudioSystem::releaseAudioSessionId(audio_session_t audioSession, pid_t pid) {
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    const sp<IAudioFlinger> af = get_audio_flinger();
     if (af != 0) {
         af->releaseAudioSessionId(audioSession, pid);
     }
 }
 
 audio_hw_sync_t AudioSystem::getAudioHwSyncForSession(audio_session_t sessionId) {
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    const sp<IAudioFlinger> af = get_audio_flinger();
     if (af == 0) return AUDIO_HW_SYNC_INVALID;
     return af->getAudioHwSyncForSession(sessionId);
 }
 
 status_t AudioSystem::systemReady() {
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    const sp<IAudioFlinger> af = get_audio_flinger();
     if (af == 0) return NO_INIT;
     return af->systemReady();
 }
 
 status_t AudioSystem::audioPolicyReady() {
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    const sp<IAudioFlinger> af = get_audio_flinger();
     if (af == 0) return NO_INIT;
     return af->audioPolicyReady();
 }
 
 status_t AudioSystem::getFrameCountHAL(audio_io_handle_t ioHandle,
                                        size_t* frameCount) {
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    const sp<IAudioFlinger> af = get_audio_flinger();
     if (af == 0) return PERMISSION_DENIED;
     sp<AudioIoDescriptor> desc = getIoDescriptor(ioHandle);
     if (desc == 0) {
@@ -546,7 +602,7 @@
 
 
 void AudioSystem::AudioFlingerClient::clearIoCache() {
-    Mutex::Autolock _l(mLock);
+    std::lock_guard _l(mMutex);
     mIoDescriptors.clear();
     mInBuffSize = 0;
     mInSamplingRate = 0;
@@ -555,14 +611,7 @@
 }
 
 void AudioSystem::AudioFlingerClient::binderDied(const wp<IBinder>& who __unused) {
-    {
-        Mutex::Autolock _l(AudioSystem::gLock);
-        AudioSystem::gAudioFlinger.clear();
-    }
-
-    // clear output handles and stream to output map caches
-    clearIoCache();
-
+    gAudioFlingerServiceHandler.clearService();
     reportError(DEAD_OBJECT);
 
     ALOGW("AudioFlinger server died!");
@@ -584,7 +633,7 @@
     audio_port_handle_t deviceId = AUDIO_PORT_HANDLE_NONE;
     std::vector<sp<AudioDeviceCallback>> callbacksToCall;
     {
-        Mutex::Autolock _l(mLock);
+        std::lock_guard _l(mMutex);
         auto callbacks = std::map<audio_port_handle_t, wp<AudioDeviceCallback>>();
 
         switch (event) {
@@ -592,13 +641,10 @@
             case AUDIO_OUTPUT_REGISTERED:
             case AUDIO_INPUT_OPENED:
             case AUDIO_INPUT_REGISTERED: {
-                sp<AudioIoDescriptor> oldDesc = getIoDescriptor_l(ioDesc->getIoHandle());
-                if (oldDesc == 0) {
-                    mIoDescriptors.add(ioDesc->getIoHandle(), ioDesc);
-                } else {
+                if (sp<AudioIoDescriptor> oldDesc = getIoDescriptor_l(ioDesc->getIoHandle())) {
                     deviceId = oldDesc->getDeviceId();
-                    mIoDescriptors.replaceValueFor(ioDesc->getIoHandle(), ioDesc);
                 }
+                mIoDescriptors[ioDesc->getIoHandle()] = ioDesc;
 
                 if (ioDesc->getDeviceId() != AUDIO_PORT_HANDLE_NONE) {
                     deviceId = ioDesc->getDeviceId();
@@ -627,7 +673,7 @@
                 ALOGV("ioConfigChanged() %s %d closed",
                       event == AUDIO_OUTPUT_CLOSED ? "output" : "input", ioDesc->getIoHandle());
 
-                mIoDescriptors.removeItem(ioDesc->getIoHandle());
+                mIoDescriptors.erase(ioDesc->getIoHandle());
                 mAudioDeviceCallbacks.erase(ioDesc->getIoHandle());
             }
                 break;
@@ -643,7 +689,7 @@
                 }
 
                 deviceId = oldDesc->getDeviceId();
-                mIoDescriptors.replaceValueFor(ioDesc->getIoHandle(), ioDesc);
+                mIoDescriptors[ioDesc->getIoHandle()] = ioDesc;
 
                 if (deviceId != ioDesc->getDeviceId()) {
                     deviceId = ioDesc->getDeviceId();
@@ -689,8 +735,8 @@
         }
     }
 
-    // Callbacks must be called without mLock held. May lead to dead lock if calling for
-    // example getRoutedDevice that updates the device and tries to acquire mLock.
+    // Callbacks must be called without mMutex held. May lead to dead lock if calling for
+    // example getRoutedDevice that updates the device and tries to acquire mMutex.
     for (auto cb  : callbacksToCall) {
         // If callbacksToCall is not empty, it implies ioDesc->getIoHandle() and deviceId are valid
         cb->onAudioDeviceUpdate(ioDesc->getIoHandle(), deviceId);
@@ -709,7 +755,7 @@
 
     std::vector<sp<SupportedLatencyModesCallback>> callbacks;
     {
-        Mutex::Autolock _l(mLock);
+        std::lock_guard _l(mMutex);
         for (auto callback : mSupportedLatencyModesCallbacks) {
             if (auto ref = callback.promote(); ref != nullptr) {
                 callbacks.push_back(ref);
@@ -726,11 +772,11 @@
 status_t AudioSystem::AudioFlingerClient::getInputBufferSize(
         uint32_t sampleRate, audio_format_t format,
         audio_channel_mask_t channelMask, size_t* buffSize) {
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    const sp<IAudioFlinger> af = get_audio_flinger();
     if (af == 0) {
         return PERMISSION_DENIED;
     }
-    Mutex::Autolock _l(mLock);
+    std::lock_guard _l(mMutex);
     // Do we have a stale mInBuffSize or are we requesting the input buffer size for new values
     if ((mInBuffSize == 0) || (sampleRate != mInSamplingRate) || (format != mInFormat)
         || (channelMask != mInChannelMask)) {
@@ -756,16 +802,15 @@
 
 sp<AudioIoDescriptor>
 AudioSystem::AudioFlingerClient::getIoDescriptor_l(audio_io_handle_t ioHandle) {
-    sp<AudioIoDescriptor> desc;
-    ssize_t index = mIoDescriptors.indexOfKey(ioHandle);
-    if (index >= 0) {
-        desc = mIoDescriptors.valueAt(index);
+    if (const auto it = mIoDescriptors.find(ioHandle);
+        it != mIoDescriptors.end()) {
+        return it->second;
     }
-    return desc;
+    return {};
 }
 
 sp<AudioIoDescriptor> AudioSystem::AudioFlingerClient::getIoDescriptor(audio_io_handle_t ioHandle) {
-    Mutex::Autolock _l(mLock);
+    std::lock_guard _l(mMutex);
     return getIoDescriptor_l(ioHandle);
 }
 
@@ -773,7 +818,7 @@
         const wp<AudioDeviceCallback>& callback, audio_io_handle_t audioIo,
         audio_port_handle_t portId) {
     ALOGV("%s audioIo %d portId %d", __func__, audioIo, portId);
-    Mutex::Autolock _l(mLock);
+    std::lock_guard _l(mMutex);
     auto& callbacks = mAudioDeviceCallbacks.emplace(
             audioIo,
             std::map<audio_port_handle_t, wp<AudioDeviceCallback>>()).first->second;
@@ -788,7 +833,7 @@
         const wp<AudioDeviceCallback>& callback __unused, audio_io_handle_t audioIo,
         audio_port_handle_t portId) {
     ALOGV("%s audioIo %d portId %d", __func__, audioIo, portId);
-    Mutex::Autolock _l(mLock);
+    std::lock_guard _l(mMutex);
     auto it = mAudioDeviceCallbacks.find(audioIo);
     if (it == mAudioDeviceCallbacks.end()) {
         return INVALID_OPERATION;
@@ -804,7 +849,7 @@
 
 status_t AudioSystem::AudioFlingerClient::addSupportedLatencyModesCallback(
         const sp<SupportedLatencyModesCallback>& callback) {
-    Mutex::Autolock _l(mLock);
+    std::lock_guard _l(mMutex);
     if (std::find(mSupportedLatencyModesCallbacks.begin(),
                   mSupportedLatencyModesCallbacks.end(),
                   callback) != mSupportedLatencyModesCallbacks.end()) {
@@ -816,7 +861,7 @@
 
 status_t AudioSystem::AudioFlingerClient::removeSupportedLatencyModesCallback(
         const sp<SupportedLatencyModesCallback>& callback) {
-    Mutex::Autolock _l(mLock);
+    std::lock_guard _l(mMutex);
     auto it = std::find(mSupportedLatencyModesCallbacks.begin(),
                                  mSupportedLatencyModesCallbacks.end(),
                                  callback);
@@ -828,93 +873,83 @@
 }
 
 /* static */ uintptr_t AudioSystem::addErrorCallback(audio_error_callback cb) {
-    Mutex::Autolock _l(gLockErrorCallbacks);
+    std::lock_guard _l(gErrorCallbacksMutex);
     gAudioErrorCallbacks.insert(cb);
     return reinterpret_cast<uintptr_t>(cb);
 }
 
 /* static */ void AudioSystem::removeErrorCallback(uintptr_t cb) {
-    Mutex::Autolock _l(gLockErrorCallbacks);
+    std::lock_guard _l(gErrorCallbacksMutex);
     gAudioErrorCallbacks.erase(reinterpret_cast<audio_error_callback>(cb));
 }
 
 /* static */ void AudioSystem::reportError(status_t err) {
-    Mutex::Autolock _l(gLockErrorCallbacks);
+    std::lock_guard _l(gErrorCallbacksMutex);
     for (auto callback : gAudioErrorCallbacks) {
         callback(err);
     }
 }
 
 /*static*/ void AudioSystem::setDynPolicyCallback(dynamic_policy_callback cb) {
-    Mutex::Autolock _l(gLock);
+    std::lock_guard _l(gMutex);
     gDynPolicyCallback = cb;
 }
 
 /*static*/ void AudioSystem::setRecordConfigCallback(record_config_callback cb) {
-    Mutex::Autolock _l(gLock);
+    std::lock_guard _l(gMutex);
     gRecordConfigCallback = cb;
 }
 
 /*static*/ void AudioSystem::setRoutingCallback(routing_callback cb) {
-    Mutex::Autolock _l(gLock);
+    std::lock_guard _l(gMutex);
     gRoutingCallback = cb;
 }
 
 /*static*/ void AudioSystem::setVolInitReqCallback(vol_range_init_req_callback cb) {
-    Mutex::Autolock _l(gLock);
+    std::lock_guard _l(gMutex);
     gVolRangeInitReqCallback = cb;
 }
 
-// client singleton for AudioPolicyService binder interface
-// protected by gLockAPS
-sp<IAudioPolicyService> AudioSystem::gAudioPolicyService;
-sp<AudioSystem::AudioPolicyServiceClient> AudioSystem::gAudioPolicyServiceClient;
-
-
-// establish binder interface to AudioPolicy service
-const sp<IAudioPolicyService> AudioSystem::get_audio_policy_service() {
-    sp<IAudioPolicyService> ap;
-    sp<AudioPolicyServiceClient> apc;
-    {
-        Mutex::Autolock _l(gLockAPS);
-        if (gAudioPolicyService == 0) {
-            sp<IServiceManager> sm = defaultServiceManager();
-            sp<IBinder> binder = sm->waitForService(String16("media.audio_policy"));
-            if (binder == nullptr) {
-                return nullptr;
-            }
-            if (gAudioPolicyServiceClient == NULL) {
-                gAudioPolicyServiceClient = new AudioPolicyServiceClient();
-            }
-            binder->linkToDeath(gAudioPolicyServiceClient);
-            gAudioPolicyService = interface_cast<IAudioPolicyService>(binder);
-            LOG_ALWAYS_FATAL_IF(gAudioPolicyService == 0);
-            apc = gAudioPolicyServiceClient;
-            // Make sure callbacks can be received by gAudioPolicyServiceClient
-            ProcessState::self()->startThreadPool();
-        }
-        ap = gAudioPolicyService;
-    }
-    if (apc != 0) {
-        int64_t token = IPCThreadState::self()->clearCallingIdentity();
+struct AudioPolicyTraits {
+    static void onServiceCreate(const sp<IAudioPolicyService>& ap,
+            const sp<AudioSystem::AudioPolicyServiceClient>& apc) {
+        const int64_t token = IPCThreadState::self()->clearCallingIdentity();
         ap->registerClient(apc);
         ap->setAudioPortCallbacksEnabled(apc->isAudioPortCbEnabled());
         ap->setAudioVolumeGroupCallbacksEnabled(apc->isAudioVolumeGroupCbEnabled());
         IPCThreadState::self()->restoreCallingIdentity(token);
     }
 
-    return ap;
+    static void onClearService(const sp<AudioSystem::AudioPolicyServiceClient>&) {}
+
+    static constexpr const char *SERVICE_NAME = "media.audio_policy";
+};
+
+[[clang::no_destroy]] static constinit ServiceHandler<IAudioPolicyService,
+        AudioSystem::AudioPolicyServiceClient, IAudioPolicyService,
+        AudioPolicyTraits> gAudioPolicyServiceHandler;
+
+status_t AudioSystem::setLocalAudioPolicyService(const sp<IAudioPolicyService>& aps) {
+    return gAudioPolicyServiceHandler.setLocalService(aps);
+}
+
+sp<IAudioPolicyService> AudioSystem::get_audio_policy_service() {
+    return gAudioPolicyServiceHandler.getService();
 }
 
 void AudioSystem::clearAudioPolicyService() {
-    Mutex::Autolock _l(gLockAPS);
-    gAudioPolicyService.clear();
+    gAudioPolicyServiceHandler.clearService();
+}
+
+void AudioSystem::disableThreadPool() {
+    gAudioFlingerServiceHandler.disableThreadPool();
+    gAudioPolicyServiceHandler.disableThreadPool();
 }
 
 // ---------------------------------------------------------------------------
 
 void AudioSystem::onNewAudioModulesAvailable() {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return;
     aps->onNewAudioModulesAvailable();
 }
@@ -922,7 +957,7 @@
 status_t AudioSystem::setDeviceConnectionState(audio_policy_dev_state_t state,
                                                const android::media::audio::common::AudioPort& port,
                                                audio_format_t encodedFormat) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
 
     if (aps == 0) return PERMISSION_DENIED;
 
@@ -937,7 +972,7 @@
 
 audio_policy_dev_state_t AudioSystem::getDeviceConnectionState(audio_devices_t device,
                                                                const char* device_address) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE;
 
     auto result = [&]() -> ConversionResult<audio_policy_dev_state_t> {
@@ -957,7 +992,7 @@
                                                const char* device_address,
                                                const char* device_name,
                                                audio_format_t encodedFormat) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     const char* address = "";
     const char* name = "";
 
@@ -980,7 +1015,7 @@
 
 status_t AudioSystem::setPhoneState(audio_mode_t state, uid_t uid) {
     if (uint32_t(state) >= AUDIO_MODE_CNT) return BAD_VALUE;
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
     return statusTFromBinderStatus(aps->setPhoneState(
@@ -990,7 +1025,7 @@
 
 status_t
 AudioSystem::setForceUse(audio_policy_force_use_t usage, audio_policy_forced_cfg_t config) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
     return statusTFromBinderStatus(
@@ -1003,7 +1038,7 @@
 }
 
 audio_policy_forced_cfg_t AudioSystem::getForceUse(audio_policy_force_use_t usage) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return AUDIO_POLICY_FORCE_NONE;
 
     auto result = [&]() -> ConversionResult<audio_policy_forced_cfg_t> {
@@ -1020,7 +1055,7 @@
 
 
 audio_io_handle_t AudioSystem::getOutput(audio_stream_type_t stream) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return AUDIO_IO_HANDLE_NONE;
 
     auto result = [&]() -> ConversionResult<audio_io_handle_t> {
@@ -1068,7 +1103,7 @@
         return BAD_VALUE;
     }
 
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return NO_INIT;
 
     media::audio::common::AudioAttributes attrAidl = VALUE_OR_RETURN_STATUS(
@@ -1117,7 +1152,7 @@
 }
 
 status_t AudioSystem::startOutput(audio_port_handle_t portId) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
     int32_t portIdAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_port_handle_t_int32_t(portId));
@@ -1125,7 +1160,7 @@
 }
 
 status_t AudioSystem::stopOutput(audio_port_handle_t portId) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
     int32_t portIdAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_port_handle_t_int32_t(portId));
@@ -1133,7 +1168,7 @@
 }
 
 void AudioSystem::releaseOutput(audio_port_handle_t portId) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return;
 
     auto status = [&]() -> status_t {
@@ -1173,7 +1208,7 @@
         return BAD_VALUE;
     }
 
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return NO_INIT;
 
     media::audio::common::AudioAttributes attrAidl = VALUE_OR_RETURN_STATUS(
@@ -1207,7 +1242,7 @@
 }
 
 status_t AudioSystem::startInput(audio_port_handle_t portId) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
     int32_t portIdAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_port_handle_t_int32_t(portId));
@@ -1215,7 +1250,7 @@
 }
 
 status_t AudioSystem::stopInput(audio_port_handle_t portId) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
     int32_t portIdAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_port_handle_t_int32_t(portId));
@@ -1223,7 +1258,7 @@
 }
 
 void AudioSystem::releaseInput(audio_port_handle_t portId) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return;
 
     auto status = [&]() -> status_t {
@@ -1237,10 +1272,25 @@
     (void) status;
 }
 
+status_t AudioSystem::setDeviceAbsoluteVolumeEnabled(audio_devices_t deviceType,
+                                                     const char *address,
+                                                     bool enabled,
+                                                     audio_stream_type_t streamToDriveAbs) {
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
+    if (aps == nullptr) return PERMISSION_DENIED;
+
+    AudioDevice deviceAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_device_AudioDevice(deviceType, address));
+    AudioStreamType streamToDriveAbsAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_stream_type_t_AudioStreamType(streamToDriveAbs));
+    return statusTFromBinderStatus(
+            aps->setDeviceAbsoluteVolumeEnabled(deviceAidl, enabled, streamToDriveAbsAidl));
+}
+
 status_t AudioSystem::initStreamVolume(audio_stream_type_t stream,
                                        int indexMin,
                                        int indexMax) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
     AudioStreamType streamAidl = VALUE_OR_RETURN_STATUS(
@@ -1261,7 +1311,7 @@
 status_t AudioSystem::setStreamVolumeIndex(audio_stream_type_t stream,
                                            int index,
                                            audio_devices_t device) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
     AudioStreamType streamAidl = VALUE_OR_RETURN_STATUS(
@@ -1276,7 +1326,7 @@
 status_t AudioSystem::getStreamVolumeIndex(audio_stream_type_t stream,
                                            int* index,
                                            audio_devices_t device) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
     AudioStreamType streamAidl = VALUE_OR_RETURN_STATUS(
@@ -1295,7 +1345,7 @@
 status_t AudioSystem::setVolumeIndexForAttributes(const audio_attributes_t& attr,
                                                   int index,
                                                   audio_devices_t device) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
     media::audio::common::AudioAttributes attrAidl = VALUE_OR_RETURN_STATUS(
@@ -1310,7 +1360,7 @@
 status_t AudioSystem::getVolumeIndexForAttributes(const audio_attributes_t& attr,
                                                   int& index,
                                                   audio_devices_t device) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
     media::audio::common::AudioAttributes attrAidl = VALUE_OR_RETURN_STATUS(
@@ -1325,7 +1375,7 @@
 }
 
 status_t AudioSystem::getMaxVolumeIndexForAttributes(const audio_attributes_t& attr, int& index) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
     media::audio::common::AudioAttributes attrAidl = VALUE_OR_RETURN_STATUS(
@@ -1338,7 +1388,7 @@
 }
 
 status_t AudioSystem::getMinVolumeIndexForAttributes(const audio_attributes_t& attr, int& index) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
     media::audio::common::AudioAttributes attrAidl = VALUE_OR_RETURN_STATUS(
@@ -1351,7 +1401,7 @@
 }
 
 product_strategy_t AudioSystem::getStrategyForStream(audio_stream_type_t stream) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PRODUCT_STRATEGY_NONE;
 
     auto result = [&]() -> ConversionResult<product_strategy_t> {
@@ -1371,7 +1421,7 @@
     if (devices == nullptr) {
         return BAD_VALUE;
     }
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
     media::audio::common::AudioAttributes aaAidl = VALUE_OR_RETURN_STATUS(
@@ -1387,7 +1437,7 @@
 }
 
 audio_io_handle_t AudioSystem::getOutputForEffect(const effect_descriptor_t* desc) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     // FIXME change return type to status_t, and return PERMISSION_DENIED here
     if (aps == 0) return AUDIO_IO_HANDLE_NONE;
 
@@ -1408,7 +1458,7 @@
                                      product_strategy_t strategy,
                                      audio_session_t session,
                                      int id) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
     media::EffectDescriptor descAidl = VALUE_OR_RETURN_STATUS(
@@ -1422,7 +1472,7 @@
 }
 
 status_t AudioSystem::unregisterEffect(int id) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
     int32_t idAidl = VALUE_OR_RETURN_STATUS(convertReinterpret<int32_t>(id));
@@ -1431,7 +1481,7 @@
 }
 
 status_t AudioSystem::setEffectEnabled(int id, bool enabled) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
     int32_t idAidl = VALUE_OR_RETURN_STATUS(convertReinterpret<int32_t>(id));
@@ -1440,7 +1490,7 @@
 }
 
 status_t AudioSystem::moveEffectsToIo(const std::vector<int>& ids, audio_io_handle_t io) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
     std::vector<int32_t> idsAidl = VALUE_OR_RETURN_STATUS(
@@ -1450,7 +1500,7 @@
 }
 
 status_t AudioSystem::isStreamActive(audio_stream_type_t stream, bool* state, uint32_t inPastMs) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
     if (state == NULL) return BAD_VALUE;
 
@@ -1464,7 +1514,7 @@
 
 status_t AudioSystem::isStreamActiveRemotely(audio_stream_type_t stream, bool* state,
                                              uint32_t inPastMs) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
     if (state == NULL) return BAD_VALUE;
 
@@ -1477,7 +1527,7 @@
 }
 
 status_t AudioSystem::isSourceActive(audio_source_t stream, bool* state) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
     if (state == NULL) return BAD_VALUE;
 
@@ -1489,19 +1539,19 @@
 }
 
 uint32_t AudioSystem::getPrimaryOutputSamplingRate() {
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    const sp<IAudioFlinger> af = get_audio_flinger();
     if (af == 0) return 0;
     return af->getPrimaryOutputSamplingRate();
 }
 
 size_t AudioSystem::getPrimaryOutputFrameCount() {
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    const sp<IAudioFlinger> af = get_audio_flinger();
     if (af == 0) return 0;
     return af->getPrimaryOutputFrameCount();
 }
 
 status_t AudioSystem::setLowRamDevice(bool isLowRamDevice, int64_t totalMemory) {
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    const sp<IAudioFlinger> af = get_audio_flinger();
     if (af == 0) return PERMISSION_DENIED;
     return af->setLowRamDevice(isLowRamDevice, totalMemory);
 }
@@ -1509,18 +1559,12 @@
 void AudioSystem::clearAudioConfigCache() {
     // called by restoreTrack_l(), which needs new IAudioFlinger and IAudioPolicyService instances
     ALOGV("clearAudioConfigCache()");
-    {
-        Mutex::Autolock _l(gLock);
-        if (gAudioFlingerClient != 0) {
-            gAudioFlingerClient->clearIoCache();
-        }
-        gAudioFlinger.clear();
-    }
+    gAudioFlingerServiceHandler.clearService();
     clearAudioPolicyService();
 }
 
 status_t AudioSystem::setSupportedSystemUsages(const std::vector<audio_usage_t>& systemUsages) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == nullptr) return PERMISSION_DENIED;
 
     std::vector<AudioUsage> systemUsagesAidl = VALUE_OR_RETURN_STATUS(
@@ -1530,7 +1574,7 @@
 }
 
 status_t AudioSystem::setAllowedCapturePolicy(uid_t uid, audio_flags_mask_t capturePolicy) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == nullptr) return PERMISSION_DENIED;
 
     int32_t uidAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(uid));
@@ -1541,7 +1585,7 @@
 
 audio_offload_mode_t AudioSystem::getOffloadSupport(const audio_offload_info_t& info) {
     ALOGV("%s", __func__);
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return AUDIO_OFFLOAD_NOT_SUPPORTED;
 
     auto result = [&]() -> ConversionResult<audio_offload_mode_t> {
@@ -1566,7 +1610,7 @@
         return BAD_VALUE;
     }
 
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
     media::AudioPortRole roleAidl = VALUE_OR_RETURN_STATUS(
@@ -1590,7 +1634,7 @@
 status_t AudioSystem::listDeclaredDevicePorts(media::AudioPortRole role,
                                               std::vector<media::AudioPortFw>* result) {
     if (result == nullptr) return BAD_VALUE;
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
     RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(aps->listDeclaredDevicePorts(role, result)));
     return OK;
@@ -1600,7 +1644,7 @@
     if (port == nullptr) {
         return BAD_VALUE;
     }
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
     media::AudioPortFw portAidl;
@@ -1616,7 +1660,7 @@
         return BAD_VALUE;
     }
 
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
     media::AudioPatchFw patchAidl = VALUE_OR_RETURN_STATUS(
@@ -1629,7 +1673,7 @@
 }
 
 status_t AudioSystem::releaseAudioPatch(audio_patch_handle_t handle) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
     int32_t handleAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_patch_handle_t_int32_t(handle));
@@ -1644,7 +1688,7 @@
         return BAD_VALUE;
     }
 
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
 
@@ -1667,7 +1711,7 @@
         return BAD_VALUE;
     }
 
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
     media::AudioPortConfigFw configAidl = VALUE_OR_RETURN_STATUS(
@@ -1676,14 +1720,13 @@
 }
 
 status_t AudioSystem::addAudioPortCallback(const sp<AudioPortCallback>& callback) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
+    const auto apc = gAudioPolicyServiceHandler.getClient();
+    if (apc == nullptr) return NO_INIT;
 
-    Mutex::Autolock _l(gLockAPS);
-    if (gAudioPolicyServiceClient == 0) {
-        return NO_INIT;
-    }
-    int ret = gAudioPolicyServiceClient->addAudioPortCallback(callback);
+    std::lock_guard _l(gApsCallbackMutex);
+    const int ret = apc->addAudioPortCallback(callback);
     if (ret == 1) {
         aps->setAudioPortCallbacksEnabled(true);
     }
@@ -1692,14 +1735,13 @@
 
 /*static*/
 status_t AudioSystem::removeAudioPortCallback(const sp<AudioPortCallback>& callback) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
+    const auto apc = gAudioPolicyServiceHandler.getClient();
+    if (apc == nullptr) return NO_INIT;
 
-    Mutex::Autolock _l(gLockAPS);
-    if (gAudioPolicyServiceClient == 0) {
-        return NO_INIT;
-    }
-    int ret = gAudioPolicyServiceClient->removeAudioPortCallback(callback);
+    std::lock_guard _l(gApsCallbackMutex);
+    const int ret = apc->removeAudioPortCallback(callback);
     if (ret == 0) {
         aps->setAudioPortCallbacksEnabled(false);
     }
@@ -1707,14 +1749,13 @@
 }
 
 status_t AudioSystem::addAudioVolumeGroupCallback(const sp<AudioVolumeGroupCallback>& callback) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
+    const auto apc = gAudioPolicyServiceHandler.getClient();
+    if (apc == nullptr) return NO_INIT;
 
-    Mutex::Autolock _l(gLockAPS);
-    if (gAudioPolicyServiceClient == 0) {
-        return NO_INIT;
-    }
-    int ret = gAudioPolicyServiceClient->addAudioVolumeGroupCallback(callback);
+    std::lock_guard _l(gApsCallbackMutex);
+    const int ret = apc->addAudioVolumeGroupCallback(callback);
     if (ret == 1) {
         aps->setAudioVolumeGroupCallbacksEnabled(true);
     }
@@ -1722,14 +1763,13 @@
 }
 
 status_t AudioSystem::removeAudioVolumeGroupCallback(const sp<AudioVolumeGroupCallback>& callback) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
+    const auto apc = gAudioPolicyServiceHandler.getClient();
+    if (apc == nullptr) return NO_INIT;
 
-    Mutex::Autolock _l(gLockAPS);
-    if (gAudioPolicyServiceClient == 0) {
-        return NO_INIT;
-    }
-    int ret = gAudioPolicyServiceClient->removeAudioVolumeGroupCallback(callback);
+    std::lock_guard _l(gApsCallbackMutex);
+    const int ret = apc->removeAudioVolumeGroupCallback(callback);
     if (ret == 0) {
         aps->setAudioVolumeGroupCallbacksEnabled(false);
     }
@@ -1745,7 +1785,7 @@
     }
     status_t status = afc->addAudioDeviceCallback(callback, audioIo, portId);
     if (status == NO_ERROR) {
-        const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+        const sp<IAudioFlinger> af = get_audio_flinger();
         if (af != 0) {
             af->registerClient(afc);
         }
@@ -1782,7 +1822,7 @@
 }
 
 audio_port_handle_t AudioSystem::getDeviceIdForIo(audio_io_handle_t audioIo) {
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    const sp<IAudioFlinger> af = get_audio_flinger();
     if (af == 0) return PERMISSION_DENIED;
     const sp<AudioIoDescriptor> desc = getIoDescriptor(audioIo);
     if (desc == 0) {
@@ -1797,7 +1837,7 @@
     if (session == nullptr || ioHandle == nullptr || device == nullptr) {
         return BAD_VALUE;
     }
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
     media::SoundTriggerSession retAidl;
@@ -1811,7 +1851,7 @@
 }
 
 status_t AudioSystem::releaseSoundTriggerSession(audio_session_t session) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
     int32_t sessionAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_session_t_int32_t(session));
@@ -1819,7 +1859,7 @@
 }
 
 audio_mode_t AudioSystem::getPhoneState() {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return AUDIO_MODE_INVALID;
 
     auto result = [&]() -> ConversionResult<audio_mode_t> {
@@ -1832,7 +1872,7 @@
 }
 
 status_t AudioSystem::registerPolicyMixes(const Vector<AudioMix>& mixes, bool registration) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
     size_t mixesSize = std::min(mixes.size(), size_t{MAX_MIXES_PER_POLICY});
@@ -1843,10 +1883,29 @@
     return statusTFromBinderStatus(aps->registerPolicyMixes(mixesAidl, registration));
 }
 
+status_t AudioSystem::getRegisteredPolicyMixes(std::vector<AudioMix>& mixes) {
+    if (!audio_flags::audio_mix_test_api()) {
+        return INVALID_OPERATION;
+    }
+
+    const sp<IAudioPolicyService> aps = AudioSystem::get_audio_policy_service();
+    if (aps == nullptr) return PERMISSION_DENIED;
+
+    std::vector<::android::media::AudioMix> aidlMixes;
+    Status status = aps->getRegisteredPolicyMixes(&aidlMixes);
+
+    for (const auto& aidlMix : aidlMixes) {
+        AudioMix mix = VALUE_OR_RETURN_STATUS(aidl2legacy_AudioMix(aidlMix));
+        mixes.push_back(mix);
+    }
+
+    return statusTFromBinderStatus(status);
+}
+
 status_t AudioSystem::updatePolicyMixes(
         const std::vector<std::pair<AudioMix, std::vector<AudioMixMatchCriterion>>>&
                 mixesWithUpdates) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
     std::vector<media::AudioMixUpdate> updatesAidl;
@@ -1865,7 +1924,7 @@
 }
 
 status_t AudioSystem::setUidDeviceAffinities(uid_t uid, const AudioDeviceTypeAddrVector& devices) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
     int32_t uidAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(uid));
@@ -1876,7 +1935,7 @@
 }
 
 status_t AudioSystem::removeUidDeviceAffinities(uid_t uid) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
     int32_t uidAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(uid));
@@ -1885,7 +1944,7 @@
 
 status_t AudioSystem::setUserIdDeviceAffinities(int userId,
                                                 const AudioDeviceTypeAddrVector& devices) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
     int32_t userIdAidl = VALUE_OR_RETURN_STATUS(convertReinterpret<int32_t>(userId));
@@ -1897,7 +1956,7 @@
 }
 
 status_t AudioSystem::removeUserIdDeviceAffinities(int userId) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
     int32_t userIdAidl = VALUE_OR_RETURN_STATUS(convertReinterpret<int32_t>(userId));
     return statusTFromBinderStatus(aps->removeUserIdDeviceAffinities(userIdAidl));
@@ -1909,7 +1968,7 @@
     if (source == nullptr || attributes == nullptr || portId == nullptr) {
         return BAD_VALUE;
     }
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
     media::AudioPortConfigFw sourceAidl = VALUE_OR_RETURN_STATUS(
@@ -1924,7 +1983,7 @@
 }
 
 status_t AudioSystem::stopAudioSource(audio_port_handle_t portId) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
     int32_t portIdAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_port_handle_t_int32_t(portId));
@@ -1932,7 +1991,7 @@
 }
 
 status_t AudioSystem::setMasterMono(bool mono) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
     return statusTFromBinderStatus(aps->setMasterMono(mono));
 }
@@ -1941,26 +2000,26 @@
     if (mono == nullptr) {
         return BAD_VALUE;
     }
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
     return statusTFromBinderStatus(aps->getMasterMono(mono));
 }
 
 status_t AudioSystem::setMasterBalance(float balance) {
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    const sp<IAudioFlinger> af = get_audio_flinger();
     if (af == 0) return PERMISSION_DENIED;
     return af->setMasterBalance(balance);
 }
 
 status_t AudioSystem::getMasterBalance(float* balance) {
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    const sp<IAudioFlinger> af = get_audio_flinger();
     if (af == 0) return PERMISSION_DENIED;
     return af->getMasterBalance(balance);
 }
 
 float
 AudioSystem::getStreamVolumeDB(audio_stream_type_t stream, int index, audio_devices_t device) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return NAN;
 
     auto result = [&]() -> ConversionResult<float> {
@@ -1978,13 +2037,13 @@
 }
 
 status_t AudioSystem::getMicrophones(std::vector<media::MicrophoneInfoFw>* microphones) {
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    const sp<IAudioFlinger> af = get_audio_flinger();
     if (af == 0) return PERMISSION_DENIED;
     return af->getMicrophones(microphones);
 }
 
 status_t AudioSystem::setAudioHalPids(const std::vector<pid_t>& pids) {
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    const sp<IAudioFlinger> af = get_audio_flinger();
     if (af == nullptr) return PERMISSION_DENIED;
     return af->setAudioHalPids(pids);
 }
@@ -1998,7 +2057,7 @@
         return BAD_VALUE;
     }
 
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
     Int numSurroundFormatsAidl;
     numSurroundFormatsAidl.value =
@@ -2025,7 +2084,7 @@
         return BAD_VALUE;
     }
 
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
     Int numSurroundFormatsAidl;
     numSurroundFormatsAidl.value =
@@ -2043,7 +2102,7 @@
 }
 
 status_t AudioSystem::setSurroundFormatEnabled(audio_format_t audioFormat, bool enabled) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
     AudioFormatDescription audioFormatAidl = VALUE_OR_RETURN_STATUS(
@@ -2053,7 +2112,7 @@
 }
 
 status_t AudioSystem::setAssistantServicesUids(const std::vector<uid_t>& uids) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
     std::vector<int32_t> uidsAidl = VALUE_OR_RETURN_STATUS(
@@ -2062,7 +2121,7 @@
 }
 
 status_t AudioSystem::setActiveAssistantServicesUids(const std::vector<uid_t>& activeUids) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
     std::vector<int32_t> activeUidsAidl = VALUE_OR_RETURN_STATUS(
@@ -2071,7 +2130,7 @@
 }
 
 status_t AudioSystem::setA11yServicesUids(const std::vector<uid_t>& uids) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
     std::vector<int32_t> uidsAidl = VALUE_OR_RETURN_STATUS(
@@ -2080,7 +2139,7 @@
 }
 
 status_t AudioSystem::setCurrentImeUid(uid_t uid) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
     int32_t uidAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(uid));
@@ -2088,7 +2147,7 @@
 }
 
 bool AudioSystem::isHapticPlaybackSupported() {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return false;
 
     auto result = [&]() -> ConversionResult<bool> {
@@ -2101,7 +2160,7 @@
 }
 
 bool AudioSystem::isUltrasoundSupported() {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return false;
 
     auto result = [&]() -> ConversionResult<bool> {
@@ -2119,7 +2178,7 @@
         return BAD_VALUE;
     }
 
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
     std::vector<AudioFormatDescription> formatsAidl;
@@ -2135,7 +2194,7 @@
 }
 
 status_t AudioSystem::listAudioProductStrategies(AudioProductStrategyVector& strategies) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
     std::vector<media::AudioProductStrategy> strategiesAidl;
@@ -2197,7 +2256,7 @@
 status_t AudioSystem::getProductStrategyFromAudioAttributes(const audio_attributes_t& aa,
                                                             product_strategy_t& productStrategy,
                                                             bool fallbackOnDefault) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
     media::audio::common::AudioAttributes aaAidl = VALUE_OR_RETURN_STATUS(
@@ -2213,7 +2272,7 @@
 }
 
 status_t AudioSystem::listAudioVolumeGroups(AudioVolumeGroupVector& groups) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
     std::vector<media::AudioVolumeGroup> groupsAidl;
@@ -2227,7 +2286,7 @@
 status_t AudioSystem::getVolumeGroupFromAudioAttributes(const audio_attributes_t &aa,
                                                         volume_group_t& volumeGroup,
                                                         bool fallbackOnDefault) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
     media::audio::common::AudioAttributes aaAidl = VALUE_OR_RETURN_STATUS(
@@ -2240,13 +2299,13 @@
 }
 
 status_t AudioSystem::setRttEnabled(bool enabled) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
     return statusTFromBinderStatus(aps->setRttEnabled(enabled));
 }
 
 bool AudioSystem::isCallScreenModeSupported() {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return false;
 
     auto result = [&]() -> ConversionResult<bool> {
@@ -2261,7 +2320,7 @@
 status_t AudioSystem::setDevicesRoleForStrategy(product_strategy_t strategy,
                                                 device_role_t role,
                                                 const AudioDeviceTypeAddrVector& devices) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) {
         return PERMISSION_DENIED;
     }
@@ -2278,7 +2337,7 @@
 status_t AudioSystem::removeDevicesRoleForStrategy(product_strategy_t strategy,
                                                    device_role_t role,
                                                    const AudioDeviceTypeAddrVector& devices) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) {
         return PERMISSION_DENIED;
     }
@@ -2294,7 +2353,7 @@
 
 status_t
 AudioSystem::clearDevicesRoleForStrategy(product_strategy_t strategy, device_role_t role) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) {
         return PERMISSION_DENIED;
     }
@@ -2307,7 +2366,7 @@
 status_t AudioSystem::getDevicesForRoleAndStrategy(product_strategy_t strategy,
                                                    device_role_t role,
                                                    AudioDeviceTypeAddrVector& devices) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) {
         return PERMISSION_DENIED;
     }
@@ -2325,7 +2384,7 @@
 status_t AudioSystem::setDevicesRoleForCapturePreset(audio_source_t audioSource,
                                                      device_role_t role,
                                                      const AudioDeviceTypeAddrVector& devices) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) {
         return PERMISSION_DENIED;
     }
@@ -2343,7 +2402,7 @@
 status_t AudioSystem::addDevicesRoleForCapturePreset(audio_source_t audioSource,
                                                      device_role_t role,
                                                      const AudioDeviceTypeAddrVector& devices) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) {
         return PERMISSION_DENIED;
     }
@@ -2359,7 +2418,7 @@
 
 status_t AudioSystem::removeDevicesRoleForCapturePreset(
         audio_source_t audioSource, device_role_t role, const AudioDeviceTypeAddrVector& devices) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) {
         return PERMISSION_DENIED;
     }
@@ -2375,7 +2434,7 @@
 
 status_t AudioSystem::clearDevicesRoleForCapturePreset(audio_source_t audioSource,
                                                        device_role_t role) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) {
         return PERMISSION_DENIED;
     }
@@ -2389,7 +2448,7 @@
 status_t AudioSystem::getDevicesForRoleAndCapturePreset(audio_source_t audioSource,
                                                         device_role_t role,
                                                         AudioDeviceTypeAddrVector& devices) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) {
         return PERMISSION_DENIED;
     }
@@ -2407,7 +2466,7 @@
 
 status_t AudioSystem::getSpatializer(const sp<media::INativeSpatializerCallback>& callback,
                                           sp<media::ISpatializer>* spatializer) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (spatializer == nullptr) {
         return BAD_VALUE;
     }
@@ -2426,7 +2485,7 @@
                                     const audio_config_t *config,
                                     const AudioDeviceTypeAddrVector &devices,
                                     bool *canBeSpatialized) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (canBeSpatialized == nullptr) {
         return BAD_VALUE;
     }
@@ -2450,7 +2509,7 @@
 
 status_t AudioSystem::getSoundDoseInterface(const sp<media::ISoundDoseCallback>& callback,
                                             sp<media::ISoundDose>* soundDose) {
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    const sp<IAudioFlinger> af = get_audio_flinger();
     if (af == nullptr) {
         return PERMISSION_DENIED;
     }
@@ -2469,7 +2528,7 @@
         return BAD_VALUE;
     }
 
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) {
         return PERMISSION_DENIED;
     }
@@ -2493,7 +2552,7 @@
         return BAD_VALUE;
     }
 
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) {
         return PERMISSION_DENIED;
     }
@@ -2512,7 +2571,7 @@
 
 status_t AudioSystem::setRequestedLatencyMode(
             audio_io_handle_t output, audio_latency_mode_t mode) {
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    const sp<IAudioFlinger> af = get_audio_flinger();
     if (af == nullptr) {
         return PERMISSION_DENIED;
     }
@@ -2521,7 +2580,7 @@
 
 status_t AudioSystem::getSupportedLatencyModes(audio_io_handle_t output,
         std::vector<audio_latency_mode_t>* modes) {
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    const sp<IAudioFlinger> af = get_audio_flinger();
     if (af == nullptr) {
         return PERMISSION_DENIED;
     }
@@ -2529,7 +2588,7 @@
 }
 
 status_t AudioSystem::setBluetoothVariableLatencyEnabled(bool enabled) {
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    const sp<IAudioFlinger> af = get_audio_flinger();
     if (af == nullptr) {
         return PERMISSION_DENIED;
     }
@@ -2538,7 +2597,7 @@
 
 status_t AudioSystem::isBluetoothVariableLatencyEnabled(
         bool *enabled) {
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    const sp<IAudioFlinger> af = get_audio_flinger();
     if (af == nullptr) {
         return PERMISSION_DENIED;
     }
@@ -2547,7 +2606,7 @@
 
 status_t AudioSystem::supportsBluetoothVariableLatency(
         bool *support) {
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    const sp<IAudioFlinger> af = get_audio_flinger();
     if (af == nullptr) {
         return PERMISSION_DENIED;
     }
@@ -2555,7 +2614,7 @@
 }
 
 status_t AudioSystem::getAudioPolicyConfig(media::AudioPolicyConfig *config) {
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    const sp<IAudioFlinger> af = get_audio_flinger();
     if (af == nullptr) {
         return PERMISSION_DENIED;
     }
@@ -2583,15 +2642,15 @@
     }
 
     binder::Status setCaptureState(bool active) override {
-        Mutex::Autolock _l(gSoundTriggerCaptureStateListenerLock);
+        std::lock_guard _l(AudioSystem::gSoundTriggerMutex);
         mListener->onStateChanged(active);
         return binder::Status::ok();
     }
 
     void binderDied(const wp<IBinder>&) override {
-        Mutex::Autolock _l(gSoundTriggerCaptureStateListenerLock);
+        std::lock_guard _l(AudioSystem::gSoundTriggerMutex);
         mListener->onServiceDied();
-        gSoundTriggerCaptureStateListener = nullptr;
+        AudioSystem::gSoundTriggerCaptureStateListener = nullptr;
     }
 
 private:
@@ -2604,13 +2663,12 @@
         const sp<CaptureStateListener>& listener) {
     LOG_ALWAYS_FATAL_IF(listener == nullptr);
 
-    const sp<IAudioPolicyService>& aps =
-            AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) {
         return PERMISSION_DENIED;
     }
 
-    Mutex::Autolock _l(gSoundTriggerCaptureStateListenerLock);
+    std::lock_guard _l(AudioSystem::gSoundTriggerMutex);
     gSoundTriggerCaptureStateListener = new CaptureStateListenerImpl(aps, listener);
     gSoundTriggerCaptureStateListener->init();
 
@@ -2619,7 +2677,7 @@
 
 status_t AudioSystem::setVibratorInfos(
         const std::vector<media::AudioVibratorInfo>& vibratorInfos) {
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    const sp<IAudioFlinger> af = get_audio_flinger();
     if (af == nullptr) {
         return PERMISSION_DENIED;
     }
@@ -2628,7 +2686,7 @@
 
 status_t AudioSystem::getMmapPolicyInfo(
         AudioMMapPolicyType policyType, std::vector<AudioMMapPolicyInfo> *policyInfos) {
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    const sp<IAudioFlinger> af = get_audio_flinger();
     if (af == nullptr) {
         return PERMISSION_DENIED;
     }
@@ -2636,7 +2694,7 @@
 }
 
 int32_t AudioSystem::getAAudioMixerBurstCount() {
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    const sp<IAudioFlinger> af = get_audio_flinger();
     if (af == nullptr) {
         return PERMISSION_DENIED;
     }
@@ -2644,7 +2702,7 @@
 }
 
 int32_t AudioSystem::getAAudioHardwareBurstMinUsec() {
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    const sp<IAudioFlinger> af = get_audio_flinger();
     if (af == nullptr) {
         return PERMISSION_DENIED;
     }
@@ -2653,7 +2711,7 @@
 
 status_t AudioSystem::getSupportedMixerAttributes(
         audio_port_handle_t portId, std::vector<audio_mixer_attributes_t> *mixerAttrs) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == nullptr) {
         return PERMISSION_DENIED;
     }
@@ -2673,7 +2731,7 @@
                                                   audio_port_handle_t portId,
                                                   uid_t uid,
                                                   const audio_mixer_attributes_t *mixerAttr) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == nullptr) {
         return PERMISSION_DENIED;
     }
@@ -2693,7 +2751,7 @@
         const audio_attributes_t *attr,
         audio_port_handle_t portId,
         std::optional<audio_mixer_attributes_t> *mixerAttr) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == nullptr) {
         return PERMISSION_DENIED;
     }
@@ -2716,7 +2774,7 @@
 status_t AudioSystem::clearPreferredMixerAttributes(const audio_attributes_t *attr,
                                                     audio_port_handle_t portId,
                                                     uid_t uid) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == nullptr) {
         return PERMISSION_DENIED;
     }
@@ -2733,45 +2791,28 @@
 
 int AudioSystem::AudioPolicyServiceClient::addAudioPortCallback(
         const sp<AudioPortCallback>& callback) {
-    Mutex::Autolock _l(mLock);
-    for (size_t i = 0; i < mAudioPortCallbacks.size(); i++) {
-        if (mAudioPortCallbacks[i] == callback) {
-            return -1;
-        }
-    }
-    mAudioPortCallbacks.add(callback);
-    return mAudioPortCallbacks.size();
+    std::lock_guard _l(mMutex);
+    return mAudioPortCallbacks.insert(callback).second ? mAudioPortCallbacks.size() : -1;
 }
 
 int AudioSystem::AudioPolicyServiceClient::removeAudioPortCallback(
         const sp<AudioPortCallback>& callback) {
-    Mutex::Autolock _l(mLock);
-    size_t i;
-    for (i = 0; i < mAudioPortCallbacks.size(); i++) {
-        if (mAudioPortCallbacks[i] == callback) {
-            break;
-        }
-    }
-    if (i == mAudioPortCallbacks.size()) {
-        return -1;
-    }
-    mAudioPortCallbacks.removeAt(i);
-    return mAudioPortCallbacks.size();
+    std::lock_guard _l(mMutex);
+    return mAudioPortCallbacks.erase(callback) > 0 ? mAudioPortCallbacks.size() : -1;
 }
 
-
 Status AudioSystem::AudioPolicyServiceClient::onAudioPortListUpdate() {
-    Mutex::Autolock _l(mLock);
-    for (size_t i = 0; i < mAudioPortCallbacks.size(); i++) {
-        mAudioPortCallbacks[i]->onAudioPortListUpdate();
+    std::lock_guard _l(mMutex);
+    for (const auto& callback : mAudioPortCallbacks) {
+        callback->onAudioPortListUpdate();
     }
     return Status::ok();
 }
 
 Status AudioSystem::AudioPolicyServiceClient::onAudioPatchListUpdate() {
-    Mutex::Autolock _l(mLock);
-    for (size_t i = 0; i < mAudioPortCallbacks.size(); i++) {
-        mAudioPortCallbacks[i]->onAudioPatchListUpdate();
+    std::lock_guard _l(mMutex);
+    for (const auto& callback : mAudioPortCallbacks) {
+        callback->onAudioPatchListUpdate();
     }
     return Status::ok();
 }
@@ -2779,30 +2820,16 @@
 // ----------------------------------------------------------------------------
 int AudioSystem::AudioPolicyServiceClient::addAudioVolumeGroupCallback(
         const sp<AudioVolumeGroupCallback>& callback) {
-    Mutex::Autolock _l(mLock);
-    for (size_t i = 0; i < mAudioVolumeGroupCallback.size(); i++) {
-        if (mAudioVolumeGroupCallback[i] == callback) {
-            return -1;
-        }
-    }
-    mAudioVolumeGroupCallback.add(callback);
-    return mAudioVolumeGroupCallback.size();
+    std::lock_guard _l(mMutex);
+    return mAudioVolumeGroupCallbacks.insert(callback).second
+            ? mAudioVolumeGroupCallbacks.size() : -1;
 }
 
 int AudioSystem::AudioPolicyServiceClient::removeAudioVolumeGroupCallback(
         const sp<AudioVolumeGroupCallback>& callback) {
-    Mutex::Autolock _l(mLock);
-    size_t i;
-    for (i = 0; i < mAudioVolumeGroupCallback.size(); i++) {
-        if (mAudioVolumeGroupCallback[i] == callback) {
-            break;
-        }
-    }
-    if (i == mAudioVolumeGroupCallback.size()) {
-        return -1;
-    }
-    mAudioVolumeGroupCallback.removeAt(i);
-    return mAudioVolumeGroupCallback.size();
+    std::lock_guard _l(mMutex);
+    return mAudioVolumeGroupCallbacks.erase(callback) > 0
+            ? mAudioVolumeGroupCallbacks.size() : -1;
 }
 
 Status AudioSystem::AudioPolicyServiceClient::onAudioVolumeGroupChanged(int32_t group,
@@ -2811,9 +2838,9 @@
             aidl2legacy_int32_t_volume_group_t(group));
     int flagsLegacy = VALUE_OR_RETURN_BINDER_STATUS(convertReinterpret<int>(flags));
 
-    Mutex::Autolock _l(mLock);
-    for (size_t i = 0; i < mAudioVolumeGroupCallback.size(); i++) {
-        mAudioVolumeGroupCallback[i]->onAudioVolumeGroupChanged(groupLegacy, flagsLegacy);
+    std::lock_guard _l(mMutex);
+    for (const auto& callback : mAudioVolumeGroupCallbacks) {
+        callback->onAudioVolumeGroupChanged(groupLegacy, flagsLegacy);
     }
     return Status::ok();
 }
@@ -2827,7 +2854,7 @@
     int stateLegacy = VALUE_OR_RETURN_BINDER_STATUS(convertReinterpret<int>(state));
     dynamic_policy_callback cb = NULL;
     {
-        Mutex::Autolock _l(AudioSystem::gLock);
+        std::lock_guard _l(AudioSystem::gMutex);
         cb = gDynPolicyCallback;
     }
 
@@ -2848,7 +2875,7 @@
         AudioSource source) {
     record_config_callback cb = NULL;
     {
-        Mutex::Autolock _l(AudioSystem::gLock);
+        std::lock_guard _l(AudioSystem::gMutex);
         cb = gRecordConfigCallback;
     }
 
@@ -2881,7 +2908,7 @@
 Status AudioSystem::AudioPolicyServiceClient::onRoutingUpdated() {
     routing_callback cb = NULL;
     {
-        Mutex::Autolock _l(AudioSystem::gLock);
+        std::lock_guard _l(AudioSystem::gMutex);
         cb = gRoutingCallback;
     }
 
@@ -2894,7 +2921,7 @@
 Status AudioSystem::AudioPolicyServiceClient::onVolumeRangeInitRequest() {
     vol_range_init_req_callback cb = NULL;
     {
-        Mutex::Autolock _l(AudioSystem::gLock);
+        std::lock_guard _l(AudioSystem::gMutex);
         cb = gVolRangeInitReqCallback;
     }
 
@@ -2906,12 +2933,12 @@
 
 void AudioSystem::AudioPolicyServiceClient::binderDied(const wp<IBinder>& who __unused) {
     {
-        Mutex::Autolock _l(mLock);
-        for (size_t i = 0; i < mAudioPortCallbacks.size(); i++) {
-            mAudioPortCallbacks[i]->onServiceDied();
+        std::lock_guard _l(mMutex);
+        for (const auto& callback : mAudioPortCallbacks) {
+            callback->onServiceDied();
         }
-        for (size_t i = 0; i < mAudioVolumeGroupCallback.size(); i++) {
-            mAudioVolumeGroupCallback[i]->onServiceDied();
+        for (const auto& callback : mAudioVolumeGroupCallbacks) {
+            callback->onServiceDied();
         }
     }
     AudioSystem::clearAudioPolicyService();
diff --git a/media/libaudioclient/AudioTrack.cpp b/media/libaudioclient/AudioTrack.cpp
index 565427b..161c4d5 100644
--- a/media/libaudioclient/AudioTrack.cpp
+++ b/media/libaudioclient/AudioTrack.cpp
@@ -233,25 +233,9 @@
     return NO_ERROR;
 }
 
-AudioTrack::AudioTrack() : AudioTrack(AttributionSourceState())
-{
-}
-
 AudioTrack::AudioTrack(const AttributionSourceState& attributionSource)
-    : mStatus(NO_INIT),
-      mState(STATE_STOPPED),
-      mPreviousPriority(ANDROID_PRIORITY_NORMAL),
-      mPreviousSchedulingGroup(SP_DEFAULT),
-      mPausedPosition(0),
-      mSelectedDeviceId(AUDIO_PORT_HANDLE_NONE),
-      mRoutedDeviceId(AUDIO_PORT_HANDLE_NONE),
-      mClientAttributionSource(attributionSource),
-      mAudioTrackCallback(new AudioTrackCallback())
+    : mClientAttributionSource(attributionSource)
 {
-    mAttributes.content_type = AUDIO_CONTENT_TYPE_UNKNOWN;
-    mAttributes.usage = AUDIO_USAGE_UNKNOWN;
-    mAttributes.flags = AUDIO_FLAG_NONE;
-    strcpy(mAttributes.tags, "");
 }
 
 AudioTrack::AudioTrack(
@@ -271,21 +255,12 @@
         bool doNotReconnect,
         float maxRequiredSpeed,
         audio_port_handle_t selectedDeviceId)
-    : mStatus(NO_INIT),
-      mState(STATE_STOPPED),
-      mPreviousPriority(ANDROID_PRIORITY_NORMAL),
-      mPreviousSchedulingGroup(SP_DEFAULT),
-      mPausedPosition(0),
-      mAudioTrackCallback(new AudioTrackCallback())
 {
-    mAttributes = AUDIO_ATTRIBUTES_INITIALIZER;
-
-    // make_unique does not aggregate init until c++20
-    mSetParams = std::unique_ptr<SetParams>{
-            new SetParams{streamType, sampleRate, format, channelMask, frameCount, flags, callback,
-                          notificationFrames, 0 /*sharedBuffer*/, false /*threadCanCallJava*/,
-                          sessionId, transferType, offloadInfo, attributionSource, pAttributes,
-                          doNotReconnect, maxRequiredSpeed, selectedDeviceId}};
+    mSetParams = std::make_unique<SetParams>(
+        streamType, sampleRate, format, channelMask, frameCount, flags, callback,
+        notificationFrames, nullptr /*sharedBuffer*/, false /*threadCanCallJava*/,
+        sessionId, transferType, offloadInfo, attributionSource, pAttributes,
+        doNotReconnect, maxRequiredSpeed, selectedDeviceId);
 }
 
 namespace {
@@ -344,13 +319,6 @@
         const audio_attributes_t* pAttributes,
         bool doNotReconnect,
         float maxRequiredSpeed)
-    : mStatus(NO_INIT),
-      mState(STATE_STOPPED),
-      mPreviousPriority(ANDROID_PRIORITY_NORMAL),
-      mPreviousSchedulingGroup(SP_DEFAULT),
-      mPausedPosition(0),
-      mSelectedDeviceId(AUDIO_PORT_HANDLE_NONE),
-      mAudioTrackCallback(new AudioTrackCallback())
 {
     mAttributes = AUDIO_ATTRIBUTES_INITIALIZER;
 
@@ -400,9 +368,6 @@
 }
 
 void AudioTrack::stopAndJoinCallbacks() {
-    // Prevent nullptr crash if it did not open properly.
-    if (mStatus != NO_ERROR) return;
-
     // Make sure that callback function exits in the case where
     // it is looping on buffer full condition in obtainBuffer().
     // Otherwise the callback thread will never exit.
@@ -919,6 +884,7 @@
     const int64_t beginNs = systemTime();
 
     AutoMutex lock(mLock);
+    if (mProxy == nullptr) return;  // not successfully initialized.
     mediametrics::Defer defer([&]() {
         mediametrics::LogItem(mMetricsId)
             .set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_STOP)
@@ -1201,6 +1167,13 @@
     mSampleRate = rate;
     mProxy->setSampleRate(effectiveSampleRate);
 
+    mediametrics::LogItem(mMetricsId)
+            .set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_SETSAMPLERATE)
+            .set(AMEDIAMETRICS_PROP_PREFIX_EFFECTIVE AMEDIAMETRICS_PROP_SAMPLERATE,
+                    static_cast<int32_t>(effectiveSampleRate))
+            .set(AMEDIAMETRICS_PROP_SAMPLERATE, static_cast<int32_t>(rate))
+            .record();
+
     return NO_ERROR;
 }
 
@@ -1699,29 +1672,42 @@
 }
 
 status_t AudioTrack::setOutputDevice(audio_port_handle_t deviceId) {
+    status_t result = NO_ERROR;
     AutoMutex lock(mLock);
-    ALOGV("%s(%d): deviceId=%d mSelectedDeviceId=%d mRoutedDeviceId %d",
-            __func__, mPortId, deviceId, mSelectedDeviceId, mRoutedDeviceId);
+    ALOGV("%s(%d): deviceId=%d mSelectedDeviceId=%d",
+            __func__, mPortId, deviceId, mSelectedDeviceId);
     if (mSelectedDeviceId != deviceId) {
         mSelectedDeviceId = deviceId;
         if (mStatus == NO_ERROR) {
-            // allow track invalidation when track is not playing to propagate
-            // the updated mSelectedDeviceId
-            if (isPlaying_l()) {
-                if (mSelectedDeviceId != mRoutedDeviceId) {
-                    android_atomic_or(CBLK_INVALID, &mCblk->mFlags);
-                    mProxy->interrupt();
+            if (isOffloadedOrDirect_l()) {
+                if (isPlaying_l()) {
+                    ALOGW("%s(%d). Offloaded or Direct track is not STOPPED or FLUSHED. "
+                          "State: %s.",
+                            __func__, mPortId, stateToString(mState));
+                    result = INVALID_OPERATION;
+                } else {
+                    ALOGD("%s(%d): creating a new AudioTrack", __func__, mPortId);
+                    result = restoreTrack_l("setOutputDevice", true /* forceRestore */);
                 }
             } else {
-                // if the track is idle, try to restore now and
-                // defer to next start if not possible
-                if (restoreTrack_l("setOutputDevice") != OK) {
-                    android_atomic_or(CBLK_INVALID, &mCblk->mFlags);
+                // allow track invalidation when track is not playing to propagate
+                // the updated mSelectedDeviceId
+                if (isPlaying_l()) {
+                    if (mSelectedDeviceId != mRoutedDeviceId) {
+                        android_atomic_or(CBLK_INVALID, &mCblk->mFlags);
+                        mProxy->interrupt();
+                    }
+                } else {
+                    // if the track is idle, try to restore now and
+                    // defer to next start if not possible
+                    if (restoreTrack_l("setOutputDevice") != OK) {
+                        android_atomic_or(CBLK_INVALID, &mCblk->mFlags);
+                    }
                 }
             }
         }
     }
-    return NO_ERROR;
+    return result;
 }
 
 audio_port_handle_t AudioTrack::getOutputDevice() {
@@ -2415,12 +2401,14 @@
     int32_t flags = android_atomic_and(
         ~(CBLK_UNDERRUN | CBLK_LOOP_CYCLE | CBLK_LOOP_FINAL | CBLK_BUFFER_END), &mCblk->mFlags);
 
+    const bool isOffloaded = isOffloaded_l();
+    const bool isOffloadedOrDirect = isOffloadedOrDirect_l();
     // Check for track invalidation
     if (flags & CBLK_INVALID) {
         // for offloaded tracks restoreTrack_l() will just update the sequence and clear
         // AudioSystem cache. We should not exit here but after calling the callback so
         // that the upper layers can recreate the track
-        if (!isOffloadedOrDirect_l() || (mSequence == mObservedSequence)) {
+        if (!isOffloadedOrDirect || (mSequence == mObservedSequence)) {
             status_t status __unused = restoreTrack_l("processAudioBuffer");
             // FIXME unused status
             // after restoration, continue below to make sure that the loop and buffer events
@@ -2590,7 +2578,7 @@
         mObservedSequence = sequence;
         callback->onNewIAudioTrack();
         // for offloaded tracks, just wait for the upper layers to recreate the track
-        if (isOffloadedOrDirect()) {
+        if (isOffloadedOrDirect) {
             return NS_INACTIVE;
         }
     }
@@ -2678,7 +2666,7 @@
                 __func__, mPortId, mRemainingFrames, avail, audioBuffer.frameCount, nonContig, err);
         if (err != NO_ERROR) {
             if (err == TIMED_OUT || err == WOULD_BLOCK || err == -EINTR ||
-                    (isOffloaded() && (err == DEAD_OBJECT))) {
+                    (isOffloaded && (err == DEAD_OBJECT))) {
                 // FIXME bug 25195759
                 return 1000000;
             }
@@ -2764,7 +2752,7 @@
             // buffer size and skip the loop entirely.
 
             nsecs_t myns;
-            if (audio_has_proportional_frames(mFormat)) {
+            if (!isOffloaded && audio_has_proportional_frames(mFormat)) {
                 // time to wait based on buffer occupancy
                 const nsecs_t datans = mRemainingFrames <= avail ? 0 :
                         framesToNanoseconds(mRemainingFrames - avail, sampleRate, speed);
@@ -2836,7 +2824,7 @@
     return 0;
 }
 
-status_t AudioTrack::restoreTrack_l(const char *from)
+status_t AudioTrack::restoreTrack_l(const char *from, bool forceRestore)
 {
     status_t result = NO_ERROR;  // logged: make sure to set this before returning.
     const int64_t beginNs = systemTime();
@@ -2857,7 +2845,8 @@
     // output parameters and new IAudioFlinger in createTrack_l()
     AudioSystem::clearAudioConfigCache();
 
-    if (isOffloadedOrDirect_l() || mDoNotReconnect) {
+    if (!forceRestore &&
+        (isOffloadedOrDirect_l() || mDoNotReconnect)) {
         // FIXME re-creation of offloaded and direct tracks is not yet implemented;
         // Disabled since (1) timestamp correction is not implemented for non-PCM and
         // (2) We pre-empt existing direct tracks on resource constraint, so these tracks
@@ -3048,6 +3037,7 @@
         const sp<VolumeShaper::Configuration>& configuration,
         const sp<VolumeShaper::Operation>& operation)
 {
+    const int64_t beginNs = systemTime();
     AutoMutex lock(mLock);
     mVolumeHandler->setIdIfNecessary(configuration);
     media::VolumeShaperConfiguration config;
@@ -3055,6 +3045,18 @@
     media::VolumeShaperOperation op;
     operation->writeToParcelable(&op);
     VolumeShaper::Status status;
+
+    mediametrics::Defer defer([&] {
+        mediametrics::LogItem(mMetricsId)
+                .set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_APPLYVOLUMESHAPER)
+                .set(AMEDIAMETRICS_PROP_EXECUTIONTIMENS, (int64_t)(systemTime() - beginNs))
+                .set(AMEDIAMETRICS_PROP_STATE, stateToString(mState))
+                .set(AMEDIAMETRICS_PROP_STATUS, (int32_t)status)
+                .set(AMEDIAMETRICS_PROP_TOSTRING, configuration->toString()
+                                 .append(" ")
+                                 .append(operation->toString()))
+                .record(); });
+
     mAudioTrack->applyVolumeShaper(config, op, &status);
 
     if (status == DEAD_OBJECT) {
diff --git a/media/libaudioclient/IAudioFlinger.cpp b/media/libaudioclient/IAudioFlinger.cpp
index 48f8992..e0dca2d 100644
--- a/media/libaudioclient/IAudioFlinger.cpp
+++ b/media/libaudioclient/IAudioFlinger.cpp
@@ -350,34 +350,6 @@
     return statusTFromBinderStatus(mDelegate->setStreamMute(streamAidl, muted));
 }
 
-float AudioFlingerClientAdapter::streamVolume(audio_stream_type_t stream,
-                                              audio_io_handle_t output) const {
-    auto result = [&]() -> ConversionResult<float> {
-        AudioStreamType streamAidl = VALUE_OR_RETURN_STATUS(
-                legacy2aidl_audio_stream_type_t_AudioStreamType(stream));
-        int32_t outputAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_io_handle_t_int32_t(output));
-        float aidlRet;
-        RETURN_IF_ERROR(statusTFromBinderStatus(
-                mDelegate->streamVolume(streamAidl, outputAidl, &aidlRet)));
-        return aidlRet;
-    }();
-    // Failure is ignored.
-    return result.value_or(0.f);
-}
-
-bool AudioFlingerClientAdapter::streamMute(audio_stream_type_t stream) const {
-    auto result = [&]() -> ConversionResult<bool> {
-        AudioStreamType streamAidl = VALUE_OR_RETURN_STATUS(
-                legacy2aidl_audio_stream_type_t_AudioStreamType(stream));
-        bool aidlRet;
-        RETURN_IF_ERROR(statusTFromBinderStatus(
-                mDelegate->streamMute(streamAidl, &aidlRet)));
-        return aidlRet;
-    }();
-    // Failure is ignored.
-    return result.value_or(false);
-}
-
 status_t AudioFlingerClientAdapter::setMode(audio_mode_t mode) {
     AudioMode modeAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_mode_t_AudioMode(mode));
     return statusTFromBinderStatus(mDelegate->setMode(modeAidl));
@@ -918,11 +890,22 @@
     return OK;
 }
 
+status_t AudioFlingerClientAdapter::setTracksInternalMute(
+        const std::vector<media::TrackInternalMuteInfo>& tracksInternalMuted) {
+    return statusTFromBinderStatus(mDelegate->setTracksInternalMute(tracksInternalMuted));
+}
+
+status_t AudioFlingerClientAdapter::resetReferencesForTest() {
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mDelegate->resetReferencesForTest()));
+    return OK;
+}
+
 ////////////////////////////////////////////////////////////////////////////////////////////////////
 // AudioFlingerServerAdapter
 AudioFlingerServerAdapter::AudioFlingerServerAdapter(
         const sp<AudioFlingerServerAdapter::Delegate>& delegate) : mDelegate(delegate) {
     setMinSchedulerPolicy(SCHED_NORMAL, ANDROID_PRIORITY_AUDIO);
+    setInheritRt(true);
 }
 
 status_t AudioFlingerServerAdapter::onTransact(uint32_t code,
@@ -1029,23 +1012,6 @@
     return Status::fromStatusT(mDelegate->setStreamMute(streamLegacy, muted));
 }
 
-Status AudioFlingerServerAdapter::streamVolume(AudioStreamType stream, int32_t output,
-                                               float* _aidl_return) {
-    audio_stream_type_t streamLegacy = VALUE_OR_RETURN_BINDER(
-            aidl2legacy_AudioStreamType_audio_stream_type_t(stream));
-    audio_io_handle_t outputLegacy = VALUE_OR_RETURN_BINDER(
-            aidl2legacy_int32_t_audio_io_handle_t(output));
-    *_aidl_return = mDelegate->streamVolume(streamLegacy, outputLegacy);
-    return Status::ok();
-}
-
-Status AudioFlingerServerAdapter::streamMute(AudioStreamType stream, bool* _aidl_return) {
-    audio_stream_type_t streamLegacy = VALUE_OR_RETURN_BINDER(
-            aidl2legacy_AudioStreamType_audio_stream_type_t(stream));
-    *_aidl_return = mDelegate->streamMute(streamLegacy);
-    return Status::ok();
-}
-
 Status AudioFlingerServerAdapter::setMode(AudioMode mode) {
     audio_mode_t modeLegacy = VALUE_OR_RETURN_BINDER(aidl2legacy_AudioMode_audio_mode_t(mode));
     return Status::fromStatusT(mDelegate->setMode(modeLegacy));
@@ -1476,4 +1442,14 @@
     return Status::ok();
 }
 
+Status AudioFlingerServerAdapter::setTracksInternalMute(
+        const std::vector<media::TrackInternalMuteInfo>& tracksInternalMute) {
+    return Status::fromStatusT(mDelegate->setTracksInternalMute(tracksInternalMute));
+}
+
+Status AudioFlingerServerAdapter::resetReferencesForTest() {
+    RETURN_BINDER_IF_ERROR(mDelegate->resetReferencesForTest());
+    return Status::ok();
+}
+
 } // namespace android
diff --git a/media/libaudioclient/PolicyAidlConversion.cpp b/media/libaudioclient/PolicyAidlConversion.cpp
index 60b08fa..163a359 100644
--- a/media/libaudioclient/PolicyAidlConversion.cpp
+++ b/media/libaudioclient/PolicyAidlConversion.cpp
@@ -242,6 +242,8 @@
     legacy.mCbFlags = VALUE_OR_RETURN(aidl2legacy_AudioMixCallbackFlag_uint32_t_mask(aidl.cbFlags));
     legacy.mAllowPrivilegedMediaPlaybackCapture = aidl.allowPrivilegedMediaPlaybackCapture;
     legacy.mVoiceCommunicationCaptureAllowed = aidl.voiceCommunicationCaptureAllowed;
+    legacy.mToken = aidl.mToken;
+    legacy.mVirtualDeviceId = aidl.mVirtualDeviceId;
     return legacy;
 }
 
@@ -265,6 +267,8 @@
     aidl.cbFlags = VALUE_OR_RETURN(legacy2aidl_uint32_t_AudioMixCallbackFlag_mask(legacy.mCbFlags));
     aidl.allowPrivilegedMediaPlaybackCapture = legacy.mAllowPrivilegedMediaPlaybackCapture;
     aidl.voiceCommunicationCaptureAllowed = legacy.mVoiceCommunicationCaptureAllowed;
+    aidl.mToken = legacy.mToken;
+    aidl.mVirtualDeviceId = legacy.mVirtualDeviceId;
     return aidl;
 }
 
@@ -375,6 +379,8 @@
             return AUDIO_POLICY_FORCE_ENCODED_SURROUND_ALWAYS;
         case media::AudioPolicyForcedConfig::ENCODED_SURROUND_MANUAL:
             return AUDIO_POLICY_FORCE_ENCODED_SURROUND_MANUAL;
+        case media::AudioPolicyForcedConfig::BT_BLE:
+            return AUDIO_POLICY_FORCE_BT_BLE;
     }
     return unexpected(BAD_VALUE);
 }
@@ -414,6 +420,8 @@
             return media::AudioPolicyForcedConfig::ENCODED_SURROUND_ALWAYS;
         case AUDIO_POLICY_FORCE_ENCODED_SURROUND_MANUAL:
             return media::AudioPolicyForcedConfig::ENCODED_SURROUND_MANUAL;
+        case AUDIO_POLICY_FORCE_BT_BLE:
+            return media::AudioPolicyForcedConfig::BT_BLE;
         case AUDIO_POLICY_FORCE_CFG_CNT:
             break;
     }
diff --git a/media/libaudioclient/TEST_MAPPING b/media/libaudioclient/TEST_MAPPING
index 234e858..68dba34 100644
--- a/media/libaudioclient/TEST_MAPPING
+++ b/media/libaudioclient/TEST_MAPPING
@@ -43,10 +43,9 @@
     }
   ],
   "postsubmit": [
-  // TODO(b/302036943): Enable once we make it pass with AIDL HAL on CF.
-  //   {
-  //      "name": "audioeffect_analysis"
-  //   },
+    {
+      "name": "audioeffect_analysis"
+    },
     {
       "name": "CtsVirtualDevicesTestCases",
       "options" : [
diff --git a/media/libaudioclient/ToneGenerator.cpp b/media/libaudioclient/ToneGenerator.cpp
index 9c4ccb8..79fcea8 100644
--- a/media/libaudioclient/ToneGenerator.cpp
+++ b/media/libaudioclient/ToneGenerator.cpp
@@ -872,6 +872,18 @@
                         { .duration = 0 , .waveFreq = { 0 }, 0, 0}},
           .repeatCnt = 3,
           .repeatSegment = 0 },                              // TONE_NZ_CALL_WAITING
+        { .segments = { { .duration = 500, .waveFreq = { 425, 0 }, 0, 0 },
+                        { .duration = 250, .waveFreq = { 0 }, 0, 0 },
+                        { .duration = 0 , .waveFreq = { 0 }, 0, 0}},
+          .repeatCnt = ToneGenerator::TONEGEN_INF,
+          .repeatSegment = 0 },                             // TONE_MY_CONGESTION
+        { .segments = { { .duration = 400, .waveFreq = { 425, 0 }, 0, 0 },
+                        { .duration = 200, .waveFreq = { 0 }, 0, 0 },
+                        { .duration = 400, .waveFreq = { 425, 0 }, 0, 0 },
+                        { .duration = 2000, .waveFreq = { 0 }, 0, 0},
+                        { .duration = 0, .waveFreq = { 0 }, 0, 0}},
+          .repeatCnt = ToneGenerator::TONEGEN_INF,
+          .repeatSegment = 0 }                              // TONE_MY_RINGTONE
 };
 
 // Used by ToneGenerator::getToneForRegion() to convert user specified supervisory tone type
@@ -976,6 +988,16 @@
             TONE_SUP_ERROR,               // TONE_SUP_ERROR
             TONE_NZ_CALL_WAITING,         // TONE_SUP_CALL_WAITING
             TONE_GB_RINGTONE              // TONE_SUP_RINGTONE
+        },
+        {   // MALAYSIA
+            TONE_SUP_DIAL,                // TONE_SUP_DIAL
+            TONE_SUP_BUSY,                // TONE_SUP_BUSY
+            TONE_MY_CONGESTION,           // TONE_SUP_CONGESTION
+            TONE_SUP_RADIO_ACK,           // TONE_SUP_RADIO_ACK
+            TONE_SUP_RADIO_NOTAVAIL,      // TONE_SUP_RADIO_NOTAVAIL
+            TONE_SUP_ERROR,               // TONE_SUP_ERROR
+            TONE_SUP_CALL_WAITING,        // TONE_SUP_CALL_WAITING
+            TONE_MY_RINGTONE              // TONE_SUP_RINGTONE
         }
 };
 
@@ -1055,6 +1077,8 @@
         mRegion = TAIWAN;
     } else if (strstr(value, "nz") != NULL) {
         mRegion = NZ;
+    } else if (strstr(value, "my") != NULL) {
+        mRegion = MY;
     } else {
         mRegion = CEPT;
     }
@@ -1240,13 +1264,10 @@
                     nsec += 1000000000;
                 }
 
-                if ((sec + 1) > ((time_t)(INT_MAX / mSamplingRate))) {
-                    mMaxSmp = sec * mSamplingRate;
-                } else {
-                    // mSamplingRate is always > 1000
-                    sec = sec * 1000 + nsec / 1000000; // duration in milliseconds
-                    mMaxSmp = (unsigned int)(((int64_t)sec * mSamplingRate) / 1000);
-                }
+                const uint64_t msec = static_cast<uint64_t>(sec) * 1000 + nsec / 1'000'000;
+                mMaxSmp = std::min(static_cast<uint64_t>(TONEGEN_INF - 1),
+                        msec * mSamplingRate / 1000);
+
                 ALOGV("stopTone() forcing mMaxSmp to %d, total for far %" PRIu64, mMaxSmp,
                       mTotalSmp);
             } else {
@@ -1614,14 +1635,11 @@
 
     mpToneDesc = mpNewToneDesc;
 
-    if (mDurationMs == -1) {
+    if (mDurationMs < 0) {  // mDurationMs is signed, treat all neg numbers as INF.
         mMaxSmp = TONEGEN_INF;
     } else {
-        if (mDurationMs > (int)(TONEGEN_INF / mSamplingRate)) {
-            mMaxSmp = (mDurationMs / 1000) * mSamplingRate;
-        } else {
-            mMaxSmp = (mDurationMs * mSamplingRate) / 1000;
-        }
+        mMaxSmp = std::min(static_cast<uint64_t>(TONEGEN_INF - 1),
+                static_cast<uint64_t>(mDurationMs) * mSamplingRate / 1000);
         ALOGV("prepareWave, duration limited to %d ms", mDurationMs);
     }
 
@@ -1652,7 +1670,8 @@
     if (mpToneDesc->segments[0].duration == TONEGEN_INF) {
         mNextSegSmp = TONEGEN_INF;
     } else{
-        mNextSegSmp = (mpToneDesc->segments[0].duration * mSamplingRate) / 1000;
+        mNextSegSmp = std::min(static_cast<uint64_t>(TONEGEN_INF - 1),
+                static_cast<uint64_t>(mpToneDesc->segments[0].duration) * mSamplingRate / 1000);
     }
 
     return true;
diff --git a/media/libaudioclient/aidl/android/media/AudioMix.aidl b/media/libaudioclient/aidl/android/media/AudioMix.aidl
index 88b0450..bb8537d 100644
--- a/media/libaudioclient/aidl/android/media/AudioMix.aidl
+++ b/media/libaudioclient/aidl/android/media/AudioMix.aidl
@@ -39,4 +39,8 @@
     boolean allowPrivilegedMediaPlaybackCapture;
     /** Indicates if the caller can capture voice communication output */
     boolean voiceCommunicationCaptureAllowed;
+    /** Identifies the owner of the AudioPolicy that this AudioMix belongs to */
+    IBinder mToken;
+    /** Indicates the Id of the VirtualDevice this AudioMix was registered for */
+    int mVirtualDeviceId;
 }
diff --git a/media/libaudioclient/aidl/android/media/AudioPolicyForcedConfig.aidl b/media/libaudioclient/aidl/android/media/AudioPolicyForcedConfig.aidl
index 2255d4c..111bb2f 100644
--- a/media/libaudioclient/aidl/android/media/AudioPolicyForcedConfig.aidl
+++ b/media/libaudioclient/aidl/android/media/AudioPolicyForcedConfig.aidl
@@ -36,4 +36,5 @@
     ENCODED_SURROUND_NEVER = 13,
     ENCODED_SURROUND_ALWAYS = 14,
     ENCODED_SURROUND_MANUAL = 15,
+    BT_BLE = 16,
 }
diff --git a/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl b/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl
index 31d3af5..29de9c2 100644
--- a/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl
+++ b/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl
@@ -41,6 +41,7 @@
 import android.media.ISoundDoseCallback;
 import android.media.MicrophoneInfoFw;
 import android.media.RenderPosition;
+import android.media.TrackInternalMuteInfo;
 import android.media.TrackSecondaryOutputInfo;
 import android.media.audio.common.AudioChannelLayout;
 import android.media.audio.common.AudioFormatDescription;
@@ -93,13 +94,11 @@
     float getMasterBalance();
 
     /*
-     * Set/gets stream type state. This will probably be used by
+     * Set stream type state. This will probably be used by
      * the preference panel, mostly.
      */
     void setStreamVolume(AudioStreamType stream, float value, int /* audio_io_handle_t */ output);
     void setStreamMute(AudioStreamType stream, boolean muted);
-    float streamVolume(AudioStreamType stream, int /* audio_io_handle_t */ output);
-    boolean streamMute(AudioStreamType stream);
 
     // set audio mode.
     void setMode(AudioMode mode);
@@ -293,6 +292,17 @@
      */
     AudioPortFw getAudioMixPort(in AudioPortFw devicePort, in AudioPortFw mixPort);
 
+    /**
+     * Set internal mute for a list of tracks.
+     */
+    void setTracksInternalMute(in TrackInternalMuteInfo[] tracksInternalMute);
+
+    /*
+     * Reset Circular references in AudioFlinger service.
+     * Test API
+     */
+     void resetReferencesForTest();
+
     // When adding a new method, please review and update
     // IAudioFlinger.h AudioFlingerServerAdapter::Delegate::TransactionCode
     // AudioFlinger.cpp AudioFlinger::onTransactWrapper()
diff --git a/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl b/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl
index 52c8da0..ac42ea9 100644
--- a/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl
+++ b/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl
@@ -58,6 +58,8 @@
 import android.media.audio.common.AudioUuid;
 import android.media.audio.common.Int;
 
+import com.android.media.permission.INativePermissionController;
+
 /**
  * IAudioPolicyService interface (see AudioPolicyInterface for method descriptions).
  *
@@ -114,6 +116,10 @@
 
     void releaseInput(int /* audio_port_handle_t */ portId);
 
+    oneway void setDeviceAbsoluteVolumeEnabled(in AudioDevice device,
+                                               boolean enabled,
+                                               AudioStreamType streamToDriveAbs);
+
     void initStreamVolume(AudioStreamType stream,
                           int indexMin,
                           int indexMax);
@@ -263,6 +269,8 @@
 
     void registerPolicyMixes(in AudioMix[] mixes, boolean registration);
 
+    List<AudioMix> getRegisteredPolicyMixes();
+
     void updatePolicyMixes(in AudioMixUpdate[] updates);
 
     void setUidDeviceAffinities(int /* uid_t */ uid, in AudioDevice[] devices);
@@ -469,6 +477,11 @@
                                        int /* uid_t */ uid);
 
 
+    /**
+     * Get the native permission controller for audioserver, to push package and permission info
+     * required to control audio access.
+     */
+    INativePermissionController getPermissionController();
     // When adding a new method, please review and update
     // AudioPolicyService.cpp AudioPolicyService::onTransact()
     // AudioPolicyService.cpp IAUDIOPOLICYSERVICE_BINDER_METHOD_MACRO_LIST
diff --git a/media/libaudioclient/aidl/android/media/OpenOutputRequest.aidl b/media/libaudioclient/aidl/android/media/OpenOutputRequest.aidl
index ddda8bb..73610a8 100644
--- a/media/libaudioclient/aidl/android/media/OpenOutputRequest.aidl
+++ b/media/libaudioclient/aidl/android/media/OpenOutputRequest.aidl
@@ -19,6 +19,7 @@
 import android.media.AudioPortFw;
 import android.media.audio.common.AudioConfig;
 import android.media.audio.common.AudioConfigBase;
+import android.media.audio.common.AudioAttributes;
 
 /**
  * {@hide}
@@ -32,4 +33,5 @@
     AudioPortFw device;
     /** Bitmask, indexed by AudioOutputFlag. */
     int flags;
+    AudioAttributes attributes;
 }
diff --git a/media/libmedia/include/media/CodecServiceRegistrant.h b/media/libaudioclient/aidl/android/media/TrackInternalMuteInfo.aidl
similarity index 72%
copy from media/libmedia/include/media/CodecServiceRegistrant.h
copy to media/libaudioclient/aidl/android/media/TrackInternalMuteInfo.aidl
index e0af781..05b1fa4 100644
--- a/media/libmedia/include/media/CodecServiceRegistrant.h
+++ b/media/libaudioclient/aidl/android/media/TrackInternalMuteInfo.aidl
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2018 The Android Open Source Project
+ * Copyright (C) 2024 The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -14,10 +14,10 @@
  * limitations under the License.
  */
 
-#ifndef CODEC_SERVICE_REGISTRANT_H_
+package android.media;
 
-#define CODEC_SERVICE_REGISTRANT_H_
-
-typedef void (*RegisterCodecServicesFunc)();
-
-#endif  // CODEC_SERVICE_REGISTRANT_H_
+parcelable TrackInternalMuteInfo {
+    /* Interpreted as audio_port_handle_t. */
+    int portId;
+    boolean muted;
+}
diff --git a/media/libaudioclient/aidl/fuzzer/Android.bp b/media/libaudioclient/aidl/fuzzer/Android.bp
index 6093933..61d5ccd 100644
--- a/media/libaudioclient/aidl/fuzzer/Android.bp
+++ b/media/libaudioclient/aidl/fuzzer/Android.bp
@@ -1,4 +1,8 @@
 /*
+package {
+    default_team: "trendy_team_media_framework_audio",
+}
+
  * Copyright (C) 2022 The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
@@ -18,62 +22,31 @@
     name: "libaudioclient_aidl_fuzzer_defaults",
     static_libs: [
         "android.hardware.audio.common@7.0-enums",
-        "effect-aidl-cpp",
+        "audiopermissioncontroller",
+        "libaudiomockhal",
         "libcgrouprc",
         "libcgrouprc_format",
         "libfakeservicemanager",
         "libjsoncpp",
-        "liblog",
         "libmediametricsservice",
-        "libmedia_helper",
         "libprocessgroup",
         "shared-file-region-aidl-cpp",
     ],
     shared_libs: [
         "android.hardware.audio.common-util",
-        "audioclient-types-aidl-cpp",
-        "audiopolicy-aidl-cpp",
-        "audiopolicy-types-aidl-cpp",
-        "av-types-aidl-cpp",
-        "capture_state_listener-aidl-cpp",
-        "framework-permission-aidl-cpp",
-        "libaudioclient",
-        "audioflinger-aidl-cpp",
         "libaudioflinger",
-        "libaudioclient_aidl_conversion",
-        "libaudiofoundation",
-        "libaudiomanager",
-        "libaudiopolicy",
-        "libaudioutils",
         "libaudiopolicyservice",
-        "libaudiopolicymanagerdefault",
-        "libaudiohal",
-        "libaudioprocessing",
-        "libactivitymanager_aidl",
         "libdl",
-        "libheadtracking",
-        "libmediautils",
-        "libmediametrics",
-        "libnblog",
-        "libnbaio",
-        "libpowermanager",
-        "libvibrator",
         "libvndksupport",
-        "libxml2",
         "mediametricsservice-aidl-cpp",
-        "packagemanager_aidl-cpp",
     ],
     header_libs: [
         "libaudiopolicymanager_interface_headers",
-        "libaudiofoundation_headers",
-        "libaudiohal_headers",
-        "libaudioflinger_headers",
-        "libbinder_headers",
         "libmedia_headers",
     ],
-     fuzz_config: {
+    fuzz_config: {
         cc: [
-            "android-media-fuzzing-reports@google.com",
+            "android-audio-fuzzing-reports@google.com",
         ],
         componentid: 155276,
         hotlists: ["4593311"],
@@ -89,7 +62,12 @@
     name: "audioflinger_aidl_fuzzer",
     srcs: ["audioflinger_aidl_fuzzer.cpp"],
     defaults: [
+        "latest_android_hardware_audio_core_ndk_shared",
+        "latest_android_hardware_audio_core_sounddose_ndk_shared",
+        "latest_android_hardware_audio_effect_ndk_shared",
         "libaudioclient_aidl_fuzzer_defaults",
-        "service_fuzzer_defaults"
+        "libaudioflinger_dependencies",
+        "libaudiopolicyservice_dependencies",
+        "service_fuzzer_defaults",
     ],
 }
diff --git a/media/libaudioclient/aidl/fuzzer/audioflinger_aidl_fuzzer.cpp b/media/libaudioclient/aidl/fuzzer/audioflinger_aidl_fuzzer.cpp
index f99cc3b..c7a04da 100644
--- a/media/libaudioclient/aidl/fuzzer/audioflinger_aidl_fuzzer.cpp
+++ b/media/libaudioclient/aidl/fuzzer/audioflinger_aidl_fuzzer.cpp
@@ -17,8 +17,12 @@
 #include <AudioFlinger.h>
 #include <android-base/logging.h>
 #include <android/binder_interface_utils.h>
+#include <android/binder_manager.h>
 #include <android/binder_process.h>
 #include <android/media/IAudioPolicyService.h>
+#include <core-mock/ConfigMock.h>
+#include <core-mock/ModuleMock.h>
+#include <effect-mock/FactoryMock.h>
 #include <fakeservicemanager/FakeServiceManager.h>
 #include <fuzzbinder/libbinder_driver.h>
 #include <fuzzbinder/random_binder.h>
@@ -32,6 +36,7 @@
 
 [[clang::no_destroy]] static std::once_flag gSmOnce;
 sp<FakeServiceManager> gFakeServiceManager;
+sp<AudioFlingerServerAdapter> gAudioFlingerServerAdapter;
 
 bool addService(const String16& serviceName, const sp<FakeServiceManager>& fakeServiceManager,
                 FuzzedDataProvider& fdp) {
@@ -43,46 +48,58 @@
     return true;
 }
 
+extern "C" int LLVMFuzzerInitialize(int* /*argc*/, char*** /*argv*/) {
+    /* Create a FakeServiceManager instance and add required services */
+    gFakeServiceManager = sp<FakeServiceManager>::make();
+    setDefaultServiceManager(gFakeServiceManager);
+
+    auto configService = ndk::SharedRefBase::make<ConfigMock>();
+    CHECK_EQ(NO_ERROR, AServiceManager_addService(configService.get()->asBinder().get(),
+                                                  "android.hardware.audio.core.IConfig/default"));
+
+    auto factoryService = ndk::SharedRefBase::make<FactoryMock>();
+    CHECK_EQ(NO_ERROR,
+             AServiceManager_addService(factoryService.get()->asBinder().get(),
+                                        "android.hardware.audio.effect.IFactory/default"));
+
+    auto moduleService = ndk::SharedRefBase::make<ModuleMock>();
+    CHECK_EQ(NO_ERROR, AServiceManager_addService(moduleService.get()->asBinder().get(),
+                                                  "android.hardware.audio.core.IModule/default"));
+
+    // Disable creating thread pool for fuzzer instance of audio flinger and audio policy services
+    AudioSystem::disableThreadPool();
+
+    return 0;
+}
+
 extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
     FuzzedDataProvider fdp(data, size);
 
-    std::call_once(gSmOnce, [&] {
-        /* Create a FakeServiceManager instance and add required services */
-        gFakeServiceManager = sp<FakeServiceManager>::make();
-        setDefaultServiceManager(gFakeServiceManager);
-    });
-    gFakeServiceManager->clear();
-
-    for (const char* service :
-         {"activity", "sensor_privacy", "permission", "scheduling_policy",
-          "android.hardware.audio.core.IConfig", "batterystats", "media.metrics"}) {
+    for (const char* service : {"activity", "sensor_privacy", "permission", "scheduling_policy",
+                                "batterystats", "media.metrics"}) {
         if (!addService(String16(service), gFakeServiceManager, fdp)) {
             return 0;
         }
     }
 
-    const auto audioFlinger = sp<AudioFlinger>::make();
-    const auto afAdapter = sp<AudioFlingerServerAdapter>::make(audioFlinger);
+    // TODO(330882064) : Initialise Audio Flinger and Audio Policy services every time
+    std::call_once(gSmOnce, [&] {
+        const auto audioFlinger = sp<AudioFlinger>::make();
+        gAudioFlingerServerAdapter = sp<AudioFlingerServerAdapter>::make(audioFlinger);
+        CHECK_EQ(NO_ERROR,
+                 gFakeServiceManager->addService(String16(IAudioFlinger::DEFAULT_SERVICE_NAME),
+                                                 IInterface::asBinder(gAudioFlingerServerAdapter),
+                                                 false /* allowIsolated */,
+                                                 IServiceManager::DUMP_FLAG_PRIORITY_DEFAULT));
 
-    CHECK_EQ(NO_ERROR,
-             gFakeServiceManager->addService(
-                     String16(IAudioFlinger::DEFAULT_SERVICE_NAME), IInterface::asBinder(afAdapter),
-                     false /* allowIsolated */, IServiceManager::DUMP_FLAG_PRIORITY_DEFAULT));
+        const auto audioPolicyService = sp<AudioPolicyService>::make();
+        CHECK_EQ(NO_ERROR,
+                 gFakeServiceManager->addService(String16("media.audio_policy"), audioPolicyService,
+                                                 false /* allowIsolated */,
+                                                 IServiceManager::DUMP_FLAG_PRIORITY_DEFAULT));
+    });
 
-    AudioSystem::get_audio_flinger_for_fuzzer();
-    const auto audioPolicyService = sp<AudioPolicyService>::make();
-
-    CHECK_EQ(NO_ERROR,
-             gFakeServiceManager->addService(String16("media.audio_policy"), audioPolicyService,
-                                             false /* allowIsolated */,
-                                             IServiceManager::DUMP_FLAG_PRIORITY_DEFAULT));
-
-    sp<IBinder> audioFlingerServiceBinder =
-            gFakeServiceManager->getService(String16(IAudioFlinger::DEFAULT_SERVICE_NAME));
-    sp<media::IAudioFlingerService> audioFlingerService =
-            interface_cast<media::IAudioFlingerService>(audioFlingerServiceBinder);
-
-    fuzzService(media::IAudioFlingerService::asBinder(audioFlingerService), std::move(fdp));
+    fuzzService(media::IAudioFlingerService::asBinder(gAudioFlingerServerAdapter), std::move(fdp));
 
     return 0;
 }
diff --git a/media/libaudioclient/aidl/fuzzer/libaudiomockhal/Android.bp b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/Android.bp
new file mode 100644
index 0000000..c4afffb
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/Android.bp
@@ -0,0 +1,48 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+cc_library {
+    name: "libaudiomockhal",
+
+    defaults: [
+        "latest_android_hardware_audio_core_ndk_shared",
+        "latest_android_hardware_audio_core_sounddose_ndk_shared",
+        "latest_android_hardware_audio_effect_ndk_shared",
+    ],
+    header_libs: [
+        "libbinder_headers",
+    ],
+    static_libs: [
+        "libbinder_random_parcel",
+    ],
+    shared_libs: [
+        "libbinder_ndk",
+    ],
+
+    host_supported: true,
+    srcs: [
+        "FactoryMock.cpp",
+        "ModuleMock.cpp",
+        "StreamInMock.cpp",
+        "StreamOutMock.cpp",
+    ],
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
+    export_include_dirs: ["include"],
+}
diff --git a/media/libaudioclient/aidl/fuzzer/libaudiomockhal/FactoryMock.cpp b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/FactoryMock.cpp
new file mode 100644
index 0000000..ea07afc
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/FactoryMock.cpp
@@ -0,0 +1,28 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+#include "effect-mock/FactoryMock.h"
+#include "effect-mock/EffectMock.h"
+
+namespace aidl::android::hardware::audio::effect {
+
+ndk::ScopedAStatus FactoryMock::createEffect(const AudioUuid&,
+                                             std::shared_ptr<IEffect>* _aidl_return) {
+    *_aidl_return = ndk::SharedRefBase::make<EffectMock>();
+    return ndk::ScopedAStatus::ok();
+}
+
+}  // namespace aidl::android::hardware::audio::effect
diff --git a/media/libaudioclient/aidl/fuzzer/libaudiomockhal/ModuleMock.cpp b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/ModuleMock.cpp
new file mode 100644
index 0000000..711924f
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/ModuleMock.cpp
@@ -0,0 +1,144 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+#include "core-mock/ModuleMock.h"
+#include "core-mock/BluetoothA2dpMock.h"
+#include "core-mock/BluetoothLeMock.h"
+#include "core-mock/BluetoothMock.h"
+#include "core-mock/StreamInMock.h"
+#include "core-mock/StreamOutMock.h"
+#include "core-mock/TelephonyMock.h"
+#include "sounddose-mock/SoundDoseMock.h"
+
+namespace aidl::android::hardware::audio::core {
+
+ModuleMock::ModuleMock() {
+    // Device ports
+    auto outDevice = createPort(/* PortId */ 0, /* Name */ "Default",
+                                /* Flags */ 1 << AudioPortDeviceExt::FLAG_INDEX_DEFAULT_DEVICE,
+                                /* isInput */ false,
+                                createDeviceExt(
+                                        /* DeviceType */ AudioDeviceType::OUT_DEFAULT,
+                                        /* Flags */ AudioPortDeviceExt::FLAG_INDEX_DEFAULT_DEVICE));
+    mPorts.push_back(outDevice);
+    auto inDevice = createPort(/* PortId */ 1, /* Name */ "Default",
+                               /* Flags */ 1 << AudioPortDeviceExt::FLAG_INDEX_DEFAULT_DEVICE,
+                               /* isInput */ true,
+                               createDeviceExt(
+                                       /* DeviceType */ AudioDeviceType::IN_DEFAULT,
+                                       /* Flags */ 0));
+    mPorts.push_back(outDevice);
+}
+
+ndk::ScopedAStatus ModuleMock::getTelephony(std::shared_ptr<ITelephony>* _aidl_return) {
+    *_aidl_return = ndk::SharedRefBase::make<TelephonyMock>();
+    return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus ModuleMock::getBluetooth(std::shared_ptr<IBluetooth>* _aidl_return) {
+    *_aidl_return = ndk::SharedRefBase::make<BluetoothMock>();
+    return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus ModuleMock::getBluetoothA2dp(std::shared_ptr<IBluetoothA2dp>* _aidl_return) {
+    *_aidl_return = ndk::SharedRefBase::make<BluetoothA2dpMock>();
+    return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus ModuleMock::getBluetoothLe(std::shared_ptr<IBluetoothLe>* _aidl_return) {
+    *_aidl_return = ndk::SharedRefBase::make<BluetoothLeMock>();
+    return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus ModuleMock::openInputStream(const OpenInputStreamArguments&,
+                                               OpenInputStreamReturn* _aidl_return) {
+    _aidl_return->stream = ndk::SharedRefBase::make<StreamInMock>();
+    return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus ModuleMock::openOutputStream(const OpenOutputStreamArguments&,
+                                                OpenOutputStreamReturn* _aidl_return) {
+    _aidl_return->stream = ndk::SharedRefBase::make<StreamOutMock>();
+    return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus ModuleMock::getMasterMute(bool* _aidl_return) {
+    *_aidl_return = mMasterMute;
+    return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus ModuleMock::setMasterMute(bool masterMute) {
+    mMasterMute = masterMute;
+    return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus ModuleMock::getMasterVolume(float* _aidl_return) {
+    *_aidl_return = mMasterVolume;
+    return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus ModuleMock::setMasterVolume(float masterVolume) {
+    mMasterVolume = masterVolume;
+    return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus ModuleMock::getMicMute(bool* _aidl_return) {
+    *_aidl_return = mMicMute;
+    return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus ModuleMock::setMicMute(bool micMute) {
+    mMicMute = micMute;
+    return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus ModuleMock::getSoundDose(std::shared_ptr<ISoundDose>* _aidl_return) {
+    *_aidl_return = ndk::SharedRefBase::make<SoundDoseMock>();
+    return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus ModuleMock::getMmapPolicyInfos(AudioMMapPolicyType,
+                                                  std::vector<AudioMMapPolicyInfo>* _aidl_return) {
+    AudioMMapPolicyInfo never;
+    never.mmapPolicy = AudioMMapPolicy::NEVER;
+    _aidl_return->push_back(never);
+    return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus ModuleMock::supportsVariableLatency(bool* _aidl_return) {
+    *_aidl_return = false;
+    return ndk::ScopedAStatus::ok();
+}
+
+AudioPortExt ModuleMock::createDeviceExt(AudioDeviceType devType, int32_t flags) {
+    AudioPortDeviceExt deviceExt;
+    deviceExt.device.type.type = devType;
+    deviceExt.flags = flags;
+    return AudioPortExt::make<AudioPortExt::Tag::device>(deviceExt);
+}
+
+AudioPort ModuleMock::createPort(int32_t id, const std::string& name, int32_t flags, bool isInput,
+                                 const AudioPortExt& ext) {
+    AudioPort port;
+    port.id = id;
+    port.name = name;
+    port.flags = isInput ? AudioIoFlags::make<AudioIoFlags::Tag::input>(flags)
+                         : AudioIoFlags::make<AudioIoFlags::Tag::output>(flags);
+    port.ext = ext;
+    return port;
+}
+
+}  // namespace aidl::android::hardware::audio::core
diff --git a/media/libaudioclient/aidl/fuzzer/libaudiomockhal/StreamInMock.cpp b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/StreamInMock.cpp
new file mode 100644
index 0000000..093a979
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/StreamInMock.cpp
@@ -0,0 +1,62 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+#include "core-mock/StreamInMock.h"
+#include "core-mock/StreamCommonMock.h"
+
+namespace aidl::android::hardware::audio::core {
+
+ndk::ScopedAStatus StreamInMock::getStreamCommon(std::shared_ptr<IStreamCommon>* _aidl_return) {
+    if (!mStreamCommon) {
+        mStreamCommon = ndk::SharedRefBase::make<StreamCommonMock>();
+    }
+    *_aidl_return = mStreamCommon;
+    return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus StreamInMock::getMicrophoneDirection(
+        IStreamIn::MicrophoneDirection* _aidl_return) {
+    *_aidl_return = mMicrophoneDirection;
+    return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus StreamInMock::setMicrophoneDirection(
+        IStreamIn::MicrophoneDirection in_direction) {
+    mMicrophoneDirection = in_direction;
+    return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus StreamInMock::getMicrophoneFieldDimension(float* _aidl_return) {
+    *_aidl_return = mMicrophoneFieldDimension;
+    return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus StreamInMock::setMicrophoneFieldDimension(float in_zoom) {
+    mMicrophoneFieldDimension = in_zoom;
+    return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus StreamInMock::getHwGain(std::vector<float>* _aidl_return) {
+    *_aidl_return = mHwGains;
+    return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus StreamInMock::setHwGain(const std::vector<float>& in_channelGains) {
+    mHwGains = in_channelGains;
+    return ndk::ScopedAStatus::ok();
+}
+
+}  // namespace aidl::android::hardware::audio::core
diff --git a/media/libaudioclient/aidl/fuzzer/libaudiomockhal/StreamOutMock.cpp b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/StreamOutMock.cpp
new file mode 100644
index 0000000..a71f954
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/StreamOutMock.cpp
@@ -0,0 +1,64 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+#include "core-mock/StreamOutMock.h"
+#include "core-mock/StreamCommonMock.h"
+
+namespace aidl::android::hardware::audio::core {
+
+ndk::ScopedAStatus StreamOutMock::getStreamCommon(std::shared_ptr<IStreamCommon>* _aidl_return) {
+    if (!mStreamCommon) {
+        mStreamCommon = ndk::SharedRefBase::make<StreamCommonMock>();
+    }
+    *_aidl_return = mStreamCommon;
+    return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus StreamOutMock::getHwVolume(std::vector<float>* _aidl_return) {
+    *_aidl_return = mHwVolume;
+    return ndk::ScopedAStatus::ok();
+}
+ndk::ScopedAStatus StreamOutMock::setHwVolume(const std::vector<float>& in_channelVolumes) {
+    mHwVolume = in_channelVolumes;
+    return ndk::ScopedAStatus::ok();
+}
+ndk::ScopedAStatus StreamOutMock::getAudioDescriptionMixLevel(float* _aidl_return) {
+    *_aidl_return = mAudioDescriptionMixLeveldB;
+    return ndk::ScopedAStatus::ok();
+}
+ndk::ScopedAStatus StreamOutMock::setAudioDescriptionMixLevel(float in_leveldB) {
+    mAudioDescriptionMixLeveldB = in_leveldB;
+    return ndk::ScopedAStatus::ok();
+}
+ndk::ScopedAStatus StreamOutMock::getDualMonoMode(AudioDualMonoMode* _aidl_return) {
+    *_aidl_return = mDualMonoMode;
+    return ndk::ScopedAStatus::ok();
+}
+ndk::ScopedAStatus StreamOutMock::setDualMonoMode(AudioDualMonoMode in_mode) {
+    mDualMonoMode = in_mode;
+    return ndk::ScopedAStatus::ok();
+}
+ndk::ScopedAStatus StreamOutMock::getPlaybackRateParameters(AudioPlaybackRate* _aidl_return) {
+    *_aidl_return = mPlaybackRateParameters;
+    return ndk::ScopedAStatus::ok();
+}
+ndk::ScopedAStatus StreamOutMock::setPlaybackRateParameters(
+        const AudioPlaybackRate& in_playbackRate) {
+    mPlaybackRateParameters = in_playbackRate;
+    return ndk::ScopedAStatus::ok();
+}
+
+}  // namespace aidl::android::hardware::audio::core
diff --git a/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/BluetoothA2dpMock.h b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/BluetoothA2dpMock.h
new file mode 100644
index 0000000..c4dd0d9
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/BluetoothA2dpMock.h
@@ -0,0 +1,46 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+#include <aidl/android/hardware/audio/core/BnBluetoothA2dp.h>
+
+using namespace aidl::android::hardware::audio::core;
+
+namespace aidl::android::hardware::audio::core {
+
+class BluetoothA2dpMock : public BnBluetoothA2dp {
+  public:
+    ndk::ScopedAStatus isEnabled(bool* _aidl_return) override {
+        *_aidl_return = mEnabled;
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus setEnabled(bool enabled) override {
+        mEnabled = enabled;
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus supportsOffloadReconfiguration(bool* _aidl_return) override {
+        *_aidl_return = kSupportsOffloadReconfiguration;
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus reconfigureOffload(const std::vector<VendorParameter>&) override {
+        return ndk::ScopedAStatus::ok();
+    }
+
+  private:
+    static constexpr bool kSupportsOffloadReconfiguration = true;
+    bool mEnabled = false;
+};
+
+}  // namespace aidl::android::hardware::audio::core
diff --git a/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/BluetoothLeMock.h b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/BluetoothLeMock.h
new file mode 100644
index 0000000..d58695a
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/BluetoothLeMock.h
@@ -0,0 +1,46 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+#include <aidl/android/hardware/audio/core/BnBluetoothLe.h>
+
+using namespace aidl::android::hardware::audio::core;
+
+namespace aidl::android::hardware::audio::core {
+
+class BluetoothLeMock : public BnBluetoothLe {
+  public:
+    ndk::ScopedAStatus isEnabled(bool* _aidl_return) override {
+        *_aidl_return = mEnabled;
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus setEnabled(bool enabled) override {
+        mEnabled = enabled;
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus supportsOffloadReconfiguration(bool* _aidl_return) override {
+        *_aidl_return = kSupportsOffloadReconfiguration;
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus reconfigureOffload(const std::vector<VendorParameter>&) override {
+        return ndk::ScopedAStatus::ok();
+    }
+
+  private:
+    static constexpr bool kSupportsOffloadReconfiguration = true;
+    bool mEnabled = false;
+};
+
+}  // namespace aidl::android::hardware::audio::core
diff --git a/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/BluetoothMock.h b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/BluetoothMock.h
new file mode 100644
index 0000000..e805840
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/BluetoothMock.h
@@ -0,0 +1,33 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+#include <aidl/android/hardware/audio/core/BnBluetooth.h>
+
+using namespace aidl::android::hardware::audio::core;
+
+namespace aidl::android::hardware::audio::core {
+
+class BluetoothMock : public BnBluetooth {
+  public:
+    ndk::ScopedAStatus setScoConfig(const IBluetooth::ScoConfig&, IBluetooth::ScoConfig*) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus setHfpConfig(const IBluetooth::HfpConfig&, IBluetooth::HfpConfig*) override {
+        return ndk::ScopedAStatus::ok();
+    }
+};
+
+}  // namespace aidl::android::hardware::audio::core
diff --git a/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/ConfigMock.h b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/ConfigMock.h
new file mode 100644
index 0000000..f4031b5
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/ConfigMock.h
@@ -0,0 +1,34 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+#include <aidl/android/hardware/audio/core/BnConfig.h>
+
+using namespace aidl::android::media::audio::common;
+using namespace aidl::android::hardware::audio::core;
+
+namespace aidl::android::hardware::audio::core {
+
+class ConfigMock : public BnConfig {
+  private:
+    ndk::ScopedAStatus getSurroundSoundConfig(SurroundSoundConfig*) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus getEngineConfig(AudioHalEngineConfig*) override {
+        return ndk::ScopedAStatus::ok();
+    }
+};
+
+}  // namespace aidl::android::hardware::audio::core
diff --git a/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/ModuleMock.h b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/ModuleMock.h
new file mode 100644
index 0000000..d49203d
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/ModuleMock.h
@@ -0,0 +1,133 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+#include <aidl/android/hardware/audio/core/BnModule.h>
+
+using namespace aidl::android::media::audio::common;
+using namespace aidl::android::hardware::audio::core;
+using namespace aidl::android::hardware::audio::core::sounddose;
+using namespace aidl::android::hardware::audio::effect;
+
+namespace aidl::android::hardware::audio::core {
+
+class ModuleMock : public BnModule {
+  public:
+    ModuleMock();
+
+  private:
+    ndk::ScopedAStatus getTelephony(std::shared_ptr<ITelephony>*) override;
+    ndk::ScopedAStatus getBluetooth(std::shared_ptr<IBluetooth>*) override;
+    ndk::ScopedAStatus getBluetoothA2dp(std::shared_ptr<IBluetoothA2dp>*) override;
+    ndk::ScopedAStatus getBluetoothLe(std::shared_ptr<IBluetoothLe>*) override;
+    ndk::ScopedAStatus openInputStream(const OpenInputStreamArguments&,
+                                       OpenInputStreamReturn*) override;
+    ndk::ScopedAStatus openOutputStream(const OpenOutputStreamArguments&,
+                                        OpenOutputStreamReturn*) override;
+    ndk::ScopedAStatus getMasterMute(bool*) override;
+    ndk::ScopedAStatus setMasterMute(bool) override;
+    ndk::ScopedAStatus getMasterVolume(float*) override;
+    ndk::ScopedAStatus setMasterVolume(float) override;
+    ndk::ScopedAStatus getMicMute(bool*) override;
+    ndk::ScopedAStatus setMicMute(bool) override;
+    ndk::ScopedAStatus getSoundDose(std::shared_ptr<ISoundDose>*) override;
+    ndk::ScopedAStatus getMmapPolicyInfos(AudioMMapPolicyType,
+                                          std::vector<AudioMMapPolicyInfo>*) override;
+    ndk::ScopedAStatus supportsVariableLatency(bool*) override;
+
+    ndk::ScopedAStatus setModuleDebug(const ModuleDebug&) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus connectExternalDevice(const AudioPort&, AudioPort*) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus disconnectExternalDevice(int32_t) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus getAudioPatches(std::vector<AudioPatch>*) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus getAudioPort(int32_t, AudioPort*) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus getAudioPortConfigs(std::vector<AudioPortConfig>*) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus getAudioPorts(std::vector<AudioPort>*) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus getAudioRoutes(std::vector<AudioRoute>*) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus getAudioRoutesForAudioPort(int32_t, std::vector<AudioRoute>*) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus getSupportedPlaybackRateFactors(SupportedPlaybackRateFactors*) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus setAudioPatch(const AudioPatch&, AudioPatch*) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus setAudioPortConfig(const AudioPortConfig&, AudioPortConfig*,
+                                          bool*) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus resetAudioPatch(int32_t) override { return ndk::ScopedAStatus::ok(); }
+    ndk::ScopedAStatus resetAudioPortConfig(int32_t) override { return ndk::ScopedAStatus::ok(); }
+    ndk::ScopedAStatus getMicrophones(std::vector<MicrophoneInfo>*) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus updateAudioMode(AudioMode) override { return ndk::ScopedAStatus::ok(); }
+    ndk::ScopedAStatus updateScreenRotation(ScreenRotation) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus updateScreenState(bool) override { return ndk::ScopedAStatus::ok(); }
+    ndk::ScopedAStatus generateHwAvSyncId(int32_t*) override {
+        return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION);
+    }
+    ndk::ScopedAStatus getVendorParameters(const std::vector<std::string>&,
+                                           std::vector<VendorParameter>*) override {
+        return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION);
+    }
+    ndk::ScopedAStatus setVendorParameters(const std::vector<VendorParameter>&, bool) override {
+        return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION);
+    }
+    ndk::ScopedAStatus addDeviceEffect(int32_t, const std::shared_ptr<IEffect>&) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus removeDeviceEffect(int32_t, const std::shared_ptr<IEffect>&) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus getAAudioMixerBurstCount(int32_t*) override {
+        return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION);
+    }
+    ndk::ScopedAStatus getAAudioHardwareBurstMinUsec(int32_t*) override {
+        return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION);
+    }
+    ndk::ScopedAStatus prepareToDisconnectExternalDevice(int32_t) override {
+        return ndk::ScopedAStatus::ok();
+    }
+
+    AudioPortExt createDeviceExt(AudioDeviceType devType, int32_t flags);
+    AudioPort createPort(int32_t id, const std::string& name, int32_t flags, bool isInput,
+                         const AudioPortExt& ext);
+
+    bool mMasterMute;
+    float mMasterVolume;
+    bool mMicMute;
+    std::vector<AudioPort> mPorts;
+};
+
+}  // namespace aidl::android::hardware::audio::core
diff --git a/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/StreamCommonMock.h b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/StreamCommonMock.h
new file mode 100644
index 0000000..25d53f8
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/StreamCommonMock.h
@@ -0,0 +1,43 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+#include <aidl/android/hardware/audio/core/BnStreamCommon.h>
+
+using namespace aidl::android::hardware::audio::core;
+using namespace aidl::android::hardware::audio::effect;
+
+namespace aidl::android::hardware::audio::core {
+
+class StreamCommonMock : public BnStreamCommon {
+    ndk::ScopedAStatus close() override { return ndk::ScopedAStatus::ok(); }
+    ndk::ScopedAStatus prepareToClose() override { return ndk::ScopedAStatus::ok(); }
+    ndk::ScopedAStatus updateHwAvSyncId(int32_t) override { return ndk::ScopedAStatus::ok(); }
+    ndk::ScopedAStatus getVendorParameters(const std::vector<std::string>&,
+                                           std::vector<VendorParameter>*) override {
+        return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION);
+    }
+    ndk::ScopedAStatus setVendorParameters(const std::vector<VendorParameter>&, bool) override {
+        return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION);
+    }
+    ndk::ScopedAStatus addEffect(const std::shared_ptr<IEffect>&) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus removeEffect(const std::shared_ptr<IEffect>&) override {
+        return ndk::ScopedAStatus::ok();
+    }
+};
+
+}  // namespace aidl::android::hardware::audio::core
diff --git a/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/StreamInMock.h b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/StreamInMock.h
new file mode 100644
index 0000000..5deab5b
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/StreamInMock.h
@@ -0,0 +1,49 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+#include <aidl/android/hardware/audio/core/BnStreamIn.h>
+
+using namespace aidl::android::hardware::audio::common;
+using namespace aidl::android::hardware::audio::core;
+using namespace aidl::android::media::audio::common;
+
+namespace aidl::android::hardware::audio::core {
+
+class StreamInMock : public BnStreamIn {
+    ndk::ScopedAStatus getStreamCommon(std::shared_ptr<IStreamCommon>* _aidl_return) override;
+    ndk::ScopedAStatus getMicrophoneDirection(
+            IStreamIn::MicrophoneDirection* _aidl_return) override;
+    ndk::ScopedAStatus setMicrophoneDirection(IStreamIn::MicrophoneDirection in_direction) override;
+    ndk::ScopedAStatus getMicrophoneFieldDimension(float* _aidl_return) override;
+    ndk::ScopedAStatus setMicrophoneFieldDimension(float in_zoom) override;
+    ndk::ScopedAStatus getHwGain(std::vector<float>* _aidl_return) override;
+    ndk::ScopedAStatus setHwGain(const std::vector<float>& in_channelGains) override;
+
+    ndk::ScopedAStatus getActiveMicrophones(std::vector<MicrophoneDynamicInfo>*) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus updateMetadata(const SinkMetadata&) override {
+        return ndk::ScopedAStatus::ok();
+    }
+
+  private:
+    IStreamIn::MicrophoneDirection mMicrophoneDirection;
+    float mMicrophoneFieldDimension;
+    std::vector<float> mHwGains;
+    std::shared_ptr<IStreamCommon> mStreamCommon;
+};
+
+}  // namespace aidl::android::hardware::audio::core
diff --git a/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/StreamOutMock.h b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/StreamOutMock.h
new file mode 100644
index 0000000..4d12815
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/StreamOutMock.h
@@ -0,0 +1,60 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+#include <aidl/android/hardware/audio/core/BnStreamOut.h>
+
+using namespace aidl::android::hardware::audio::common;
+using namespace aidl::android::hardware::audio::core;
+using namespace aidl::android::media::audio::common;
+
+namespace aidl::android::hardware::audio::core {
+
+class StreamOutMock : public BnStreamOut {
+    ndk::ScopedAStatus getStreamCommon(std::shared_ptr<IStreamCommon>* _aidl_return) override;
+    ndk::ScopedAStatus getHwVolume(std::vector<float>* _aidl_return) override;
+    ndk::ScopedAStatus setHwVolume(const std::vector<float>& in_channelVolumes) override;
+    ndk::ScopedAStatus getAudioDescriptionMixLevel(float* _aidl_return) override;
+    ndk::ScopedAStatus setAudioDescriptionMixLevel(float in_leveldB) override;
+    ndk::ScopedAStatus getDualMonoMode(AudioDualMonoMode* _aidl_return) override;
+    ndk::ScopedAStatus setDualMonoMode(AudioDualMonoMode in_mode) override;
+    ndk::ScopedAStatus getPlaybackRateParameters(AudioPlaybackRate* _aidl_return) override;
+    ndk::ScopedAStatus setPlaybackRateParameters(const AudioPlaybackRate& in_playbackRate) override;
+
+    ndk::ScopedAStatus updateMetadata(const SourceMetadata&) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus updateOffloadMetadata(const AudioOffloadMetadata&) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus getRecommendedLatencyModes(std::vector<AudioLatencyMode>*) override {
+        return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION);
+    }
+    ndk::ScopedAStatus setLatencyMode(AudioLatencyMode) override {
+        return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION);
+    }
+    ndk::ScopedAStatus selectPresentation(int32_t, int32_t) override {
+        return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION);
+    }
+
+  private:
+    AudioPlaybackRate mPlaybackRateParameters;
+    AudioDualMonoMode mDualMonoMode;
+    float mAudioDescriptionMixLeveldB;
+    std::vector<float> mHwVolume;
+    std::shared_ptr<IStreamCommon> mStreamCommon;
+};
+
+}  // namespace aidl::android::hardware::audio::core
diff --git a/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/TelephonyMock.h b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/TelephonyMock.h
new file mode 100644
index 0000000..d56dee6
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/TelephonyMock.h
@@ -0,0 +1,36 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+#include <aidl/android/hardware/audio/core/BnTelephony.h>
+
+using namespace aidl::android::hardware::audio::core;
+using namespace aidl::android::media::audio::common;
+
+namespace aidl::android::hardware::audio::core {
+
+class TelephonyMock : public BnTelephony {
+  public:
+    ndk::ScopedAStatus getSupportedAudioModes(std::vector<AudioMode>*) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus switchAudioMode(AudioMode) override { return ndk::ScopedAStatus::ok(); }
+    ndk::ScopedAStatus setTelecomConfig(const ITelephony::TelecomConfig&,
+                                        ITelephony::TelecomConfig*) override {
+        return ndk::ScopedAStatus::ok();
+    }
+};
+
+}  // namespace aidl::android::hardware::audio::core
diff --git a/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/effect-mock/EffectMock.h b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/effect-mock/EffectMock.h
new file mode 100644
index 0000000..db20cd8
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/effect-mock/EffectMock.h
@@ -0,0 +1,42 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+#include <aidl/android/hardware/audio/effect/BnEffect.h>
+
+using namespace aidl::android::hardware::audio::effect;
+
+namespace aidl::android::hardware::audio::effect {
+
+class EffectMock : public BnEffect {
+  public:
+    ndk::ScopedAStatus open(const Parameter::Common&, const std::optional<Parameter::Specific>&,
+                            IEffect::OpenEffectReturn*) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus close() override { return ndk::ScopedAStatus::ok(); }
+    ndk::ScopedAStatus command(CommandId) override { return ndk::ScopedAStatus::ok(); }
+    ndk::ScopedAStatus getState(State*) override { return ndk::ScopedAStatus::ok(); }
+    ndk::ScopedAStatus getDescriptor(Descriptor*) override { return ndk::ScopedAStatus::ok(); }
+    ndk::ScopedAStatus reopen(IEffect::OpenEffectReturn*) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus setParameter(const Parameter&) override { return ndk::ScopedAStatus::ok(); }
+    ndk::ScopedAStatus getParameter(const Parameter::Id&, Parameter*) override {
+        return ndk::ScopedAStatus::ok();
+    }
+};
+
+}  // namespace aidl::android::hardware::audio::effect
diff --git a/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/effect-mock/FactoryMock.h b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/effect-mock/FactoryMock.h
new file mode 100644
index 0000000..57d58d5
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/effect-mock/FactoryMock.h
@@ -0,0 +1,42 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+#include <aidl/android/hardware/audio/effect/BnFactory.h>
+
+using namespace aidl::android::media::audio::common;
+using namespace aidl::android::hardware::audio::effect;
+
+namespace aidl::android::hardware::audio::effect {
+
+class FactoryMock : public BnFactory {
+    ndk::ScopedAStatus queryEffects(const std::optional<AudioUuid>&,
+                                    const std::optional<AudioUuid>&,
+                                    const std::optional<AudioUuid>&,
+                                    std::vector<Descriptor>*) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus queryProcessing(const std::optional<Processing::Type>&,
+                                       std::vector<Processing>*) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus destroyEffect(const std::shared_ptr<IEffect>&) override {
+        return ndk::ScopedAStatus::ok();
+    }
+
+    ndk::ScopedAStatus createEffect(const AudioUuid&, std::shared_ptr<IEffect>*) override;
+};
+
+}  // namespace aidl::android::hardware::audio::effect
diff --git a/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/sounddose-mock/SoundDoseMock.h b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/sounddose-mock/SoundDoseMock.h
new file mode 100644
index 0000000..5557b10
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/sounddose-mock/SoundDoseMock.h
@@ -0,0 +1,41 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+#include <aidl/android/hardware/audio/core/sounddose/BnSoundDose.h>
+
+using namespace aidl::android::hardware::audio::core::sounddose;
+
+namespace aidl::android::hardware::audio::core::sounddose {
+
+class SoundDoseMock : public BnSoundDose {
+    ndk::ScopedAStatus setOutputRs2UpperBound(float in_rs2ValueDbA) override {
+        mOutputRs2UpperBound = in_rs2ValueDbA;
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus getOutputRs2UpperBound(float* _aidl_return) override {
+        *_aidl_return = mOutputRs2UpperBound;
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus registerSoundDoseCallback(
+            const std::shared_ptr<ISoundDose::IHalSoundDoseCallback>&) override {
+        return ndk::ScopedAStatus::ok();
+    }
+
+  private:
+    float mOutputRs2UpperBound;
+};
+
+}  // namespace aidl::android::hardware::audio::core::sounddose
diff --git a/media/libaudioclient/fuzzer/Android.bp b/media/libaudioclient/fuzzer/Android.bp
index fd3b0a8..a95c700 100644
--- a/media/libaudioclient/fuzzer/Android.bp
+++ b/media/libaudioclient/fuzzer/Android.bp
@@ -15,6 +15,7 @@
  */
 
 package {
+    default_team: "trendy_team_media_framework_audio",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
@@ -41,9 +42,9 @@
         "libcutils",
         "libjsoncpp",
         "liblog",
+        "libmedia_helper",
         "libmediametrics",
         "libmediametricsservice",
-        "libmedia_helper",
         "libprocessgroup",
         "shared-file-region-aidl-cpp",
     ],
@@ -55,8 +56,9 @@
         "audiopolicy-types-aidl-cpp",
         "av-types-aidl-cpp",
         "capture_state_listener-aidl-cpp",
-        "libaudioclient_aidl_conversion",
+        "framework-permission-aidl-cpp",
         "libaudio_aidl_conversion_common_cpp",
+        "libaudioclient_aidl_conversion",
         "libaudioflinger",
         "libaudiofoundation",
         "libaudiomanager",
@@ -69,7 +71,6 @@
         "libutils",
         "libxml2",
         "mediametricsservice-aidl-cpp",
-        "framework-permission-aidl-cpp",
     ],
     header_libs: [
         "libaudiofoundation_headers",
diff --git a/media/libaudioclient/fuzzer/audioflinger_fuzzer.cpp b/media/libaudioclient/fuzzer/audioflinger_fuzzer.cpp
index dfdb4cf..4c94974 100644
--- a/media/libaudioclient/fuzzer/audioflinger_fuzzer.cpp
+++ b/media/libaudioclient/fuzzer/audioflinger_fuzzer.cpp
@@ -519,12 +519,6 @@
     stream = getValue(&mFdp, kStreamtypes);
     AudioSystem::getOutputLatency(&latency, stream);
 
-    stream = getValue(&mFdp, kStreamtypes);
-    AudioSystem::getStreamVolume(stream, &volume, mFdp.ConsumeIntegral<int32_t>());
-
-    stream = getValue(&mFdp, kStreamtypes);
-    AudioSystem::getStreamMute(stream, &state);
-
     uint32_t samplingRate;
     AudioSystem::getSamplingRate(mFdp.ConsumeIntegral<int32_t>(), &samplingRate);
 
diff --git a/media/libaudioclient/include/media/AudioPolicy.h b/media/libaudioclient/include/media/AudioPolicy.h
index ec35e93..b190fba 100644
--- a/media/libaudioclient/include/media/AudioPolicy.h
+++ b/media/libaudioclient/include/media/AudioPolicy.h
@@ -18,6 +18,7 @@
 #ifndef ANDROID_AUDIO_POLICY_H
 #define ANDROID_AUDIO_POLICY_H
 
+#include <binder/IBinder.h>
 #include <binder/Parcel.h>
 #include <media/AudioDeviceTypeAddr.h>
 #include <system/audio.h>
@@ -127,6 +128,8 @@
     audio_devices_t mDeviceType;
     String8         mDeviceAddress;
     uint32_t        mCbFlags; // flags indicating which callbacks to use, see kCbFlag*
+    sp<IBinder>     mToken;
+    uint32_t        mVirtualDeviceId;
     /** Ignore the AUDIO_FLAG_NO_MEDIA_PROJECTION */
     bool            mAllowPrivilegedMediaPlaybackCapture = false;
     /** Indicates if the caller can capture voice communication output */
diff --git a/media/libaudioclient/include/media/AudioProductStrategy.h b/media/libaudioclient/include/media/AudioProductStrategy.h
index fcbb019..2505b11 100644
--- a/media/libaudioclient/include/media/AudioProductStrategy.h
+++ b/media/libaudioclient/include/media/AudioProductStrategy.h
@@ -58,11 +58,11 @@
      * @return {@code INVALID_SCORE} if not matching, {@code MATCH_ON_DEFAULT_SCORE} if matching
      * to default strategy, non zero positive score if matching a strategy.
      */
-    static int attributesMatchesScore(const audio_attributes_t refAttributes,
-                                      const audio_attributes_t clientAttritubes);
+    static int attributesMatchesScore(audio_attributes_t refAttributes,
+                                      audio_attributes_t clientAttritubes);
 
-    static bool attributesMatches(const audio_attributes_t refAttributes,
-                                      const audio_attributes_t clientAttritubes) {
+    static bool attributesMatches(audio_attributes_t refAttributes,
+                                  audio_attributes_t clientAttritubes) {
         return attributesMatchesScore(refAttributes, clientAttritubes) > 0;
     }
 
diff --git a/media/libaudioclient/include/media/AudioRecord.h b/media/libaudioclient/include/media/AudioRecord.h
index 00f2c7a..d4479ef 100644
--- a/media/libaudioclient/include/media/AudioRecord.h
+++ b/media/libaudioclient/include/media/AudioRecord.h
@@ -681,7 +681,7 @@
 
     // Current client state:  false = stopped, true = active.  Protected by mLock.  If more states
     // are added, consider changing this to enum State { ... } mState as in AudioTrack.
-    bool                    mActive;
+    bool mActive = false;
 
     // for client callback handler
 
@@ -708,7 +708,7 @@
     Modulo<uint32_t>        mNewPosition;           // in frames
     uint32_t                mUpdatePeriod;          // in frames, zero means no EVENT_NEW_POS
 
-    status_t                mStatus;
+    status_t mStatus = NO_INIT;
 
     android::content::AttributionSourceState mClientAttributionSource; // Owner's attribution source
 
@@ -736,8 +736,8 @@
                                                     // held to read or write those bits reliably.
     audio_input_flags_t     mOrigFlags;             // as specified in constructor or set(), const
 
-    audio_session_t         mSessionId;
-    audio_port_handle_t     mPortId;                    // Id from Audio Policy Manager
+    audio_session_t mSessionId = AUDIO_SESSION_ALLOCATE;
+    audio_port_handle_t mPortId = AUDIO_PORT_HANDLE_NONE;
 
     /**
      * mLogSessionId is a string identifying this AudioRecord for the metrics service.
@@ -756,9 +756,9 @@
     sp<IMemory>             mBufferMemory;
     audio_io_handle_t       mInput = AUDIO_IO_HANDLE_NONE; // from AudioSystem::getInputforAttr()
 
-    int                     mPreviousPriority;  // before start()
-    SchedPolicy             mPreviousSchedulingGroup;
-    bool                    mAwaitBoost;    // thread should wait for priority boost before running
+    int mPreviousPriority = ANDROID_PRIORITY_NORMAL;  // before start()
+    SchedPolicy mPreviousSchedulingGroup = SP_DEFAULT;
+    bool mAwaitBoost = false;  // thread should wait for priority boost before running
 
     // The proxy should only be referenced while a lock is held because the proxy isn't
     // multi-thread safe.
@@ -799,14 +799,17 @@
 
     // For Device Selection API
     //  a value of AUDIO_PORT_HANDLE_NONE indicated default (AudioPolicyManager) routing.
-    audio_port_handle_t     mSelectedDeviceId; // Device requested by the application.
-    audio_port_handle_t     mRoutedDeviceId;   // Device actually selected by audio policy manager:
-                                              // May not match the app selection depending on other
-                                              // activity and connected devices
+
+    // Device requested by the application.
+    audio_port_handle_t     mSelectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+    // Device actually selected by AudioPolicyManager: This may not match the app
+    // selection depending on other activity and connected devices
+    audio_port_handle_t     mRoutedDeviceId = AUDIO_PORT_HANDLE_NONE;
+
     wp<AudioSystem::AudioDeviceCallback> mDeviceCallback;
 
-    audio_microphone_direction_t mSelectedMicDirection;
-    float mSelectedMicFieldDimension;
+    audio_microphone_direction_t mSelectedMicDirection = MIC_DIRECTION_UNSPECIFIED;
+    float mSelectedMicFieldDimension = MIC_FIELD_DIMENSION_DEFAULT;
 
     int32_t                    mMaxSharedAudioHistoryMs = 0;
     std::string                mSharedAudioPackageName = {};
diff --git a/media/libaudioclient/include/media/AudioSystem.h b/media/libaudioclient/include/media/AudioSystem.h
index a1f7941..67b3dcd 100644
--- a/media/libaudioclient/include/media/AudioSystem.h
+++ b/media/libaudioclient/include/media/AudioSystem.h
@@ -19,6 +19,7 @@
 
 #include <sys/types.h>
 
+#include <mutex>
 #include <set>
 #include <vector>
 
@@ -86,6 +87,7 @@
 typedef void (*routing_callback)();
 typedef void (*vol_range_init_req_callback)();
 
+class CaptureStateListenerImpl;
 class IAudioFlinger;
 class String8;
 
@@ -95,6 +97,13 @@
 
 class AudioSystem
 {
+    friend class AudioFlingerClient;
+    friend class AudioPolicyServiceClient;
+    friend class CaptureStateListenerImpl;
+    template <typename ServiceInterface, typename Client, typename AidlInterface,
+            typename ServiceTraits>
+    friend class ServiceHandler;
+
 public:
 
     // FIXME Declare in binder opcode order, similarly to IAudioFlinger.h and IAudioFlinger.cpp
@@ -115,15 +124,12 @@
     static status_t setMasterMute(bool mute);
     static status_t getMasterMute(bool* mute);
 
-    // set/get stream volume on specified output
+    // set stream volume on specified output
     static status_t setStreamVolume(audio_stream_type_t stream, float value,
                                     audio_io_handle_t output);
-    static status_t getStreamVolume(audio_stream_type_t stream, float* volume,
-                                    audio_io_handle_t output);
 
     // mute/unmute stream
     static status_t setStreamMute(audio_stream_type_t stream, bool mute);
-    static status_t getStreamMute(audio_stream_type_t stream, bool* mute);
 
     // set audio mode in audio hardware
     static status_t setMode(audio_mode_t mode);
@@ -177,8 +183,11 @@
     static status_t setLocalAudioFlinger(const sp<IAudioFlinger>& af);
 
     // helper function to obtain AudioFlinger service handle
-    static const sp<IAudioFlinger> get_audio_flinger();
-    static const sp<IAudioFlinger> get_audio_flinger_for_fuzzer();
+    static sp<IAudioFlinger> get_audio_flinger();
+
+    // function to disable creation of thread pool (Used for testing).
+    // This should be called before get_audio_flinger() or get_audio_policy_service().
+    static void disableThreadPool();
 
     static float linearToLog(int volume);
     static int logToLinear(float volume);
@@ -362,9 +371,13 @@
     static status_t startInput(audio_port_handle_t portId);
     static status_t stopInput(audio_port_handle_t portId);
     static void releaseInput(audio_port_handle_t portId);
+    static status_t setDeviceAbsoluteVolumeEnabled(audio_devices_t deviceType,
+                                                   const char *address,
+                                                   bool enabled,
+                                                   audio_stream_type_t streamToDriveAbs);
     static status_t initStreamVolume(audio_stream_type_t stream,
-                                      int indexMin,
-                                      int indexMax);
+                                     int indexMin,
+                                     int indexMax);
     static status_t setStreamVolumeIndex(audio_stream_type_t stream,
                                          int index,
                                          audio_devices_t device);
@@ -402,7 +415,12 @@
     // and output configuration cache (gOutputs)
     static void clearAudioConfigCache();
 
-    static const sp<media::IAudioPolicyService> get_audio_policy_service();
+    // Sets a local AudioPolicyService interface to be used by AudioSystem.
+    // This is used by audioserver main() to allow client object initialization
+    // before exposing any interfaces to ServiceManager.
+    static status_t setLocalAudioPolicyService(const sp<media::IAudioPolicyService>& aps);
+
+    static sp<media::IAudioPolicyService> get_audio_policy_service();
     static void clearAudioPolicyService();
 
     // helpers for android.media.AudioManager.getProperty(), see description there for meaning
@@ -462,6 +480,8 @@
 
     static status_t registerPolicyMixes(const Vector<AudioMix>& mixes, bool registration);
 
+    static status_t getRegisteredPolicyMixes(std::vector<AudioMix>& mixes);
+
     static status_t updatePolicyMixes(
         const std::vector<
                 std::pair<AudioMix, std::vector<AudioMixMatchCriterion>>>& mixesWithUpdates);
@@ -774,23 +794,18 @@
 
     static int32_t getAAudioHardwareBurstMinUsec();
 
-private:
-
     class AudioFlingerClient: public IBinder::DeathRecipient, public media::BnAudioFlingerClient
     {
     public:
-        AudioFlingerClient() :
-            mInBuffSize(0), mInSamplingRate(0),
-            mInFormat(AUDIO_FORMAT_DEFAULT), mInChannelMask(AUDIO_CHANNEL_NONE) {
-        }
+        AudioFlingerClient() = default;
 
-        void clearIoCache();
+        void clearIoCache() EXCLUDES(mMutex);
         status_t getInputBufferSize(uint32_t sampleRate, audio_format_t format,
-                                    audio_channel_mask_t channelMask, size_t* buffSize);
-        sp<AudioIoDescriptor> getIoDescriptor(audio_io_handle_t ioHandle);
+                audio_channel_mask_t channelMask, size_t* buffSize) EXCLUDES(mMutex);
+        sp<AudioIoDescriptor> getIoDescriptor(audio_io_handle_t ioHandle) EXCLUDES(mMutex);
 
         // DeathRecipient
-        virtual void binderDied(const wp<IBinder>& who);
+        void binderDied(const wp<IBinder>& who) final;
 
         // IAudioFlingerClient
 
@@ -798,61 +813,71 @@
         // values for output/input parameters up-to-date in client process
         binder::Status ioConfigChanged(
                 media::AudioIoConfigEvent event,
-                const media::AudioIoDescriptor& ioDesc) override;
+                const media::AudioIoDescriptor& ioDesc) final EXCLUDES(mMutex);
 
         binder::Status onSupportedLatencyModesChanged(
                 int output,
-                const std::vector<media::audio::common::AudioLatencyMode>& latencyModes) override;
+                const std::vector<media::audio::common::AudioLatencyMode>& latencyModes)
+                final EXCLUDES(mMutex);
 
         status_t addAudioDeviceCallback(const wp<AudioDeviceCallback>& callback,
-                                               audio_io_handle_t audioIo,
-                                               audio_port_handle_t portId);
+                audio_io_handle_t audioIo, audio_port_handle_t portId) EXCLUDES(mMutex);
         status_t removeAudioDeviceCallback(const wp<AudioDeviceCallback>& callback,
-                                           audio_io_handle_t audioIo,
-                                           audio_port_handle_t portId);
+                audio_io_handle_t audioIo, audio_port_handle_t portId) EXCLUDES(mMutex);
 
         status_t addSupportedLatencyModesCallback(
-                        const sp<SupportedLatencyModesCallback>& callback);
+                const sp<SupportedLatencyModesCallback>& callback) EXCLUDES(mMutex);
         status_t removeSupportedLatencyModesCallback(
-                        const sp<SupportedLatencyModesCallback>& callback);
+                const sp<SupportedLatencyModesCallback>& callback) EXCLUDES(mMutex);
 
-        audio_port_handle_t getDeviceIdForIo(audio_io_handle_t audioIo);
+        audio_port_handle_t getDeviceIdForIo(audio_io_handle_t audioIo) EXCLUDES(mMutex);
 
     private:
-        Mutex                               mLock;
-        DefaultKeyedVector<audio_io_handle_t, sp<AudioIoDescriptor> >   mIoDescriptors;
+        mutable std::mutex mMutex;
+        std::map<audio_io_handle_t, sp<AudioIoDescriptor>> mIoDescriptors GUARDED_BY(mMutex);
 
         std::map<audio_io_handle_t, std::map<audio_port_handle_t, wp<AudioDeviceCallback>>>
-                mAudioDeviceCallbacks;
+                mAudioDeviceCallbacks GUARDED_BY(mMutex);
 
         std::vector<wp<SupportedLatencyModesCallback>>
-                mSupportedLatencyModesCallbacks GUARDED_BY(mLock);
+                mSupportedLatencyModesCallbacks GUARDED_BY(mMutex);
 
         // cached values for recording getInputBufferSize() queries
-        size_t                              mInBuffSize;    // zero indicates cache is invalid
-        uint32_t                            mInSamplingRate;
-        audio_format_t                      mInFormat;
-        audio_channel_mask_t                mInChannelMask;
-        sp<AudioIoDescriptor> getIoDescriptor_l(audio_io_handle_t ioHandle);
+        size_t mInBuffSize GUARDED_BY(mMutex) = 0; // zero indicates cache is invalid
+        uint32_t mInSamplingRate GUARDED_BY(mMutex) = 0;
+        audio_format_t mInFormat GUARDED_BY(mMutex) = AUDIO_FORMAT_DEFAULT;
+        audio_channel_mask_t mInChannelMask GUARDED_BY(mMutex) = AUDIO_CHANNEL_NONE;
+
+        sp<AudioIoDescriptor> getIoDescriptor_l(audio_io_handle_t ioHandle) REQUIRES(mMutex);
     };
 
     class AudioPolicyServiceClient: public IBinder::DeathRecipient,
-                                    public media::BnAudioPolicyServiceClient
-    {
+                                    public media::BnAudioPolicyServiceClient {
     public:
-        AudioPolicyServiceClient() {
+        AudioPolicyServiceClient() = default;
+
+        int addAudioPortCallback(const sp<AudioPortCallback>& callback) EXCLUDES(mMutex);
+
+        int removeAudioPortCallback(const sp<AudioPortCallback>& callback) EXCLUDES(mMutex);
+
+        bool isAudioPortCbEnabled() const EXCLUDES(mMutex) {
+            std::lock_guard _l(mMutex);
+            return !mAudioPortCallbacks.empty();
         }
 
-        int addAudioPortCallback(const sp<AudioPortCallback>& callback);
-        int removeAudioPortCallback(const sp<AudioPortCallback>& callback);
-        bool isAudioPortCbEnabled() const { return (mAudioPortCallbacks.size() != 0); }
+        int addAudioVolumeGroupCallback(
+                const sp<AudioVolumeGroupCallback>& callback) EXCLUDES(mMutex);
 
-        int addAudioVolumeGroupCallback(const sp<AudioVolumeGroupCallback>& callback);
-        int removeAudioVolumeGroupCallback(const sp<AudioVolumeGroupCallback>& callback);
-        bool isAudioVolumeGroupCbEnabled() const { return (mAudioVolumeGroupCallback.size() != 0); }
+        int removeAudioVolumeGroupCallback(
+                const sp<AudioVolumeGroupCallback>& callback) EXCLUDES(mMutex);
+
+        bool isAudioVolumeGroupCbEnabled() const EXCLUDES(mMutex) {
+            std::lock_guard _l(mMutex);
+            return !mAudioVolumeGroupCallbacks.empty();
+        }
 
         // DeathRecipient
-        virtual void binderDied(const wp<IBinder>& who);
+        void binderDied(const wp<IBinder>& who) final;
 
         // IAudioPolicyServiceClient
         binder::Status onAudioVolumeGroupChanged(int32_t group, int32_t flags) override;
@@ -873,43 +898,36 @@
         binder::Status onVolumeRangeInitRequest();
 
     private:
-        Mutex                               mLock;
-        Vector <sp <AudioPortCallback> >    mAudioPortCallbacks;
-        Vector <sp <AudioVolumeGroupCallback> > mAudioVolumeGroupCallback;
+        mutable std::mutex mMutex;
+        std::set<sp<AudioPortCallback>> mAudioPortCallbacks GUARDED_BY(mMutex);
+        std::set<sp<AudioVolumeGroupCallback>> mAudioVolumeGroupCallbacks GUARDED_BY(mMutex);
     };
 
+    private:
+
     static audio_io_handle_t getOutput(audio_stream_type_t stream);
-    static const sp<AudioFlingerClient> getAudioFlingerClient();
+    static sp<AudioFlingerClient> getAudioFlingerClient();
     static sp<AudioIoDescriptor> getIoDescriptor(audio_io_handle_t ioHandle);
-    static const sp<IAudioFlinger> getAudioFlingerImpl(bool canStartThreadPool);
 
     // Invokes all registered error callbacks with the given error code.
     static void reportError(status_t err);
 
-    static sp<AudioFlingerClient> gAudioFlingerClient;
-    static sp<AudioPolicyServiceClient> gAudioPolicyServiceClient;
-    friend class AudioFlingerClient;
-    friend class AudioPolicyServiceClient;
+    [[clang::no_destroy]] static std::mutex gMutex;
+    static dynamic_policy_callback gDynPolicyCallback GUARDED_BY(gMutex);
+    static record_config_callback gRecordConfigCallback GUARDED_BY(gMutex);
+    static routing_callback gRoutingCallback GUARDED_BY(gMutex);
+    static vol_range_init_req_callback gVolRangeInitReqCallback GUARDED_BY(gMutex);
 
-    static Mutex gLock;      // protects gAudioFlinger
-    static Mutex gLockErrorCallbacks;      // protects gAudioErrorCallbacks
-    static Mutex gLockAPS;   // protects gAudioPolicyService and gAudioPolicyServiceClient
-    static sp<IAudioFlinger> gAudioFlinger;
-    static std::set<audio_error_callback> gAudioErrorCallbacks;
-    static dynamic_policy_callback gDynPolicyCallback;
-    static record_config_callback gRecordConfigCallback;
-    static routing_callback gRoutingCallback;
-    static vol_range_init_req_callback gVolRangeInitReqCallback;
+    [[clang::no_destroy]] static std::mutex gApsCallbackMutex;
+    [[clang::no_destroy]] static std::mutex gErrorCallbacksMutex;
+    [[clang::no_destroy]] static std::set<audio_error_callback> gAudioErrorCallbacks
+            GUARDED_BY(gErrorCallbacksMutex);
 
-    static size_t gInBuffSize;
-    // previous parameters for recording buffer size queries
-    static uint32_t gPrevInSamplingRate;
-    static audio_format_t gPrevInFormat;
-    static audio_channel_mask_t gPrevInChannelMask;
-
-    static sp<media::IAudioPolicyService> gAudioPolicyService;
+    [[clang::no_destroy]] static std::mutex gSoundTriggerMutex;
+    [[clang::no_destroy]] static sp<CaptureStateListenerImpl> gSoundTriggerCaptureStateListener
+            GUARDED_BY(gSoundTriggerMutex);
 };
 
-};  // namespace android
+}  // namespace android
 
 #endif  /*ANDROID_AUDIOSYSTEM_H_*/
diff --git a/media/libaudioclient/include/media/AudioTrack.h b/media/libaudioclient/include/media/AudioTrack.h
index 523383f..3a001a4 100644
--- a/media/libaudioclient/include/media/AudioTrack.h
+++ b/media/libaudioclient/include/media/AudioTrack.h
@@ -257,9 +257,7 @@
     /* Constructs an uninitialized AudioTrack. No connection with
      * AudioFlinger takes place.  Use set() after this.
      */
-                        AudioTrack();
-
-                        AudioTrack(const AttributionSourceState& attributionSourceState);
+    explicit AudioTrack(const AttributionSourceState& attributionSourceState = {});
 
     /* Creates an AudioTrack object and registers it with AudioFlinger.
      * Once created, the track needs to be started before it can be used.
@@ -1220,7 +1218,7 @@
             void setLoop_l(uint32_t loopStart, uint32_t loopEnd, int loopCount);
 
             // FIXME enum is faster than strcmp() for parameter 'from'
-            status_t restoreTrack_l(const char *from);
+            status_t restoreTrack_l(const char *from, bool forceRestore = false);
 
             uint32_t    getUnderrunCount_l() const;
 
@@ -1312,11 +1310,11 @@
     sp<IMemory>             mSharedBuffer;
     transfer_type           mTransfer;
     audio_offload_info_t    mOffloadInfoCopy;
-    audio_attributes_t      mAttributes;
+    audio_attributes_t mAttributes = AUDIO_ATTRIBUTES_INITIALIZER;
 
     size_t                  mFrameSize;             // frame size in bytes
 
-    status_t                mStatus;
+    status_t mStatus = NO_INIT;
 
     // can change dynamically when IAudioTrack invalidated
     uint32_t                mLatency;               // in ms
@@ -1329,7 +1327,7 @@
         STATE_PAUSED_STOPPING,
         STATE_FLUSHED,
         STATE_STOPPING,
-    }                       mState;
+    } mState = STATE_STOPPED;
 
     static constexpr const char *stateToString(State state)
     {
@@ -1459,8 +1457,8 @@
 
     mutable Mutex           mLock;
 
-    int                     mPreviousPriority;          // before start()
-    SchedPolicy             mPreviousSchedulingGroup;
+    int mPreviousPriority = ANDROID_PRIORITY_NORMAL;  // before start()
+    SchedPolicy mPreviousSchedulingGroup = SP_DEFAULT;
     bool                    mAwaitBoost;    // thread should wait for priority boost before running
 
     // The proxy should only be referenced while a lock is held because the proxy isn't
@@ -1472,14 +1470,17 @@
     sp<AudioTrackClientProxy>       mProxy;         // primary owner of the memory
 
     bool                    mInUnderrun;            // whether track is currently in underrun state
-    uint32_t                mPausedPosition;
+    uint32_t mPausedPosition = 0;
 
     // For Device Selection API
     //  a value of AUDIO_PORT_HANDLE_NONE indicated default (AudioPolicyManager) routing.
-    audio_port_handle_t    mSelectedDeviceId; // Device requested by the application.
-    audio_port_handle_t    mRoutedDeviceId;   // Device actually selected by audio policy manager:
-                                              // May not match the app selection depending on other
-                                              // activity and connected devices.
+
+    // Device requested by the application.
+    audio_port_handle_t mSelectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+
+    // Device actually selected by AudioPolicyManager: This may not match the app
+    // selection depending on other activity and connected devices.
+    audio_port_handle_t mRoutedDeviceId = AUDIO_PORT_HANDLE_NONE;
 
     sp<media::VolumeHandler>       mVolumeHandler;
 
@@ -1537,7 +1538,7 @@
         Mutex mAudioTrackCbLock;
         wp<media::IAudioTrackCallback> mCallback;
     };
-    sp<AudioTrackCallback> mAudioTrackCallback;
+    sp<AudioTrackCallback> mAudioTrackCallback = sp<AudioTrackCallback>::make();
 };
 
 }; // namespace android
diff --git a/media/libaudioclient/include/media/EffectClientAsyncProxy.h b/media/libaudioclient/include/media/EffectClientAsyncProxy.h
new file mode 100644
index 0000000..e7d6d80
--- /dev/null
+++ b/media/libaudioclient/include/media/EffectClientAsyncProxy.h
@@ -0,0 +1,111 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <android/media/BnEffectClient.h>
+#include <audio_utils/CommandThread.h>
+
+namespace android::media {
+
+class EffectClientAsyncProxy : public IEffectClient {
+public:
+
+    /**
+     * Call this factory method to interpose a worker thread when a binder
+     * callback interface is invoked in-proc.
+     */
+    static sp<IEffectClient> makeIfNeeded(const sp<IEffectClient>& effectClient) {
+        if (isLocalBinder(effectClient)) {
+            return sp<EffectClientAsyncProxy>::make(effectClient);
+        }
+        return effectClient;
+    }
+
+    explicit EffectClientAsyncProxy(const sp<IEffectClient>& effectClient)
+        : mEffectClient(effectClient) {}
+
+    ::android::IBinder* onAsBinder() override {
+        return nullptr;
+    }
+
+    ::android::binder::Status controlStatusChanged(bool controlGranted) override {
+        getThread().add(__func__, [=, effectClient = mEffectClient]() {
+            effectClient->controlStatusChanged(controlGranted);
+        });
+        return ::android::binder::Status::fromStatusT(::android::NO_ERROR);
+    }
+
+    ::android::binder::Status enableStatusChanged(bool enabled) override {
+        getThread().add(__func__, [=, effectClient = mEffectClient]() {
+            effectClient->enableStatusChanged(enabled);
+        });
+        return ::android::binder::Status::fromStatusT(::android::NO_ERROR);
+    }
+
+    ::android::binder::Status commandExecuted(
+            int32_t cmdCode, const ::std::vector<uint8_t>& cmdData,
+            const ::std::vector<uint8_t>& replyData) override {
+        getThread().add(__func__, [=, effectClient = mEffectClient]() {
+            effectClient->commandExecuted(cmdCode, cmdData, replyData);
+        });
+        return ::android::binder::Status::fromStatusT(::android::NO_ERROR);
+    }
+
+    ::android::binder::Status framesProcessed(int32_t frames) override {
+        getThread().add(__func__, [=, effectClient = mEffectClient]() {
+            effectClient->framesProcessed(frames);
+        });
+        return ::android::binder::Status::fromStatusT(::android::NO_ERROR);
+    }
+
+    /**
+     * Returns true if the binder interface is local (in-proc).
+     *
+     * Move to a binder helper class?
+     */
+    static bool isLocalBinder(const sp<IInterface>& interface) {
+        const auto b = IInterface::asBinder(interface);
+        return b && b->localBinder();
+    }
+
+private:
+    const sp<IEffectClient> mEffectClient;
+
+    /**
+     * Returns the per-interface-descriptor CommandThread for in-proc binder transactions.
+     *
+     * Note: Remote RPC transactions to a given binder (kernel) node enter that node's
+     * async_todo list, which serializes all async operations to that binder node.
+     * Each transaction on the async_todo list must complete before the next one
+     * starts, even though there may be available threads in the process threadpool.
+     *
+     * For local transactions, we order all async requests entering
+     * the CommandThread.  We do not maintain a threadpool, though a future implementation
+     * could use a shared ThreadPool.
+     *
+     * By using a static here, all in-proc binder interfaces made async with
+     * EffectClientAsyncProxy will get the same CommandThread.
+     *
+     * @return CommandThread to use.
+     */
+    static audio_utils::CommandThread& getThread() {
+        [[clang::no_destroy]] static audio_utils::CommandThread commandThread;
+        return commandThread;
+    }
+};  // class EffectClientAsyncProxy
+
+}  // namespace android::media
diff --git a/media/libaudioclient/include/media/IAudioFlinger.h b/media/libaudioclient/include/media/IAudioFlinger.h
index 5a1e037..667e9ae 100644
--- a/media/libaudioclient/include/media/IAudioFlinger.h
+++ b/media/libaudioclient/include/media/IAudioFlinger.h
@@ -60,6 +60,7 @@
 #include "android/media/OpenInputResponse.h"
 #include "android/media/OpenOutputRequest.h"
 #include "android/media/OpenOutputResponse.h"
+#include "android/media/TrackInternalMuteInfo.h"
 #include "android/media/TrackSecondaryOutputInfo.h"
 
 namespace android {
@@ -228,10 +229,6 @@
                                     audio_io_handle_t output) = 0;
     virtual     status_t    setStreamMute(audio_stream_type_t stream, bool muted) = 0;
 
-    virtual     float       streamVolume(audio_stream_type_t stream,
-                                    audio_io_handle_t output) const = 0;
-    virtual     bool        streamMute(audio_stream_type_t stream) const = 0;
-
     // set audio mode
     virtual     status_t    setMode(audio_mode_t mode) = 0;
 
@@ -388,6 +385,11 @@
 
     virtual status_t getAudioMixPort(const struct audio_port_v7 *devicePort,
                                      struct audio_port_v7 *mixPort) const = 0;
+
+    virtual status_t setTracksInternalMute(
+            const std::vector<media::TrackInternalMuteInfo>& tracksInternalMute) = 0;
+
+    virtual status_t resetReferencesForTest() = 0;
 };
 
 /**
@@ -418,9 +420,6 @@
     status_t setStreamVolume(audio_stream_type_t stream, float value,
                              audio_io_handle_t output) override;
     status_t setStreamMute(audio_stream_type_t stream, bool muted) override;
-    float streamVolume(audio_stream_type_t stream,
-                       audio_io_handle_t output) const override;
-    bool streamMute(audio_stream_type_t stream) const override;
     status_t setMode(audio_mode_t mode) override;
     status_t setMicMute(bool state) override;
     bool getMicMute() const override;
@@ -504,6 +503,9 @@
     status_t getAudioPolicyConfig(media::AudioPolicyConfig* output) override;
     status_t getAudioMixPort(const struct audio_port_v7 *devicePort,
                              struct audio_port_v7 *mixPort) const override;
+    status_t setTracksInternalMute(
+            const std::vector<media::TrackInternalMuteInfo>& tracksInternalMute) override;
+    status_t resetReferencesForTest() override;
 
 private:
     const sp<media::IAudioFlingerService> mDelegate;
@@ -540,8 +542,6 @@
             MASTER_MUTE = media::BnAudioFlingerService::TRANSACTION_masterMute,
             SET_STREAM_VOLUME = media::BnAudioFlingerService::TRANSACTION_setStreamVolume,
             SET_STREAM_MUTE = media::BnAudioFlingerService::TRANSACTION_setStreamMute,
-            STREAM_VOLUME = media::BnAudioFlingerService::TRANSACTION_streamVolume,
-            STREAM_MUTE = media::BnAudioFlingerService::TRANSACTION_streamMute,
             SET_MODE = media::BnAudioFlingerService::TRANSACTION_setMode,
             SET_MIC_MUTE = media::BnAudioFlingerService::TRANSACTION_setMicMute,
             GET_MIC_MUTE = media::BnAudioFlingerService::TRANSACTION_getMicMute,
@@ -606,6 +606,9 @@
             GET_AUDIO_POLICY_CONFIG =
                     media::BnAudioFlingerService::TRANSACTION_getAudioPolicyConfig,
             GET_AUDIO_MIX_PORT = media::BnAudioFlingerService::TRANSACTION_getAudioMixPort,
+            SET_TRACKS_INTERNAL_MUTE = media::BnAudioFlingerService::TRANSACTION_setTracksInternalMute,
+            RESET_REFERENCES_FOR_TEST =
+                    media::BnAudioFlingerService::TRANSACTION_resetReferencesForTest,
         };
 
     protected:
@@ -661,9 +664,6 @@
     Status setStreamVolume(media::audio::common::AudioStreamType stream,
                            float value, int32_t output) override;
     Status setStreamMute(media::audio::common::AudioStreamType stream, bool muted) override;
-    Status streamVolume(media::audio::common::AudioStreamType stream,
-                        int32_t output, float* _aidl_return) override;
-    Status streamMute(media::audio::common::AudioStreamType stream, bool* _aidl_return) override;
     Status setMode(media::audio::common::AudioMode mode) override;
     Status setMicMute(bool state) override;
     Status getMicMute(bool* _aidl_return) override;
@@ -742,6 +742,9 @@
     Status getAudioMixPort(const media::AudioPortFw& devicePort,
                            const media::AudioPortFw& mixPort,
                            media::AudioPortFw* _aidl_return) override;
+    Status setTracksInternalMute(
+            const std::vector<media::TrackInternalMuteInfo>& tracksInternalMute) override;
+    Status resetReferencesForTest() override;
 private:
     const sp<AudioFlingerServerAdapter::Delegate> mDelegate;
 };
diff --git a/media/libaudioclient/include/media/ToneGenerator.h b/media/libaudioclient/include/media/ToneGenerator.h
index 46e9501..3e515fc 100644
--- a/media/libaudioclient/include/media/ToneGenerator.h
+++ b/media/libaudioclient/include/media/ToneGenerator.h
@@ -225,11 +225,14 @@
         TONE_INDIA_CONGESTION,      // Congestion tone: 400 Hz, 250ms ON, 250ms OFF...
         TONE_INDIA_CALL_WAITING,    // Call waiting tone: 400 Hz, tone repeated in a 0.2s on, 0.1s off, 0.2s on, 7.5s off pattern.
         TONE_INDIA_RINGTONE,        // Ring tone: 400 Hz tone modulated with 25Hz, 0.4 on 0.2 off 0.4 on 2..0 off
-         // TAIWAN supervisory tones
+        // TAIWAN supervisory tones
         TONE_TW_RINGTONE,           // Ring Tone: 440 Hz + 480 Hz repeated with pattern 1s on, 3s off.
-         // NEW ZEALAND supervisory tones
+        // NEW ZEALAND supervisory tones
         TONE_NZ_CALL_WAITING,       // Call waiting tone: 400 Hz,  0.2s ON, 3s OFF,
                                     //        0.2s ON, 3s OFF, 0.2s ON, 3s OFF, 0.2s ON
+        // MALAYSIA supervisory tones
+        TONE_MY_CONGESTION,         // Congestion tone: 425 Hz, 500ms ON, 250ms OFF...
+        TONE_MY_RINGTONE,           // Ring tone: 425 Hz, 400ms ON 200ms OFF 400ms ON 2s OFF..
         NUM_ALTERNATE_TONES
     };
 
@@ -244,6 +247,7 @@
         INDIA,
         TAIWAN,
         NZ,
+        MY,
         CEPT,
         NUM_REGIONS
     };
diff --git a/media/libaudioclient/tests/Android.bp b/media/libaudioclient/tests/Android.bp
index f72ac89..ddf14a3 100644
--- a/media/libaudioclient/tests/Android.bp
+++ b/media/libaudioclient/tests/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
@@ -22,8 +23,8 @@
     ],
     sanitize: {
         misc_undefined: [
-            "unsigned-integer-overflow",
             "signed-integer-overflow",
+            "unsigned-integer-overflow",
         ],
     },
 }
@@ -31,8 +32,8 @@
 cc_defaults {
     name: "audio_aidl_conversion_test_defaults",
     defaults: [
-        "libaudioclient_tests_defaults",
         "latest_android_media_audio_common_types_cpp_static",
+        "libaudioclient_tests_defaults",
     ],
     static_libs: [
         "audioclient-types-aidl-cpp",
@@ -109,9 +110,9 @@
         "libcgrouprc",
         "libdl",
         "libmedia",
+        "libmedia_helper",
         "libmediametrics",
         "libmediautils",
-        "libmedia_helper",
         "libnblog",
         "libprocessgroup",
         "libshmemcompat",
@@ -121,6 +122,7 @@
     ],
     static_libs: [
         "android.hardware.audio.common@7.0-enums",
+        "audio-permission-aidl-cpp",
         "audioclient-types-aidl-cpp",
         "audioflinger-aidl-cpp",
         "audiopolicy-aidl-cpp",
@@ -132,7 +134,14 @@
         "libaudiomanager",
         "libaudiopolicy",
     ],
+    cflags: [
+        "-Wthread-safety",
+    ],
     data: ["bbb*.raw"],
+    srcs: [
+        "audio_test_utils.cpp",
+        "test_execution_tracer.cpp",
+    ],
     test_config_template: "audio_test_template.xml",
 }
 
@@ -141,7 +150,6 @@
     defaults: ["libaudioclient_gtests_defaults"],
     srcs: [
         "audiorecord_tests.cpp",
-        "audio_test_utils.cpp",
     ],
 }
 
@@ -150,7 +158,6 @@
     defaults: ["libaudioclient_gtests_defaults"],
     srcs: [
         "audiotrack_tests.cpp",
-        "audio_test_utils.cpp",
     ],
 }
 
@@ -159,7 +166,6 @@
     defaults: ["libaudioclient_gtests_defaults"],
     srcs: [
         "audioeffect_tests.cpp",
-        "audio_test_utils.cpp",
     ],
 }
 
@@ -172,7 +178,6 @@
     ],
     srcs: [
         "audioeffect_analyser.cpp",
-        "audio_test_utils.cpp",
     ],
     static_libs: [
         "libpffft",
@@ -184,7 +189,6 @@
     defaults: ["libaudioclient_gtests_defaults"],
     srcs: [
         "audiorouting_tests.cpp",
-        "audio_test_utils.cpp",
     ],
 }
 
@@ -193,14 +197,15 @@
     defaults: ["libaudioclient_gtests_defaults"],
     srcs: [
         "audioclient_serialization_tests.cpp",
-        "audio_test_utils.cpp",
     ],
 }
 
 cc_test {
     name: "trackplayerbase_tests",
     defaults: ["libaudioclient_gtests_defaults"],
-    srcs: ["trackplayerbase_tests.cpp"],
+    srcs: [
+        "trackplayerbase_tests.cpp",
+    ],
 }
 
 cc_test {
@@ -208,6 +213,5 @@
     defaults: ["libaudioclient_gtests_defaults"],
     srcs: [
         "audiosystem_tests.cpp",
-        "audio_test_utils.cpp",
     ],
 }
diff --git a/media/libaudioclient/tests/audio_aidl_legacy_conversion_tests.cpp b/media/libaudioclient/tests/audio_aidl_legacy_conversion_tests.cpp
index 0be1d7e..7f55e48 100644
--- a/media/libaudioclient/tests/audio_aidl_legacy_conversion_tests.cpp
+++ b/media/libaudioclient/tests/audio_aidl_legacy_conversion_tests.cpp
@@ -483,28 +483,8 @@
                                  AudioDeviceAddress::make<AudioDeviceAddress::Tag::alsa>(
                                          std::vector<int32_t>{1, 2}))));
 
-TEST(AnonymizedBluetoothAddressRoundTripTest, Legacy2Aidl2Legacy) {
-    const std::vector<uint8_t> sAnonymizedAidlAddress =
-            std::vector<uint8_t>{0xFD, 0xFF, 0xFF, 0xFF, 0xAB, 0xCD};
-    const std::string sAnonymizedLegacyAddress = std::string("XX:XX:XX:XX:AB:CD");
-    auto device = legacy2aidl_audio_device_AudioDevice(AUDIO_DEVICE_OUT_BLUETOOTH_A2DP,
-                                                       sAnonymizedLegacyAddress);
-    ASSERT_TRUE(device.ok());
-    ASSERT_EQ(AudioDeviceAddress::Tag::mac, device.value().address.getTag());
-    ASSERT_EQ(sAnonymizedAidlAddress, device.value().address.get<AudioDeviceAddress::mac>());
-
-    audio_devices_t legacyType;
-    std::string legacyAddress;
-    status_t status =
-            aidl2legacy_AudioDevice_audio_device(device.value(), &legacyType, &legacyAddress);
-    ASSERT_EQ(OK, status);
-    EXPECT_EQ(legacyType, AUDIO_DEVICE_OUT_BLUETOOTH_A2DP);
-    EXPECT_EQ(sAnonymizedLegacyAddress, legacyAddress);
-}
-
 class AudioFormatDescriptionRoundTripTest : public testing::TestWithParam<AudioFormatDescription> {
 };
-
 TEST_P(AudioFormatDescriptionRoundTripTest, Aidl2Legacy2Aidl) {
     const auto initial = GetParam();
     auto conv = aidl2legacy_AudioFormatDescription_audio_format_t(initial);
diff --git a/media/libaudioclient/tests/audio_test_utils.cpp b/media/libaudioclient/tests/audio_test_utils.cpp
index ee5489b..1599839 100644
--- a/media/libaudioclient/tests/audio_test_utils.cpp
+++ b/media/libaudioclient/tests/audio_test_utils.cpp
@@ -28,25 +28,35 @@
 
 void OnAudioDeviceUpdateNotifier::onAudioDeviceUpdate(audio_io_handle_t audioIo,
                                                       audio_port_handle_t deviceId) {
-    std::unique_lock<std::mutex> lock{mMutex};
     ALOGI("%s: audioIo=%d deviceId=%d", __func__, audioIo, deviceId);
-    mAudioIo = audioIo;
-    mDeviceId = deviceId;
+    {
+        std::lock_guard lock(mMutex);
+        mAudioIo = audioIo;
+        mDeviceId = deviceId;
+    }
     mCondition.notify_all();
 }
 
 status_t OnAudioDeviceUpdateNotifier::waitForAudioDeviceCb(audio_port_handle_t expDeviceId) {
-    std::unique_lock<std::mutex> lock{mMutex};
+    std::unique_lock lock(mMutex);
+    android::base::ScopedLockAssertion lock_assertion(mMutex);
     if (mAudioIo == AUDIO_IO_HANDLE_NONE ||
         (expDeviceId != AUDIO_PORT_HANDLE_NONE && expDeviceId != mDeviceId)) {
         mCondition.wait_for(lock, std::chrono::milliseconds(500));
         if (mAudioIo == AUDIO_IO_HANDLE_NONE ||
-            (expDeviceId != AUDIO_PORT_HANDLE_NONE && expDeviceId != mDeviceId))
+            (expDeviceId != AUDIO_PORT_HANDLE_NONE && expDeviceId != mDeviceId)) {
             return TIMED_OUT;
+        }
     }
     return OK;
 }
 
+std::pair<audio_io_handle_t, audio_port_handle_t>
+OnAudioDeviceUpdateNotifier::getLastPortAndDevice() const {
+    std::lock_guard lock(mMutex);
+    return {mAudioIo, mDeviceId};
+}
+
 AudioPlayback::AudioPlayback(uint32_t sampleRate, audio_format_t format,
                              audio_channel_mask_t channelMask, audio_output_flags_t flags,
                              audio_session_t sessionId, AudioTrack::transfer_type transferType,
@@ -147,9 +157,8 @@
 }
 
 void AudioPlayback::onBufferEnd() {
-    std::unique_lock<std::mutex> lock{mMutex};
+    std::lock_guard lock(mMutex);
     mStopPlaying = true;
-    mCondition.notify_all();
 }
 
 status_t AudioPlayback::fillBuffer() {
@@ -187,7 +196,12 @@
     const int maxTries = MAX_WAIT_TIME_MS / WAIT_PERIOD_MS;
     int counter = 0;
     size_t totalFrameCount = mMemCapacity / mTrack->frameSize();
-    while (!mStopPlaying && counter < maxTries) {
+    bool stopPlaying;
+    {
+        std::lock_guard lock(mMutex);
+        stopPlaying = mStopPlaying;
+    }
+    while (!stopPlaying && counter < maxTries) {
         uint32_t currPosition;
         mTrack->getPosition(&currPosition);
         if (currPosition >= totalFrameCount) counter++;
@@ -213,7 +227,10 @@
             mTrack->start();
         }
         std::this_thread::sleep_for(std::chrono::milliseconds(WAIT_PERIOD_MS));
+        std::lock_guard lock(mMutex);
+        stopPlaying = mStopPlaying;
     }
+    std::lock_guard lock(mMutex);
     if (!mStopPlaying && counter == maxTries) return TIMED_OUT;
     return OK;
 }
@@ -228,8 +245,10 @@
 }
 
 void AudioPlayback::stop() {
-    std::unique_lock<std::mutex> lock{mMutex};
-    mStopPlaying = true;
+    {
+        std::lock_guard lock(mMutex);
+        mStopPlaying = true;
+    }
     if (mState != PLAY_STOPPED && mState != PLAY_NO_INIT) {
         int32_t msec = 0;
         (void)mTrack->pendingDuration(&msec);
@@ -257,10 +276,13 @@
         return 0;
     }
 
-    // no more frames to read
-    if (mNumFramesReceived >= mNumFramesToRecord || mStopRecording) {
-        mStopRecording = true;
-        return 0;
+    {
+        std::lock_guard l(mMutex);
+        // no more frames to read
+        if (mNumFramesReceived >= mNumFramesToRecord || mStopRecording) {
+            mStopRecording = true;
+            return 0;
+        }
     }
 
     int64_t timeUs = 0, position = 0, timeNs = 0;
@@ -272,6 +294,7 @@
         ts.getBestTimestamp(&position, &timeNs, ExtendedTimestamp::TIMEBASE_MONOTONIC, &location) ==
                 OK) {
         // Use audio timestamp.
+        std::lock_guard l(mMutex);
         timeUs = timeNs / 1000 -
                  (position - mNumFramesReceived + mNumFramesLost) * usPerSec / mSampleRate;
     } else {
@@ -300,6 +323,7 @@
         } else {
             numLostBytes = 0;
         }
+        std::lock_guard l(mMutex);
         const int64_t timestampUs =
                 ((1000000LL * mNumFramesReceived) + (mRecord->getSampleRate() >> 1)) /
                 mRecord->getSampleRate();
@@ -313,6 +337,7 @@
     if (buffer.size() == 0) {
         ALOGW("Nothing is available from AudioRecord callback buffer");
     } else {
+        std::lock_guard l(mMutex);
         const size_t bufferSize = buffer.size();
         const int64_t timestampUs =
                 ((1000000LL * mNumFramesReceived) + (mRecord->getSampleRate() >> 1)) /
@@ -324,9 +349,12 @@
     }
 
     if (tmpQueue.size() > 0) {
-        std::unique_lock<std::mutex> lock{mMutex};
-        for (auto it = tmpQueue.begin(); it != tmpQueue.end(); it++)
-            mBuffersReceived.push_back(std::move(*it));
+        {
+            std::lock_guard lock(mMutex);
+            mBuffersReceived.insert(mBuffersReceived.end(),
+                                    std::make_move_iterator(tmpQueue.begin()),
+                                    std::make_move_iterator(tmpQueue.end()));
+        }
         mCondition.notify_all();
     }
     return buffer.size();
@@ -334,17 +362,24 @@
 
 void AudioCapture::onOverrun() {
     ALOGV("received event overrun");
-    mBufferOverrun = true;
 }
 
 void AudioCapture::onMarker(uint32_t markerPosition) {
     ALOGV("received Callback at position %d", markerPosition);
-    mReceivedCbMarkerAtPosition = markerPosition;
+    {
+        std::lock_guard l(mMutex);
+        mReceivedCbMarkerAtPosition = markerPosition;
+    }
+    mMarkerCondition.notify_all();
 }
 
 void AudioCapture::onNewPos(uint32_t markerPosition) {
     ALOGV("received Callback at position %d", markerPosition);
-    mReceivedCbMarkerCount++;
+    {
+        std::lock_guard l(mMutex);
+        mReceivedCbMarkerCount = mReceivedCbMarkerCount.value_or(0) + 1;
+    }
+    mMarkerCondition.notify_all();
 }
 
 void AudioCapture::onNewIAudioRecord() {
@@ -362,20 +397,7 @@
       mFlags(flags),
       mSessionId(sessionId),
       mTransferType(transferType),
-      mAttributes(attributes) {
-    mFrameCount = 0;
-    mNotificationFrames = 0;
-    mNumFramesToRecord = 0;
-    mNumFramesReceived = 0;
-    mNumFramesLost = 0;
-    mBufferOverrun = false;
-    mMarkerPosition = 0;
-    mMarkerPeriod = 0;
-    mReceivedCbMarkerAtPosition = -1;
-    mReceivedCbMarkerCount = 0;
-    mState = REC_NO_INIT;
-    mStopRecording = false;
-}
+      mAttributes(attributes) {}
 
 AudioCapture::~AudioCapture() {
     if (mOutFileFd > 0) close(mOutFileFd);
@@ -484,7 +506,10 @@
 
 status_t AudioCapture::stop() {
     status_t status = OK;
-    mStopRecording = true;
+    {
+        std::lock_guard l(mMutex);
+        mStopRecording = true;
+    }
     if (mState != REC_STOPPED && mState != REC_NO_INIT) {
         if (mInputSource != AUDIO_SOURCE_DEFAULT) {
             bool state = false;
@@ -503,25 +528,32 @@
     const int maxTries = MAX_WAIT_TIME_MS / WAIT_PERIOD_MS;
     int counter = 0;
     size_t nonContig = 0;
-    while (mNumFramesReceived < mNumFramesToRecord) {
+    int64_t numFramesReceived;
+    {
+        std::lock_guard l(mMutex);
+        numFramesReceived = mNumFramesReceived;
+    }
+    while (numFramesReceived < mNumFramesToRecord) {
         AudioRecord::Buffer recordBuffer;
         recordBuffer.frameCount = mNotificationFrames;
         status_t status = mRecord->obtainBuffer(&recordBuffer, 1, &nonContig);
         if (OK == status) {
             const int64_t timestampUs =
-                    ((1000000LL * mNumFramesReceived) + (mRecord->getSampleRate() >> 1)) /
+                    ((1000000LL * numFramesReceived) + (mRecord->getSampleRate() >> 1)) /
                     mRecord->getSampleRate();
             RawBuffer buff{-1, timestampUs, static_cast<int32_t>(recordBuffer.size())};
             memcpy(buff.mData.get(), recordBuffer.data(), recordBuffer.size());
             buffer = std::move(buff);
-            mNumFramesReceived += recordBuffer.size() / mRecord->frameSize();
+            numFramesReceived += recordBuffer.size() / mRecord->frameSize();
             mRecord->releaseBuffer(&recordBuffer);
             counter = 0;
         } else if (WOULD_BLOCK == status) {
             // if not received a buffer for MAX_WAIT_TIME_MS, something has gone wrong
-            if (counter == maxTries) return TIMED_OUT;
-            counter++;
+            if (counter++ == maxTries) status = TIMED_OUT;
         }
+        std::lock_guard l(mMutex);
+        mNumFramesReceived = numFramesReceived;
+        if (TIMED_OUT == status) return status;
     }
     return OK;
 }
@@ -530,7 +562,8 @@
     if (REC_STARTED != mState) return INVALID_OPERATION;
     const int maxTries = MAX_WAIT_TIME_MS / WAIT_PERIOD_MS;
     int counter = 0;
-    std::unique_lock<std::mutex> lock{mMutex};
+    std::unique_lock lock(mMutex);
+    android::base::ScopedLockAssertion lock_assertion(mMutex);
     while (mBuffersReceived.empty() && !mStopRecording && counter < maxTries) {
         mCondition.wait_for(lock, std::chrono::milliseconds(WAIT_PERIOD_MS));
         counter++;
@@ -548,7 +581,12 @@
 status_t AudioCapture::audioProcess() {
     RawBuffer buffer;
     status_t status = OK;
-    while (mNumFramesReceived < mNumFramesToRecord && status == OK) {
+    int64_t numFramesReceived;
+    {
+        std::lock_guard l(mMutex);
+        numFramesReceived = mNumFramesReceived;
+    }
+    while (numFramesReceived < mNumFramesToRecord && status == OK) {
         if (mTransferType == AudioRecord::TRANSFER_CALLBACK)
             status = obtainBufferCb(buffer);
         else
@@ -557,20 +595,64 @@
             const char* ptr = static_cast<const char*>(static_cast<void*>(buffer.mData.get()));
             write(mOutFileFd, ptr, buffer.mCapacity);
         }
+        std::lock_guard l(mMutex);
+        numFramesReceived = mNumFramesReceived;
     }
     return OK;
 }
 
+uint32_t AudioCapture::getMarkerPeriod() const {
+    std::lock_guard l(mMutex);
+    return mMarkerPeriod;
+}
+
+uint32_t AudioCapture::getMarkerPosition() const {
+    std::lock_guard l(mMutex);
+    return mMarkerPosition;
+}
+
+void AudioCapture::setMarkerPeriod(uint32_t markerPeriod) {
+    std::lock_guard l(mMutex);
+    mMarkerPeriod = markerPeriod;
+}
+
+void AudioCapture::setMarkerPosition(uint32_t markerPosition) {
+    std::lock_guard l(mMutex);
+    mMarkerPosition = markerPosition;
+}
+
+uint32_t AudioCapture::waitAndGetReceivedCbMarkerAtPosition() const {
+    std::unique_lock lock(mMutex);
+    android::base::ScopedLockAssertion lock_assertion(mMutex);
+    mMarkerCondition.wait_for(lock, std::chrono::seconds(3), [this]() {
+        android::base::ScopedLockAssertion lock_assertion(mMutex);
+        return mReceivedCbMarkerAtPosition.has_value();
+    });
+    return mReceivedCbMarkerAtPosition.value_or(~0);
+}
+
+uint32_t AudioCapture::waitAndGetReceivedCbMarkerCount() const {
+    std::unique_lock lock(mMutex);
+    android::base::ScopedLockAssertion lock_assertion(mMutex);
+    mMarkerCondition.wait_for(lock, std::chrono::seconds(3), [this]() {
+        android::base::ScopedLockAssertion lock_assertion(mMutex);
+        return mReceivedCbMarkerCount.has_value();
+    });
+    return mReceivedCbMarkerCount.value_or(0);
+}
+
 status_t listAudioPorts(std::vector<audio_port_v7>& portsVec) {
     int attempts = 5;
     status_t status;
     unsigned int generation1, generation;
-    unsigned int numPorts = 0;
+    unsigned int numPorts;
     do {
         if (attempts-- < 0) {
             status = TIMED_OUT;
             break;
         }
+        // query for number of ports.
+        numPorts = 0;
         status = AudioSystem::listAudioPorts(AUDIO_PORT_ROLE_NONE, AUDIO_PORT_TYPE_NONE, &numPorts,
                                              nullptr, &generation1);
         if (status != NO_ERROR) {
@@ -622,12 +704,14 @@
     int attempts = 5;
     status_t status;
     unsigned int generation1, generation;
-    unsigned int numPatches = 0;
+    unsigned int numPatches;
     do {
         if (attempts-- < 0) {
             status = TIMED_OUT;
             break;
         }
+        // query for number of patches.
+        numPatches = 0;
         status = AudioSystem::listAudioPatches(&numPatches, nullptr, &generation1);
         if (status != NO_ERROR) {
             ALOGE("AudioSystem::listAudioPatches returned error %d", status);
diff --git a/media/libaudioclient/tests/audio_test_utils.h b/media/libaudioclient/tests/audio_test_utils.h
index 76e4642..022ecf3 100644
--- a/media/libaudioclient/tests/audio_test_utils.h
+++ b/media/libaudioclient/tests/audio_test_utils.h
@@ -19,14 +19,13 @@
 
 #include <sys/stat.h>
 #include <unistd.h>
-#include <atomic>
-#include <chrono>
-#include <cinttypes>
 #include <deque>
 #include <memory>
 #include <mutex>
 #include <thread>
+#include <utility>
 
+#include <android-base/thread_annotations.h>
 #include <binder/MemoryDealer.h>
 #include <media/AidlConversion.h>
 #include <media/AudioRecord.h>
@@ -63,13 +62,15 @@
 
 class OnAudioDeviceUpdateNotifier : public AudioSystem::AudioDeviceCallback {
   public:
-    audio_io_handle_t mAudioIo = AUDIO_IO_HANDLE_NONE;
-    audio_port_handle_t mDeviceId = AUDIO_PORT_HANDLE_NONE;
-    std::mutex mMutex;
-    std::condition_variable mCondition;
-
-    void onAudioDeviceUpdate(audio_io_handle_t audioIo, audio_port_handle_t deviceId);
+    void onAudioDeviceUpdate(audio_io_handle_t audioIo, audio_port_handle_t deviceId) override;
     status_t waitForAudioDeviceCb(audio_port_handle_t expDeviceId = AUDIO_PORT_HANDLE_NONE);
+    std::pair<audio_io_handle_t, audio_port_handle_t> getLastPortAndDevice() const;
+
+  private:
+    audio_io_handle_t mAudioIo GUARDED_BY(mMutex) = AUDIO_IO_HANDLE_NONE;
+    audio_port_handle_t mDeviceId GUARDED_BY(mMutex) = AUDIO_PORT_HANDLE_NONE;
+    mutable std::mutex mMutex;
+    std::condition_variable mCondition;
 };
 
 // Simple AudioPlayback class.
@@ -86,15 +87,14 @@
     status_t create();
     sp<AudioTrack> getAudioTrackHandle();
     status_t start();
-    status_t waitForConsumption(bool testSeek = false);
+    status_t waitForConsumption(bool testSeek = false) EXCLUDES(mMutex);
     status_t fillBuffer();
     status_t onProcess(bool testSeek = false);
-    virtual void onBufferEnd() override;
-    void stop();
+    void onBufferEnd() override EXCLUDES(mMutex);
+    void stop() EXCLUDES(mMutex);
 
-    bool mStopPlaying;
-    std::mutex mMutex;
-    std::condition_variable mCondition;
+    bool mStopPlaying GUARDED_BY(mMutex);
+    mutable std::mutex mMutex;
 
     enum State {
         PLAY_NO_INIT,
@@ -144,10 +144,10 @@
                  AudioRecord::transfer_type transferType = AudioRecord::TRANSFER_CALLBACK,
                  const audio_attributes_t* attributes = nullptr);
     ~AudioCapture();
-    size_t onMoreData(const AudioRecord::Buffer& buffer) override;
+    size_t onMoreData(const AudioRecord::Buffer& buffer) override EXCLUDES(mMutex);
     void onOverrun() override;
-    void onMarker(uint32_t markerPosition) override;
-    void onNewPos(uint32_t newPos) override;
+    void onMarker(uint32_t markerPosition) override EXCLUDES(mMutex);
+    void onNewPos(uint32_t newPos) override EXCLUDES(mMutex);
     void onNewIAudioRecord() override;
     status_t create();
     status_t setRecordDuration(float durationInSec);
@@ -156,21 +156,20 @@
     sp<AudioRecord> getAudioRecordHandle();
     status_t start(AudioSystem::sync_event_t event = AudioSystem::SYNC_EVENT_NONE,
                    audio_session_t triggerSession = AUDIO_SESSION_NONE);
-    status_t obtainBufferCb(RawBuffer& buffer);
-    status_t obtainBuffer(RawBuffer& buffer);
-    status_t audioProcess();
-    status_t stop();
+    status_t obtainBufferCb(RawBuffer& buffer) EXCLUDES(mMutex);
+    status_t obtainBuffer(RawBuffer& buffer) EXCLUDES(mMutex);
+    status_t audioProcess() EXCLUDES(mMutex);
+    status_t stop() EXCLUDES(mMutex);
+    uint32_t getMarkerPeriod() const EXCLUDES(mMutex);
+    uint32_t getMarkerPosition() const EXCLUDES(mMutex);
+    void setMarkerPeriod(uint32_t markerPeriod) EXCLUDES(mMutex);
+    void setMarkerPosition(uint32_t markerPosition) EXCLUDES(mMutex);
+    uint32_t waitAndGetReceivedCbMarkerAtPosition() const EXCLUDES(mMutex);
+    uint32_t waitAndGetReceivedCbMarkerCount() const EXCLUDES(mMutex);
 
-    uint32_t mFrameCount;
-    uint32_t mNotificationFrames;
-    int64_t mNumFramesToRecord;
-    int64_t mNumFramesReceived;
-    int64_t mNumFramesLost;
-    uint32_t mMarkerPosition;
-    uint32_t mMarkerPeriod;
-    uint32_t mReceivedCbMarkerAtPosition;
-    uint32_t mReceivedCbMarkerCount;
-    bool mBufferOverrun;
+    uint32_t mFrameCount = 0;
+    uint32_t mNotificationFrames = 0;
+    int64_t mNumFramesToRecord = 0;
 
     enum State {
         REC_NO_INIT,
@@ -191,14 +190,23 @@
 
     size_t mMaxBytesPerCallback = 2048;
     sp<AudioRecord> mRecord;
-    State mState;
-    bool mStopRecording;
+    State mState = REC_NO_INIT;
+    bool mStopRecording GUARDED_BY(mMutex) = false;
     std::string mFileName;
     int mOutFileFd = -1;
 
-    std::mutex mMutex;
+    mutable std::mutex mMutex;
     std::condition_variable mCondition;
-    std::deque<RawBuffer> mBuffersReceived;
+    std::deque<RawBuffer> mBuffersReceived GUARDED_BY(mMutex);
+
+    mutable std::condition_variable mMarkerCondition;
+    uint32_t mMarkerPeriod GUARDED_BY(mMutex) = 0;
+    uint32_t mMarkerPosition GUARDED_BY(mMutex) = 0;
+    std::optional<uint32_t> mReceivedCbMarkerCount GUARDED_BY(mMutex);
+    std::optional<uint32_t> mReceivedCbMarkerAtPosition GUARDED_BY(mMutex);
+
+    int64_t mNumFramesReceived GUARDED_BY(mMutex) = 0;
+    int64_t mNumFramesLost GUARDED_BY(mMutex) = 0;
 };
 
 #endif  // AUDIO_TEST_UTILS_H_
diff --git a/media/libaudioclient/tests/audioclient_serialization_tests.cpp b/media/libaudioclient/tests/audioclient_serialization_tests.cpp
index 707b9b3..5debabc 100644
--- a/media/libaudioclient/tests/audioclient_serialization_tests.cpp
+++ b/media/libaudioclient/tests/audioclient_serialization_tests.cpp
@@ -15,18 +15,23 @@
  */
 
 //#define LOG_NDEBUG 0
-#define LOG_TAG "AudioClientSerializationUnitTests"
+#define LOG_TAG "AudioClientSerializationTests"
 
 #include <cstdint>
 #include <cstdlib>
 #include <ctime>
-
-#include <gtest/gtest.h>
+#include <vector>
 
 #include <android_audio_policy_configuration_V7_0-enums.h>
+#include <gtest/gtest.h>
+#include <media/AudioPolicy.h>
+#include <media/AudioProductStrategy.h>
+#include <media/AudioVolumeGroup.h>
+#include <media/VolumeGroupAttributes.h>
+#include <system/audio.h>
 #include <xsdc/XsdcSupport.h>
 
-#include "audio_test_utils.h"
+#include "test_execution_tracer.h"
 
 using namespace android;
 namespace xsd {
@@ -310,3 +315,9 @@
 // audioStream
 INSTANTIATE_TEST_SUITE_P(SerializationParameterizedTests, AudioAttributesParameterizedTest,
                          ::testing::Combine(testing::ValuesIn(kStreamtypes)));
+
+int main(int argc, char** argv) {
+    ::testing::InitGoogleTest(&argc, argv);
+    ::testing::UnitTest::GetInstance()->listeners().Append(new TestExecutionTracer());
+    return RUN_ALL_TESTS();
+}
diff --git a/media/libaudioclient/tests/audioeffect_analyser.cpp b/media/libaudioclient/tests/audioeffect_analyser.cpp
index 94accae..199fb8b 100644
--- a/media/libaudioclient/tests/audioeffect_analyser.cpp
+++ b/media/libaudioclient/tests/audioeffect_analyser.cpp
@@ -14,23 +14,26 @@
  * limitations under the License.
  */
 
-// #define LOG_NDEBUG 0
-#define LOG_TAG "AudioEffectAnalyser"
-
-#include <android-base/file.h>
-#include <android-base/stringprintf.h>
-#include <gtest/gtest.h>
-#include <media/AudioEffect.h>
-#include <system/audio_effects/effect_bassboost.h>
-#include <system/audio_effects/effect_equalizer.h>
 #include <fstream>
 #include <iostream>
 #include <string>
 #include <tuple>
 #include <vector>
 
+// #define LOG_NDEBUG 0
+#define LOG_TAG "AudioEffectAnalyser"
+
+#include <android-base/file.h>
+#include <android-base/stringprintf.h>
+#include <binder/ProcessState.h>
+#include <gtest/gtest.h>
+#include <media/AudioEffect.h>
+#include <system/audio_effects/effect_bassboost.h>
+#include <system/audio_effects/effect_equalizer.h>
+
 #include "audio_test_utils.h"
 #include "pffft.hpp"
+#include "test_execution_tracer.h"
 
 #define CHECK_OK(expr, msg) \
     mStatus = (expr);       \
@@ -59,6 +62,15 @@
 constexpr int kNPointFFT = 16384;
 constexpr float kBinWidth = (float)kSamplingFrequency / kNPointFFT;
 
+// frequency used to generate testing tone
+constexpr uint32_t kTestFrequency = 1400;
+
+// Tolerance of audio gain difference in dB, which is 10^(0.1/20) (around 1.0116%) difference in
+// amplitude
+constexpr float kAudioGainDiffTolerancedB = .1f;
+
+const std::string kDataTempPath = "/data/local/tmp";
+
 const char* gPackageName = "AudioEffectAnalyser";
 
 static_assert(kPrimeDurationInSec + 2 * kNPointFFT / kSamplingFrequency < kCaptureDurationSec,
@@ -174,21 +186,30 @@
     return effect;
 }
 
-void computeFilterGainsAtTones(float captureDuration, int nPointFft, std::vector<int>& binOffsets,
-                               float* inputMag, float* gaindB, const char* res,
-                               audio_session_t sessionId) {
+void computeFilterGainsAtTones(float captureDuration, int nPointFft, std::vector<int> binOffsets,
+                               float* inputMag, float* gaindB, const std::string res,
+                               audio_session_t sessionId, const std::string res2 = "",
+                               audio_session_t sessionId2 = AUDIO_SESSION_NONE) {
     int totalFrameCount = captureDuration * kSamplingFrequency;
     auto output = pffft::AlignedVector<float>(totalFrameCount);
     auto fftOutput = pffft::AlignedVector<float>(nPointFft);
-    PlaybackEnv argsP;
-    argsP.mRes = std::string{res};
+    PlaybackEnv argsP, argsP2;
+    argsP.mRes = res;
     argsP.mSessionId = sessionId;
     CaptureEnv argsR;
     argsR.mCaptureDuration = captureDuration;
     std::thread playbackThread(&PlaybackEnv::play, &argsP);
+    std::optional<std::thread> playbackThread2;
+    if (res2 != "") {
+        argsP2 = {.mSessionId = sessionId2, .mRes = res2};
+        playbackThread2 = std::thread(&PlaybackEnv::play, &argsP2);
+    }
     std::thread captureThread(&CaptureEnv::capture, &argsR);
     captureThread.join();
     playbackThread.join();
+    if (playbackThread2 != std::nullopt) {
+        playbackThread2->join();
+    }
     ASSERT_EQ(OK, argsR.mStatus) << argsR.mMsg;
     ASSERT_EQ(OK, argsP.mStatus) << argsP.mMsg;
     ASSERT_FALSE(argsR.mDumpFileName.empty()) << "recorded not written to file";
@@ -207,7 +228,11 @@
         auto k = binOffsets[i];
         auto outputMag = sqrt((fftOutput[k * 2] * fftOutput[k * 2]) +
                               (fftOutput[k * 2 + 1] * fftOutput[k * 2 + 1]));
-        gaindB[i] = 20 * log10(outputMag / inputMag[i]);
+        if (inputMag == nullptr) {
+            gaindB[i] = 20 * log10(outputMag);
+        } else {
+            gaindB[i] = 20 * log10(outputMag / inputMag[i]);
+        }
     }
 }
 
@@ -279,7 +304,7 @@
         inputMag[i] = sqrt((fftInput[k * 2] * fftInput[k * 2]) +
                            (fftInput[k * 2 + 1] * fftInput[k * 2 + 1]));
     }
-    TemporaryFile tf("/data/local/tmp");
+    TemporaryFile tf(kDataTempPath);
     close(tf.release());
     std::ofstream fout(tf.path, std::ios::out | std::ios::binary);
     fout.write((char*)input.data(), input.size() * sizeof(input[0]));
@@ -383,7 +408,7 @@
         inputMag[i] = sqrt((fftInput[k * 2] * fftInput[k * 2]) +
                            (fftInput[k * 2 + 1] * fftInput[k * 2 + 1]));
     }
-    TemporaryFile tf("/data/local/tmp");
+    TemporaryFile tf(kDataTempPath);
     close(tf.release());
     std::ofstream fout(tf.path, std::ios::out | std::ios::binary);
     fout.write((char*)input.data(), input.size() * sizeof(input[0]));
@@ -393,7 +418,7 @@
     memset(gainWithOutFilter, 0, sizeof(gainWithOutFilter));
     ASSERT_NO_FATAL_FAILURE(computeFilterGainsAtTones(kCaptureDurationSec, kNPointFFT, binOffsets,
                                                       inputMag, gainWithOutFilter, tf.path,
-                                                      AUDIO_SESSION_OUTPUT_MIX));
+                                                      AUDIO_SESSION_NONE));
     float diffA = gainWithOutFilter[0] - gainWithOutFilter[1];
     float prevGain = -100.f;
     for (auto strength = 150; strength < 1000; strength += strengthSupported ? 150 : 1000) {
@@ -417,3 +442,60 @@
         prevGain = diffB;
     }
 }
+
+// assert the silent audio session with effect does not override the output audio
+TEST(AudioEffectTest, SilentAudioEffectSessionNotOverrideOutput) {
+    audio_session_t sessionId =
+            (audio_session_t)AudioSystem::newAudioUniqueId(AUDIO_UNIQUE_ID_USE_SESSION);
+    sp<AudioEffect> bassboost = createEffect(SL_IID_BASSBOOST, sessionId);
+    if ((bassboost->descriptor().flags & EFFECT_FLAG_HW_ACC_MASK) != 0) {
+        GTEST_SKIP() << "effect processed output inaccessible, skipping test";
+    }
+    ASSERT_EQ(OK, bassboost->initCheck());
+    ASSERT_EQ(NO_ERROR, bassboost->setEnabled(true));
+
+    const auto bin = roundToFreqCenteredToFftBin(kBinWidth, kTestFrequency);
+    const int binIndex = std::get<0 /* index */>(bin);
+    const int binFrequency = std::get<1 /* freq */>(bin);
+
+    const int totalFrameCount = kSamplingFrequency * kPlayBackDurationSec;
+    // input for effect module
+    auto silentAudio = pffft::AlignedVector<float>(totalFrameCount);
+    auto input = pffft::AlignedVector<float>(totalFrameCount);
+    generateMultiTone({binFrequency}, kSamplingFrequency, kPlayBackDurationSec, kDefAmplitude,
+                      input.data(), totalFrameCount);
+    TemporaryFile tf(kDataTempPath);
+    close(tf.release());
+    std::ofstream fout(tf.path, std::ios::out | std::ios::binary);
+    fout.write((char*)input.data(), input.size() * sizeof(input[0]));
+    fout.close();
+
+    // play non-silent audio file on AUDIO_SESSION_NONE
+    float audioGain, audioPlusSilentEffectGain;
+    ASSERT_NO_FATAL_FAILURE(computeFilterGainsAtTones(kCaptureDurationSec, kNPointFFT, {binIndex},
+                                                      nullptr, &audioGain, tf.path,
+                                                      AUDIO_SESSION_NONE));
+    EXPECT_FALSE(std::isinf(audioGain)) << "output gain should not be -inf";
+
+    TemporaryFile silentFile(kDataTempPath);
+    close(silentFile.release());
+    std::ofstream fSilent(silentFile.path, std::ios::out | std::ios::binary);
+    fSilent.write((char*)silentAudio.data(), silentAudio.size() * sizeof(silentAudio[0]));
+    fSilent.close();
+    // play non-silent audio file on AUDIO_SESSION_NONE and silent audio on sessionId, expect
+    // the new output gain to be almost same as last playback
+    ASSERT_NO_FATAL_FAILURE(computeFilterGainsAtTones(
+            kCaptureDurationSec, kNPointFFT, {binIndex}, nullptr, &audioPlusSilentEffectGain,
+            tf.path, AUDIO_SESSION_NONE, silentFile.path, sessionId));
+    EXPECT_FALSE(std::isinf(audioPlusSilentEffectGain))
+            << "output might have been overwritten in effect accumulate mode";
+    EXPECT_NEAR(audioGain, audioPlusSilentEffectGain, kAudioGainDiffTolerancedB)
+            << " output gain should almost same with one more silent audio stream";
+}
+
+int main(int argc, char** argv) {
+    android::ProcessState::self()->startThreadPool();
+    ::testing::InitGoogleTest(&argc, argv);
+    ::testing::UnitTest::GetInstance()->listeners().Append(new TestExecutionTracer());
+    return RUN_ALL_TESTS();
+}
diff --git a/media/libaudioclient/tests/audioeffect_tests.cpp b/media/libaudioclient/tests/audioeffect_tests.cpp
index e12ae23..bedeff9 100644
--- a/media/libaudioclient/tests/audioeffect_tests.cpp
+++ b/media/libaudioclient/tests/audioeffect_tests.cpp
@@ -15,8 +15,9 @@
  */
 
 //#define LOG_NDEBUG 0
-#define LOG_TAG "AudioEffectUnitTests"
+#define LOG_TAG "AudioEffectTests"
 
+#include <binder/ProcessState.h>
 #include <gtest/gtest.h>
 #include <media/AudioEffect.h>
 #include <system/audio_effects/effect_hapticgenerator.h>
@@ -24,6 +25,7 @@
 #include <system/audio_effects/effect_visualizer.h>
 
 #include "audio_test_utils.h"
+#include "test_execution_tracer.h"
 
 using namespace android;
 
@@ -68,8 +70,8 @@
     return effect;
 }
 
-status_t isEffectExistsOnAudioSession(const effect_uuid_t* type, const effect_uuid_t* uuid,
-                                      int priority, audio_session_t sessionId) {
+status_t createAndInitCheckEffect(const effect_uuid_t* type, const effect_uuid_t* uuid,
+                                  int priority, audio_session_t sessionId) {
     sp<AudioEffect> effect = createEffect(type, uuid, priority, sessionId);
     return effect->initCheck();
 }
@@ -270,10 +272,9 @@
     EXPECT_FALSE(isEffectDefaultOnRecord(selectedEffectType, selectedEffectUuid,
                                          capture->getAudioRecordHandle()))
             << "Effect should not have been default on record. " << type;
-    EXPECT_EQ(NO_ERROR,
-              isEffectExistsOnAudioSession(selectedEffectType, selectedEffectUuid,
-                                           kDefaultInputEffectPriority - 1,
-                                           capture->getAudioRecordHandle()->getSessionId()))
+    EXPECT_EQ(NO_ERROR, createAndInitCheckEffect(selectedEffectType, selectedEffectUuid,
+                                                 kDefaultInputEffectPriority - 1,
+                                                 capture->getAudioRecordHandle()->getSessionId()))
             << "Effect should not have been added. " << type;
     EXPECT_EQ(OK, capture->audioProcess());
     EXPECT_EQ(OK, capture->stop());
@@ -294,9 +295,9 @@
                                         capture->getAudioRecordHandle()))
             << "Effect should have been default on record. " << type;
     EXPECT_EQ(ALREADY_EXISTS,
-              isEffectExistsOnAudioSession(selectedEffectType, selectedEffectUuid,
-                                           kDefaultInputEffectPriority - 1,
-                                           capture->getAudioRecordHandle()->getSessionId()))
+              createAndInitCheckEffect(selectedEffectType, selectedEffectUuid,
+                                       kDefaultInputEffectPriority - 1,
+                                       capture->getAudioRecordHandle()->getSessionId()))
             << "Effect should have been added. " << type;
     EXPECT_EQ(OK, capture->audioProcess());
     EXPECT_EQ(OK, capture->stop());
@@ -311,10 +312,9 @@
     EXPECT_FALSE(isEffectDefaultOnRecord(selectedEffectType, selectedEffectUuid,
                                          capture->getAudioRecordHandle()))
             << "Effect should not have been default on record. " << type;
-    EXPECT_EQ(NO_ERROR,
-              isEffectExistsOnAudioSession(selectedEffectType, selectedEffectUuid,
-                                           kDefaultInputEffectPriority - 1,
-                                           capture->getAudioRecordHandle()->getSessionId()))
+    EXPECT_EQ(NO_ERROR, createAndInitCheckEffect(selectedEffectType, selectedEffectUuid,
+                                                 kDefaultInputEffectPriority - 1,
+                                                 capture->getAudioRecordHandle()->getSessionId()))
             << "Effect should not have been added. " << type;
     EXPECT_EQ(OK, capture->audioProcess());
     EXPECT_EQ(OK, capture->stop());
@@ -419,8 +419,8 @@
     EXPECT_EQ(NO_ERROR, playback->create());
     EXPECT_EQ(NO_ERROR, playback->start());
     EXPECT_EQ(compatCheck ? NO_ERROR : NO_INIT,
-              isEffectExistsOnAudioSession(&mType, &mUuid, kDefaultOutputEffectPriority - 1,
-                                           playback->getAudioTrackHandle()->getSessionId()))
+              createAndInitCheckEffect(&mType, &mUuid, kDefaultOutputEffectPriority - 1,
+                                       playback->getAudioTrackHandle()->getSessionId()))
             << "Effect should not have been added. " << mTypeStr;
     EXPECT_EQ(NO_ERROR, playback->waitForConsumption());
     playback->stop();
@@ -443,8 +443,8 @@
     EXPECT_EQ(NO_ERROR, playback->start());
     // If effect chosen is not compatible with the session, then effect won't be applied
     EXPECT_EQ(compatCheck ? ALREADY_EXISTS : NO_INIT,
-              isEffectExistsOnAudioSession(&mType, &mUuid, kDefaultOutputEffectPriority - 1,
-                                           playback->getAudioTrackHandle()->getSessionId()))
+              createAndInitCheckEffect(&mType, &mUuid, kDefaultOutputEffectPriority - 1,
+                                       playback->getAudioTrackHandle()->getSessionId()))
             << "Effect should have been added. " << mTypeStr;
     EXPECT_EQ(NO_ERROR, playback->waitForConsumption());
     if (mSelectFastMode) {
@@ -465,8 +465,8 @@
     EXPECT_EQ(NO_ERROR, playback->create());
     EXPECT_EQ(NO_ERROR, playback->start());
     EXPECT_EQ(compatCheck ? NO_ERROR : NO_INIT,
-              isEffectExistsOnAudioSession(&mType, &mUuid, kDefaultOutputEffectPriority - 1,
-                                           playback->getAudioTrackHandle()->getSessionId()))
+              createAndInitCheckEffect(&mType, &mUuid, kDefaultOutputEffectPriority - 1,
+                                       playback->getAudioTrackHandle()->getSessionId()))
             << "Effect should not have been added. " << mTypeStr;
     EXPECT_EQ(NO_ERROR, playback->waitForConsumption());
     playback->stop();
@@ -500,8 +500,8 @@
     EXPECT_EQ(NO_ERROR, playback->create());
     EXPECT_EQ(NO_ERROR, playback->start());
 
-    EXPECT_EQ(ALREADY_EXISTS, isEffectExistsOnAudioSession(
-                                      &mType, &mUuid, kDefaultOutputEffectPriority - 1, sessionId))
+    EXPECT_EQ(ALREADY_EXISTS,
+              createAndInitCheckEffect(&mType, &mUuid, kDefaultOutputEffectPriority - 1, sessionId))
             << "Effect should have been added. " << mTypeStr;
     if (mSelectFastMode) {
         EXPECT_EQ(mIsFastCompatibleEffect ? AUDIO_OUTPUT_FLAG_FAST : 0,
@@ -554,8 +554,8 @@
     ASSERT_EQ(NO_ERROR, playback->loadResource("/data/local/tmp/bbb_2ch_24kHz_s16le.raw"));
     EXPECT_EQ(NO_ERROR, playback->create());
     EXPECT_EQ(NO_ERROR, playback->start());
-    EXPECT_TRUE(isEffectExistsOnAudioSession(selectedEffectType, selectedEffectUuid,
-                                             kDefaultOutputEffectPriority - 1, sessionId))
+    ASSERT_EQ(ALREADY_EXISTS, createAndInitCheckEffect(selectedEffectType, selectedEffectUuid,
+                                                       kDefaultOutputEffectPriority - 1, sessionId))
             << "Effect should have been added. " << type;
     EXPECT_EQ(NO_ERROR, playback->waitForConsumption());
     playback->stop();
@@ -563,3 +563,10 @@
     EXPECT_TRUE(cb->receivedFramesProcessed)
             << "AudioEffect frames processed callback not received";
 }
+
+int main(int argc, char** argv) {
+    android::ProcessState::self()->startThreadPool();
+    ::testing::InitGoogleTest(&argc, argv);
+    ::testing::UnitTest::GetInstance()->listeners().Append(new TestExecutionTracer());
+    return RUN_ALL_TESTS();
+}
diff --git a/media/libaudioclient/tests/audiorecord_tests.cpp b/media/libaudioclient/tests/audiorecord_tests.cpp
index 61edd4d..f2fee8b 100644
--- a/media/libaudioclient/tests/audiorecord_tests.cpp
+++ b/media/libaudioclient/tests/audiorecord_tests.cpp
@@ -24,9 +24,29 @@
 #include <gtest/gtest.h>
 
 #include "audio_test_utils.h"
+#include "test_execution_tracer.h"
 
 using namespace android;
 
+// Test that the basic constructor returns an object that doesn't crash
+// on stop() or destruction.
+
+TEST(AudioRecordTestBasic, EmptyAudioRecord) {
+    AttributionSourceState attributionSource;
+    attributionSource.packageName = "AudioRecordTest";
+    attributionSource.uid = VALUE_OR_FATAL(legacy2aidl_uid_t_int32_t(getuid()));
+    attributionSource.pid = VALUE_OR_FATAL(legacy2aidl_pid_t_int32_t(getpid()));
+    attributionSource.token = sp<BBinder>::make();
+    const auto ar = sp<AudioRecord>::make(attributionSource);
+
+    // test key commands on an unset AudioRecord.
+    EXPECT_EQ(NO_INIT, ar->initCheck());
+    EXPECT_EQ(true, ar->stopped());
+
+    // just don't crash.
+    ar->stop();
+}
+
 class AudioRecordTest : public ::testing::Test {
   public:
     void SetUp() override {
@@ -82,7 +102,10 @@
     }
 
     void TearDown() override {
-        if (mAC) ASSERT_EQ(OK, mAC->stop());
+        if (mAC) {
+            ASSERT_EQ(OK, mAC->stop());
+            mAC.clear();
+        }
     }
 };
 
@@ -100,10 +123,12 @@
     EXPECT_EQ(OK, mAC->getAudioRecordHandle()->addAudioDeviceCallback(cb));
     EXPECT_EQ(OK, mAC->start()) << "record creation failed";
     EXPECT_EQ(OK, cb->waitForAudioDeviceCb());
-    EXPECT_EQ(AUDIO_IO_HANDLE_NONE, cbOld->mAudioIo);
-    EXPECT_EQ(AUDIO_PORT_HANDLE_NONE, cbOld->mDeviceId);
-    EXPECT_NE(AUDIO_IO_HANDLE_NONE, cb->mAudioIo);
-    EXPECT_NE(AUDIO_PORT_HANDLE_NONE, cb->mDeviceId);
+    const auto [oldAudioIo, oldDeviceId] = cbOld->getLastPortAndDevice();
+    EXPECT_EQ(AUDIO_IO_HANDLE_NONE, oldAudioIo);
+    EXPECT_EQ(AUDIO_PORT_HANDLE_NONE, oldDeviceId);
+    const auto [audioIo, deviceId] = cb->getLastPortAndDevice();
+    EXPECT_NE(AUDIO_IO_HANDLE_NONE, audioIo);
+    EXPECT_NE(AUDIO_PORT_HANDLE_NONE, deviceId);
     EXPECT_EQ(BAD_VALUE, mAC->getAudioRecordHandle()->removeAudioDeviceCallback(nullptr));
     EXPECT_EQ(INVALID_OPERATION, mAC->getAudioRecordHandle()->removeAudioDeviceCallback(cbOld));
     EXPECT_EQ(OK, mAC->getAudioRecordHandle()->removeAudioDeviceCallback(cb));
@@ -146,31 +171,33 @@
 }
 
 TEST_F(AudioRecordTest, TestGetSetMarker) {
-    mAC->mMarkerPosition = (mAC->mNotificationFrames << 3) + (mAC->mNotificationFrames >> 1);
-    EXPECT_EQ(OK, mAC->getAudioRecordHandle()->setMarkerPosition(mAC->mMarkerPosition))
+    mAC->setMarkerPosition((mAC->mNotificationFrames << 3) + (mAC->mNotificationFrames >> 1));
+    EXPECT_EQ(OK, mAC->getAudioRecordHandle()->setMarkerPosition(mAC->getMarkerPosition()))
             << "setMarkerPosition() failed";
     uint32_t marker;
     EXPECT_EQ(OK, mAC->getAudioRecordHandle()->getMarkerPosition(&marker))
             << "getMarkerPosition() failed";
     EXPECT_EQ(OK, mAC->start()) << "start recording failed";
     EXPECT_EQ(OK, mAC->audioProcess()) << "audioProcess failed";
-    EXPECT_EQ(marker, mAC->mMarkerPosition)
+    EXPECT_EQ(marker, mAC->getMarkerPosition())
             << "configured marker and received marker are different";
-    EXPECT_EQ(mAC->mReceivedCbMarkerAtPosition, mAC->mMarkerPosition)
+    EXPECT_EQ(mAC->waitAndGetReceivedCbMarkerAtPosition(), mAC->getMarkerPosition())
             << "configured marker and received cb marker are different";
 }
 
 TEST_F(AudioRecordTest, TestGetSetMarkerPeriodical) {
-    mAC->mMarkerPeriod = (mAC->mNotificationFrames << 3) + (mAC->mNotificationFrames >> 1);
-    EXPECT_EQ(OK, mAC->getAudioRecordHandle()->setPositionUpdatePeriod(mAC->mMarkerPeriod))
+    mAC->setMarkerPeriod((mAC->mNotificationFrames << 3) + (mAC->mNotificationFrames >> 1));
+    EXPECT_EQ(OK, mAC->getAudioRecordHandle()->setPositionUpdatePeriod(mAC->getMarkerPeriod()))
             << "setPositionUpdatePeriod() failed";
     uint32_t marker;
     EXPECT_EQ(OK, mAC->getAudioRecordHandle()->getPositionUpdatePeriod(&marker))
             << "getPositionUpdatePeriod() failed";
     EXPECT_EQ(OK, mAC->start()) << "start recording failed";
     EXPECT_EQ(OK, mAC->audioProcess()) << "audioProcess failed";
-    EXPECT_EQ(marker, mAC->mMarkerPeriod) << "configured marker and received marker are different";
-    EXPECT_EQ(mAC->mReceivedCbMarkerCount, mAC->mNumFramesToRecord / mAC->mMarkerPeriod)
+    EXPECT_EQ(marker, mAC->getMarkerPeriod())
+            << "configured marker and received marker are different";
+    EXPECT_EQ(mAC->waitAndGetReceivedCbMarkerCount(),
+              mAC->mNumFramesToRecord / mAC->getMarkerPeriod())
             << "configured marker and received cb marker are different";
 }
 
@@ -197,12 +224,12 @@
         EXPECT_EQ(mSessionId, mAC->getAudioRecordHandle()->getSessionId());
     if (mTransferType != AudioRecord::TRANSFER_CALLBACK) {
         uint32_t marker;
-        mAC->mMarkerPosition = (mAC->mNotificationFrames << 3) + (mAC->mNotificationFrames >> 1);
+        mAC->setMarkerPosition((mAC->mNotificationFrames << 3) + (mAC->mNotificationFrames >> 1));
         EXPECT_EQ(INVALID_OPERATION,
-                  mAC->getAudioRecordHandle()->setMarkerPosition(mAC->mMarkerPosition));
+                  mAC->getAudioRecordHandle()->setMarkerPosition(mAC->getMarkerPosition()));
         EXPECT_EQ(OK, mAC->getAudioRecordHandle()->getMarkerPosition(&marker));
         EXPECT_EQ(INVALID_OPERATION,
-                  mAC->getAudioRecordHandle()->setPositionUpdatePeriod(mAC->mMarkerPosition));
+                  mAC->getAudioRecordHandle()->setPositionUpdatePeriod(mAC->getMarkerPosition()));
         EXPECT_EQ(OK, mAC->getAudioRecordHandle()->getPositionUpdatePeriod(&marker));
     }
     EXPECT_EQ(OK, mAC->start()) << "start recording failed";
@@ -261,26 +288,6 @@
                                                               AUDIO_SOURCE_UNPROCESSED)),
                          GetRecordTestName);
 
-namespace {
-
-class TestExecutionTracer : public ::testing::EmptyTestEventListener {
-  public:
-    void OnTestStart(const ::testing::TestInfo& test_info) override {
-        TraceTestState("Started", test_info);
-    }
-    void OnTestEnd(const ::testing::TestInfo& test_info) override {
-        TraceTestState("Finished", test_info);
-    }
-    void OnTestPartResult(const ::testing::TestPartResult& result) override { LOG(INFO) << result; }
-
-  private:
-    static void TraceTestState(const std::string& state, const ::testing::TestInfo& test_info) {
-        LOG(INFO) << state << " " << test_info.test_suite_name() << "::" << test_info.name();
-    }
-};
-
-}  // namespace
-
 int main(int argc, char** argv) {
     ::testing::InitGoogleTest(&argc, argv);
     ::testing::UnitTest::GetInstance()->listeners().Append(new TestExecutionTracer());
diff --git a/media/libaudioclient/tests/audiorouting_tests.cpp b/media/libaudioclient/tests/audiorouting_tests.cpp
index c101f00..8151d39 100644
--- a/media/libaudioclient/tests/audiorouting_tests.cpp
+++ b/media/libaudioclient/tests/audiorouting_tests.cpp
@@ -19,11 +19,13 @@
 
 #include <string.h>
 
+#include <binder/Binder.h>
 #include <binder/ProcessState.h>
 #include <cutils/properties.h>
 #include <gtest/gtest.h>
 
 #include "audio_test_utils.h"
+#include "test_execution_tracer.h"
 
 using namespace android;
 
@@ -62,16 +64,17 @@
         EXPECT_EQ(OK, ap->start()) << "audio track start failed";
         EXPECT_EQ(OK, ap->onProcess());
         EXPECT_EQ(OK, cb->waitForAudioDeviceCb());
-        EXPECT_TRUE(checkPatchPlayback(cb->mAudioIo, cb->mDeviceId));
+        const auto [audioIo, deviceId] = cb->getLastPortAndDevice();
+        EXPECT_TRUE(checkPatchPlayback(audioIo, deviceId));
         EXPECT_NE(0, ap->getAudioTrackHandle()->getFlags() & output_flags[i]);
         audio_patch patch;
-        EXPECT_EQ(OK, getPatchForOutputMix(cb->mAudioIo, patch));
+        EXPECT_EQ(OK, getPatchForOutputMix(audioIo, patch));
         if (output_flags[i] != AUDIO_OUTPUT_FLAG_FAST) {
             // A "normal" output can still have a FastMixer, depending on the buffer size.
             // Thus, a fast track can be created on a mix port which does not have the FAST flag.
             for (auto j = 0; j < patch.num_sources; j++) {
                 if (patch.sources[j].type == AUDIO_PORT_TYPE_MIX &&
-                    patch.sources[j].ext.mix.handle == cb->mAudioIo) {
+                    patch.sources[j].ext.mix.handle == audioIo) {
                     SCOPED_TRACE(dumpPortConfig(patch.sources[j]));
                     EXPECT_NE(0, patch.sources[j].flags.output & output_flags[i])
                             << "expected output flag "
@@ -150,6 +153,7 @@
         config.sample_rate = 48000;
         AudioMix mix(criteria, mixType, config, mixFlag, String8{mAddress.c_str()}, 0);
         mix.mDeviceType = deviceType;
+        mix.mToken = sp<BBinder>::make();
         mMixes.push(mix);
         if (OK == AudioSystem::registerPolicyMixes(mMixes, true)) {
             mPolicyMixRegistered = true;
@@ -267,21 +271,6 @@
     playback->stop();
 }
 
-class TestExecutionTracer : public ::testing::EmptyTestEventListener {
-  public:
-    void OnTestStart(const ::testing::TestInfo& test_info) override {
-        TraceTestState("Started", test_info);
-    }
-    void OnTestEnd(const ::testing::TestInfo& test_info) override {
-        TraceTestState("Completed", test_info);
-    }
-
-  private:
-    static void TraceTestState(const std::string& state, const ::testing::TestInfo& test_info) {
-        ALOGI("%s %s::%s", state.c_str(), test_info.test_suite_name(), test_info.name());
-    }
-};
-
 int main(int argc, char** argv) {
     android::ProcessState::self()->startThreadPool();
     ::testing::InitGoogleTest(&argc, argv);
diff --git a/media/libaudioclient/tests/audiosystem_tests.cpp b/media/libaudioclient/tests/audiosystem_tests.cpp
index d9789f1..742ca48 100644
--- a/media/libaudioclient/tests/audiosystem_tests.cpp
+++ b/media/libaudioclient/tests/audiosystem_tests.cpp
@@ -14,18 +14,19 @@
  * limitations under the License.
  */
 
-#define LOG_TAG "AudioSystemTest"
-
 #include <string.h>
 
 #include <set>
 
+#define LOG_TAG "AudioSystemTest"
+
 #include <gtest/gtest.h>
 #include <log/log.h>
 #include <media/AidlConversionCppNdk.h>
 #include <media/IAudioFlinger.h>
 
 #include "audio_test_utils.h"
+#include "test_execution_tracer.h"
 
 using android::media::audio::common::AudioDeviceAddress;
 using android::media::audio::common::AudioDeviceDescription;
@@ -107,30 +108,32 @@
 // UNIT TESTS
 TEST_F(AudioSystemTest, CheckServerSideValues) {
     ASSERT_NO_FATAL_FAILURE(createPlaybackSession());
-    EXPECT_GT(mAF->sampleRate(mCbPlayback->mAudioIo), 0);
-    EXPECT_NE(mAF->format(mCbPlayback->mAudioIo), AUDIO_FORMAT_INVALID);
-    EXPECT_GT(mAF->frameCount(mCbPlayback->mAudioIo), 0);
+    const auto [pbAudioIo, _] = mCbPlayback->getLastPortAndDevice();
+    EXPECT_GT(mAF->sampleRate(pbAudioIo), 0);
+    EXPECT_NE(mAF->format(pbAudioIo), AUDIO_FORMAT_INVALID);
+    EXPECT_GT(mAF->frameCount(pbAudioIo), 0);
     size_t frameCountHal, frameCountHalCache;
-    frameCountHal = mAF->frameCountHAL(mCbPlayback->mAudioIo);
+    frameCountHal = mAF->frameCountHAL(pbAudioIo);
     EXPECT_GT(frameCountHal, 0);
-    EXPECT_EQ(OK, AudioSystem::getFrameCountHAL(mCbPlayback->mAudioIo, &frameCountHalCache));
+    EXPECT_EQ(OK, AudioSystem::getFrameCountHAL(pbAudioIo, &frameCountHalCache));
     EXPECT_EQ(frameCountHal, frameCountHalCache);
-    EXPECT_GT(mAF->latency(mCbPlayback->mAudioIo), 0);
+    EXPECT_GT(mAF->latency(pbAudioIo), 0);
     // client side latency is at least server side latency
-    EXPECT_LE(mAF->latency(mCbPlayback->mAudioIo), mPlayback->getAudioTrackHandle()->latency());
+    EXPECT_LE(mAF->latency(pbAudioIo), mPlayback->getAudioTrackHandle()->latency());
 
     ASSERT_NO_FATAL_FAILURE(createRecordSession());
-    EXPECT_GT(mAF->sampleRate(mCbRecord->mAudioIo), 0);
-    // EXPECT_NE(mAF->format(mCbRecord->mAudioIo), AUDIO_FORMAT_INVALID);
-    EXPECT_GT(mAF->frameCount(mCbRecord->mAudioIo), 0);
-    EXPECT_GT(mAF->frameCountHAL(mCbRecord->mAudioIo), 0);
-    frameCountHal = mAF->frameCountHAL(mCbRecord->mAudioIo);
+    const auto [recAudioIo, __] = mCbRecord->getLastPortAndDevice();
+    EXPECT_GT(mAF->sampleRate(recAudioIo), 0);
+    // EXPECT_NE(mAF->format(recAudioIo), AUDIO_FORMAT_INVALID);
+    EXPECT_GT(mAF->frameCount(recAudioIo), 0);
+    EXPECT_GT(mAF->frameCountHAL(recAudioIo), 0);
+    frameCountHal = mAF->frameCountHAL(recAudioIo);
     EXPECT_GT(frameCountHal, 0);
-    EXPECT_EQ(OK, AudioSystem::getFrameCountHAL(mCbRecord->mAudioIo, &frameCountHalCache));
+    EXPECT_EQ(OK, AudioSystem::getFrameCountHAL(recAudioIo, &frameCountHalCache));
     EXPECT_EQ(frameCountHal, frameCountHalCache);
-    // EXPECT_GT(mAF->latency(mCbRecord->mAudioIo), 0);
+    // EXPECT_GT(mAF->latency(recAudioIo), 0);
     // client side latency is at least server side latency
-    // EXPECT_LE(mAF->latency(mCbRecord->mAudioIo), mCapture->getAudioRecordHandle()->latency());
+    // EXPECT_LE(mAF->latency(recAudioIo), mCapture->getAudioRecordHandle()->latency());
 
     EXPECT_GT(AudioSystem::getPrimaryOutputSamplingRate(), 0);  // first fast mixer sample rate
     EXPECT_GT(AudioSystem::getPrimaryOutputFrameCount(), 0);    // fast mixer frame count
@@ -196,19 +199,6 @@
     EXPECT_EQ(origBalance, tstBalance);
 }
 
-TEST_F(AudioSystemTest, GetStreamVolume) {
-    ASSERT_NO_FATAL_FAILURE(createPlaybackSession());
-    float origStreamVol;
-    EXPECT_EQ(NO_ERROR, AudioSystem::getStreamVolume(AUDIO_STREAM_MUSIC, &origStreamVol,
-                                                     mCbPlayback->mAudioIo));
-}
-
-TEST_F(AudioSystemTest, GetStreamMute) {
-    ASSERT_NO_FATAL_FAILURE(createPlaybackSession());
-    bool origMuteState;
-    EXPECT_EQ(NO_ERROR, AudioSystem::getStreamMute(AUDIO_STREAM_MUSIC, &origMuteState));
-}
-
 TEST_F(AudioSystemTest, StartAndStopAudioSource) {
     std::vector<struct audio_port_v7> ports;
     audio_port_config sourcePortConfig;
@@ -706,21 +696,6 @@
     }
 }
 
-class TestExecutionTracer : public ::testing::EmptyTestEventListener {
-  public:
-    void OnTestStart(const ::testing::TestInfo& test_info) override {
-        TraceTestState("Started", test_info);
-    }
-    void OnTestEnd(const ::testing::TestInfo& test_info) override {
-        TraceTestState("Completed", test_info);
-    }
-
-  private:
-    static void TraceTestState(const std::string& state, const ::testing::TestInfo& test_info) {
-        ALOGI("%s %s::%s", state.c_str(), test_info.test_suite_name(), test_info.name());
-    }
-};
-
 int main(int argc, char** argv) {
     ::testing::InitGoogleTest(&argc, argv);
     ::testing::UnitTest::GetInstance()->listeners().Append(new TestExecutionTracer());
diff --git a/media/libaudioclient/tests/audiotrack_tests.cpp b/media/libaudioclient/tests/audiotrack_tests.cpp
index 2b68225..cf7d926 100644
--- a/media/libaudioclient/tests/audiotrack_tests.cpp
+++ b/media/libaudioclient/tests/audiotrack_tests.cpp
@@ -15,13 +15,34 @@
  */
 
 //#define LOG_NDEBUG 0
+#define LOG_TAG "AudioTrackTests"
 
+#include <binder/ProcessState.h>
 #include <gtest/gtest.h>
 
 #include "audio_test_utils.h"
+#include "test_execution_tracer.h"
 
 using namespace android;
 
+// Test that the basic constructor returns an object that doesn't crash
+// on stop() or destruction.
+
+TEST(AudioTrackTestBasic, EmptyAudioTrack) {
+    AttributionSourceState attributionSource;
+    attributionSource.packageName = "AudioTrackTest";
+    attributionSource.uid = VALUE_OR_FATAL(legacy2aidl_uid_t_int32_t(getuid()));
+    attributionSource.pid = VALUE_OR_FATAL(legacy2aidl_pid_t_int32_t(getpid()));
+    attributionSource.token = sp<BBinder>::make();
+    const auto at = sp<AudioTrack>::make(attributionSource);
+
+    EXPECT_EQ(NO_INIT, at->initCheck());
+    EXPECT_EQ(true, at->stopped());
+
+    // ensure we do not crash.
+    at->stop();
+}
+
 TEST(AudioTrackTest, TestPlayTrack) {
     const auto ap = sp<AudioPlayback>::make(44100 /* sampleRate */, AUDIO_FORMAT_PCM_16_BIT,
                                             AUDIO_CHANNEL_OUT_STEREO, AUDIO_OUTPUT_FLAG_NONE,
@@ -136,18 +157,20 @@
     EXPECT_EQ(OK, ap->start()) << "audio track start failed";
     EXPECT_EQ(OK, ap->onProcess());
     EXPECT_EQ(OK, cb->waitForAudioDeviceCb());
-    EXPECT_EQ(AUDIO_IO_HANDLE_NONE, cbOld->mAudioIo);
-    EXPECT_EQ(AUDIO_PORT_HANDLE_NONE, cbOld->mDeviceId);
-    EXPECT_NE(AUDIO_IO_HANDLE_NONE, cb->mAudioIo);
-    EXPECT_NE(AUDIO_PORT_HANDLE_NONE, cb->mDeviceId);
-    EXPECT_EQ(cb->mAudioIo, ap->getAudioTrackHandle()->getOutput());
-    EXPECT_EQ(cb->mDeviceId, ap->getAudioTrackHandle()->getRoutedDeviceId());
+    const auto [oldAudioIo, oldDeviceId] = cbOld->getLastPortAndDevice();
+    EXPECT_EQ(AUDIO_IO_HANDLE_NONE, oldAudioIo);
+    EXPECT_EQ(AUDIO_PORT_HANDLE_NONE, oldDeviceId);
+    const auto [audioIo, deviceId] = cb->getLastPortAndDevice();
+    EXPECT_NE(AUDIO_IO_HANDLE_NONE, audioIo);
+    EXPECT_NE(AUDIO_PORT_HANDLE_NONE, deviceId);
+    EXPECT_EQ(audioIo, ap->getAudioTrackHandle()->getOutput());
+    EXPECT_EQ(deviceId, ap->getAudioTrackHandle()->getRoutedDeviceId());
     String8 keys;
     keys = ap->getAudioTrackHandle()->getParameters(keys);
     if (!keys.empty()) {
         std::cerr << "track parameters :: " << keys << std::endl;
     }
-    EXPECT_TRUE(checkPatchPlayback(cb->mAudioIo, cb->mDeviceId));
+    EXPECT_TRUE(checkPatchPlayback(audioIo, deviceId));
     EXPECT_EQ(BAD_VALUE, ap->getAudioTrackHandle()->removeAudioDeviceCallback(nullptr));
     EXPECT_EQ(INVALID_OPERATION, ap->getAudioTrackHandle()->removeAudioDeviceCallback(cbOld));
     EXPECT_EQ(OK, ap->getAudioTrackHandle()->removeAudioDeviceCallback(cb));
@@ -209,3 +232,10 @@
                                              AUDIO_OUTPUT_FLAG_RAW | AUDIO_OUTPUT_FLAG_FAST,
                                              AUDIO_OUTPUT_FLAG_DEEP_BUFFER),
                            ::testing::Values(AUDIO_SESSION_NONE)));
+
+int main(int argc, char** argv) {
+    android::ProcessState::self()->startThreadPool();
+    ::testing::InitGoogleTest(&argc, argv);
+    ::testing::UnitTest::GetInstance()->listeners().Append(new TestExecutionTracer());
+    return RUN_ALL_TESTS();
+}
diff --git a/media/libaudioclient/tests/test_execution_tracer.cpp b/media/libaudioclient/tests/test_execution_tracer.cpp
new file mode 100644
index 0000000..797bb4b
--- /dev/null
+++ b/media/libaudioclient/tests/test_execution_tracer.cpp
@@ -0,0 +1,43 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "TestExecutionTracer"
+
+#include "test_execution_tracer.h"
+
+#include <android-base/logging.h>
+
+void TestExecutionTracer::OnTestStart(const ::testing::TestInfo& test_info) {
+    TraceTestState("Started", test_info);
+}
+
+void TestExecutionTracer::OnTestEnd(const ::testing::TestInfo& test_info) {
+    TraceTestState("Finished", test_info);
+}
+
+void TestExecutionTracer::OnTestPartResult(const ::testing::TestPartResult& result) {
+    if (result.failed()) {
+        LOG(ERROR) << result;
+    } else {
+        LOG(INFO) << result;
+    }
+}
+
+// static
+void TestExecutionTracer::TraceTestState(const std::string& state,
+                                         const ::testing::TestInfo& test_info) {
+    LOG(INFO) << state << " " << test_info.test_suite_name() << "::" << test_info.name();
+}
diff --git a/media/libaudioclient/tests/test_execution_tracer.h b/media/libaudioclient/tests/test_execution_tracer.h
new file mode 100644
index 0000000..9031aaf
--- /dev/null
+++ b/media/libaudioclient/tests/test_execution_tracer.h
@@ -0,0 +1,29 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <gtest/gtest.h>
+
+class TestExecutionTracer : public ::testing::EmptyTestEventListener {
+  public:
+    void OnTestStart(const ::testing::TestInfo& test_info) override;
+    void OnTestEnd(const ::testing::TestInfo& test_info) override;
+    void OnTestPartResult(const ::testing::TestPartResult& result) override;
+
+  private:
+    static void TraceTestState(const std::string& state, const ::testing::TestInfo& test_info);
+};
diff --git a/media/libaudioclient/tests/trackplayerbase_tests.cpp b/media/libaudioclient/tests/trackplayerbase_tests.cpp
index c9b704d..7317bf0 100644
--- a/media/libaudioclient/tests/trackplayerbase_tests.cpp
+++ b/media/libaudioclient/tests/trackplayerbase_tests.cpp
@@ -16,10 +16,12 @@
 
 #define LOG_TAG "TrackPlayerBaseTest"
 
+#include <binder/ProcessState.h>
 #include <gtest/gtest.h>
-
 #include <media/TrackPlayerBase.h>
 
+#include "test_execution_tracer.h"
+
 using namespace android;
 using namespace android::media;
 
@@ -159,3 +161,10 @@
 
 INSTANTIATE_TEST_SUITE_P(TrackPlayerTest, PauseTestParam,
                          ::testing::Values(std::make_tuple(1.0, 75.0, 2, 24000)));
+
+int main(int argc, char** argv) {
+    android::ProcessState::self()->startThreadPool();
+    ::testing::InitGoogleTest(&argc, argv);
+    ::testing::UnitTest::GetInstance()->listeners().Append(new TestExecutionTracer());
+    return RUN_ALL_TESTS();
+}
diff --git a/media/libaudiofoundation/Android.bp b/media/libaudiofoundation/Android.bp
index c758fcd..576406d 100644
--- a/media/libaudiofoundation/Android.bp
+++ b/media/libaudiofoundation/Android.bp
@@ -87,7 +87,7 @@
     ],
 
     cflags: [
-        "-Werror",
         "-Wall",
+        "-Werror",
     ],
 }
diff --git a/media/libaudiofoundation/AudioPort.cpp b/media/libaudiofoundation/AudioPort.cpp
index ae0457f..6dbf284 100644
--- a/media/libaudiofoundation/AudioPort.cpp
+++ b/media/libaudiofoundation/AudioPort.cpp
@@ -192,7 +192,8 @@
                 dst->append(
                         base::StringPrintf("%*s extra audio descriptor %zu:\n", eadSpaces, "", i));
                 dst->append(base::StringPrintf(
-                    "%*s- standard: %u\n", descSpaces, "", mExtraAudioDescriptors[i].standard));
+                        "%*s- standard: %u\n", descSpaces, "",
+                        static_cast<unsigned>(mExtraAudioDescriptors[i].standard)));
                 dst->append(base::StringPrintf("%*s- descriptor:", descSpaces, ""));
                 for (auto v : mExtraAudioDescriptors[i].audioDescriptor) {
                     dst->append(base::StringPrintf(" %02x", v));
diff --git a/media/libaudiofoundation/tests/Android.bp b/media/libaudiofoundation/tests/Android.bp
index 82c7db7..0ca50ab 100644
--- a/media/libaudiofoundation/tests/Android.bp
+++ b/media/libaudiofoundation/tests/Android.bp
@@ -22,8 +22,8 @@
 
     static_libs: [
         "audioclient-types-aidl-cpp",
-        "libaudioclient_aidl_conversion",
         "libaudio_aidl_conversion_common_cpp",
+        "libaudioclient_aidl_conversion",
         "libaudiofoundation",
         "libstagefright_foundation",
     ],
@@ -37,8 +37,8 @@
     ],
 
     cflags: [
-        "-Werror",
         "-Wall",
+        "-Werror",
     ],
 
     test_suites: ["device-tests"],
@@ -64,8 +64,8 @@
     ],
 
     cflags: [
-        "-Werror",
         "-Wall",
+        "-Werror",
     ],
 
     test_suites: ["device-tests"],
diff --git a/media/libaudiohal/Android.bp b/media/libaudiohal/Android.bp
index 3c05b0b..75e2c11 100644
--- a/media/libaudiohal/Android.bp
+++ b/media/libaudiohal/Android.bp
@@ -18,12 +18,11 @@
 
     cflags: [
         "-Wall",
-        "-Wextra",
         "-Werror",
+        "-Wextra",
     ],
 
     required: [
-        "libaudiohal@4.0",
         "libaudiohal@5.0",
         "libaudiohal@6.0",
         "libaudiohal@7.0",
@@ -45,7 +44,9 @@
         "libbase_headers",
         "liberror_headers",
         "libmediautils_headers",
-    ]
+    ],
+
+    export_include_dirs: ["include"],
 }
 
 cc_library_shared {
@@ -62,12 +63,12 @@
 
     shared_libs: [
         "libhidlbase",
-        "libutils",
         "liblog",
+        "libutils",
     ],
 
     header_libs: [
-        "libaudiohal_headers"
+        "libaudiohal_headers",
     ],
 }
 
diff --git a/media/libaudiohal/FactoryHal.cpp b/media/libaudiohal/FactoryHal.cpp
index c414e19..15cb297 100644
--- a/media/libaudiohal/FactoryHal.cpp
+++ b/media/libaudiohal/FactoryHal.cpp
@@ -50,13 +50,12 @@
  * This list need to keep sync with AudioHalVersionInfo.VERSIONS in
  * media/java/android/media/AudioHalVersionInfo.java.
  */
-static const std::array<AudioHalVersionInfo, 6> sAudioHALVersions = {
+static const std::array<AudioHalVersionInfo, 5> sAudioHALVersions = {
     AudioHalVersionInfo(AudioHalVersionInfo::Type::AIDL, 1, 0),
     AudioHalVersionInfo(AudioHalVersionInfo::Type::HIDL, 7, 1),
     AudioHalVersionInfo(AudioHalVersionInfo::Type::HIDL, 7, 0),
     AudioHalVersionInfo(AudioHalVersionInfo::Type::HIDL, 6, 0),
     AudioHalVersionInfo(AudioHalVersionInfo::Type::HIDL, 5, 0),
-    AudioHalVersionInfo(AudioHalVersionInfo::Type::HIDL, 4, 0),
 };
 
 static const std::map<AudioHalVersionInfo::Type, InterfaceName> sDevicesHALInterfaces = {
diff --git a/media/libaudiohal/impl/AidlUtils.cpp b/media/libaudiohal/impl/AidlUtils.cpp
new file mode 100644
index 0000000..a916802
--- /dev/null
+++ b/media/libaudiohal/impl/AidlUtils.cpp
@@ -0,0 +1,47 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "AidlUtils.h"
+
+#define LOG_TAG "AIDLUtils"
+#include <utils/Log.h>
+
+namespace android {
+
+//static
+HalDeathHandler& HalDeathHandler::getInstance() {
+    // never-delete singleton
+    static HalDeathHandler* instance = new HalDeathHandler;
+    return *instance;
+}
+
+//static
+void HalDeathHandler::OnBinderDied(void*) {
+    ALOGE("HAL instance died, audio server is restarting");
+    _exit(1);  // Avoid calling atexit handlers, as this code runs on a thread from RPC threadpool.
+}
+
+HalDeathHandler::HalDeathHandler()
+        : mDeathRecipient(AIBinder_DeathRecipient_new(OnBinderDied)) {}
+
+bool HalDeathHandler::registerHandler(AIBinder* binder) {
+    binder_status_t status = AIBinder_linkToDeath(binder, mDeathRecipient.get(), nullptr);
+    if (status == STATUS_OK) return true;
+    ALOGE("%s: linkToDeath failed: %d", __func__, status);
+    return false;
+}
+
+}  // namespace android
diff --git a/media/libaudiohal/impl/AidlUtils.h b/media/libaudiohal/impl/AidlUtils.h
new file mode 100644
index 0000000..97a5bba
--- /dev/null
+++ b/media/libaudiohal/impl/AidlUtils.h
@@ -0,0 +1,61 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <memory>
+#include <string>
+
+#include <android/binder_auto_utils.h>
+#include <android/binder_ibinder.h>
+#include <android/binder_manager.h>
+
+namespace android {
+
+class HalDeathHandler {
+  public:
+    static HalDeathHandler& getInstance();
+
+    bool registerHandler(AIBinder* binder);
+  private:
+    static void OnBinderDied(void*);
+
+    HalDeathHandler();
+
+    ::ndk::ScopedAIBinder_DeathRecipient mDeathRecipient;
+};
+
+template<class Intf>
+std::shared_ptr<Intf> getServiceInstance(const std::string& instanceName) {
+    const std::string serviceName =
+            std::string(Intf::descriptor).append("/").append(instanceName);
+    std::shared_ptr<Intf> service;
+    while (!service) {
+        AIBinder* serviceBinder = nullptr;
+        while (!serviceBinder) {
+            // 'waitForService' may return a nullptr, hopefully a transient error.
+            serviceBinder = AServiceManager_waitForService(serviceName.c_str());
+        }
+        // `fromBinder` may fail and return a nullptr if the service has died in the meantime.
+        service = Intf::fromBinder(ndk::SpAIBinder(serviceBinder));
+        if (service != nullptr) {
+            HalDeathHandler::getInstance().registerHandler(serviceBinder);
+        }
+    }
+    return service;
+}
+
+}  // namespace android
diff --git a/media/libaudiohal/impl/Android.bp b/media/libaudiohal/impl/Android.bp
index fb1cc34..1a6b949 100644
--- a/media/libaudiohal/impl/Android.bp
+++ b/media/libaudiohal/impl/Android.bp
@@ -41,7 +41,7 @@
     ],
     header_libs: [
         "android.hardware.audio.common.util@all-versions",
-    ]
+    ],
 }
 
 cc_defaults {
@@ -49,8 +49,8 @@
 
     cflags: [
         "-Wall",
-        "-Wextra",
         "-Werror",
+        "-Wextra",
         "-fvisibility=hidden",
     ],
     shared_libs: [
@@ -71,7 +71,7 @@
     ],
     header_libs: [
         "libaudioclient_headers",
-        "libaudiohal_headers"
+        "libaudiohal_headers",
     ],
     defaults: [
         "latest_android_media_audio_common_types_cpp_export_shared",
@@ -83,36 +83,10 @@
 }
 
 cc_library_shared {
-    name: "libaudiohal@4.0",
-    defaults: [
-        "libaudiohal_default",
-        "libaudiohal_hidl_default"
-    ],
-    srcs: [
-        ":audio_core_hal_client_sources",
-        ":audio_effect_hidl_hal_client_sources",
-        "EffectsFactoryHalEntry.cpp",
-    ],
-    shared_libs: [
-        "android.hardware.audio.common@4.0",
-        "android.hardware.audio.common@4.0-util",
-        "android.hardware.audio.effect@4.0",
-        "android.hardware.audio.effect@4.0-util",
-        "android.hardware.audio@4.0",
-        "android.hardware.audio@4.0-util",
-    ],
-    cflags: [
-        "-DMAJOR_VERSION=4",
-        "-DMINOR_VERSION=0",
-        "-include common/all-versions/VersionMacro.h",
-    ]
-}
-
-cc_library_shared {
     name: "libaudiohal@5.0",
     defaults: [
         "libaudiohal_default",
-        "libaudiohal_hidl_default"
+        "libaudiohal_hidl_default",
     ],
     srcs: [
         ":audio_core_hal_client_sources",
@@ -131,14 +105,14 @@
         "-DMAJOR_VERSION=5",
         "-DMINOR_VERSION=0",
         "-include common/all-versions/VersionMacro.h",
-    ]
+    ],
 }
 
 cc_library_shared {
     name: "libaudiohal@6.0",
     defaults: [
         "libaudiohal_default",
-        "libaudiohal_hidl_default"
+        "libaudiohal_hidl_default",
     ],
     srcs: [
         ":audio_core_hal_client_sources",
@@ -157,14 +131,14 @@
         "-DMAJOR_VERSION=6",
         "-DMINOR_VERSION=0",
         "-include common/all-versions/VersionMacro.h",
-    ]
+    ],
 }
 
 cc_library_static {
     name: "libaudiohal.effect@7.0",
     defaults: [
         "libaudiohal_default",
-        "libaudiohal_hidl_default"
+        "libaudiohal_hidl_default",
     ],
     srcs: [
         ":audio_effect_hidl_hal_client_sources",
@@ -179,14 +153,14 @@
         "-DMAJOR_VERSION=7",
         "-DMINOR_VERSION=0",
         "-include common/all-versions/VersionMacro.h",
-    ]
+    ],
 }
 
 cc_library_shared {
     name: "libaudiohal@7.0",
     defaults: [
         "libaudiohal_default",
-        "libaudiohal_hidl_default"
+        "libaudiohal_hidl_default",
     ],
     srcs: [
         ":audio_core_hal_client_sources",
@@ -206,7 +180,7 @@
         "-DMAJOR_VERSION=7",
         "-DMINOR_VERSION=0",
         "-include common/all-versions/VersionMacro.h",
-    ]
+    ],
 }
 
 cc_library_shared {
@@ -215,7 +189,7 @@
         "latest_android_hardware_audio_core_sounddose_ndk_shared",
         "latest_android_hardware_audio_sounddose_ndk_shared",
         "libaudiohal_default",
-        "libaudiohal_hidl_default"
+        "libaudiohal_hidl_default",
     ],
     srcs: [
         ":audio_core_hal_client_sources",
@@ -237,12 +211,12 @@
         "libbinder_ndk",
     ],
     cflags: [
-        "-DMAJOR_VERSION=7",
-        "-DMINOR_VERSION=1",
         "-DCOMMON_TYPES_MINOR_VERSION=0",
         "-DCORE_TYPES_MINOR_VERSION=0",
+        "-DMAJOR_VERSION=7",
+        "-DMINOR_VERSION=1",
         "-include common/all-versions/VersionMacro.h",
-    ]
+    ],
 }
 
 cc_defaults {
@@ -253,11 +227,11 @@
         "latest_android_hardware_audio_core_sounddose_ndk_shared",
         "latest_android_hardware_audio_effect_ndk_shared",
         "latest_android_media_audio_common_types_ndk_shared",
+        "latest_av_audio_types_aidl_ndk_shared",
     ],
     shared_libs: [
         "android.hardware.common-V2-ndk",
         "android.hardware.common.fmq-V1-ndk",
-        "av-audio-types-aidl-ndk",
         "libaudio_aidl_conversion_common_cpp",
         "libaudio_aidl_conversion_common_ndk",
         "libaudio_aidl_conversion_common_ndk_cpp",
@@ -271,25 +245,48 @@
         "libeffectsconfig_headers",
     ],
     cflags: [
-        "-Wall",
-        "-Wextra",
-        "-Werror",
-        "-Wthread-safety",
         "-DBACKEND_CPP_NDK",
+        "-Wall",
+        "-Werror",
+        "-Wextra",
+        "-Wthread-safety",
     ],
 }
 
 cc_library_shared {
     name: "libaudiohal@aidl",
     defaults: [
-        "libaudiohal_default",
         "libaudiohal_aidl_default",
+        "libaudiohal_default",
     ],
     srcs: [
+        ":audio_effect_hal_aidl_src_files",
+        ":core_audio_hal_aidl_src_files",
+        "AidlUtils.cpp",
         "DevicesFactoryHalEntry.cpp",
-        "EffectConversionHelperAidl.cpp",
+        "EffectsFactoryHalEntry.cpp",
+    ],
+}
+
+filegroup {
+    name: "core_audio_hal_aidl_src_files",
+    srcs: [
+        "ConversionHelperAidl.cpp",
+        "DeviceHalAidl.cpp",
+        "DevicesFactoryHalAidl.cpp",
+        "Hal2AidlMapper.cpp",
+        "StreamHalAidl.cpp",
+    ],
+}
+
+filegroup {
+    name: "audio_effect_hal_aidl_src_files",
+    srcs: [
+        ":audio_effectproxy_src_files",
         "EffectBufferHalAidl.cpp",
+        "EffectConversionHelperAidl.cpp",
         "EffectHalAidl.cpp",
+        "EffectsFactoryHalAidl.cpp",
         "effectsAidlConversion/AidlConversionAec.cpp",
         "effectsAidlConversion/AidlConversionAgc1.cpp",
         "effectsAidlConversion/AidlConversionAgc2.cpp",
@@ -306,21 +303,6 @@
         "effectsAidlConversion/AidlConversionVendorExtension.cpp",
         "effectsAidlConversion/AidlConversionVirtualizer.cpp",
         "effectsAidlConversion/AidlConversionVisualizer.cpp",
-        "EffectsFactoryHalAidl.cpp",
-        "EffectsFactoryHalEntry.cpp",
-        ":audio_effectproxy_src_files",
-        ":core_audio_hal_aidl_src_files",
-    ],
-}
-
-filegroup {
-    name: "core_audio_hal_aidl_src_files",
-    srcs: [
-        "ConversionHelperAidl.cpp",
-        "DeviceHalAidl.cpp",
-        "DevicesFactoryHalAidl.cpp",
-        "Hal2AidlMapper.cpp",
-        "StreamHalAidl.cpp",
     ],
 }
 
diff --git a/media/libaudiohal/impl/ConversionHelperAidl.cpp b/media/libaudiohal/impl/ConversionHelperAidl.cpp
index 46abfda..7a32811 100644
--- a/media/libaudiohal/impl/ConversionHelperAidl.cpp
+++ b/media/libaudiohal/impl/ConversionHelperAidl.cpp
@@ -37,10 +37,6 @@
     using ParameterScope = IHalAdapterVendorExtension::ParameterScope;
     if (parameterKeys.size() == 0) return OK;
     const String8 rawKeys = parameterKeys.keysToString();
-    if (vendorExt == nullptr) {
-        ALOGW("%s: unknown parameters, ignored: \"%s\"", __func__, rawKeys.c_str());
-        return OK;
-    }
 
     std::vector<std::string> parameterIds;
     RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(vendorExt->parseVendorParameterIds(
@@ -85,10 +81,6 @@
     using ParameterScope = IHalAdapterVendorExtension::ParameterScope;
     if (parameters.size() == 0) return OK;
     const String8 rawKeysAndValues = parameters.toString();
-    if (vendorExt == nullptr) {
-        ALOGW("%s: unknown parameters, ignored: \"%s\"", __func__, rawKeysAndValues.c_str());
-        return OK;
-    }
 
     std::vector<VendorParameter> syncParameters, asyncParameters;
     RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(vendorExt->parseVendorParameters(
diff --git a/media/libaudiohal/impl/DeviceHalAidl.cpp b/media/libaudiohal/impl/DeviceHalAidl.cpp
index db9a9b1..dc81722 100644
--- a/media/libaudiohal/impl/DeviceHalAidl.cpp
+++ b/media/libaudiohal/impl/DeviceHalAidl.cpp
@@ -24,6 +24,7 @@
 #include <aidl/android/hardware/audio/core/StreamDescriptor.h>
 #include <error/expected_utils.h>
 #include <media/AidlConversionCppNdk.h>
+#include <media/AidlConversionNdk.h>
 #include <media/AidlConversionNdkCpp.h>
 #include <media/AidlConversionUtil.h>
 #include <mediautils/TimeCheck.h>
@@ -58,7 +59,10 @@
 using aidl::android::media::audio::IHalAdapterVendorExtension;
 using aidl::android::hardware::audio::common::getFrameSizeInBytes;
 using aidl::android::hardware::audio::common::isBitPositionFlagSet;
+using aidl::android::hardware::audio::common::kDumpFromAudioServerArgument;
 using aidl::android::hardware::audio::common::RecordTrackMetadata;
+using aidl::android::hardware::audio::common::PlaybackTrackMetadata;
+using aidl::android::hardware::audio::common::SourceMetadata;
 using aidl::android::hardware::audio::core::sounddose::ISoundDose;
 using aidl::android::hardware::audio::core::AudioPatch;
 using aidl::android::hardware::audio::core::AudioRoute;
@@ -274,15 +278,16 @@
     return parseAndGetVendorParameters(mVendorExt, mModule, parameterKeys, values);
 }
 
-status_t DeviceHalAidl::getInputBufferSize(const struct audio_config* config, size_t* size) {
+status_t DeviceHalAidl::getInputBufferSize(struct audio_config* config, size_t* size) {
     ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
     TIME_CHECK();
     if (mModule == nullptr) return NO_INIT;
     if (config == nullptr || size == nullptr) {
         return BAD_VALUE;
     }
+    constexpr bool isInput = true;
     AudioConfig aidlConfig = VALUE_OR_RETURN_STATUS(
-            ::aidl::android::legacy2aidl_audio_config_t_AudioConfig(*config, true /*isInput*/));
+            ::aidl::android::legacy2aidl_audio_config_t_AudioConfig(*config, isInput));
     AudioDevice aidlDevice;
     aidlDevice.type.type = AudioDeviceType::IN_DEFAULT;
     AudioSource aidlSource = AudioSource::DEFAULT;
@@ -296,6 +301,9 @@
                         0 /*handle*/, aidlDevice, aidlFlags, aidlSource,
                         &cleanups, &aidlConfig, &mixPortConfig, &aidlPatch));
     }
+    *config = VALUE_OR_RETURN_STATUS(
+            ::aidl::android::aidl2legacy_AudioConfig_audio_config_t(aidlConfig, isInput));
+    if (mixPortConfig.id == 0) return BAD_VALUE;  // HAL suggests a different config.
     *size = aidlConfig.frameCount *
             getFrameSizeInBytes(aidlConfig.base.format, aidlConfig.base.channelMask);
     // Do not disarm cleanups to release temporary port configs.
@@ -382,7 +390,7 @@
         return runCb([](CbRef cb) { cb->onWriteReady(); });
     }
     ndk::ScopedAStatus onError() override {
-        return runCb([](CbRef cb) { cb->onError(); });
+        return runCb([](CbRef cb) { cb->onError(true /*isHardError*/); });
     }
     ndk::ScopedAStatus onDrainReady() override {
         return runCb([](CbRef cb) { cb->onDrainReady(); });
@@ -401,8 +409,7 @@
                       *static_cast<StreamCallbackBase*>(this)),
               StreamCallbackBaseHelper<StreamOutHalInterfaceLatencyModeCallback>(
                       *static_cast<StreamCallbackBase*>(this)) {}
-    ndk::ScopedAStatus onCodecFormatChanged(const std::vector<uint8_t>& in_audioMetadata) override {
-        std::basic_string<uint8_t> halMetadata(in_audioMetadata.begin(), in_audioMetadata.end());
+    ndk::ScopedAStatus onCodecFormatChanged(const std::vector<uint8_t>& halMetadata) override {
         return StreamCallbackBaseHelper<StreamOutHalInterfaceEventCallback>::runCb(
                 [&halMetadata](auto cb) { cb->onCodecFormatChanged(halMetadata); });
     }
@@ -423,7 +430,8 @@
         audio_io_handle_t handle, audio_devices_t devices,
         audio_output_flags_t flags, struct audio_config* config,
         const char* address,
-        sp<StreamOutHalInterface>* outStream) {
+        sp<StreamOutHalInterface>* outStream,
+        const std::vector<playback_track_metadata_v7_t>& sourceMetadata) {
     ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
     TIME_CHECK();
     if (mModule == nullptr) return NO_INIT;
@@ -439,9 +447,12 @@
             ::aidl::android::legacy2aidl_audio_device_AudioDevice(devices, address));
     int32_t aidlOutputFlags = VALUE_OR_RETURN_STATUS(
             ::aidl::android::legacy2aidl_audio_output_flags_t_int32_t_mask(flags));
+    SourceMetadata aidlMetadata = VALUE_OR_RETURN_STATUS(
+            ::aidl::android::legacy2aidl_SourceMetadata(sourceMetadata));
     AudioIoFlags aidlFlags = AudioIoFlags::make<AudioIoFlags::Tag::output>(aidlOutputFlags);
     AudioPortConfig mixPortConfig;
     AudioPatch aidlPatch;
+
     Hal2AidlMapper::Cleanups cleanups(mMapperAccessor);
     {
         std::lock_guard l(mLock);
@@ -456,17 +467,22 @@
     args.portConfigId = mixPortConfig.id;
     const bool isOffload = isBitPositionFlagSet(
             aidlOutputFlags, AudioOutputFlags::COMPRESS_OFFLOAD);
+    const bool isHwAvSync = isBitPositionFlagSet(
+            aidlOutputFlags, AudioOutputFlags::HW_AV_SYNC);
     std::shared_ptr<OutputStreamCallbackAidl> streamCb;
     if (isOffload) {
         streamCb = ndk::SharedRefBase::make<OutputStreamCallbackAidl>(this);
     }
     auto eventCb = ndk::SharedRefBase::make<OutputStreamEventCallbackAidl>(this);
-    if (isOffload) {
+    if (isOffload || isHwAvSync) {
         args.offloadInfo = aidlConfig.offloadInfo;
+    }
+    if (isOffload) {
         args.callback = streamCb;
     }
     args.bufferSizeFrames = aidlConfig.frameCount;
     args.eventCallback = eventCb;
+    args.sourceMetadata = aidlMetadata;
     ::aidl::android::hardware::audio::core::IModule::OpenOutputStreamReturn ret;
     RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mModule->openOutputStream(args, &ret)));
     StreamContextAidl context(ret.desc, isOffload);
@@ -475,15 +491,21 @@
                 __func__, ret.desc.toString().c_str());
         return NO_INIT;
     }
-    *outStream = sp<StreamOutHalAidl>::make(*config, std::move(context), aidlPatch.latenciesMs[0],
+    auto stream = sp<StreamOutHalAidl>::make(*config, std::move(context), aidlPatch.latenciesMs[0],
             std::move(ret.stream), mVendorExt, this /*callbackBroker*/);
-    void* cbCookie = (*outStream).get();
+    *outStream = stream;
+    /* StreamOutHalInterface* */ void* cbCookie = (*outStream).get();
     {
         std::lock_guard l(mLock);
         mCallbacks.emplace(cbCookie, Callbacks{});
         mMapper.addStream(*outStream, mixPortConfig.id, aidlPatch.id);
     }
-    if (streamCb) streamCb->setCookie(cbCookie);
+    if (streamCb) {
+        streamCb->setCookie(cbCookie);
+        // Although StreamOutHalAidl implements StreamOutHalInterfaceCallback,
+        // we always go via the CallbackBroker for consistency.
+        setStreamOutCallback(cbCookie, stream);
+    }
     eventCb->setCookie(cbCookie);
     cleanups.disarmAll();
     return OK;
@@ -586,7 +608,6 @@
     // that the HAL module uses `int32_t` for patch IDs. The following assert ensures
     // that both the framework and the HAL use the same value for "no ID":
     static_assert(AUDIO_PATCH_HANDLE_NONE == 0);
-    int32_t aidlPatchId = static_cast<int32_t>(*patch);
 
     // Upon conversion, mix port configs contain audio configuration, while
     // device port configs contain device address. This data is used to find
@@ -608,11 +629,27 @@
                         ::aidl::android::legacy2aidl_audio_port_config_AudioPortConfig(
                                 sinks[i], isInput, 0)));
     }
+    int32_t aidlPatchId = static_cast<int32_t>(*patch);
     Hal2AidlMapper::Cleanups cleanups(mMapperAccessor);
     {
         std::lock_guard l(mLock);
-        RETURN_STATUS_IF_ERROR(mMapper.createOrUpdatePatch(
-                        aidlSources, aidlSinks, &aidlPatchId, &cleanups));
+        // Check for patches that only exist for the framework, or have different HAL patch ID.
+        if (int32_t aidlHalPatchId = mMapper.findFwkPatch(aidlPatchId); aidlHalPatchId != 0) {
+            if (aidlHalPatchId == aidlPatchId) {
+                // This patch was previously released by the HAL. Thus we need to pass '0'
+                // to the HAL to obtain a new patch.
+                int32_t newAidlPatchId = 0;
+                RETURN_STATUS_IF_ERROR(mMapper.createOrUpdatePatch(
+                                aidlSources, aidlSinks, &newAidlPatchId, &cleanups));
+                mMapper.updateFwkPatch(aidlPatchId, newAidlPatchId);
+            } else {
+                RETURN_STATUS_IF_ERROR(mMapper.createOrUpdatePatch(
+                                aidlSources, aidlSinks, &aidlHalPatchId, &cleanups));
+            }
+        } else {
+            RETURN_STATUS_IF_ERROR(mMapper.createOrUpdatePatch(
+                            aidlSources, aidlSinks, &aidlPatchId, &cleanups));
+        }
     }
     *patch = static_cast<audio_patch_handle_t>(aidlPatchId);
     cleanups.disarmAll();
@@ -628,7 +665,19 @@
         return BAD_VALUE;
     }
     std::lock_guard l(mLock);
-    RETURN_STATUS_IF_ERROR(mMapper.releaseAudioPatch(static_cast<int32_t>(patch)));
+    // Check for patches that only exist for the framework, or have different HAL patch ID.
+    int32_t aidlPatchId = static_cast<int32_t>(patch);
+    if (int32_t aidlHalPatchId = mMapper.findFwkPatch(aidlPatchId); aidlHalPatchId != 0) {
+        if (aidlHalPatchId == aidlPatchId) {
+            // This patch was previously released by the HAL, just need to finish its removal.
+            mMapper.eraseFwkPatch(aidlPatchId);
+            return OK;
+        } else {
+            // This patch has a HAL patch ID which is different
+            aidlPatchId = aidlHalPatchId;
+        }
+    }
+    RETURN_STATUS_IF_ERROR(mMapper.releaseAudioPatch(aidlPatchId));
     return OK;
 }
 
@@ -877,7 +926,9 @@
 status_t DeviceHalAidl::dump(int fd, const Vector<String16>& args) {
     TIME_CHECK();
     if (mModule == nullptr) return NO_INIT;
-    return mModule->dump(fd, Args(args).args(), args.size());
+    Vector<String16> newArgs = args;
+    newArgs.push(String16(kDumpFromAudioServerArgument));
+    return mModule->dump(fd, Args(newArgs).args(), newArgs.size());
 }
 
 status_t DeviceHalAidl::supportsBluetoothVariableLatency(bool* supports) {
@@ -985,7 +1036,7 @@
     if (mModule == nullptr) return NO_INIT;
     {
         std::lock_guard l(mLock);
-        mMapper.resetUnusedPatchesPortConfigsAndPorts();
+        mMapper.resetUnusedPatchesAndPortConfigs();
     }
     ModuleDebug debug{ .simulateDeviceConnections = enabled };
     status_t status = statusTFromBinderStatus(mModule->setModuleDebug(debug));
@@ -1052,15 +1103,11 @@
     (void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<String8>(
                     parameters, String8(AudioParameter::keyReconfigA2dp),
                     [&](const String8& value) -> status_t {
-                        if (mVendorExt != nullptr) {
-                            std::vector<VendorParameter> result;
-                            RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
-                                    mVendorExt->parseBluetoothA2dpReconfigureOffload(
-                                            std::string(value.c_str()), &result)));
-                            reconfigureOffload = std::move(result);
-                        } else {
-                            reconfigureOffload = std::vector<VendorParameter>();
-                        }
+                        std::vector<VendorParameter> result;
+                        RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
+                                mVendorExt->parseBluetoothA2dpReconfigureOffload(
+                                        std::string(value.c_str()), &result)));
+                        reconfigureOffload = std::move(result);
                         return OK;
                     }));
     if (mBluetoothA2dp != nullptr && a2dpEnabled.has_value()) {
diff --git a/media/libaudiohal/impl/DeviceHalAidl.h b/media/libaudiohal/impl/DeviceHalAidl.h
index f705db7..4db1638 100644
--- a/media/libaudiohal/impl/DeviceHalAidl.h
+++ b/media/libaudiohal/impl/DeviceHalAidl.h
@@ -113,13 +113,15 @@
     status_t getParameters(const String8& keys, String8 *values) override;
 
     // Returns audio input buffer size according to parameters passed.
-    status_t getInputBufferSize(const struct audio_config* config, size_t* size) override;
+    status_t getInputBufferSize(struct audio_config* config, size_t* size) override;
 
     // Creates and opens the audio hardware output stream. The stream is closed
     // by releasing all references to the returned object.
     status_t openOutputStream(audio_io_handle_t handle, audio_devices_t devices,
                               audio_output_flags_t flags, struct audio_config* config,
-                              const char* address, sp<StreamOutHalInterface>* outStream) override;
+                              const char* address, sp<StreamOutHalInterface>* outStream,
+                              const std::vector<playback_track_metadata_v7_t>&
+                                                               sourceMetadata = {}) override;
 
     // Creates and opens the audio hardware input stream. The stream is closed
     // by releasing all references to the returned object.
@@ -182,7 +184,7 @@
     status_t getAudioMixPort(const struct audio_port_v7* devicePort,
                              struct audio_port_v7* mixPort) override;
 
-    status_t dump(int __unused, const Vector<String16>& __unused) override;
+    status_t dump(int fd, const Vector<String16>& args) override;
 
   private:
     friend class sp<DeviceHalAidl>;
diff --git a/media/libaudiohal/impl/DeviceHalHidl.cpp b/media/libaudiohal/impl/DeviceHalHidl.cpp
index e8e1f46..b48c7ed 100644
--- a/media/libaudiohal/impl/DeviceHalHidl.cpp
+++ b/media/libaudiohal/impl/DeviceHalHidl.cpp
@@ -236,7 +236,7 @@
 }
 
 status_t DeviceHalHidl::getInputBufferSize(
-        const struct audio_config *config, size_t *size) {
+        struct audio_config *config, size_t *size) {
     TIME_CHECK();
     if (mDevice == 0) return NO_INIT;
     AudioConfig hidlConfig;
@@ -259,7 +259,8 @@
         audio_output_flags_t flags,
         struct audio_config *config,
         const char *address,
-        sp<StreamOutHalInterface> *outStream) {
+        sp<StreamOutHalInterface> *outStream,
+        const std::vector<playback_track_metadata_v7_t>& sourceMetadata) {
     TIME_CHECK();
     if (mDevice == 0) return NO_INIT;
     DeviceAddress hidlDevice;
@@ -273,6 +274,18 @@
         return status;
     }
 
+#if MAJOR_VERSION == 4
+    ::android::hardware::audio::CORE_TYPES_CPP_VERSION::SourceMetadata hidlMetadata;
+#else
+    ::android::hardware::audio::common::COMMON_TYPES_CPP_VERSION::SourceMetadata hidlMetadata;
+#endif
+
+    if (status_t status = CoreUtils::sourceMetadataFromHalV7(
+                sourceMetadata, true /*ignoreNonVendorTags*/, &hidlMetadata);
+            status != OK) {
+        return status;
+    }
+
 #if !(MAJOR_VERSION == 7 && MINOR_VERSION == 1)
     //TODO: b/193496180 use spatializer flag at audio HAL when available
     if ((flags & AUDIO_OUTPUT_FLAG_SPATIALIZER) != 0) {
@@ -294,7 +307,7 @@
 #endif
             handle, hidlDevice, hidlConfig, hidlFlags,
 #if MAJOR_VERSION >= 4
-            {} /* metadata */,
+            hidlMetadata /* metadata */,
 #endif
             [&](Result r, const sp<::android::hardware::audio::CPP_VERSION::IStreamOut>& result,
                     const AudioConfig& suggestedConfig) {
@@ -752,10 +765,14 @@
     // the attributes reported by `getParameters` API.
     struct audio_port_v7 temp = *devicePort;
     AudioProfileAttributesMultimap attrsFromDevice;
-    status_t status = getAudioPort(&temp);
-    if (status == NO_ERROR) {
-        attrsFromDevice = createAudioProfilesAttrMap(temp.audio_profiles, 0 /*first*/,
-                                                     temp.num_audio_profiles);
+    bool supportsPatches;
+    if (supportsAudioPatches(&supportsPatches) == OK && supportsPatches) {
+        // The audio patches are supported since HAL 3.0, which is the same HAL version
+        // requirement for 'getAudioPort' API.
+        if (getAudioPort(&temp) == NO_ERROR) {
+            attrsFromDevice = createAudioProfilesAttrMap(temp.audio_profiles, 0 /*first*/,
+                                                         temp.num_audio_profiles);
+        }
     }
     auto streamIt = mStreams.find(mixPort->ext.mix.handle);
     if (streamIt == mStreams.end()) {
@@ -767,7 +784,7 @@
     }
 
     String8 formatsStr;
-    status = getParametersFromStream(
+    status_t status = getParametersFromStream(
             stream, AudioParameter::keyStreamSupportedFormats, nullptr /*extraParameters*/,
             &formatsStr);
     if (status != NO_ERROR) {
diff --git a/media/libaudiohal/impl/DeviceHalHidl.h b/media/libaudiohal/impl/DeviceHalHidl.h
index 7a712df..5f3e08c 100644
--- a/media/libaudiohal/impl/DeviceHalHidl.h
+++ b/media/libaudiohal/impl/DeviceHalHidl.h
@@ -67,13 +67,15 @@
     status_t getParameters(const String8& keys, String8 *values) override;
 
     // Returns audio input buffer size according to parameters passed.
-    status_t getInputBufferSize(const struct audio_config* config, size_t* size) override;
+    status_t getInputBufferSize(struct audio_config* config, size_t* size) override;
 
     // Creates and opens the audio hardware output stream. The stream is closed
     // by releasing all references to the returned object.
     status_t openOutputStream(audio_io_handle_t handle, audio_devices_t devices,
                               audio_output_flags_t flags, struct audio_config* config,
-                              const char* address, sp<StreamOutHalInterface>* outStream) override;
+                              const char* address, sp<StreamOutHalInterface>* outStream,
+                              const std::vector<playback_track_metadata_v7_t>&
+                                                                sourceMetadata = {}) override;
 
     // Creates and opens the audio hardware input stream. The stream is closed
     // by releasing all references to the returned object.
diff --git a/media/libaudiohal/impl/DevicesFactoryHalAidl.cpp b/media/libaudiohal/impl/DevicesFactoryHalAidl.cpp
index 3dbc14a..68b650f 100644
--- a/media/libaudiohal/impl/DevicesFactoryHalAidl.cpp
+++ b/media/libaudiohal/impl/DevicesFactoryHalAidl.cpp
@@ -17,17 +17,21 @@
 #include <algorithm>
 #include <map>
 #include <memory>
+#include <mutex>
 #include <string>
 
 #define LOG_TAG "DevicesFactoryHalAidl"
 //#define LOG_NDEBUG 0
 
 #include <aidl/android/hardware/audio/core/IModule.h>
+#include <aidl/android/media/audio/BnHalAdapterVendorExtension.h>
 #include <android/binder_manager.h>
+#include <cutils/properties.h>
 #include <media/AidlConversionNdkCpp.h>
 #include <media/AidlConversionUtil.h>
 #include <utils/Log.h>
 
+#include "AidlUtils.h"
 #include "DeviceHalAidl.h"
 #include "DevicesFactoryHalAidl.h"
 
@@ -35,6 +39,7 @@
 using aidl::android::hardware::audio::core::IConfig;
 using aidl::android::hardware::audio::core::IModule;
 using aidl::android::hardware::audio::core::SurroundSoundConfig;
+using aidl::android::hardware::audio::core::VendorParameter;
 using aidl::android::media::audio::common::AudioHalEngineConfig;
 using aidl::android::media::audio::IHalAdapterVendorExtension;
 using android::detail::AudioHalVersionInfo;
@@ -62,10 +67,84 @@
     return cpp;
 }
 
+class HalAdapterVendorExtensionWrapper :
+            public ::aidl::android::media::audio::BnHalAdapterVendorExtension {
+  private:
+    template<typename F>
+    ndk::ScopedAStatus callWithRetryOnCrash(F method) {
+        ndk::ScopedAStatus status = ndk::ScopedAStatus::ok();
+        for (auto service = getService(); service != nullptr; service = getService(true)) {
+            status = method(service);
+            if (status.getStatus() != STATUS_DEAD_OBJECT) break;
+        }
+        return status;
+    }
+
+    ndk::ScopedAStatus parseVendorParameterIds(ParameterScope in_scope,
+                                               const std::string& in_rawKeys,
+                                               std::vector<std::string>* _aidl_return) override {
+        return callWithRetryOnCrash([&](auto service) {
+            return service->parseVendorParameterIds(in_scope, in_rawKeys, _aidl_return);
+        });
+    }
+
+    ndk::ScopedAStatus parseVendorParameters(
+            ParameterScope in_scope, const std::string& in_rawKeysAndValues,
+            std::vector<VendorParameter>* out_syncParameters,
+            std::vector<VendorParameter>* out_asyncParameters) override {
+        return callWithRetryOnCrash([&](auto service) {
+            return service->parseVendorParameters(in_scope, in_rawKeysAndValues,
+                    out_syncParameters, out_asyncParameters);
+        });
+    }
+
+    ndk::ScopedAStatus parseBluetoothA2dpReconfigureOffload(
+            const std::string& in_rawValue, std::vector<VendorParameter>* _aidl_return) override {
+        return callWithRetryOnCrash([&](auto service) {
+            return service->parseBluetoothA2dpReconfigureOffload(in_rawValue, _aidl_return);
+        });
+    }
+
+    ndk::ScopedAStatus parseBluetoothLeReconfigureOffload(const std::string& in_rawValue,
+            std::vector<VendorParameter>* _aidl_return) override {
+        return callWithRetryOnCrash([&](auto service) {
+            return service->parseBluetoothLeReconfigureOffload(in_rawValue, _aidl_return);
+        });
+    }
+
+    ndk::ScopedAStatus processVendorParameters(ParameterScope in_scope,
+                                               const std::vector<VendorParameter>& in_parameters,
+                                               std::string* _aidl_return) override {
+        return callWithRetryOnCrash([&](auto service) {
+            return service->processVendorParameters(in_scope, in_parameters, _aidl_return);
+        });
+    }
+
+    std::shared_ptr<IHalAdapterVendorExtension> getService(bool reset = false) {
+        std::lock_guard l(mLock);
+        if (reset || !mVendorExt.has_value()) {
+            if (property_get_bool("ro.audio.ihaladaptervendorextension_enabled", false)) {
+                auto serviceName = std::string(IHalAdapterVendorExtension::descriptor) + "/default";
+                mVendorExt = std::shared_ptr<IHalAdapterVendorExtension>(
+                        IHalAdapterVendorExtension::fromBinder(ndk::SpAIBinder(
+                                        AServiceManager_waitForService(serviceName.c_str()))));
+            } else {
+                mVendorExt = nullptr;
+            }
+        }
+        return mVendorExt.value();
+    }
+
+    std::mutex mLock;
+    std::optional<std::shared_ptr<::aidl::android::media::audio::IHalAdapterVendorExtension>>
+            mVendorExt GUARDED_BY(mLock);
+};
+
 }  // namespace
 
 DevicesFactoryHalAidl::DevicesFactoryHalAidl(std::shared_ptr<IConfig> config)
-    : mConfig(std::move(config)) {
+        : mConfig(std::move(config)),
+          mVendorExt(ndk::SharedRefBase::make<HalAdapterVendorExtensionWrapper>()) {
 }
 
 status_t DevicesFactoryHalAidl::getDeviceNames(std::vector<std::string> *names) {
@@ -101,16 +180,8 @@
     if (name == nullptr || device == nullptr) {
         return BAD_VALUE;
     }
-    std::shared_ptr<IModule> service;
     if (strcmp(name, "primary") == 0) name = "default";
-    auto serviceName = std::string(IModule::descriptor) + "/" + name;
-    service = IModule::fromBinder(
-            ndk::SpAIBinder(AServiceManager_waitForService(serviceName.c_str())));
-    if (service == nullptr) {
-        ALOGE("%s fromBinder %s failed", __func__, serviceName.c_str());
-        return NO_INIT;
-    }
-    *device = sp<DeviceHalAidl>::make(name, service, getVendorExtension());
+    *device = sp<DeviceHalAidl>::make(name, getServiceInstance<IModule>(name), mVendorExt);
     return OK;
 }
 
@@ -149,30 +220,9 @@
     return OK;
 }
 
-std::shared_ptr<IHalAdapterVendorExtension> DevicesFactoryHalAidl::getVendorExtension() {
-    if (!mVendorExt.has_value()) {
-        auto serviceName = std::string(IHalAdapterVendorExtension::descriptor) + "/default";
-        if (AServiceManager_isDeclared(serviceName.c_str())) {
-            mVendorExt = std::shared_ptr<IHalAdapterVendorExtension>(
-                    IHalAdapterVendorExtension::fromBinder(ndk::SpAIBinder(
-                                    AServiceManager_waitForService(serviceName.c_str()))));
-        } else {
-            mVendorExt = nullptr;
-        }
-    }
-    return mVendorExt.value();
-}
-
 // Main entry-point to the shared library.
 extern "C" __attribute__((visibility("default"))) void* createIDevicesFactoryImpl() {
-    auto serviceName = std::string(IConfig::descriptor) + "/default";
-    auto service = IConfig::fromBinder(
-            ndk::SpAIBinder(AServiceManager_waitForService(serviceName.c_str())));
-    if (!service) {
-        ALOGE("%s binder service %s not exist", __func__, serviceName.c_str());
-        return nullptr;
-    }
-    return new DevicesFactoryHalAidl(service);
+    return new DevicesFactoryHalAidl(getServiceInstance<IConfig>("default"));
 }
 
 } // namespace android
diff --git a/media/libaudiohal/impl/DevicesFactoryHalAidl.h b/media/libaudiohal/impl/DevicesFactoryHalAidl.h
index 17bfe43..2a3a9e7 100644
--- a/media/libaudiohal/impl/DevicesFactoryHalAidl.h
+++ b/media/libaudiohal/impl/DevicesFactoryHalAidl.h
@@ -45,10 +45,7 @@
 
   private:
     const std::shared_ptr<::aidl::android::hardware::audio::core::IConfig> mConfig;
-    std::optional<std::shared_ptr<::aidl::android::media::audio::IHalAdapterVendorExtension>>
-            mVendorExt;
-
-    std::shared_ptr<::aidl::android::media::audio::IHalAdapterVendorExtension> getVendorExtension();
+    const std::shared_ptr<::aidl::android::media::audio::IHalAdapterVendorExtension> mVendorExt;
 
     ~DevicesFactoryHalAidl() = default;
 };
diff --git a/media/libaudiohal/impl/EffectBufferHalAidl.cpp b/media/libaudiohal/impl/EffectBufferHalAidl.cpp
index a701852..33fe3ed 100644
--- a/media/libaudiohal/impl/EffectBufferHalAidl.cpp
+++ b/media/libaudiohal/impl/EffectBufferHalAidl.cpp
@@ -58,25 +58,14 @@
 }
 
 EffectBufferHalAidl::~EffectBufferHalAidl() {
+    if (mAudioBuffer.raw) free(mAudioBuffer.raw);
 }
 
 status_t EffectBufferHalAidl::init() {
-    int fd = ashmem_create_region("audioEffectAidl", mBufferSize);
-    if (fd < 0) {
-        ALOGE("%s create ashmem failed %d", __func__, fd);
-        return fd;
+    if (0 != posix_memalign(&mAudioBuffer.raw, 32, mBufferSize)) {
+        return NO_MEMORY;
     }
 
-    ScopedFileDescriptor tempFd(fd);
-    mAudioBuffer.raw = mmap(nullptr /* address */, mBufferSize /* length */, PROT_READ | PROT_WRITE,
-                            MAP_SHARED, fd, 0 /* offset */);
-    if (mAudioBuffer.raw == MAP_FAILED) {
-        ALOGE("mmap failed for fd %d", fd);
-        mAudioBuffer.raw = nullptr;
-        return INVALID_OPERATION;
-    }
-
-    mMemory = {std::move(tempFd), static_cast<int64_t>(mBufferSize)};
     return OK;
 }
 
diff --git a/media/libaudiohal/impl/EffectBufferHalAidl.h b/media/libaudiohal/impl/EffectBufferHalAidl.h
index 035314b..cf6031f 100644
--- a/media/libaudiohal/impl/EffectBufferHalAidl.h
+++ b/media/libaudiohal/impl/EffectBufferHalAidl.h
@@ -50,7 +50,6 @@
     const size_t mBufferSize;
     bool mFrameCountChanged;
     void* mExternalData;
-    aidl::android::hardware::common::Ashmem mMemory;
     audio_buffer_t mAudioBuffer;
 
     // Can not be constructed directly by clients.
diff --git a/media/libaudiohal/impl/EffectConversionHelperAidl.cpp b/media/libaudiohal/impl/EffectConversionHelperAidl.cpp
index 39999a5..a13903b 100644
--- a/media/libaudiohal/impl/EffectConversionHelperAidl.cpp
+++ b/media/libaudiohal/impl/EffectConversionHelperAidl.cpp
@@ -29,6 +29,7 @@
 #include <system/audio_effects/effect_visualizer.h>
 
 #include <utils/Log.h>
+#include <Utils.h>
 
 #include "EffectConversionHelperAidl.h"
 #include "EffectProxy.h"
@@ -37,18 +38,20 @@
 namespace effect {
 
 using ::aidl::android::aidl_utils::statusTFromBinderStatus;
+using ::aidl::android::hardware::audio::common::getChannelCount;
 using ::aidl::android::hardware::audio::effect::CommandId;
 using ::aidl::android::hardware::audio::effect::Descriptor;
 using ::aidl::android::hardware::audio::effect::Flags;
 using ::aidl::android::hardware::audio::effect::IEffect;
 using ::aidl::android::hardware::audio::effect::Parameter;
 using ::aidl::android::hardware::audio::effect::State;
+using ::aidl::android::media::audio::common::AudioChannelLayout;
 using ::aidl::android::media::audio::common::AudioDeviceDescription;
 using ::aidl::android::media::audio::common::AudioMode;
 using ::aidl::android::media::audio::common::AudioSource;
-using ::android::hardware::EventFlag;
 using android::effect::utils::EffectParamReader;
 using android::effect::utils::EffectParamWriter;
+using android::hardware::EventFlag;
 
 using ::android::status_t;
 
@@ -181,36 +184,40 @@
     State state;
     RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getState(&state)));
     if (state == State::INIT) {
-        ALOGI("%s at state %s, opening effect with input %s output %s", __func__,
+        ALOGD("%s at state %s, opening effect with input %s output %s", __func__,
               android::internal::ToString(state).c_str(), common.input.toString().c_str(),
               common.output.toString().c_str());
         IEffect::OpenEffectReturn openReturn;
         RETURN_STATUS_IF_ERROR(
                 statusTFromBinderStatus(mEffect->open(common, std::nullopt, &openReturn)));
-        updateMqs(openReturn);
-
-        if (status_t status = updateEventFlags(); status != OK) {
-            ALOGV("%s closing at status %d", __func__, status);
-            mEffect->close();
-            return status;
-        }
+        updateMqsAndEventFlags(openReturn);
     } else if (mCommon != common) {
-        ALOGI("%s at state %s, setParameter", __func__, android::internal::ToString(state).c_str());
+        ALOGV("%s at state %s, setCommonParameter %s", __func__,
+              android::internal::ToString(state).c_str(), common.toString().c_str());
         Parameter aidlParam = UNION_MAKE(Parameter, common, common);
         RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->setParameter(aidlParam)));
     }
+    mOutputAccessMode = config->outputCfg.accessMode;
     mCommon = common;
 
     return *static_cast<int32_t*>(pReplyData) = OK;
 }
 
-void EffectConversionHelperAidl::updateMqs(const IEffect::OpenEffectReturn& ret) {
+void EffectConversionHelperAidl::updateMqsAndEventFlags(const IEffect::OpenEffectReturn& ret) {
     if (mIsProxyEffect) {
         mStatusQ = std::static_pointer_cast<EffectProxy>(mEffect)->getStatusMQ();
+    } else {
+        mStatusQ = std::make_shared<StatusMQ>(ret.statusMQ);
+    }
+    updateEventFlags();
+    updateDataMqs(ret);
+}
+
+void EffectConversionHelperAidl::updateDataMqs(const IEffect::OpenEffectReturn& ret) {
+    if (mIsProxyEffect) {
         mInputQ = std::static_pointer_cast<EffectProxy>(mEffect)->getInputMQ();
         mOutputQ = std::static_pointer_cast<EffectProxy>(mEffect)->getOutputMQ();
     } else {
-        mStatusQ = std::make_shared<StatusMQ>(ret.statusMQ);
         mInputQ = std::make_shared<DataMQ>(ret.inputDataMQ);
         mOutputQ = std::make_shared<DataMQ>(ret.outputDataMQ);
     }
@@ -395,22 +402,20 @@
     effect_offload_param_t* offload = (effect_offload_param_t*)pCmdData;
     // send to proxy to update active sub-effect
     if (mIsProxyEffect) {
-        ALOGI("%s offload param offload %s ioHandle %d", __func__,
+        ALOGV("%s offload param offload %s ioHandle %d", __func__,
               offload->isOffload ? "true" : "false", offload->ioHandle);
         const auto& effectProxy = std::static_pointer_cast<EffectProxy>(mEffect);
         RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(effectProxy->setOffloadParam(offload)));
         if (mCommon.ioHandle != offload->ioHandle) {
-            ALOGI("%s ioHandle update [%d to %d]", __func__, mCommon.ioHandle, offload->ioHandle);
+            ALOGV("%s ioHandle update [%d to %d]", __func__, mCommon.ioHandle, offload->ioHandle);
             mCommon.ioHandle = offload->ioHandle;
             Parameter aidlParam = UNION_MAKE(Parameter, common, mCommon);
             RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->setParameter(aidlParam)));
         }
         // update FMQs if the effect instance already open
         if (State state; effectProxy->getState(&state).isOk() && state != State::INIT) {
-            mStatusQ = effectProxy->getStatusMQ();
-            mInputQ = effectProxy->getInputMQ();
-            mOutputQ = effectProxy->getOutputMQ();
-            updateEventFlags();
+            IEffect::OpenEffectReturn openReturn;
+            updateMqsAndEventFlags(openReturn);
         }
     }
     return *static_cast<int32_t*>(pReplyData) = OK;
@@ -512,9 +517,20 @@
     IEffect::OpenEffectReturn openReturn;
     RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->reopen(&openReturn)));
 
-    updateMqs(openReturn);
+    // status MQ won't be changed after open
+    updateDataMqs(openReturn);
     return OK;
 }
 
+size_t EffectConversionHelperAidl::getAudioChannelCount() const {
+    return getChannelCount(mCommon.input.base.channelMask,
+                           ~AudioChannelLayout::LAYOUT_HAPTIC_AB /* mask */);
+}
+
+size_t EffectConversionHelperAidl::getHapticChannelCount() const {
+    return getChannelCount(mCommon.input.base.channelMask,
+                           AudioChannelLayout::LAYOUT_HAPTIC_AB /* mask */);
+}
+
 }  // namespace effect
 }  // namespace android
diff --git a/media/libaudiohal/impl/EffectConversionHelperAidl.h b/media/libaudiohal/impl/EffectConversionHelperAidl.h
index 8b9efb3..50b47a9 100644
--- a/media/libaudiohal/impl/EffectConversionHelperAidl.h
+++ b/media/libaudiohal/impl/EffectConversionHelperAidl.h
@@ -49,6 +49,11 @@
     ::aidl::android::hardware::audio::effect::Descriptor getDescriptor() const;
     status_t reopen();
 
+    size_t getAudioChannelCount() const;
+    size_t getHapticChannelCount() const;
+
+    uint8_t mOutputAccessMode = EFFECT_BUFFER_ACCESS_WRITE;
+
   protected:
     const int32_t mSessionId;
     const int32_t mIoId;
@@ -69,9 +74,6 @@
                                 void* pReplyData);
 
   private:
-    const aidl::android::media::audio::common::AudioFormatDescription kDefaultFormatDescription = {
-            .type = aidl::android::media::audio::common::AudioFormatType::PCM,
-            .pcm = aidl::android::media::audio::common::PcmType::FLOAT_32_BIT};
     const bool mIsProxyEffect;
 
     static constexpr int kDefaultframeCount = 0x100;
@@ -81,13 +83,16 @@
         return pt ? std::to_string(*pt) : "nullptr";
     }
 
-    using AudioChannelLayout = aidl::android::media::audio::common::AudioChannelLayout;
     const aidl::android::media::audio::common::AudioConfig kDefaultAudioConfig = {
             .base = {.sampleRate = 44100,
-                     .channelMask = AudioChannelLayout::make<AudioChannelLayout::layoutMask>(
-                             AudioChannelLayout::LAYOUT_STEREO),
-                     .format = kDefaultFormatDescription},
+                     .channelMask = aidl::android::media::audio::common::AudioChannelLayout::make<
+                             aidl::android::media::audio::common::AudioChannelLayout::layoutMask>(
+                             aidl::android::media::audio::common::AudioChannelLayout::
+                                     LAYOUT_STEREO),
+                     .format = {.type = aidl::android::media::audio::common::AudioFormatType::PCM,
+                                .pcm = aidl::android::media::audio::common::PcmType::FLOAT_32_BIT}},
             .frameCount = kDefaultframeCount};
+
     // command handler map
     typedef status_t (EffectConversionHelperAidl::*CommandHandler)(uint32_t /* cmdSize */,
                                                                    const void* /* pCmdData */,
@@ -98,7 +103,6 @@
     std::shared_ptr<StatusMQ> mStatusQ = nullptr;
     std::shared_ptr<DataMQ> mInputQ = nullptr, mOutputQ = nullptr;
 
-
     struct EventFlagDeleter {
         void operator()(::android::hardware::EventFlag* flag) const {
             if (flag) {
@@ -108,8 +112,10 @@
     };
     std::shared_ptr<android::hardware::EventFlag> mEfGroup = nullptr;
     status_t updateEventFlags();
-
-    void updateMqs(const ::aidl::android::hardware::audio::effect::IEffect::OpenEffectReturn& ret);
+    void updateDataMqs(
+            const ::aidl::android::hardware::audio::effect::IEffect::OpenEffectReturn& ret);
+    void updateMqsAndEventFlags(
+            const ::aidl::android::hardware::audio::effect::IEffect::OpenEffectReturn& ret);
 
     status_t handleInit(uint32_t cmdSize, const void* pCmdData, uint32_t* replySize,
                         void* pReplyData);
diff --git a/media/libaudiohal/impl/EffectHalAidl.cpp b/media/libaudiohal/impl/EffectHalAidl.cpp
index 2836727..ea4dbf6 100644
--- a/media/libaudiohal/impl/EffectHalAidl.cpp
+++ b/media/libaudiohal/impl/EffectHalAidl.cpp
@@ -15,11 +15,13 @@
  */
 
 #include <cstddef>
+#include <cstring>
 #define LOG_TAG "EffectHalAidl"
 //#define LOG_NDEBUG 0
 
 #include <memory>
 
+#include <audio_utils/primitives.h>
 #include <error/expected_utils.h>
 #include <media/AidlConversionCppNdk.h>
 #include <media/AidlConversionEffect.h>
@@ -53,24 +55,29 @@
 #include "effectsAidlConversion/AidlConversionVisualizer.h"
 
 using ::aidl::android::aidl_utils::statusTFromBinderStatus;
+using ::aidl::android::hardware::audio::effect::CommandId;
 using ::aidl::android::hardware::audio::effect::Descriptor;
 using ::aidl::android::hardware::audio::effect::IEffect;
 using ::aidl::android::hardware::audio::effect::IFactory;
+using ::aidl::android::hardware::audio::effect::kEventFlagDataMqNotEmpty;
 using ::aidl::android::hardware::audio::effect::kEventFlagDataMqUpdate;
+using ::aidl::android::hardware::audio::effect::kEventFlagNotEmpty;
+using ::aidl::android::hardware::audio::effect::kReopenSupportedVersion;
 using ::aidl::android::hardware::audio::effect::State;
 
 namespace android {
 namespace effect {
 
 EffectHalAidl::EffectHalAidl(const std::shared_ptr<IFactory>& factory,
-                             const std::shared_ptr<IEffect>& effect,
-                             int32_t sessionId, int32_t ioId, const Descriptor& desc,
-                             bool isProxyEffect)
+                             const std::shared_ptr<IEffect>& effect, int32_t sessionId,
+                             int32_t ioId, const Descriptor& desc, bool isProxyEffect)
     : mFactory(factory),
       mEffect(effect),
       mSessionId(sessionId),
       mIoId(ioId),
       mIsProxyEffect(isProxyEffect) {
+    assert(mFactory != nullptr);
+    assert(mEffect != nullptr);
     createAidlConversion(effect, sessionId, ioId, desc);
 }
 
@@ -122,6 +129,7 @@
                ::aidl::android::hardware::audio::effect::getEffectTypeUuidHapticGenerator()) {
         mConversion = std::make_unique<android::effect::AidlConversionHapticGenerator>(
                 effect, sessionId, ioId, desc, mIsProxyEffect);
+        mIsHapticGenerator = true;
     } else if (typeUuid ==
                ::aidl::android::hardware::audio::effect::getEffectTypeUuidLoudnessEnhancer()) {
         mConversion = std::make_unique<android::effect::AidlConversionLoudnessEnhancer>(
@@ -184,10 +192,19 @@
         return INVALID_OPERATION;
     }
 
-    if (uint32_t efState = 0;
-        ::android::OK == efGroup->wait(kEventFlagDataMqUpdate, &efState, 1 /* ns */,
-                                       true /* retry */)) {
-        ALOGI("%s %s receive dataMQUpdate eventFlag from HAL", __func__, effectName.c_str());
+    // use IFactory HAL version because IEffect can be an EffectProxy instance
+    static const int halVersion = [&]() {
+        int version = 0;
+        return mFactory->getInterfaceVersion(&version).isOk() ? version : 0;
+    }();
+
+    if (uint32_t efState = 0; halVersion >= kReopenSupportedVersion &&
+                              ::android::OK == efGroup->wait(kEventFlagDataMqUpdate, &efState,
+                                                             1 /* ns */, true /* retry */) &&
+                              efState & kEventFlagDataMqUpdate) {
+        ALOGD("%s %s V%d receive dataMQUpdate eventFlag from HAL", __func__, effectName.c_str(),
+              halVersion);
+
         mConversion->reopen();
     }
     auto statusQ = mConversion->getStatusMQ();
@@ -201,7 +218,7 @@
     }
 
     size_t available = inputQ->availableToWrite();
-    size_t floatsToWrite = std::min(available, mInBuffer->getSize() / sizeof(float));
+    const size_t floatsToWrite = std::min(available, mInBuffer->getSize() / sizeof(float));
     if (floatsToWrite == 0) {
         ALOGE("%s not able to write, floats in buffer %zu, space in FMQ %zu", __func__,
               mInBuffer->getSize() / sizeof(float), available);
@@ -213,32 +230,74 @@
               floatsToWrite, mInBuffer->audioBuffer(), inputQ->availableToWrite());
         return INVALID_OPERATION;
     }
-    efGroup->wake(aidl::android::hardware::audio::effect::kEventFlagNotEmpty);
+
+    // for V2 audio effect HAL, expect different EventFlag to avoid bit conflict with FMQ_NOT_EMPTY
+    efGroup->wake(halVersion >= kReopenSupportedVersion ? kEventFlagDataMqNotEmpty
+                                                        : kEventFlagNotEmpty);
 
     IEffect::Status retStatus{};
-    if (!statusQ->readBlocking(&retStatus, 1) || retStatus.status != OK ||
-        (size_t)retStatus.fmqConsumed != floatsToWrite || retStatus.fmqProduced == 0) {
-        ALOGE("%s read status failed: %s", __func__, retStatus.toString().c_str());
+    if (!statusQ->readBlocking(&retStatus, 1)) {
+        ALOGE("%s %s V%d read status from status FMQ failed", __func__, effectName.c_str(),
+              halVersion);
+        return INVALID_OPERATION;
+    }
+    if (retStatus.status != OK || (size_t)retStatus.fmqConsumed != floatsToWrite ||
+        retStatus.fmqProduced == 0) {
+        ALOGE("%s read status failed: %s, consumed %d (of %zu) produced %d", __func__,
+              retStatus.toString().c_str(), retStatus.fmqConsumed, floatsToWrite,
+              retStatus.fmqProduced);
         return INVALID_OPERATION;
     }
 
     available = outputQ->availableToRead();
-    size_t floatsToRead = std::min(available, mOutBuffer->getSize() / sizeof(float));
+    const size_t floatsToRead = std::min(available, mOutBuffer->getSize() / sizeof(float));
     if (floatsToRead == 0) {
         ALOGE("%s not able to read, buffer space %zu, floats in FMQ %zu", __func__,
               mOutBuffer->getSize() / sizeof(float), available);
         return INVALID_OPERATION;
     }
+
+    float *outputRawBuffer = mOutBuffer->audioBuffer()->f32;
+    std::vector<float> tempBuffer;
+    // keep original data in the output buffer for accumulate mode or HapticGenerator effect
+    if (mConversion->mOutputAccessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE || mIsHapticGenerator) {
+        tempBuffer.resize(floatsToRead);
+        outputRawBuffer = tempBuffer.data();
+    }
     // always read floating point data for AIDL
-    if (!mOutBuffer->audioBuffer() ||
-        !outputQ->read(mOutBuffer->audioBuffer()->f32, floatsToRead)) {
+    if (!outputQ->read(outputRawBuffer, floatsToRead)) {
         ALOGE("%s failed to read %zu from outputQ to audioBuffer %p", __func__, floatsToRead,
               mOutBuffer->audioBuffer());
         return INVALID_OPERATION;
     }
 
-    ALOGD("%s %s consumed %zu produced %zu", __func__, effectName.c_str(), floatsToWrite,
-          floatsToRead);
+    // HapticGenerator needs special handling because the generated haptic samples should append to
+    // the end of audio samples, the generated haptic data pass back from HAL in output FMQ at same
+    // offset as input buffer, here we skip the audio samples in output FMQ and append haptic
+    // samples to the end of input buffer
+    if (mIsHapticGenerator) {
+        static constexpr float kHalFloatSampleLimit = 2.0f;
+        assert(floatsToRead == floatsToWrite);
+        const auto audioChNum = mConversion->getAudioChannelCount();
+        const auto audioSamples =
+                floatsToWrite * audioChNum / (audioChNum + mConversion->getHapticChannelCount());
+        // accumulate or copy input to output, haptic samples remains all zero
+        if (mConversion->mOutputAccessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE) {
+            accumulate_float(mOutBuffer->audioBuffer()->f32, mInBuffer->audioBuffer()->f32,
+                             audioSamples);
+        } else {
+            memcpy_to_float_from_float_with_clamping(mOutBuffer->audioBuffer()->f32,
+                                                     mInBuffer->audioBuffer()->f32, audioSamples,
+                                                     kHalFloatSampleLimit);
+        }
+        // append the haptic sample at the end of input audio samples
+        memcpy_to_float_from_float_with_clamping(mInBuffer->audioBuffer()->f32 + audioSamples,
+                                                 outputRawBuffer + audioSamples,
+                                                 floatsToRead - audioSamples, kHalFloatSampleLimit);
+    } else if (mConversion->mOutputAccessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE) {
+        accumulate_float(mOutBuffer->audioBuffer()->f32, outputRawBuffer, floatsToRead);
+    }
+
     return OK;
 }
 
@@ -275,6 +334,7 @@
 
 status_t EffectHalAidl::close() {
     TIME_CHECK();
+    mEffect->command(CommandId::STOP);
     return statusTFromBinderStatus(mEffect->close());
 }
 
diff --git a/media/libaudiohal/impl/EffectHalAidl.h b/media/libaudiohal/impl/EffectHalAidl.h
index bbcb7e2..4f7de7c 100644
--- a/media/libaudiohal/impl/EffectHalAidl.h
+++ b/media/libaudiohal/impl/EffectHalAidl.h
@@ -73,6 +73,7 @@
     const int32_t mSessionId;
     const int32_t mIoId;
     const bool mIsProxyEffect;
+    bool mIsHapticGenerator = false;
 
     std::unique_ptr<EffectConversionHelperAidl> mConversion;
 
diff --git a/media/libaudiohal/impl/EffectProxy.cpp b/media/libaudiohal/impl/EffectProxy.cpp
index d440ef8..c7c6536 100644
--- a/media/libaudiohal/impl/EffectProxy.cpp
+++ b/media/libaudiohal/impl/EffectProxy.cpp
@@ -82,8 +82,7 @@
 ndk::ScopedAStatus EffectProxy::setOffloadParam(const effect_offload_param_t* offload) {
     const auto& itor = std::find_if(mSubEffects.begin(), mSubEffects.end(), [&](const auto& sub) {
         const auto& desc = sub.descriptor;
-        return offload->isOffload ==
-               (desc.common.flags.hwAcceleratorMode == Flags::HardwareAccelerator::TUNNEL);
+        return offload->isOffload == desc.common.flags.offloadIndication;
     });
     if (itor == mSubEffects.end()) {
         ALOGE("%s no %soffload sub-effect found", __func__, offload->isOffload ? "" : "non-");
@@ -93,7 +92,7 @@
     }
 
     mActiveSubIdx = std::distance(mSubEffects.begin(), itor);
-    ALOGI("%s: active %soffload sub-effect %zu descriptor: %s", __func__,
+    ALOGI("%s: active %soffload sub-effect %zu: %s", __func__,
           offload->isOffload ? "" : "non-", mActiveSubIdx,
           ::android::audio::utils::toString(mSubEffects[mActiveSubIdx].descriptor.common.id.uuid)
                   .c_str());
@@ -147,7 +146,7 @@
 
     // close all opened effects if failure
     if (!status.isOk()) {
-        ALOGE("%s: closing all sub-effects with error %s", __func__,
+        ALOGW("%s: closing all sub-effects with error %s", __func__,
               status.getDescription().c_str());
         close();
     }
@@ -164,7 +163,7 @@
 
 ndk::ScopedAStatus EffectProxy::getDescriptor(Descriptor* desc) {
     *desc = mSubEffects[mActiveSubIdx].descriptor;
-    desc->common.id.uuid = desc->common.id.proxy.value();
+    desc->common = mDescriptorCommon;
     return ndk::ScopedAStatus::ok();
 }
 
@@ -186,38 +185,35 @@
     return ndk::ScopedAStatus::ok();
 }
 
+// Sub-effects are required to have identical features, so here we return the SW sub-effect
+// descriptor, with the implementation UUID replaced with proxy UUID, and flags setting respect all
+// sub-effects.
 Descriptor::Common EffectProxy::buildDescriptorCommon(
         const AudioUuid& uuid, const std::vector<Descriptor>& subEffectDescs) {
-    // initial flag values before we know which sub-effect to active (with setOffloadParam)
-    // align to HIDL EffectProxy flags
-    Descriptor::Common common = {.flags = {.type = Flags::Type::INSERT,
-                                           .insert = Flags::Insert::LAST,
-                                           .volume = Flags::Volume::CTRL}};
-
+    Descriptor::Common swCommon;
+    const Flags& firstFlag = subEffectDescs[0].common.flags;
+    bool offloadExist = false;
     for (const auto& desc : subEffectDescs) {
-        if (desc.common.flags.hwAcceleratorMode == Flags::HardwareAccelerator::TUNNEL) {
-            common.flags.hwAcceleratorMode = Flags::HardwareAccelerator::TUNNEL;
+        if (desc.common.flags.offloadIndication) {
+            offloadExist = true;
+        } else {
+            swCommon = desc.common;
         }
-
-        // set indication if any sub-effect indication was set
-        common.flags.offloadIndication |= desc.common.flags.offloadIndication;
-        common.flags.deviceIndication |= desc.common.flags.deviceIndication;
-        common.flags.audioModeIndication |= desc.common.flags.audioModeIndication;
-        common.flags.audioSourceIndication |= desc.common.flags.audioSourceIndication;
-        // Set to NONE if any sub-effect not supporting any Volume command
-        if (desc.common.flags.volume == Flags::Volume::NONE) {
-            common.flags.volume = Flags::Volume::NONE;
+        if (desc.common.flags.audioModeIndication != firstFlag.audioModeIndication ||
+            desc.common.flags.audioSourceIndication != firstFlag.audioSourceIndication ||
+            desc.common.flags.sinkMetadataIndication != firstFlag.sinkMetadataIndication ||
+            desc.common.flags.sourceMetadataIndication != firstFlag.sourceMetadataIndication ||
+            desc.common.flags.deviceIndication != firstFlag.deviceIndication) {
+            ALOGW("Inconsistent flags %s vs %s", desc.common.flags.toString().c_str(),
+                  firstFlag.toString().c_str());
         }
     }
 
-    // copy type UUID from any of sub-effects, all sub-effects should have same type
-    common.id.type = subEffectDescs[0].common.id.type;
+    swCommon.flags.offloadIndication = offloadExist;
     // replace implementation UUID with proxy UUID.
-    common.id.uuid = uuid;
-    common.id.proxy = std::nullopt;
-    common.name = "Proxy";
-    common.implementor = "AOSP";
-    return common;
+    swCommon.id.uuid = uuid;
+    swCommon.id.proxy = std::nullopt;
+    return swCommon;
 }
 
 // Handle with active sub-effect first, only send to other sub-effects when success
@@ -256,7 +252,7 @@
         std::function<ndk::ScopedAStatus(const std::shared_ptr<IEffect>&)> const& func) {
     ndk::ScopedAStatus status = runWithActiveSubEffect(func);
     if (!status.isOk()) {
-        ALOGE("%s active sub-effect return error %s", __func__, status.getDescription().c_str());
+        ALOGW("%s active sub-effect return error %s", __func__, status.getDescription().c_str());
     }
 
     // proceed with others
@@ -265,7 +261,7 @@
             continue;
         }
         if (!mSubEffects[i].handle) {
-            ALOGE("%s null sub-effect interface for %s", __func__,
+            ALOGW("%s null sub-effect interface for %s", __func__,
                   mSubEffects[i].descriptor.common.id.uuid.toString().c_str());
             continue;
         }
diff --git a/media/libaudiohal/impl/EffectsFactoryHalAidl.cpp b/media/libaudiohal/impl/EffectsFactoryHalAidl.cpp
index 7d807b2..2753906 100644
--- a/media/libaudiohal/impl/EffectsFactoryHalAidl.cpp
+++ b/media/libaudiohal/impl/EffectsFactoryHalAidl.cpp
@@ -31,6 +31,7 @@
 #include <system/audio_aidl_utils.h>
 #include <utils/Log.h>
 
+#include "AidlUtils.h"
 #include "EffectBufferHalAidl.h"
 #include "EffectHalAidl.h"
 #include "EffectProxy.h"
@@ -41,6 +42,8 @@
 using ::aidl::android::hardware::audio::effect::Descriptor;
 using ::aidl::android::hardware::audio::effect::IFactory;
 using ::aidl::android::hardware::audio::effect::Processing;
+using ::aidl::android::media::audio::common::AudioDevice;
+using ::aidl::android::media::audio::common::AudioDeviceAddress;
 using ::aidl::android::media::audio::common::AudioSource;
 using ::aidl::android::media::audio::common::AudioStreamType;
 using ::aidl::android::media::audio::common::AudioUuid;
@@ -120,8 +123,6 @@
     }
 
     *pNumEffects = mEffectCount;
-    ALOGD("%s %u non %zu proxyMap %zu proxyDesc %zu", __func__, *pNumEffects,
-          mNonProxyDescList.size(), mProxyUuidDescriptorMap.size(), mProxyDescList.size());
     return OK;
 }
 
@@ -175,10 +176,7 @@
     if (uuid == nullptr || effect == nullptr) {
         return BAD_VALUE;
     }
-    if (sessionId == AUDIO_SESSION_DEVICE && ioId == AUDIO_IO_HANDLE_NONE) {
-        return INVALID_OPERATION;
-    }
-    ALOGI("%s session %d ioId %d", __func__, sessionId, ioId);
+    ALOGV("%s session %d ioId %d", __func__, sessionId, ioId);
 
     AudioUuid aidlUuid =
             VALUE_OR_RETURN_STATUS(::aidl::android::legacy2aidl_audio_uuid_t_AudioUuid(*uuid));
@@ -189,7 +187,6 @@
         aidlEffect = ndk::SharedRefBase::make<EffectProxy>(
                 aidlUuid, mProxyUuidDescriptorMap.at(aidlUuid) /* sub-effect descriptor list */,
                 mFactory);
-        mProxyList.emplace_back(std::static_pointer_cast<EffectProxy>(aidlEffect));
     } else {
         RETURN_STATUS_IF_ERROR(
                 statusTFromBinderStatus(mFactory->createEffect(aidlUuid, &aidlEffect)));
@@ -206,25 +203,17 @@
 }
 
 status_t EffectsFactoryHalAidl::dumpEffects(int fd) {
-    status_t ret = OK;
-    // record the error ret and continue dump as many effects as possible
-    for (const auto& proxy : mProxyList) {
-        if (status_t temp = BAD_VALUE; proxy && (temp = proxy->dump(fd, nullptr, 0)) != OK) {
-            ret = temp;
-        }
-    }
+    // TODO: b/333803769 improve the effect dump implementation
     RETURN_STATUS_IF_ERROR(mFactory->dump(fd, nullptr, 0));
-    return ret;
+    return OK;
 }
 
 status_t EffectsFactoryHalAidl::allocateBuffer(size_t size, sp<EffectBufferHalInterface>* buffer) {
-    ALOGI("%s size %zu buffer %p", __func__, size, buffer);
     return EffectBufferHalAidl::allocate(size, buffer);
 }
 
 status_t EffectsFactoryHalAidl::mirrorBuffer(void* external, size_t size,
                                              sp<EffectBufferHalInterface>* buffer) {
-    ALOGI("%s extern %p size %zu buffer %p", __func__, external, size, buffer);
     return EffectBufferHalAidl::mirror(external, size, buffer);
 }
 
@@ -245,7 +234,6 @@
         ALOGE("%s UUID not found in HAL and proxy list %s", __func__, toString(uuid).c_str());
         return NAME_NOT_FOUND;
     }
-    ALOGI("%s UUID impl found %s", __func__, toString(uuid).c_str());
 
     *pDescriptor = VALUE_OR_RETURN_STATUS(
             ::aidl::android::aidl2legacy_Descriptor_effect_descriptor(*matchIt));
@@ -267,7 +255,6 @@
         ALOGW("%s UUID type not found in HAL and proxy list %s", __func__, toString(type).c_str());
         return BAD_VALUE;
     }
-    ALOGI("%s UUID type found %zu \n %s", __func__, result.size(), toString(type).c_str());
 
     *descriptors = VALUE_OR_RETURN_STATUS(
             aidl::android::convertContainer<std::vector<effect_descriptor_t>>(
@@ -296,7 +283,8 @@
 
     auto getConfigProcessingWithAidlProcessing =
             [&](const auto& aidlProcess, std::vector<effectsConfig::InputStream>& preprocess,
-                std::vector<effectsConfig::OutputStream>& postprocess) {
+                std::vector<effectsConfig::OutputStream>& postprocess,
+                std::vector<effectsConfig::DeviceEffects>& deviceprocess) {
                 if (aidlProcess.type.getTag() == Processing::Type::streamType) {
                     AudioStreamType aidlType =
                             aidlProcess.type.template get<Processing::Type::streamType>();
@@ -328,6 +316,25 @@
                     effectsConfig::InputStream stream = {.type = type.value(),
                                                          .effects = std::move(effects)};
                     preprocess.emplace_back(stream);
+                } else if (aidlProcess.type.getTag() == Processing::Type::device) {
+                    AudioDevice aidlDevice =
+                            aidlProcess.type.template get<Processing::Type::device>();
+                    std::vector<std::shared_ptr<const effectsConfig::Effect>> effects;
+                    std::transform(aidlProcess.ids.begin(), aidlProcess.ids.end(),
+                                   std::back_inserter(effects), getConfigEffectWithDescriptor);
+                    audio_devices_t type;
+                    char address[AUDIO_DEVICE_MAX_ADDRESS_LEN];
+                    status_t status = ::aidl::android::aidl2legacy_AudioDevice_audio_device(
+                            aidlDevice, &type, address);
+                    if (status != NO_ERROR) {
+                        ALOGE("%s device effect has invalid device type / address", __func__);
+                        return;
+                    }
+                    effectsConfig::DeviceEffects device = {
+                            {.type = type, .effects = std::move(effects)},
+                            .address = address,
+                    };
+                    deviceprocess.emplace_back(device);
                 }
             };
 
@@ -335,17 +342,21 @@
             [&]() -> std::shared_ptr<const effectsConfig::Processings> {
                 std::vector<effectsConfig::InputStream> preprocess;
                 std::vector<effectsConfig::OutputStream> postprocess;
+                std::vector<effectsConfig::DeviceEffects> deviceprocess;
                 for (const auto& processing : mAidlProcessings) {
-                    getConfigProcessingWithAidlProcessing(processing, preprocess, postprocess);
+                    getConfigProcessingWithAidlProcessing(processing, preprocess, postprocess,
+                                                          deviceprocess);
                 }
 
-                if (0 == preprocess.size() && 0 == postprocess.size()) {
+                if (0 == preprocess.size() && 0 == postprocess.size() &&
+                    0 == deviceprocess.size()) {
                     return nullptr;
                 }
 
                 return std::make_shared<const effectsConfig::Processings>(
                         effectsConfig::Processings({.preprocess = std::move(preprocess),
-                                                    .postprocess = std::move(postprocess)}));
+                                                    .postprocess = std::move(postprocess),
+                                                    .deviceprocess = std::move(deviceprocess)}));
             }());
 
     return processings;
@@ -362,14 +373,7 @@
 // exports from a static library are optimized out unless actually used by
 // the shared library. See EffectsFactoryHalEntry.cpp.
 extern "C" void* createIEffectsFactoryImpl() {
-    auto serviceName = std::string(IFactory::descriptor) + "/default";
-    auto service = IFactory::fromBinder(
-            ndk::SpAIBinder(AServiceManager_waitForService(serviceName.c_str())));
-    if (!service) {
-        ALOGE("%s binder service %s not exist", __func__, serviceName.c_str());
-        return nullptr;
-    }
-    return new effect::EffectsFactoryHalAidl(service);
+    return new effect::EffectsFactoryHalAidl(getServiceInstance<IFactory>("default"));
 }
 
 } // namespace android
diff --git a/media/libaudiohal/impl/EffectsFactoryHalAidl.h b/media/libaudiohal/impl/EffectsFactoryHalAidl.h
index 73089b0..3b8628c 100644
--- a/media/libaudiohal/impl/EffectsFactoryHalAidl.h
+++ b/media/libaudiohal/impl/EffectsFactoryHalAidl.h
@@ -84,9 +84,6 @@
     // Query result of pre and post processing from effect factory
     const std::vector<Processing> mAidlProcessings;
 
-    // list of the EffectProxy instances
-    std::list<std::shared_ptr<EffectProxy>> mProxyList;
-
     virtual ~EffectsFactoryHalAidl() = default;
     status_t getHalDescriptorWithImplUuid(
             const ::aidl::android::media::audio::common::AudioUuid& uuid,
diff --git a/media/libaudiohal/impl/Hal2AidlMapper.cpp b/media/libaudiohal/impl/Hal2AidlMapper.cpp
index 2b7f298..a01ac4b 100644
--- a/media/libaudiohal/impl/Hal2AidlMapper.cpp
+++ b/media/libaudiohal/impl/Hal2AidlMapper.cpp
@@ -37,6 +37,7 @@
 using aidl::android::media::audio::common::AudioDeviceType;
 using aidl::android::media::audio::common::AudioFormatDescription;
 using aidl::android::media::audio::common::AudioFormatType;
+using aidl::android::media::audio::common::AudioGainConfig;
 using aidl::android::media::audio::common::AudioInputFlags;
 using aidl::android::media::audio::common::AudioIoFlags;
 using aidl::android::media::audio::common::AudioOutputFlags;
@@ -102,8 +103,8 @@
 }
 
 void Hal2AidlMapper::addStream(
-        const sp<StreamHalInterface>& stream, int32_t portConfigId, int32_t patchId) {
-    mStreams.insert(std::pair(stream, std::pair(portConfigId, patchId)));
+        const sp<StreamHalInterface>& stream, int32_t mixPortConfigId, int32_t patchId) {
+    mStreams.insert(std::pair(stream, std::pair(mixPortConfigId, patchId)));
 }
 
 bool Hal2AidlMapper::audioDeviceMatches(const AudioDevice& device, const AudioPort& p) {
@@ -136,8 +137,8 @@
     // 'sinks' will not be updated because 'setAudioPatch' only needs IDs. Here we log
     // the source arguments, where only the audio configuration and device specifications
     // are relevant.
-    ALOGD("%s: [disregard IDs] sources: %s, sinks: %s",
-            __func__, ::android::internal::ToString(sources).c_str(),
+    ALOGD("%s: patch ID: %d, [disregard IDs] sources: %s, sinks: %s",
+            __func__, *patchId, ::android::internal::ToString(sources).c_str(),
             ::android::internal::ToString(sinks).c_str());
     auto fillPortConfigs = [&](
             const std::vector<AudioPortConfig>& configs,
@@ -181,7 +182,9 @@
     };
     // When looking up port configs, the destinationPortId is only used for mix ports.
     // Thus, we process device port configs first, and look up the destination port ID from them.
-    bool sourceIsDevice = std::any_of(sources.begin(), sources.end(),
+    const bool sourceIsDevice = std::any_of(sources.begin(), sources.end(),
+            [](const auto& config) { return config.ext.getTag() == AudioPortExt::device; });
+    const bool sinkIsDevice = std::any_of(sinks.begin(), sinks.end(),
             [](const auto& config) { return config.ext.getTag() == AudioPortExt::device; });
     const std::vector<AudioPortConfig>& devicePortConfigs =
             sourceIsDevice ? sources : sinks;
@@ -202,10 +205,29 @@
         existingPatchIt->second = patch;
     } else {
         bool created = false;
-        RETURN_STATUS_IF_ERROR(findOrCreatePatch(patch, &patch, &created));
+        // When the framework does not specify a patch ID, only the mix port config
+        // is used for finding an existing patch. That's because the framework assumes
+        // that there can only be one patch for an I/O thread.
+        PatchMatch match = sourceIsDevice && sinkIsDevice ?
+                MATCH_BOTH : (sourceIsDevice ? MATCH_SINKS : MATCH_SOURCES);
+        auto requestedPatch = patch;
+        RETURN_STATUS_IF_ERROR(findOrCreatePatch(patch, match,
+                                                 &patch, &created));
         // No cleanup of the patch is needed, it is managed by the framework.
         *patchId = patch.id;
         if (!created) {
+            requestedPatch.id = patch.id;
+            if (patch != requestedPatch) {
+                ALOGI("%s: Updating transient patch. Current: %s, new: %s",
+                        __func__, patch.toString().c_str(), requestedPatch.toString().c_str());
+                // Since matching may be done by mix port only, update the patch if the device port
+                // config has changed.
+                patch = requestedPatch;
+                RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
+                                mModule->setAudioPatch(patch, &patch)));
+                existingPatchIt = mPatches.find(patch.id);
+                existingPatchIt->second = patch;
+            }
             // The framework might have "created" a patch which already existed due to
             // stream creation. Need to release the ownership from the stream.
             for (auto& s : mStreams) {
@@ -274,18 +296,18 @@
 }
 
 status_t Hal2AidlMapper::findOrCreatePatch(
-        const AudioPatch& requestedPatch, AudioPatch* patch, bool* created) {
+        const AudioPatch& requestedPatch, PatchMatch match, AudioPatch* patch, bool* created) {
     std::set<int32_t> sourcePortConfigIds(requestedPatch.sourcePortConfigIds.begin(),
             requestedPatch.sourcePortConfigIds.end());
     std::set<int32_t> sinkPortConfigIds(requestedPatch.sinkPortConfigIds.begin(),
             requestedPatch.sinkPortConfigIds.end());
-    return findOrCreatePatch(sourcePortConfigIds, sinkPortConfigIds, patch, created);
+    return findOrCreatePatch(sourcePortConfigIds, sinkPortConfigIds, match, patch, created);
 }
 
 status_t Hal2AidlMapper::findOrCreatePatch(
         const std::set<int32_t>& sourcePortConfigIds, const std::set<int32_t>& sinkPortConfigIds,
-        AudioPatch* patch, bool* created) {
-    auto patchIt = findPatch(sourcePortConfigIds, sinkPortConfigIds);
+        PatchMatch match, AudioPatch* patch, bool* created) {
+    auto patchIt = findPatch(sourcePortConfigIds, sinkPortConfigIds, match);
     if (patchIt == mPatches.end()) {
         AudioPatch requestedPatch, appliedPatch;
         requestedPatch.sourcePortConfigIds.insert(requestedPatch.sourcePortConfigIds.end(),
@@ -304,8 +326,8 @@
 }
 
 status_t Hal2AidlMapper::findOrCreateDevicePortConfig(
-        const AudioDevice& device, const AudioConfig* config, AudioPortConfig* portConfig,
-        bool* created) {
+        const AudioDevice& device, const AudioConfig* config, const AudioGainConfig* gainConfig,
+        AudioPortConfig* portConfig, bool* created) {
     if (auto portConfigIt = findPortConfig(device); portConfigIt == mPortConfigs.end()) {
         auto portsIt = findPort(device);
         if (portsIt == mPorts.end()) {
@@ -318,12 +340,18 @@
         if (config != nullptr) {
             setPortConfigFromConfig(&requestedPortConfig, *config);
         }
+        if (gainConfig != nullptr) {
+            requestedPortConfig.gain = *gainConfig;
+        }
         return createOrUpdatePortConfigRetry(requestedPortConfig, portConfig, created);
     } else {
         AudioPortConfig requestedPortConfig = portConfigIt->second;
         if (config != nullptr) {
             setPortConfigFromConfig(&requestedPortConfig, *config);
         }
+        if (gainConfig != nullptr) {
+            requestedPortConfig.gain = *gainConfig;
+        }
 
         if (requestedPortConfig != portConfigIt->second) {
             return createOrUpdatePortConfigRetry(requestedPortConfig, portConfig, created);
@@ -426,18 +454,26 @@
                 requestedPortConfig.ext.get<Tag::mix>().handle, source, destinationPortIds,
                 portConfig, created);
     } else if (requestedPortConfig.ext.getTag() == Tag::device) {
-        if (const auto& p = requestedPortConfig;
-                p.sampleRate.has_value() && p.channelMask.has_value() &&
-                p.format.has_value()) {
-            AudioConfig config;
-            setConfigFromPortConfig(&config, requestedPortConfig);
+        const auto& p = requestedPortConfig;
+        const bool hasAudioConfig =
+                p.sampleRate.has_value() && p.channelMask.has_value() && p.format.has_value();
+        const bool hasGainConfig = p.gain.has_value();
+        if (hasAudioConfig || hasGainConfig) {
+            AudioConfig config, *configPtr = nullptr;
+            if (hasAudioConfig) {
+                setConfigFromPortConfig(&config, requestedPortConfig);
+                configPtr = &config;
+            }
+            const AudioGainConfig* gainConfigPtr = nullptr;
+            if (hasGainConfig) gainConfigPtr = &(*(p.gain));
             return findOrCreateDevicePortConfig(
-                    requestedPortConfig.ext.get<Tag::device>().device, &config,
+                    requestedPortConfig.ext.get<Tag::device>().device, configPtr, gainConfigPtr,
                     portConfig, created);
         } else {
+            ALOGD("%s: device port config does not have audio or gain config specified", __func__);
             return findOrCreateDevicePortConfig(
                     requestedPortConfig.ext.get<Tag::device>().device, nullptr /*config*/,
-                    portConfig, created);
+                    nullptr /*gainConfig*/, portConfig, created);
         }
     }
     ALOGW("%s: unsupported audio port config: %s",
@@ -450,13 +486,14 @@
         *portConfig = it->second;
         return OK;
     }
-    ALOGE("%s: could not find a configured device port for device %s",
+    ALOGE("%s: could not find a device port config for device %s",
             __func__, device.toString().c_str());
     return BAD_VALUE;
 }
 
 Hal2AidlMapper::Patches::iterator Hal2AidlMapper::findPatch(
-        const std::set<int32_t>& sourcePortConfigIds, const std::set<int32_t>& sinkPortConfigIds) {
+        const std::set<int32_t>& sourcePortConfigIds, const std::set<int32_t>& sinkPortConfigIds,
+        PatchMatch match) {
     return std::find_if(mPatches.begin(), mPatches.end(),
             [&](const auto& pair) {
                 const auto& p = pair.second;
@@ -464,7 +501,15 @@
                         p.sourcePortConfigIds.begin(), p.sourcePortConfigIds.end());
                 std::set<int32_t> patchSinks(
                         p.sinkPortConfigIds.begin(), p.sinkPortConfigIds.end());
-                return sourcePortConfigIds == patchSrcs && sinkPortConfigIds == patchSinks; });
+                switch (match) {
+                    case MATCH_SOURCES:
+                        return sourcePortConfigIds == patchSrcs;
+                    case MATCH_SINKS:
+                        return sinkPortConfigIds == patchSinks;
+                    case MATCH_BOTH:
+                        return sourcePortConfigIds == patchSrcs && sinkPortConfigIds == patchSinks;
+                }
+            });
 }
 
 Hal2AidlMapper::Ports::iterator Hal2AidlMapper::findPort(const AudioDevice& device) {
@@ -698,20 +743,23 @@
     return OK;
 }
 
-bool Hal2AidlMapper::isPortBeingHeld(int32_t portId) {
-    // It is assumed that mStreams has already been cleaned up.
-    for (const auto& s : mStreams) {
-        if (portConfigBelongsToPort(s.second.first, portId)) return true;
-    }
-    for (const auto& [_, patch] : mPatches) {
+std::set<int32_t> Hal2AidlMapper::getPatchIdsByPortId(int32_t portId) {
+    std::set<int32_t> result;
+    for (const auto& [patchId, patch] : mPatches) {
         for (int32_t id : patch.sourcePortConfigIds) {
-            if (portConfigBelongsToPort(id, portId)) return true;
+            if (portConfigBelongsToPort(id, portId)) {
+                result.insert(patchId);
+                break;
+            }
         }
         for (int32_t id : patch.sinkPortConfigIds) {
-            if (portConfigBelongsToPort(id, portId)) return true;
+            if (portConfigBelongsToPort(id, portId)) {
+                result.insert(patchId);
+                break;
+            }
         }
     }
-    return false;
+    return result;
 }
 
 status_t Hal2AidlMapper::prepareToDisconnectExternalDevice(const AudioPort& devicePort) {
@@ -730,13 +778,13 @@
             this, __func__, ioHandle, device.toString().c_str(),
             flags.toString().c_str(), toString(source).c_str(),
             config->toString().c_str(), mixPortConfig->toString().c_str());
-    resetUnusedPatchesPortConfigsAndPorts();
+    resetUnusedPatchesAndPortConfigs();
     const AudioConfig initialConfig = *config;
     // Find / create AudioPortConfigs for the device port and the mix port,
     // then find / create a patch between them, and open a stream on the mix port.
     AudioPortConfig devicePortConfig;
     bool created = false;
-    RETURN_STATUS_IF_ERROR(findOrCreateDevicePortConfig(device, config,
+    RETURN_STATUS_IF_ERROR(findOrCreateDevicePortConfig(device, config, nullptr /*gainConfig*/,
                     &devicePortConfig, &created));
     LOG_ALWAYS_FATAL_IF(devicePortConfig.id == 0);
     if (created) {
@@ -813,10 +861,10 @@
     }
     if (isInput) {
         RETURN_STATUS_IF_ERROR(findOrCreatePatch(
-                        {devicePortConfigId}, {mixPortConfig->id}, patch, &created));
+                        {devicePortConfigId}, {mixPortConfig->id}, MATCH_BOTH, patch, &created));
     } else {
         RETURN_STATUS_IF_ERROR(findOrCreatePatch(
-                        {mixPortConfig->id}, {devicePortConfigId}, patch, &created));
+                        {mixPortConfig->id}, {devicePortConfigId}, MATCH_BOTH, patch, &created));
     }
     if (created) {
         cleanups->add(&Hal2AidlMapper::resetPatch, patch->id);
@@ -843,39 +891,53 @@
     return releaseAudioPatches({patchId});
 }
 
+// Note: does not reset port configs.
+status_t Hal2AidlMapper::releaseAudioPatch(Patches::iterator it) {
+    const int32_t patchId = it->first;
+    if (ndk::ScopedAStatus status = mModule->resetAudioPatch(patchId); !status.isOk()) {
+        ALOGE("%s: error while resetting patch %d: %s",
+                __func__, patchId, status.getDescription().c_str());
+        return statusTFromBinderStatus(status);
+    }
+    mPatches.erase(it);
+    for (auto it = mFwkPatches.begin(); it != mFwkPatches.end(); ++it) {
+        if (it->second == patchId) {
+            mFwkPatches.erase(it);
+            break;
+        }
+    }
+    return OK;
+}
+
 status_t Hal2AidlMapper::releaseAudioPatches(const std::set<int32_t>& patchIds) {
     status_t result = OK;
     for (const auto patchId : patchIds) {
         if (auto it = mPatches.find(patchId); it != mPatches.end()) {
-            mPatches.erase(it);
-            if (ndk::ScopedAStatus status = mModule->resetAudioPatch(patchId); !status.isOk()) {
-                ALOGE("%s: error while resetting patch %d: %s",
-                        __func__, patchId, status.getDescription().c_str());
-                result = statusTFromBinderStatus(status);
-            }
+            releaseAudioPatch(it);
         } else {
             ALOGE("%s: patch id %d not found", __func__, patchId);
             result = BAD_VALUE;
         }
     }
-    resetUnusedPortConfigsAndPorts();
+    resetUnusedPortConfigs();
     return result;
 }
 
 void Hal2AidlMapper::resetPortConfig(int32_t portConfigId) {
     if (auto it = mPortConfigs.find(portConfigId); it != mPortConfigs.end()) {
-        mPortConfigs.erase(it);
         if (ndk::ScopedAStatus status = mModule->resetAudioPortConfig(portConfigId);
                 !status.isOk()) {
             ALOGE("%s: error while resetting port config %d: %s",
                     __func__, portConfigId, status.getDescription().c_str());
+            return;
         }
+        mPortConfigs.erase(it);
         return;
     }
     ALOGE("%s: port config id %d not found", __func__, portConfigId);
 }
 
-void Hal2AidlMapper::resetUnusedPatchesPortConfigsAndPorts() {
+void Hal2AidlMapper::resetUnusedPatchesAndPortConfigs() {
     // Since patches can be created independently of streams via 'createOrUpdatePatch',
     // here we only clean up patches for released streams.
     std::set<int32_t> patchesToRelease;
@@ -889,52 +951,35 @@
             it = mStreams.erase(it);
         }
     }
-    // 'releaseAudioPatches' also resets unused port configs and ports.
+    // 'releaseAudioPatches' also resets unused port configs.
     releaseAudioPatches(patchesToRelease);
 }
 
-void Hal2AidlMapper::resetUnusedPortConfigsAndPorts() {
+void Hal2AidlMapper::resetUnusedPortConfigs() {
     // The assumption is that port configs are used to create patches
     // (or to open streams, but that involves creation of patches, too). Thus,
     // orphaned port configs can and should be reset.
-    std::map<int32_t, int32_t /*portID*/> portConfigIds;
+    std::set<int32_t> portConfigIdsToReset;
     std::transform(mPortConfigs.begin(), mPortConfigs.end(),
-            std::inserter(portConfigIds, portConfigIds.end()),
-            [](const auto& pcPair) { return std::make_pair(pcPair.first, pcPair.second.portId); });
+            std::inserter(portConfigIdsToReset, portConfigIdsToReset.end()),
+            [](const auto& pcPair) { return pcPair.first; });
     for (const auto& p : mPatches) {
-        for (int32_t id : p.second.sourcePortConfigIds) portConfigIds.erase(id);
-        for (int32_t id : p.second.sinkPortConfigIds) portConfigIds.erase(id);
+        for (int32_t id : p.second.sourcePortConfigIds) portConfigIdsToReset.erase(id);
+        for (int32_t id : p.second.sinkPortConfigIds) portConfigIdsToReset.erase(id);
     }
     for (int32_t id : mInitialPortConfigIds) {
-        portConfigIds.erase(id);
+        portConfigIdsToReset.erase(id);
     }
     for (const auto& s : mStreams) {
-        portConfigIds.erase(s.second.first);
+        portConfigIdsToReset.erase(s.second.first);
     }
-    std::set<int32_t> retryDeviceDisconnection;
-    for (const auto& portConfigAndIdPair : portConfigIds) {
-        resetPortConfig(portConfigAndIdPair.first);
-        if (const auto it = mConnectedPorts.find(portConfigAndIdPair.second);
-                it != mConnectedPorts.end() && it->second) {
-            retryDeviceDisconnection.insert(portConfigAndIdPair.second);
-        }
-    }
-    for (int32_t portId : retryDeviceDisconnection) {
-        if (!isPortBeingHeld(portId)) {
-            if (auto status = mModule->disconnectExternalDevice(portId); status.isOk()) {
-                eraseConnectedPort(portId);
-                ALOGD("%s: executed postponed external device disconnection for port ID %d",
-                        __func__, portId);
-            }
-        }
-    }
-    if (!retryDeviceDisconnection.empty()) {
-        updateRoutes();
+    for (const auto& portConfigId : portConfigIdsToReset) {
+        resetPortConfig(portConfigId);
     }
 }
 
 status_t Hal2AidlMapper::setDevicePortConnectedState(const AudioPort& devicePort, bool connected) {
-    resetUnusedPatchesPortConfigsAndPorts();
+    resetUnusedPatchesAndPortConfigs();
     if (connected) {
         AudioDevice matchDevice = devicePort.ext.get<AudioPortExt::device>().device;
         std::optional<AudioPort> templatePort;
@@ -980,7 +1025,7 @@
                 "%s: module %s, duplicate port ID received from HAL: %s, existing port: %s",
                 __func__, mInstance.c_str(), connectedPort.toString().c_str(),
                 it->second.toString().c_str());
-        mConnectedPorts[connectedPort.id] = false;
+        mConnectedPorts.insert(connectedPort.id);
         if (erasePortAfterConnectionIt != mPorts.end()) {
             mPorts.erase(erasePortAfterConnectionIt);
         }
@@ -1007,17 +1052,34 @@
             port.ext.get<AudioPortExt::Tag::device>().device = matchDevice;
             port.profiles = portsIt->second.profiles;
         }
-        // Streams are closed by AudioFlinger independently from device disconnections.
-        // It is possible that the stream has not been closed yet.
-        if (!isPortBeingHeld(portId)) {
-            RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
-                            mModule->disconnectExternalDevice(portId)));
-            eraseConnectedPort(portId);
-        } else {
-            ALOGD("%s: since device port ID %d is used by a stream, "
-                    "external device disconnection postponed", __func__, portId);
-            mConnectedPorts[portId] = true;
+
+        // Patches may still exist, the framework may reset or update them later.
+        // For disconnection to succeed, need to release these patches first.
+        if (std::set<int32_t> patchIdsToRelease = getPatchIdsByPortId(portId);
+                !patchIdsToRelease.empty()) {
+            FwkPatches releasedPatches;
+            status_t status = OK;
+            for (int32_t patchId : patchIdsToRelease) {
+                if (auto it = mPatches.find(patchId); it != mPatches.end()) {
+                    if (status = releaseAudioPatch(it); status != OK) break;
+                    releasedPatches.insert(std::make_pair(patchId, patchId));
+                }
+            }
+            resetUnusedPortConfigs();
+            // Patches created by Hal2AidlMapper during stream creation and not "claimed"
+            // by the framework must not be surfaced to it.
+            for (auto& s : mStreams) {
+                if (auto it = releasedPatches.find(s.second.second); it != releasedPatches.end()) {
+                    releasedPatches.erase(it);
+                }
+            }
+            mFwkPatches.merge(releasedPatches);
+            LOG_ALWAYS_FATAL_IF(!releasedPatches.empty(),
+                    "mFwkPatches already contains some of released patches");
+            if (status != OK) return status;
         }
+        RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mModule->disconnectExternalDevice(portId)));
+        eraseConnectedPort(portId);
     }
     return updateRoutes();
 }
diff --git a/media/libaudiohal/impl/Hal2AidlMapper.h b/media/libaudiohal/impl/Hal2AidlMapper.h
index f937173..710b43e 100644
--- a/media/libaudiohal/impl/Hal2AidlMapper.h
+++ b/media/libaudiohal/impl/Hal2AidlMapper.h
@@ -49,7 +49,7 @@
             const std::string& instance,
             const std::shared_ptr<::aidl::android::hardware::audio::core::IModule>& module);
 
-    void addStream(const sp<StreamHalInterface>& stream, int32_t portConfigId, int32_t patchId);
+    void addStream(const sp<StreamHalInterface>& stream, int32_t mixPortConfigId, int32_t patchId);
     status_t createOrUpdatePatch(
             const std::vector<::aidl::android::media::audio::common::AudioPortConfig>& sources,
             const std::vector<::aidl::android::media::audio::common::AudioPortConfig>& sinks,
@@ -91,13 +91,32 @@
         ::aidl::android::media::audio::common::AudioPortConfig* portConfig,
         Cleanups* cleanups = nullptr);
     status_t releaseAudioPatch(int32_t patchId);
-    void resetUnusedPatchesPortConfigsAndPorts();
+    void resetUnusedPatchesAndPortConfigs();
     status_t setDevicePortConnectedState(
             const ::aidl::android::media::audio::common::AudioPort& devicePort, bool connected);
 
+    // Methods to work with FwkPatches.
+    void eraseFwkPatch(int32_t fwkPatchId) { mFwkPatches.erase(fwkPatchId); }
+    int32_t findFwkPatch(int32_t fwkPatchId) {
+        const auto it = mFwkPatches.find(fwkPatchId);
+        return it != mFwkPatches.end() ? it->second : 0;
+    }
+    void updateFwkPatch(int32_t fwkPatchId, int32_t halPatchId) {
+        mFwkPatches[fwkPatchId] = halPatchId;
+    }
+
   private:
-    // IDs of ports for connected external devices, and whether they are held by streams.
-    using ConnectedPorts = std::map<int32_t /*port ID*/, bool>;
+    // 'FwkPatches' is used to store patches that diverge from the framework's state.
+    // Uses framework patch ID (aka audio_patch_handle_t) values for indexing.
+    // When the 'key == value', that means Hal2AidlMapper has removed this patch, and it is absent
+    // from 'mPatches', but it still "exists" for the framework. It will either remove it or
+    // re-patch. If the framework re-patches, it will continue to use the same patch handle,
+    // but the HAL will use the new one (since the old patch was reset), thus 'key != value'
+    // for such patches. Since they "exist" both for the framework and the HAL, 'mPatches'
+    // contains their data under HAL patch ID ('value' of 'FwkPatches').
+    // To avoid confusion, all patchIDs used by Hal2AidlMapper are HAL IDs. Mapping between
+    // framework patch IDs and HAL patch IDs is done by DeviceHalAidl.
+    using FwkPatches = std::map<int32_t /*audio_patch_handle_t*/, int32_t /*patch ID*/>;
     using Patches = std::map<int32_t /*patch ID*/,
             ::aidl::android::hardware::audio::core::AudioPatch>;
     using PortConfigs = std::map<int32_t /*port config ID*/,
@@ -107,12 +126,14 @@
     // Answers the question "whether portID 'first' is reachable from portID 'second'?"
     // It's not a map because both portIDs are known. The matrix is symmetric.
     using RoutingMatrix = std::set<std::pair<int32_t, int32_t>>;
-    // There is always a port config ID set. The patch ID is set after stream
+    // There is always a mix port config ID set. The patch ID is set after stream
     // creation, and can be set to '-1' later if the framework happens to create
     // a patch between the same endpoints. In that case, the ownership of the patch
     // is on the framework.
     using Streams = std::map<wp<StreamHalInterface>,
-            std::pair<int32_t /*port config ID*/, int32_t /*patch ID*/>>;
+            std::pair<int32_t /*mix port config ID*/, int32_t /*patch ID*/>>;
+
+    enum PatchMatch { MATCH_SOURCES, MATCH_SINKS, MATCH_BOTH };
 
     const std::string mInstance;
     const std::shared_ptr<::aidl::android::hardware::audio::core::IModule> mModule;
@@ -131,15 +152,18 @@
             ::aidl::android::media::audio::common::AudioPortConfig* result, bool *created);
     void eraseConnectedPort(int32_t portId);
     status_t findOrCreatePatch(
-        const std::set<int32_t>& sourcePortConfigIds,
-        const std::set<int32_t>& sinkPortConfigIds,
+            const std::set<int32_t>& sourcePortConfigIds,
+            const std::set<int32_t>& sinkPortConfigIds,
+            PatchMatch match,
         ::aidl::android::hardware::audio::core::AudioPatch* patch, bool* created);
     status_t findOrCreatePatch(
         const ::aidl::android::hardware::audio::core::AudioPatch& requestedPatch,
+        PatchMatch match,
         ::aidl::android::hardware::audio::core::AudioPatch* patch, bool* created);
     status_t findOrCreateDevicePortConfig(
             const ::aidl::android::media::audio::common::AudioDevice& device,
             const ::aidl::android::media::audio::common::AudioConfig* config,
+            const ::aidl::android::media::audio::common::AudioGainConfig* gainConfig,
             ::aidl::android::media::audio::common::AudioPortConfig* portConfig,
             bool* created);
     // If the resulting 'portConfig->id' is 0, that means the config was not created,
@@ -156,7 +180,7 @@
         const std::set<int32_t>& destinationPortIds,
         ::aidl::android::media::audio::common::AudioPortConfig* portConfig, bool* created);
     Patches::iterator findPatch(const std::set<int32_t>& sourcePortConfigIds,
-            const std::set<int32_t>& sinkPortConfigIds);
+            const std::set<int32_t>& sinkPortConfigIds, PatchMatch match);
     Ports::iterator findPort(const ::aidl::android::media::audio::common::AudioDevice& device);
     Ports::iterator findPort(
             const ::aidl::android::media::audio::common::AudioConfig& config,
@@ -168,7 +192,7 @@
             const std::optional<::aidl::android::media::audio::common::AudioConfig>& config,
             const std::optional<::aidl::android::media::audio::common::AudioIoFlags>& flags,
             int32_t ioHandle);
-    bool isPortBeingHeld(int32_t portId);
+    std::set<int32_t> getPatchIdsByPortId(int32_t portId);
     status_t prepareToOpenStreamHelper(
         int32_t ioHandle, int32_t devicePortId, int32_t devicePortConfigId,
         const ::aidl::android::media::audio::common::AudioIoFlags& flags,
@@ -181,10 +205,11 @@
         auto it = mPortConfigs.find(portConfigId);
         return it != mPortConfigs.end() && it->second.portId == portId;
     }
+    status_t releaseAudioPatch(Patches::iterator it);
     status_t releaseAudioPatches(const std::set<int32_t>& patchIds);
     void resetPatch(int32_t patchId) { (void)releaseAudioPatch(patchId); }
     void resetPortConfig(int32_t portConfigId);
-    void resetUnusedPortConfigsAndPorts();
+    void resetUnusedPortConfigs();
     status_t updateAudioPort(
             int32_t portId, ::aidl::android::media::audio::common::AudioPort* port);
     status_t updateRoutes();
@@ -197,13 +222,14 @@
     std::optional<::aidl::android::media::audio::common::AudioPort> mRemoteSubmixOut;
     int32_t mDefaultInputPortId = -1;
     int32_t mDefaultOutputPortId = -1;
+    FwkPatches mFwkPatches;
     PortConfigs mPortConfigs;
     std::set<int32_t> mInitialPortConfigIds;
     Patches mPatches;
     Routes mRoutes;
     RoutingMatrix mRoutingMatrix;
     Streams mStreams;
-    ConnectedPorts mConnectedPorts;
+    std::set<int32_t> mConnectedPorts;
     std::pair<int32_t, ::aidl::android::media::audio::common::AudioPort>
             mDisconnectedPortReplacement;
     std::set<int32_t> mDynamicMixPortIds;
diff --git a/media/libaudiohal/impl/StreamHalAidl.cpp b/media/libaudiohal/impl/StreamHalAidl.cpp
index 5f525d7..918f886 100644
--- a/media/libaudiohal/impl/StreamHalAidl.cpp
+++ b/media/libaudiohal/impl/StreamHalAidl.cpp
@@ -29,6 +29,7 @@
 #include <media/AudioParameter.h>
 #include <mediautils/TimeCheck.h>
 #include <system/audio.h>
+#include <Utils.h>
 #include <utils/Log.h>
 
 #include "DeviceHalAidl.h"
@@ -36,13 +37,14 @@
 #include "StreamHalAidl.h"
 
 using ::aidl::android::aidl_utils::statusTFromBinderStatus;
+using ::aidl::android::hardware::audio::common::kDumpFromAudioServerArgument;
 using ::aidl::android::hardware::audio::common::PlaybackTrackMetadata;
 using ::aidl::android::hardware::audio::common::RecordTrackMetadata;
 using ::aidl::android::hardware::audio::core::IStreamCommon;
 using ::aidl::android::hardware::audio::core::IStreamIn;
 using ::aidl::android::hardware::audio::core::IStreamOut;
-using ::aidl::android::hardware::audio::core::StreamDescriptor;
 using ::aidl::android::hardware::audio::core::MmapBufferDescriptor;
+using ::aidl::android::hardware::audio::core::StreamDescriptor;
 using ::aidl::android::media::audio::common::MicrophoneDynamicInfo;
 using ::aidl::android::media::audio::IHalAdapterVendorExtension;
 
@@ -82,7 +84,12 @@
           mConfig(configToBase(config)),
           mContext(std::move(context)),
           mStream(stream),
-          mVendorExt(vext) {
+          mVendorExt(vext),
+          mLastReplyLifeTimeNs(
+                  std::min(static_cast<size_t>(20),
+                           mContext.getBufferDurationMs(mConfig.sample_rate))
+                  * NANOS_PER_MILLISECOND)
+{
     ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
     {
         std::lock_guard l(mLock);
@@ -195,8 +202,12 @@
     StreamDescriptor::Reply reply;
     switch (state) {
         case StreamDescriptor::State::ACTIVE:
+        case StreamDescriptor::State::DRAINING:
+        case StreamDescriptor::State::TRANSFERRING:
             RETURN_STATUS_IF_ERROR(pause(&reply));
-            if (reply.state != StreamDescriptor::State::PAUSED) {
+            if (reply.state != StreamDescriptor::State::PAUSED &&
+                    reply.state != StreamDescriptor::State::DRAIN_PAUSED &&
+                    reply.state != StreamDescriptor::State::TRANSFER_PAUSED) {
                 ALOGE("%s: unexpected stream state: %s (expected PAUSED)",
                         __func__, toString(reply.state).c_str());
                 return INVALID_OPERATION;
@@ -204,6 +215,7 @@
             FALLTHROUGH_INTENDED;
         case StreamDescriptor::State::PAUSED:
         case StreamDescriptor::State::DRAIN_PAUSED:
+        case StreamDescriptor::State::TRANSFER_PAUSED:
             if (mIsInput) return flush();
             RETURN_STATUS_IF_ERROR(flush(&reply));
             if (reply.state != StreamDescriptor::State::IDLE) {
@@ -234,7 +246,9 @@
     ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
     TIME_CHECK();
     if (!mStream) return NO_INIT;
-    status_t status = mStream->dump(fd, Args(args).args(), args.size());
+    Vector<String16> newArgs = args;
+    newArgs.push(String16(kDumpFromAudioServerArgument));
+    status_t status = mStream->dump(fd, Args(newArgs).args(), newArgs.size());
     mStreamPowerLog.dump(fd);
     return status;
 }
@@ -243,20 +257,71 @@
     ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
     TIME_CHECK();
     if (!mStream) return NO_INIT;
-    const auto state = getState();
-    StreamDescriptor::Reply reply;
-    if (state == StreamDescriptor::State::STANDBY) {
-        RETURN_STATUS_IF_ERROR(sendCommand(makeHalCommand<HalCommand::Tag::start>(), &reply, true));
-        return sendCommand(makeHalCommand<HalCommand::Tag::burst>(0), &reply, true);
+    if (!mContext.isMmapped()) {
+        return BAD_VALUE;
     }
-
-    return INVALID_OPERATION;
+    StreamDescriptor::Reply reply;
+    RETURN_STATUS_IF_ERROR(updateCountersIfNeeded(&reply));
+    switch (reply.state) {
+        case StreamDescriptor::State::STANDBY:
+            RETURN_STATUS_IF_ERROR(
+                    sendCommand(makeHalCommand<HalCommand::Tag::start>(), &reply, true));
+            if (reply.state != StreamDescriptor::State::IDLE) {
+                ALOGE("%s: unexpected stream state: %s (expected IDLE)",
+                        __func__, toString(reply.state).c_str());
+                return INVALID_OPERATION;
+            }
+            FALLTHROUGH_INTENDED;
+        case StreamDescriptor::State::IDLE:
+            RETURN_STATUS_IF_ERROR(
+                    sendCommand(makeHalCommand<HalCommand::Tag::burst>(0), &reply, true));
+            if (reply.state != StreamDescriptor::State::ACTIVE) {
+                ALOGE("%s: unexpected stream state: %s (expected ACTIVE)",
+                        __func__, toString(reply.state).c_str());
+                return INVALID_OPERATION;
+            }
+            FALLTHROUGH_INTENDED;
+        case StreamDescriptor::State::ACTIVE:
+            return OK;
+        case StreamDescriptor::State::DRAINING:
+            RETURN_STATUS_IF_ERROR(
+                    sendCommand(makeHalCommand<HalCommand::Tag::start>(), &reply, true));
+            if (reply.state != StreamDescriptor::State::ACTIVE) {
+                ALOGE("%s: unexpected stream state: %s (expected ACTIVE)",
+                        __func__, toString(reply.state).c_str());
+                return INVALID_OPERATION;
+            }
+            return OK;
+        default:
+            ALOGE("%s: not supported from %s stream state %s",
+                    __func__, mIsInput ? "input" : "output", toString(reply.state).c_str());
+            return INVALID_OPERATION;
+    }
 }
 
 status_t StreamHalAidl::stop() {
     ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    TIME_CHECK();
     if (!mStream) return NO_INIT;
-    return standby();
+    if (!mContext.isMmapped()) {
+        return BAD_VALUE;
+    }
+    StreamDescriptor::Reply reply;
+    RETURN_STATUS_IF_ERROR(updateCountersIfNeeded(&reply));
+    if (const auto state = reply.state; state == StreamDescriptor::State::ACTIVE) {
+        return drain(false /*earlyNotify*/, nullptr);
+    } else if (state == StreamDescriptor::State::DRAINING) {
+        RETURN_STATUS_IF_ERROR(pause());
+        return flush();
+    } else if (state == StreamDescriptor::State::PAUSED) {
+        return flush();
+    } else if (state != StreamDescriptor::State::IDLE &&
+            state != StreamDescriptor::State::STANDBY) {
+        ALOGE("%s: not supported from %s stream state %s",
+                __func__, mIsInput ? "input" : "output", toString(state).c_str());
+        return INVALID_OPERATION;
+    }
+    return OK;
 }
 
 status_t StreamHalAidl::getLatency(uint32_t *latency) {
@@ -271,11 +336,12 @@
     return OK;
 }
 
-status_t StreamHalAidl::getObservablePosition(int64_t *frames, int64_t *timestamp) {
+status_t StreamHalAidl::getObservablePosition(int64_t* frames, int64_t* timestamp,
+        StatePositions* statePositions) {
     ALOGV("%p %s::%s", this, getClassName().c_str(), __func__);
     if (!mStream) return NO_INIT;
     StreamDescriptor::Reply reply;
-    RETURN_STATUS_IF_ERROR(updateCountersIfNeeded(&reply));
+    RETURN_STATUS_IF_ERROR(updateCountersIfNeeded(&reply, statePositions));
     *frames = std::max<int64_t>(0, reply.observable.frames);
     *timestamp = std::max<int64_t>(0, reply.observable.timeNs);
     return OK;
@@ -285,8 +351,7 @@
     ALOGV("%p %s::%s", this, getClassName().c_str(), __func__);
     if (!mStream) return NO_INIT;
     StreamDescriptor::Reply reply;
-    // TODO: switch to updateCountersIfNeeded once we sort out mWorkerTid initialization
-    RETURN_STATUS_IF_ERROR(sendCommand(makeHalCommand<HalCommand::Tag::getStatus>(), &reply, true));
+    RETURN_STATUS_IF_ERROR(updateCountersIfNeeded(&reply));
     *frames = std::max<int64_t>(0, reply.hardware.frames);
     *timestamp = std::max<int64_t>(0, reply.hardware.timeNs);
     return OK;
@@ -319,8 +384,11 @@
             return INVALID_OPERATION;
         }
     }
+    StreamContextAidl::DataMQ::Error fmqError = StreamContextAidl::DataMQ::Error::NONE;
+    std::string fmqErrorMsg;
     if (!mIsInput) {
-        bytes = std::min(bytes, mContext.getDataMQ()->availableToWrite());
+        bytes = std::min(bytes,
+                mContext.getDataMQ()->availableToWrite(&fmqError, &fmqErrorMsg));
     }
     StreamDescriptor::Command burst =
             StreamDescriptor::Command::make<StreamDescriptor::Command::Tag::burst>(bytes);
@@ -337,12 +405,14 @@
         LOG_ALWAYS_FATAL_IF(*transferred > bytes,
                 "%s: HAL module read %zu bytes, which exceeds requested count %zu",
                 __func__, *transferred, bytes);
-        if (auto toRead = mContext.getDataMQ()->availableToRead();
+        if (auto toRead = mContext.getDataMQ()->availableToRead(&fmqError, &fmqErrorMsg);
                 toRead != 0 && !mContext.getDataMQ()->read(static_cast<int8_t*>(buffer), toRead)) {
             ALOGE("%s: failed to read %zu bytes to data MQ", __func__, toRead);
             return NOT_ENOUGH_DATA;
         }
     }
+    LOG_ALWAYS_FATAL_IF(fmqError != StreamContextAidl::DataMQ::Error::NONE,
+            "%s", fmqErrorMsg.c_str());
     mStreamPowerLog.log(buffer, *transferred);
     return OK;
 }
@@ -351,8 +421,16 @@
     ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
     TIME_CHECK();
     if (!mStream) return NO_INIT;
-    return sendCommand(makeHalCommand<HalCommand::Tag::pause>(), reply,
-            true /*safeFromNonWorkerThread*/);  // The workers stops its I/O activity first.
+
+    if (const auto state = getState(); isInPlayOrRecordState(state)) {
+        return sendCommand(
+                makeHalCommand<HalCommand::Tag::pause>(), reply,
+                true /*safeFromNonWorkerThread*/);  // The workers stops its I/O activity first.
+    } else {
+        ALOGD("%s: already stream in one of the PAUSED kind of states, current state: %s", __func__,
+              toString(state).c_str());
+        return OK;
+    }
 }
 
 status_t StreamHalAidl::resume(StreamDescriptor::Reply* reply) {
@@ -362,24 +440,33 @@
     if (mIsInput) {
         return sendCommand(makeHalCommand<HalCommand::Tag::burst>(0), reply);
     } else {
-        if (mContext.isAsynchronous()) {
+        if (const auto state = getState(); state == StreamDescriptor::State::IDLE) {
             // Handle pause-flush-resume sequence. 'flush' from PAUSED goes to
             // IDLE. We move here from IDLE to ACTIVE (same as 'start' from PAUSED).
-            const auto state = getState();
-            if (state == StreamDescriptor::State::IDLE) {
-                StreamDescriptor::Reply localReply{};
-                StreamDescriptor::Reply* innerReply = reply ?: &localReply;
-                RETURN_STATUS_IF_ERROR(
-                        sendCommand(makeHalCommand<HalCommand::Tag::burst>(0), innerReply));
-                if (innerReply->state != StreamDescriptor::State::ACTIVE) {
-                    ALOGE("%s: unexpected stream state: %s (expected ACTIVE)",
-                            __func__, toString(innerReply->state).c_str());
-                    return INVALID_OPERATION;
-                }
-                return OK;
+            StreamDescriptor::Reply localReply{};
+            StreamDescriptor::Reply* innerReply = reply ?: &localReply;
+            RETURN_STATUS_IF_ERROR(
+                    sendCommand(makeHalCommand<HalCommand::Tag::burst>(0), innerReply));
+            if (innerReply->state != StreamDescriptor::State::ACTIVE) {
+                ALOGE("%s: unexpected stream state: %s (expected ACTIVE)",
+                        __func__, toString(innerReply->state).c_str());
+                return INVALID_OPERATION;
             }
+            return OK;
+        } else if (state == StreamDescriptor::State::PAUSED ||
+                   state == StreamDescriptor::State::TRANSFER_PAUSED ||
+                   state == StreamDescriptor::State::DRAIN_PAUSED) {
+            return sendCommand(makeHalCommand<HalCommand::Tag::start>(), reply);
+        } else if (state == StreamDescriptor::State::ACTIVE ||
+                   state == StreamDescriptor::State::TRANSFERRING ||
+                   state == StreamDescriptor::State::DRAINING) {
+            ALOGD("%s: already in stream state: %s", __func__, toString(state).c_str());
+            return OK;
+        } else {
+            ALOGE("%s: unexpected stream state: %s (expected IDLE or one of *PAUSED states)",
+                        __func__, toString(state).c_str());
+            return INVALID_OPERATION;
         }
-        return sendCommand(makeHalCommand<HalCommand::Tag::start>(), reply);
     }
 }
 
@@ -398,8 +485,19 @@
     ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
     TIME_CHECK();
     if (!mStream) return NO_INIT;
-    return sendCommand(makeHalCommand<HalCommand::Tag::flush>(), reply,
-            true /*safeFromNonWorkerThread*/);  // The workers stops its I/O activity first.
+
+    if (const auto state = getState(); isInPausedState(state)) {
+        return sendCommand(
+                makeHalCommand<HalCommand::Tag::flush>(), reply,
+                true /*safeFromNonWorkerThread*/);  // The workers stops its I/O activity first.
+    } else if (isInPlayOrRecordState(state)) {
+        ALOGE("%s: found stream in non-flushable state: %s", __func__, toString(state).c_str());
+        return INVALID_OPERATION;
+    } else {
+        ALOGD("%s: already stream in one of the flushable state: current state: %s", __func__,
+              toString(state).c_str());
+        return OK;
+    }
 }
 
 status_t StreamHalAidl::exit() {
@@ -409,6 +507,38 @@
     return statusTFromBinderStatus(mStream->prepareToClose());
 }
 
+void StreamHalAidl::onAsyncTransferReady() {
+    if (auto state = getState(); state == StreamDescriptor::State::TRANSFERRING) {
+        // Retrieve the current state together with position counters unconditionally
+        // to ensure that the state on our side gets updated.
+        sendCommand(makeHalCommand<HalCommand::Tag::getStatus>(),
+                nullptr, true /*safeFromNonWorkerThread */);
+    } else {
+        ALOGW("%s: unexpected onTransferReady in the state %s", __func__, toString(state).c_str());
+    }
+}
+
+void StreamHalAidl::onAsyncDrainReady() {
+    if (auto state = getState(); state == StreamDescriptor::State::DRAINING) {
+        // Retrieve the current state together with position counters unconditionally
+        // to ensure that the state on our side gets updated.
+        sendCommand(makeHalCommand<HalCommand::Tag::getStatus>(), nullptr,
+                    true /*safeFromNonWorkerThread */);
+        // For compatibility with HIDL behavior, apply a "soft" position reset
+        // after receiving the "drain ready" callback.
+        std::lock_guard l(mLock);
+        mStatePositions.framesAtFlushOrDrain = mLastReply.observable.frames;
+    } else {
+        ALOGW("%s: unexpected onDrainReady in the state %s", __func__, toString(state).c_str());
+    }
+}
+
+void StreamHalAidl::onAsyncError() {
+    std::lock_guard l(mLock);
+    ALOGW("%s: received in the state %s", __func__, toString(mLastReply.state).c_str());
+    mLastReply.state = StreamDescriptor::State::ERROR;
+}
+
 status_t StreamHalAidl::createMmapBuffer(int32_t minSizeFrames __unused,
                                          struct audio_mmap_buffer_info *info) {
     ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
@@ -457,9 +587,9 @@
 }
 
 status_t StreamHalAidl::sendCommand(
-        const ::aidl::android::hardware::audio::core::StreamDescriptor::Command &command,
+        const ::aidl::android::hardware::audio::core::StreamDescriptor::Command& command,
         ::aidl::android::hardware::audio::core::StreamDescriptor::Reply* reply,
-        bool safeFromNonWorkerThread) {
+        bool safeFromNonWorkerThread, StatePositions* statePositions) {
     // TIME_CHECK();  // TODO(b/243839867) reenable only when optimized.
     if (!safeFromNonWorkerThread) {
         const pid_t workerTid = mWorkerTid.load(std::memory_order_acquire);
@@ -467,26 +597,48 @@
                 "%s %s: must be invoked from the worker thread (%d)",
                 __func__, command.toString().c_str(), workerTid);
     }
-    if (!mContext.getCommandMQ()->writeBlocking(&command, 1)) {
-        ALOGE("%s: failed to write command %s to MQ", __func__, command.toString().c_str());
-        return NOT_ENOUGH_DATA;
-    }
     StreamDescriptor::Reply localReply{};
-    if (reply == nullptr) {
-        reply = &localReply;
-    }
-    if (!mContext.getReplyMQ()->readBlocking(reply, 1)) {
-        ALOGE("%s: failed to read from reply MQ, command %s", __func__, command.toString().c_str());
-        return NOT_ENOUGH_DATA;
-    }
     {
-        std::lock_guard l(mLock);
-        // Not every command replies with 'latencyMs' field filled out, substitute the last
-        // returned value in that case.
-        if (reply->latencyMs <= 0) {
-            reply->latencyMs = mLastReply.latencyMs;
+        std::lock_guard l(mCommandReplyLock);
+        if (!mContext.getCommandMQ()->writeBlocking(&command, 1)) {
+            ALOGE("%s: failed to write command %s to MQ", __func__, command.toString().c_str());
+            return NOT_ENOUGH_DATA;
         }
-        mLastReply = *reply;
+        if (reply == nullptr) {
+            reply = &localReply;
+        }
+        if (!mContext.getReplyMQ()->readBlocking(reply, 1)) {
+            ALOGE("%s: failed to read from reply MQ, command %s",
+                    __func__, command.toString().c_str());
+            return NOT_ENOUGH_DATA;
+        }
+        {
+            std::lock_guard l(mLock);
+            // Not every command replies with 'latencyMs' field filled out, substitute the last
+            // returned value in that case.
+            if (reply->latencyMs <= 0) {
+                reply->latencyMs = mLastReply.latencyMs;
+            }
+            mLastReply = *reply;
+            mLastReplyExpirationNs = uptimeNanos() + mLastReplyLifeTimeNs;
+            if (!mIsInput && reply->status == STATUS_OK) {
+                if (command.getTag() == StreamDescriptor::Command::standby &&
+                        reply->state == StreamDescriptor::State::STANDBY) {
+                    mStatePositions.framesAtStandby = reply->observable.frames;
+                } else if (command.getTag() == StreamDescriptor::Command::flush &&
+                           reply->state == StreamDescriptor::State::IDLE) {
+                    mStatePositions.framesAtFlushOrDrain = reply->observable.frames;
+                } else if (!mContext.isAsynchronous() &&
+                        command.getTag() == StreamDescriptor::Command::drain &&
+                        (reply->state == StreamDescriptor::State::IDLE ||
+                                reply->state == StreamDescriptor::State::DRAINING)) {
+                    mStatePositions.framesAtFlushOrDrain = reply->observable.frames;
+                } // for asynchronous drain, the frame count is saved in 'onAsyncDrainReady'
+            }
+            if (statePositions != nullptr) {
+                *statePositions = mStatePositions;
+            }
+        }
     }
     switch (reply->status) {
         case STATUS_OK: return OK;
@@ -501,17 +653,24 @@
 }
 
 status_t StreamHalAidl::updateCountersIfNeeded(
-        ::aidl::android::hardware::audio::core::StreamDescriptor::Reply* reply) {
-    if (mWorkerTid.load(std::memory_order_acquire) == gettid()) {
-        if (const auto state = getState(); state != StreamDescriptor::State::ACTIVE &&
-                state != StreamDescriptor::State::DRAINING &&
-                state != StreamDescriptor::State::TRANSFERRING) {
-            return sendCommand(makeHalCommand<HalCommand::Tag::getStatus>(), reply);
-        }
+        ::aidl::android::hardware::audio::core::StreamDescriptor::Reply* reply,
+        StatePositions* statePositions) {
+    bool doUpdate = false;
+    {
+        std::lock_guard l(mLock);
+        doUpdate = uptimeNanos() > mLastReplyExpirationNs;
     }
-    if (reply != nullptr) {
+    if (doUpdate) {
+        // Since updates are paced, it is OK to perform them from any thread, they should
+        // not interfere with I/O operations of the worker.
+        return sendCommand(makeHalCommand<HalCommand::Tag::getStatus>(),
+                reply, true /*safeFromNonWorkerThread */, statePositions);
+    } else if (reply != nullptr) {  // provide cached reply
         std::lock_guard l(mLock);
         *reply = mLastReply;
+        if (statePositions != nullptr) {
+            *statePositions = mStatePositions;
+        }
     }
     return OK;
 }
@@ -545,7 +704,7 @@
 
 StreamOutHalAidl::~StreamOutHalAidl() {
     if (auto broker = mCallbackBroker.promote(); broker != nullptr) {
-        broker->clearCallbacks(this);
+        broker->clearCallbacks(static_cast<StreamOutHalInterface*>(this));
     }
 }
 
@@ -569,7 +728,19 @@
 status_t StreamOutHalAidl::setVolume(float left, float right) {
     TIME_CHECK();
     if (!mStream) return NO_INIT;
-    return statusTFromBinderStatus(mStream->setHwVolume({left, right}));
+    size_t channelCount = audio_channel_count_from_out_mask(mConfig.channel_mask);
+    if (channelCount == 0) channelCount = 2;
+    std::vector<float> volumes(channelCount);
+    if (channelCount == 1) {
+        volumes[0] = (left + right) / 2;
+    } else {
+        volumes[0] = left;
+        volumes[1] = right;
+        for (size_t i = 2; i < channelCount; ++i) {
+            volumes[i] = (left + right) / 2;
+        }
+    }
+    return statusTFromBinderStatus(mStream->setHwVolume(volumes));
 }
 
 status_t StreamOutHalAidl::selectPresentation(int presentationId, int programId) {
@@ -586,37 +757,36 @@
     return transfer(const_cast<void*>(buffer), bytes, written);
 }
 
-status_t StreamOutHalAidl::getRenderPosition(uint32_t *dspFrames) {
+status_t StreamOutHalAidl::getRenderPosition(uint64_t *dspFrames) {
     if (dspFrames == nullptr) {
         return BAD_VALUE;
     }
     int64_t aidlFrames = 0, aidlTimestamp = 0;
-    RETURN_STATUS_IF_ERROR(getObservablePosition(&aidlFrames, &aidlTimestamp));
-    *dspFrames = static_cast<uint32_t>(aidlFrames);
+    StatePositions statePositions{};
+    RETURN_STATUS_IF_ERROR(
+            getObservablePosition(&aidlFrames, &aidlTimestamp, &statePositions));
+    // Number of audio frames since the stream has exited standby.
+    // See the table at the start of 'StreamHalInterface' on when it needs to reset.
+    int64_t mostRecentResetPoint;
+    if (!mContext.isAsynchronous() && audio_has_proportional_frames(mConfig.format)) {
+        mostRecentResetPoint = statePositions.framesAtStandby;
+    } else {
+        mostRecentResetPoint =
+                std::max(statePositions.framesAtStandby, statePositions.framesAtFlushOrDrain);
+    }
+    *dspFrames = aidlFrames <= mostRecentResetPoint ? 0 : aidlFrames - mostRecentResetPoint;
     return OK;
 }
 
-status_t StreamOutHalAidl::getNextWriteTimestamp(int64_t *timestamp __unused) {
-    // Obsolete, use getPresentationPosition.
-    return INVALID_OPERATION;
-}
-
 status_t StreamOutHalAidl::setCallback(wp<StreamOutHalInterfaceCallback> callback) {
+    ALOGD("%p %s", this, __func__);
     TIME_CHECK();
     if (!mStream) return NO_INIT;
     if (!mContext.isAsynchronous()) {
         ALOGE("%s: the callback is intended for asynchronous streams only", __func__);
         return INVALID_OPERATION;
     }
-    if (auto broker = mCallbackBroker.promote(); broker != nullptr) {
-        if (auto cb = callback.promote(); cb != nullptr) {
-            broker->setStreamOutCallback(this, cb);
-        } else {
-            // It is expected that the framework never passes a null pointer.
-            // In the AIDL model callbacks can't be "unregistered".
-            LOG_ALWAYS_FATAL("%s: received an expired or null callback pointer", __func__);
-        }
-    }
+    mClientCallback = callback;
     return OK;
 }
 
@@ -649,6 +819,14 @@
 }
 
 status_t StreamOutHalAidl::drain(bool earlyNotify) {
+    if (!mStream) return NO_INIT;
+
+    if (const auto state = getState(); isInDrainedState(state)) {
+        ALOGD("%p %s stream already in %s", this, __func__, toString(state).c_str());
+        if (mContext.isAsynchronous()) onDrainReady();
+        return OK;
+    }
+
     return StreamHalAidl::drain(earlyNotify);
 }
 
@@ -661,13 +839,26 @@
         return BAD_VALUE;
     }
     int64_t aidlFrames = 0, aidlTimestamp = 0;
-    RETURN_STATUS_IF_ERROR(getObservablePosition(&aidlFrames, &aidlTimestamp));
-    *frames = aidlFrames;
+    StatePositions statePositions{};
+    RETURN_STATUS_IF_ERROR(getObservablePosition(&aidlFrames, &aidlTimestamp, &statePositions));
+    // See the table at the start of 'StreamHalInterface'.
+    if (!mContext.isAsynchronous() && audio_has_proportional_frames(mConfig.format)) {
+        *frames = aidlFrames;
+    } else {
+        const int64_t mostRecentResetPoint =
+                std::max(statePositions.framesAtStandby, statePositions.framesAtFlushOrDrain);
+        *frames = aidlFrames <= mostRecentResetPoint ? 0 : aidlFrames - mostRecentResetPoint;
+    }
     timestamp->tv_sec = aidlTimestamp / NANOS_PER_SECOND;
     timestamp->tv_nsec = aidlTimestamp - timestamp->tv_sec * NANOS_PER_SECOND;
     return OK;
 }
 
+status_t StreamOutHalAidl::presentationComplete() {
+    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    return OK;
+}
+
 status_t StreamOutHalAidl::updateSourceMetadata(
         const StreamOutHalInterface::SourceMetadata& sourceMetadata) {
     TIME_CHECK();
@@ -739,7 +930,7 @@
     TIME_CHECK();
     if (!mStream) return NO_INIT;
     if (auto broker = mCallbackBroker.promote(); broker != nullptr) {
-        broker->setStreamOutEventCallback(this, callback);
+        broker->setStreamOutEventCallback(static_cast<StreamOutHalInterface*>(this), callback);
     }
     return OK;
 }
@@ -773,7 +964,8 @@
     TIME_CHECK();
     if (!mStream) return NO_INIT;
     if (auto broker = mCallbackBroker.promote(); broker != nullptr) {
-        broker->setStreamOutLatencyModeCallback(this, callback);
+        broker->setStreamOutLatencyModeCallback(
+                static_cast<StreamOutHalInterface*>(this), callback);
     }
     return OK;
 };
@@ -782,13 +974,34 @@
     return StreamHalAidl::exit();
 }
 
+void StreamOutHalAidl::onWriteReady() {
+    onAsyncTransferReady();
+    if (auto clientCb = mClientCallback.load().promote(); clientCb != nullptr) {
+        clientCb->onWriteReady();
+    }
+}
+
+void StreamOutHalAidl::onDrainReady() {
+    onAsyncDrainReady();
+    if (auto clientCb = mClientCallback.load().promote(); clientCb != nullptr) {
+        clientCb->onDrainReady();
+    }
+}
+
+void StreamOutHalAidl::onError(bool isHardError) {
+    onAsyncError();
+    if (auto clientCb = mClientCallback.load().promote(); clientCb != nullptr) {
+        clientCb->onError(isHardError);
+    }
+}
+
 status_t StreamOutHalAidl::filterAndUpdateOffloadMetadata(AudioParameter &parameters) {
     TIME_CHECK();
     bool updateMetadata = false;
     if (VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<int>(
                 parameters, String8(AudioParameter::keyOffloadCodecAverageBitRate),
                 [&](int value) {
-                    return value > 0 ?
+                    return value >= 0 ?
                             mOffloadMetadata.averageBitRatePerSecond = value, OK : BAD_VALUE;
                 }))) {
         updateMetadata = true;
@@ -866,7 +1079,9 @@
 status_t StreamInHalAidl::setGain(float gain) {
     TIME_CHECK();
     if (!mStream) return NO_INIT;
-    return statusTFromBinderStatus(mStream->setHwGain({gain}));
+    const size_t channelCount = audio_channel_count_from_in_mask(mConfig.channel_mask);
+    std::vector<float> gains(channelCount != 0 ? channelCount : 1, gain);
+    return statusTFromBinderStatus(mStream->setHwGain(gains));
 }
 
 status_t StreamInHalAidl::read(void *buffer, size_t bytes, size_t *read) {
diff --git a/media/libaudiohal/impl/StreamHalAidl.h b/media/libaudiohal/impl/StreamHalAidl.h
index 4acc6ac..baf4ac0 100644
--- a/media/libaudiohal/impl/StreamHalAidl.h
+++ b/media/libaudiohal/impl/StreamHalAidl.h
@@ -32,6 +32,7 @@
 #include <media/audiohal/StreamHalInterface.h>
 #include <media/AidlConversionUtil.h>
 #include <media/AudioParameter.h>
+#include <mediautils/Synchronization.h>
 
 #include "ConversionHelperAidl.h"
 #include "StreamPowerLog.h"
@@ -93,6 +94,10 @@
     }
     size_t getBufferSizeBytes() const { return mFrameSizeBytes * mBufferSizeFrames; }
     size_t getBufferSizeFrames() const { return mBufferSizeFrames; }
+    size_t getBufferDurationMs(int32_t sampleRate) const {
+        auto bufferSize = mIsMmapped ? getMmapBurstSize() : mBufferSizeFrames;
+        return sampleRate != 0 ? bufferSize * MILLIS_PER_SECOND / sampleRate : 0;
+    }
     CommandMQ* getCommandMQ() const { return mCommandMQ.get(); }
     DataMQ* getDataMQ() const { return mDataMQ.get(); }
     size_t getFrameSizeBytes() const { return mFrameSizeBytes; }
@@ -100,7 +105,7 @@
     bool isAsynchronous() const { return mIsAsynchronous; }
     bool isMmapped() const { return mIsMmapped; }
     const MmapBufferDescriptor& getMmapBufferDescriptor() const { return mMmapBufferDescriptor; }
-
+    size_t getMmapBurstSize() const { return mMmapBufferDescriptor.burstSizeFrames;}
   private:
     static std::unique_ptr<DataMQ> maybeCreateDataMQ(
             const ::aidl::android::hardware::audio::core::StreamDescriptor& descriptor) {
@@ -190,6 +195,11 @@
     // For tests.
     friend class sp<StreamHalAidl>;
 
+    struct StatePositions {
+        int64_t framesAtFlushOrDrain;
+        int64_t framesAtStandby;
+    };
+
     template<class T>
     static std::shared_ptr<::aidl::android::hardware::audio::core::IStreamCommon> getStreamCommon(
             const std::shared_ptr<T>& stream);
@@ -205,10 +215,51 @@
 
     ~StreamHalAidl() override;
 
+    ::aidl::android::hardware::audio::core::StreamDescriptor::State getState() {
+        std::lock_guard l(mLock);
+        return mLastReply.state;
+    }
+
+    bool isInDrainedState(
+            const ::aidl::android::hardware::audio::core::StreamDescriptor::State state) {
+        if (state == ::aidl::android::hardware::audio::core::StreamDescriptor::State::IDLE ||
+            state == ::aidl::android::hardware::audio::core::StreamDescriptor::State::STANDBY) {
+            // drain equivalent states
+            return true;
+        }
+        return false;
+    }
+
+    bool isInPlayOrRecordState(
+            const ::aidl::android::hardware::audio::core::StreamDescriptor::State state) {
+        if (state == ::aidl::android::hardware::audio::core::StreamDescriptor::State::ACTIVE ||
+            state ==
+                    ::aidl::android::hardware::audio::core::StreamDescriptor::State::TRANSFERRING ||
+            state == ::aidl::android::hardware::audio::core::StreamDescriptor::State::DRAINING) {
+            // play or record equivalent states
+            return true;
+        }
+        return false;
+    }
+
+    bool isInPausedState(
+            const ::aidl::android::hardware::audio::core::StreamDescriptor::State& state) {
+        if (state == ::aidl::android::hardware::audio::core::StreamDescriptor::State::PAUSED ||
+            state ==
+                    ::aidl::android::hardware::audio::core::StreamDescriptor::State::DRAIN_PAUSED ||
+            state == ::aidl::android::hardware::audio::core::StreamDescriptor::State::
+                             TRANSFER_PAUSED) {
+            // pause equivalent states
+            return true;
+        }
+        return false;
+    }
+
     status_t getLatency(uint32_t *latency);
 
     // Always returns non-negative values.
-    status_t getObservablePosition(int64_t *frames, int64_t *timestamp);
+    status_t getObservablePosition(int64_t* frames, int64_t* timestamp,
+            StatePositions* statePositions = nullptr);
 
     // Always returns non-negative values.
     status_t getHardwarePosition(int64_t *frames, int64_t *timestamp);
@@ -232,9 +283,22 @@
 
     status_t exit();
 
+    void onAsyncTransferReady();
+    void onAsyncDrainReady();
+    void onAsyncError();
+
     const bool mIsInput;
     const audio_config_base_t mConfig;
     const StreamContextAidl mContext;
+    // This lock is used to make sending of a command and receiving a reply an atomic
+    // operation. Otherwise, when two threads are trying to send a command, they may both advance to
+    // reading of the reply once the HAL has consumed the command from the MQ, and that creates a
+    // race condition between them.
+    //
+    // Note that only access to command and reply MQs needs to be protected because the data MQ is
+    // only accessed by the I/O thread. Also, there is no need to protect lookup operations on the
+    // queues as they are thread-safe, only send/receive operation must be protected.
+    std::mutex mCommandReplyLock;
 
   private:
     static audio_config_base_t configToBase(const audio_config& config) {
@@ -244,21 +308,26 @@
         result.format = config.format;
         return result;
     }
-    ::aidl::android::hardware::audio::core::StreamDescriptor::State getState() {
-        std::lock_guard l(mLock);
-        return mLastReply.state;
-    }
+    // Note: Since `sendCommand` takes mLock while holding mCommandReplyLock, never call
+    // it with `mLock` being held.
     status_t sendCommand(
-            const ::aidl::android::hardware::audio::core::StreamDescriptor::Command &command,
+            const ::aidl::android::hardware::audio::core::StreamDescriptor::Command& command,
             ::aidl::android::hardware::audio::core::StreamDescriptor::Reply* reply = nullptr,
-            bool safeFromNonWorkerThread = false);
+            bool safeFromNonWorkerThread = false,
+            StatePositions* statePositions = nullptr);
     status_t updateCountersIfNeeded(
-            ::aidl::android::hardware::audio::core::StreamDescriptor::Reply* reply = nullptr);
+            ::aidl::android::hardware::audio::core::StreamDescriptor::Reply* reply = nullptr,
+            StatePositions* statePositions = nullptr);
 
     const std::shared_ptr<::aidl::android::hardware::audio::core::IStreamCommon> mStream;
     const std::shared_ptr<::aidl::android::media::audio::IHalAdapterVendorExtension> mVendorExt;
+    const int64_t mLastReplyLifeTimeNs;
     std::mutex mLock;
     ::aidl::android::hardware::audio::core::StreamDescriptor::Reply mLastReply GUARDED_BY(mLock);
+    int64_t mLastReplyExpirationNs GUARDED_BY(mLock) = 0;
+    // Cached values of observable positions when the stream last entered certain state.
+    // Updated for output streams only.
+    StatePositions mStatePositions GUARDED_BY(mLock) = {};
     // mStreamPowerLog is used for audio signal power logging.
     StreamPowerLog mStreamPowerLog;
     std::atomic<pid_t> mWorkerTid = -1;
@@ -266,7 +335,9 @@
 
 class CallbackBroker;
 
-class StreamOutHalAidl : public StreamOutHalInterface, public StreamHalAidl {
+class StreamOutHalAidl : public virtual StreamOutHalInterface,
+                         public virtual StreamOutHalInterfaceCallback,
+                         public StreamHalAidl {
   public:
     // Extract the output stream parameters and set by AIDL APIs.
     status_t setParameters(const String8& kvPairs) override;
@@ -285,10 +356,7 @@
 
     // Return the number of audio frames written by the audio dsp to DAC since
     // the output has exited standby.
-    status_t getRenderPosition(uint32_t *dspFrames) override;
-
-    // Get the local time at which the next write to the audio driver will be presented.
-    status_t getNextWriteTimestamp(int64_t *timestamp) override;
+    status_t getRenderPosition(uint64_t *dspFrames) override;
 
     // Set the callback for notifying completion of non-blocking write and drain.
     status_t setCallback(wp<StreamOutHalInterfaceCallback> callback) override;
@@ -308,12 +376,19 @@
     // Requests notification when data buffered by the driver/hardware has been played.
     status_t drain(bool earlyNotify) override;
 
-    // Notifies to the audio driver to flush the queued data.
+    // Notifies to the audio driver to flush (that is, drop) the queued data. Stream must
+    // already be paused before calling 'flush'.
     status_t flush() override;
 
     // Return a recent count of the number of audio frames presented to an external observer.
+    // This excludes frames which have been written but are still in the pipeline. See the
+    // table at the start of the 'StreamOutHalInterface' for the specification of the frame
+    // count behavior w.r.t. 'flush', 'drain' and 'standby' operations.
     status_t getPresentationPosition(uint64_t *frames, struct timespec *timestamp) override;
 
+    // Notifies the HAL layer that the framework considers the current playback as completed.
+    status_t presentationComplete() override;
+
     // Called when the metadata of the stream's source has been changed.
     status_t updateSourceMetadata(const SourceMetadata& sourceMetadata) override;
 
@@ -344,6 +419,11 @@
 
     status_t exit() override;
 
+    // StreamOutHalInterfaceCallback
+    void onWriteReady() override;
+    void onDrainReady() override;
+    void onError(bool isHardError) override;
+
   private:
     friend class sp<StreamOutHalAidl>;
 
@@ -352,6 +432,7 @@
 
     const std::shared_ptr<::aidl::android::hardware::audio::core::IStreamOut> mStream;
     const wp<CallbackBroker> mCallbackBroker;
+    mediautils::atomic_wp<StreamOutHalInterfaceCallback> mClientCallback;
 
     AudioOffloadMetadata mOffloadMetadata;
 
@@ -384,6 +465,7 @@
 
     // Return a recent count of the number of audio frames received and
     // the clock time associated with that frame count.
+    // The count must not reset to zero when a PCM input enters standby.
     status_t getCapturePosition(int64_t *frames, int64_t *time) override;
 
     // Get active microphones
diff --git a/media/libaudiohal/impl/StreamHalHidl.cpp b/media/libaudiohal/impl/StreamHalHidl.cpp
index 72eadc6..a931fdd 100644
--- a/media/libaudiohal/impl/StreamHalHidl.cpp
+++ b/media/libaudiohal/impl/StreamHalHidl.cpp
@@ -17,6 +17,8 @@
 #define LOG_TAG "StreamHalHidl"
 //#define LOG_NDEBUG 0
 
+#include <cinttypes>
+
 #include <android/hidl/manager/1.0/IServiceManager.h>
 #include <hwbinder/IPCThreadState.h>
 #include <media/AudioParameter.h>
@@ -589,32 +591,39 @@
     return OK;
 }
 
-status_t StreamOutHalHidl::getRenderPosition(uint32_t *dspFrames) {
+status_t StreamOutHalHidl::getRenderPosition(uint64_t *dspFrames) {
     // TIME_CHECK();  // TODO(b/243839867) reenable only when optimized.
     if (mStream == 0) return NO_INIT;
     Result retval;
+    uint32_t halPosition = 0;
     Return<void> ret = mStream->getRenderPosition(
             [&](Result r, uint32_t d) {
                 retval = r;
                 if (retval == Result::OK) {
-                    *dspFrames = d;
+                    halPosition = d;
                 }
             });
-    return processReturn("getRenderPosition", ret, retval);
-}
+    status_t status = processReturn("getRenderPosition", ret, retval);
+    if (status != OK) {
+        return status;
+    }
+    // Maintain a 64-bit render position using the 32-bit result from the HAL.
+    // This delta calculation relies on the arithmetic overflow behavior
+    // of integers. For example (100 - 0xFFFFFFF0) = 116.
+    std::lock_guard l(mPositionMutex);
+    const auto truncatedPosition = (uint32_t)mRenderPosition;
+    int32_t deltaHalPosition; // initialization not needed, overwitten by __builtin_sub_overflow()
+    (void) __builtin_sub_overflow(halPosition, truncatedPosition, &deltaHalPosition);
 
-status_t StreamOutHalHidl::getNextWriteTimestamp(int64_t *timestamp) {
-    TIME_CHECK();
-    if (mStream == 0) return NO_INIT;
-    Result retval;
-    Return<void> ret = mStream->getNextWriteTimestamp(
-            [&](Result r, int64_t t) {
-                retval = r;
-                if (retval == Result::OK) {
-                    *timestamp = t;
-                }
-            });
-    return processReturn("getRenderPosition", ret, retval);
+    if (deltaHalPosition >= 0) {
+        mRenderPosition += deltaHalPosition;
+    } else if (mExpectRetrograde) {
+        mExpectRetrograde = false;
+        mRenderPosition -= static_cast<uint64_t>(-deltaHalPosition);
+        ALOGW("Retrograde motion of %" PRId32 " frames", -deltaHalPosition);
+    }
+    *dspFrames = mRenderPosition;
+    return OK;
 }
 
 status_t StreamOutHalHidl::setCallback(wp<StreamOutHalInterfaceCallback> callback) {
@@ -667,9 +676,23 @@
 status_t StreamOutHalHidl::flush() {
     TIME_CHECK();
     if (mStream == 0) return NO_INIT;
+    {
+        std::lock_guard l(mPositionMutex);
+        mRenderPosition = 0;
+        mExpectRetrograde = false;
+    }
     return processReturn("pause", mStream->flush());
 }
 
+status_t StreamOutHalHidl::standby() {
+    {
+        std::lock_guard l(mPositionMutex);
+        mRenderPosition = 0;
+        mExpectRetrograde = false;
+    }
+    return StreamHalHidl::standby();
+}
+
 status_t StreamOutHalHidl::getPresentationPosition(uint64_t *frames, struct timespec *timestamp) {
     // TIME_CHECK();  // TODO(b/243839867) reenable only when optimized.
     if (mStream == 0) return NO_INIT;
@@ -696,6 +719,16 @@
     }
 }
 
+status_t StreamOutHalHidl::presentationComplete() {
+    // Avoid suppressing retrograde motion in mRenderPosition for gapless offload/direct when
+    // transitioning between tracks.
+    // The HAL resets the frame position without flush/stop being called, but calls back prior to
+    // this event. So, on the next occurrence of retrograde motion, we permit backwards movement of
+    // mRenderPosition.
+    mExpectRetrograde = true;
+    return OK;
+}
+
 #if MAJOR_VERSION == 2
 status_t StreamOutHalHidl::updateSourceMetadata(
         const StreamOutHalInterface::SourceMetadata& /* sourceMetadata */) {
@@ -840,7 +873,7 @@
             const android::hardware::hidl_vec<uint8_t>& audioMetadata)  override {
         sp<StreamOutHalHidl> stream = mStream.promote();
         if (stream != nullptr) {
-            std::basic_string<uint8_t> metadataBs(audioMetadata.begin(), audioMetadata.end());
+            std::vector<uint8_t> metadataBs(audioMetadata.begin(), audioMetadata.end());
             stream->onCodecFormatChanged(metadataBs);
         }
         return Void();
@@ -964,10 +997,10 @@
     sp<StreamOutHalInterfaceCallback> callback = mCallback.load().promote();
     if (callback == 0) return;
     ALOGV("asyncCallback onError");
-    callback->onError();
+    callback->onError(false /*isHardError*/);
 }
 
-void StreamOutHalHidl::onCodecFormatChanged(const std::basic_string<uint8_t>& metadataBs) {
+void StreamOutHalHidl::onCodecFormatChanged(const std::vector<uint8_t>& metadataBs) {
     sp<StreamOutHalInterfaceEventCallback> callback = mEventCallback.load().promote();
     if (callback == nullptr) return;
     ALOGV("asyncCodecFormatCallback %s", __func__);
diff --git a/media/libaudiohal/impl/StreamHalHidl.h b/media/libaudiohal/impl/StreamHalHidl.h
index 5361047..433e0a3 100644
--- a/media/libaudiohal/impl/StreamHalHidl.h
+++ b/media/libaudiohal/impl/StreamHalHidl.h
@@ -18,10 +18,12 @@
 #define ANDROID_HARDWARE_STREAM_HAL_HIDL_H
 
 #include <atomic>
+#include <mutex>
 
 #include PATH(android/hardware/audio/CORE_TYPES_FILE_VERSION/IStream.h)
 #include PATH(android/hardware/audio/CORE_TYPES_FILE_VERSION/IStreamIn.h)
 #include PATH(android/hardware/audio/FILE_VERSION/IStreamOut.h)
+#include <android-base/thread_annotations.h>
 #include <fmq/EventFlag.h>
 #include <fmq/MessageQueue.h>
 #include <media/audiohal/EffectHalInterface.h>
@@ -119,6 +121,9 @@
 
 class StreamOutHalHidl : public StreamOutHalInterface, public StreamHalHidl {
   public:
+    // Put the audio hardware input/output into standby mode (from StreamHalInterface).
+    status_t standby() override;
+
     // Return the frame size (number of bytes per sample) of a stream.
     virtual status_t getFrameSize(size_t *size);
 
@@ -136,10 +141,7 @@
 
     // Return the number of audio frames written by the audio dsp to DAC since
     // the output has exited standby.
-    virtual status_t getRenderPosition(uint32_t *dspFrames);
-
-    // Get the local time at which the next write to the audio driver will be presented.
-    virtual status_t getNextWriteTimestamp(int64_t *timestamp);
+    virtual status_t getRenderPosition(uint64_t *dspFrames);
 
     // Set the callback for notifying completion of non-blocking write and drain.
     virtual status_t setCallback(wp<StreamOutHalInterfaceCallback> callback);
@@ -159,12 +161,19 @@
     // Requests notification when data buffered by the driver/hardware has been played.
     virtual status_t drain(bool earlyNotify);
 
-    // Notifies to the audio driver to flush the queued data.
+    // Notifies to the audio driver to flush (that is, drop) the queued data. Stream must
+    // already be paused before calling 'flush'.
     virtual status_t flush();
 
     // Return a recent count of the number of audio frames presented to an external observer.
+    // This excludes frames which have been written but are still in the pipeline. See the
+    // table at the start of the 'StreamOutHalInterface' for the specification of the frame
+    // count behavior w.r.t. 'flush', 'drain' and 'standby' operations.
     virtual status_t getPresentationPosition(uint64_t *frames, struct timespec *timestamp);
 
+    // Notifies the HAL layer that the framework considers the current playback as completed.
+    status_t presentationComplete() override;
+
     // Called when the metadata of the stream's source has been changed.
     status_t updateSourceMetadata(const SourceMetadata& sourceMetadata) override;
 
@@ -194,7 +203,7 @@
     status_t setEventCallback(const sp<StreamOutHalInterfaceEventCallback>& callback) override;
 
     // Methods used by StreamCodecFormatCallback (HIDL).
-    void onCodecFormatChanged(const std::basic_string<uint8_t>& metadataBs);
+    void onCodecFormatChanged(const std::vector<uint8_t>& metadataBs);
 
     status_t setLatencyMode(audio_latency_mode_t mode) override;
     status_t getRecommendedLatencyModes(std::vector<audio_latency_mode_t> *modes) override;
@@ -221,6 +230,10 @@
     std::unique_ptr<StatusMQ> mStatusMQ;
     std::atomic<pid_t> mWriterClient;
     EventFlag* mEfGroup;
+    std::mutex mPositionMutex;
+    // Used to expand correctly the 32-bit position from the HAL.
+    uint64_t mRenderPosition GUARDED_BY(mPositionMutex) = 0;
+    bool mExpectRetrograde GUARDED_BY(mPositionMutex) = false; // See 'presentationComplete'.
 
     // Can not be constructed directly by clients.
     StreamOutHalHidl(const sp<::android::hardware::audio::CPP_VERSION::IStreamOut>& stream);
@@ -250,6 +263,7 @@
 
     // Return a recent count of the number of audio frames received and
     // the clock time associated with that frame count.
+    // The count must not reset to zero when a PCM input enters standby.
     virtual status_t getCapturePosition(int64_t *frames, int64_t *time);
 
     // Get active microphones
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionEq.cpp b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionEq.cpp
index ca6ff88..7879200 100644
--- a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionEq.cpp
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionEq.cpp
@@ -17,6 +17,7 @@
 #include <cstdint>
 #include <cstring>
 #include <optional>
+#include <unordered_set>
 #define LOG_TAG "AidlConversionEQ"
 //#define LOG_NDEBUG 0
 
@@ -262,10 +263,21 @@
         }
         case EQ_PARAM_GET_NUM_OF_PRESETS: {
             Parameter aidlParam = VALUE_OR_RETURN_STATUS(getAidlParameter(Equalizer::presets));
-            const auto& presets = VALUE_OR_RETURN_STATUS(GET_PARAMETER_SPECIFIC_FIELD(
+            auto presets = VALUE_OR_RETURN_STATUS(GET_PARAMETER_SPECIFIC_FIELD(
                     aidlParam, Equalizer, equalizer, Equalizer::presets,
                     std::vector<Equalizer::Preset>));
-            uint16_t num = presets.size();
+            // it was assumed the presets index in the range of [0, NUM_OF_PRESETS - 1], so
+            // filter out presets out of this range (one example is preset {-1, "custom"})
+            std::erase_if(presets, [](const auto& preset) { return preset.index < 0; });
+            // validate remaining indexes are unique [0, num - 1]
+            std::unordered_set<uint16_t> uniqueIndices;
+            const uint16_t num = presets.size();
+            for (const auto& preset : presets) {
+                if (preset.index >= num || 0 != uniqueIndices.count(preset.index)) {
+                    return BAD_VALUE;
+                }
+                uniqueIndices.insert(preset.index);
+            }
             return param.writeToValue(&num);
         }
         case EQ_PARAM_GET_PRESET_NAME: {
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionPresetReverb.cpp b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionPresetReverb.cpp
index 3cac591..642c370 100644
--- a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionPresetReverb.cpp
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionPresetReverb.cpp
@@ -71,7 +71,6 @@
 status_t AidlConversionPresetReverb::getParameter(EffectParamWriter& param) {
     uint32_t type = 0;
     uint16_t value = 0;
-    ALOGE("%s enter %s", __func__, param.toString().c_str());
     if (!param.validateParamValueSize(sizeof(uint32_t), sizeof(uint16_t)) ||
         OK != param.readFromParameter(&type)) {
         ALOGE("%s invalid param %s", __func__, param.toString().c_str());
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionSpatializer.cpp b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionSpatializer.cpp
index 49e6827..c2aa278 100644
--- a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionSpatializer.cpp
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionSpatializer.cpp
@@ -16,17 +16,17 @@
 
 #include <cstdint>
 #include <cstring>
-#include <optional>
 #define LOG_TAG "AidlConversionSpatializer"
 //#define LOG_NDEBUG 0
 
 #include <aidl/android/hardware/audio/effect/DefaultExtension.h>
 #include <aidl/android/hardware/audio/effect/VendorExtension.h>
 #include <error/expected_utils.h>
-#include <media/AidlConversionNdk.h>
+#include <media/AidlConversionCppNdk.h>
 #include <media/AidlConversionEffect.h>
+#include <media/AidlConversionNdk.h>
+#include <system/audio_effects/aidl_effects_utils.h>
 #include <system/audio_effects/effect_spatializer.h>
-
 #include <utils/Log.h>
 
 #include "AidlConversionSpatializer.h"
@@ -34,38 +34,319 @@
 namespace android {
 namespace effect {
 
-using ::aidl::android::aidl_utils::statusTFromBinderStatus;
-using ::aidl::android::hardware::audio::effect::DefaultExtension;
-using ::aidl::android::hardware::audio::effect::Parameter;
-using ::aidl::android::hardware::audio::effect::VendorExtension;
-using ::android::status_t;
+using aidl::android::getParameterSpecificField;
+using aidl::android::aidl_utils::statusTFromBinderStatus;
+using aidl::android::hardware::audio::common::SourceMetadata;
+using aidl::android::hardware::audio::effect::DefaultExtension;
+using aidl::android::hardware::audio::effect::Parameter;
+using aidl::android::hardware::audio::effect::Range;
+using aidl::android::hardware::audio::effect::Spatializer;
+using aidl::android::hardware::audio::effect::VendorExtension;
+using aidl::android::media::audio::common::AudioChannelLayout;
+using aidl::android::media::audio::common::HeadTracking;
+using aidl::android::media::audio::common::Spatialization;
+using aidl::android::media::audio::common::toString;
+using android::status_t;
 using utils::EffectParamReader;
 using utils::EffectParamWriter;
 
+bool AidlConversionSpatializer::isSpatializerParameterSupported() {
+    return mIsSpatializerAidlParamSupported.value_or(
+            (mIsSpatializerAidlParamSupported =
+                     [&]() {
+                         ::aidl::android::hardware::audio::effect::Parameter aidlParam;
+                         auto id = MAKE_SPECIFIC_PARAMETER_ID(Spatializer, spatializerTag,
+                                                              Spatializer::vendor);
+                         // No range defined in descriptor capability means no Spatializer AIDL
+                         // implementation BAD_VALUE return from getParameter indicates the
+                         // parameter is not supported by HAL
+                         return mDesc.capability.range.getTag() == Range::spatializer &&
+                                mEffect->getParameter(id, &aidlParam).getStatus() !=
+                                        android::BAD_VALUE;
+                     }())
+                    .value());
+}
+
 status_t AidlConversionSpatializer::setParameter(EffectParamReader& param) {
-    Parameter aidlParam = VALUE_OR_RETURN_STATUS(
-            ::aidl::android::legacy2aidl_EffectParameterReader_Parameter(param));
+    Parameter aidlParam;
+    if (isSpatializerParameterSupported()) {
+        uint32_t command = 0;
+        if (!param.validateParamValueSize(sizeof(uint32_t), sizeof(int8_t)) ||
+            OK != param.readFromParameter(&command)) {
+            ALOGE("%s %d invalid param %s", __func__, __LINE__, param.toString().c_str());
+            return BAD_VALUE;
+        }
+
+        switch (command) {
+            case SPATIALIZER_PARAM_LEVEL: {
+                Spatialization::Level level = Spatialization::Level::NONE;
+                if (OK != param.readFromValue(&level)) {
+                    ALOGE("%s invalid level value %s", __func__, param.toString().c_str());
+                    return BAD_VALUE;
+                }
+                aidlParam = MAKE_SPECIFIC_PARAMETER(Spatializer, spatializer,
+                                                    spatializationLevel, level);
+                break;
+            }
+            case SPATIALIZER_PARAM_HEADTRACKING_MODE: {
+                HeadTracking::Mode mode = HeadTracking::Mode::DISABLED;
+                if (OK != param.readFromValue(&mode)) {
+                    ALOGE("%s invalid mode value %s", __func__, param.toString().c_str());
+                    return BAD_VALUE;
+                }
+                aidlParam = MAKE_SPECIFIC_PARAMETER(Spatializer, spatializer, headTrackingMode,
+                                                    mode);
+                break;
+            }
+            case SPATIALIZER_PARAM_HEAD_TO_STAGE: {
+                const size_t valueSize = param.getValueSize();
+                if (valueSize / sizeof(float) > 6 || valueSize % sizeof(float) != 0) {
+                    ALOGE("%s invalid parameter value size %zu", __func__, valueSize);
+                    return BAD_VALUE;
+                }
+                std::array<float, 6> headToStage = {};
+                for (size_t i = 0; i < valueSize / sizeof(float); i++) {
+                    if (OK != param.readFromValue(&headToStage[i])) {
+                        ALOGE("%s failed to read headToStage from %s", __func__,
+                              param.toString().c_str());
+                        return BAD_VALUE;
+                    }
+                }
+                HeadTracking::SensorData sensorData =
+                        HeadTracking::SensorData::make<HeadTracking::SensorData::headToStage>(
+                                headToStage);
+                aidlParam = MAKE_SPECIFIC_PARAMETER(Spatializer, spatializer,
+                                                    headTrackingSensorData, sensorData);
+                break;
+            }
+            case SPATIALIZER_PARAM_HEADTRACKING_CONNECTION: {
+                int32_t modeInt32 = 0;
+                int32_t sensorId = -1;
+                if (OK != param.readFromValue(&modeInt32) || OK != param.readFromValue(&sensorId)) {
+                    ALOGE("%s %d invalid parameter value %s", __func__, __LINE__,
+                          param.toString().c_str());
+                    return BAD_VALUE;
+                }
+
+                const auto mode = static_cast<HeadTracking::ConnectionMode>(modeInt32);
+                if (mode < *ndk::enum_range<HeadTracking::ConnectionMode>().begin() ||
+                    mode > *ndk::enum_range<HeadTracking::ConnectionMode>().end()) {
+                    ALOGE("%s %d invalid mode %d", __func__, __LINE__, modeInt32);
+                    return BAD_VALUE;
+                }
+                aidlParam = MAKE_SPECIFIC_PARAMETER(Spatializer, spatializer,
+                                                    headTrackingConnectionMode, mode);
+                if (status_t status = statusTFromBinderStatus(mEffect->setParameter(aidlParam));
+                    status != OK) {
+                    ALOGE("%s failed to set headTrackingConnectionMode %s", __func__,
+                          toString(mode).c_str());
+                    return status;
+                }
+                aidlParam = MAKE_SPECIFIC_PARAMETER(Spatializer, spatializer, headTrackingSensorId,
+                                                    sensorId);
+                return statusTFromBinderStatus(mEffect->setParameter(aidlParam));
+            }
+            default: {
+                // for vendor extension, copy data area to the DefaultExtension, parameter ignored
+                VendorExtension ext = VALUE_OR_RETURN_STATUS(
+                        aidl::android::legacy2aidl_EffectParameterReader_VendorExtension(param));
+                aidlParam =
+                        MAKE_SPECIFIC_PARAMETER(Spatializer, spatializer, vendor, ext);
+                break;
+            }
+        }
+    } else {
+        aidlParam = VALUE_OR_RETURN_STATUS(
+                ::aidl::android::legacy2aidl_EffectParameterReader_Parameter(param));
+    }
+
     return statusTFromBinderStatus(mEffect->setParameter(aidlParam));
 }
 
 status_t AidlConversionSpatializer::getParameter(EffectParamWriter& param) {
-    DefaultExtension defaultExt;
-    // read parameters into DefaultExtension vector<uint8_t>
-    defaultExt.bytes.resize(param.getParameterSize());
-    if (OK != param.readFromParameter(defaultExt.bytes.data(), param.getParameterSize())) {
-        ALOGE("%s invalid param %s", __func__, param.toString().c_str());
-        param.setStatus(BAD_VALUE);
-        return BAD_VALUE;
-    }
+    if (isSpatializerParameterSupported()) {
+        uint32_t command = 0;
+        if (!param.validateParamValueSize(sizeof(uint32_t), sizeof(int8_t)) ||
+            OK != param.readFromParameter(&command)) {
+            ALOGE("%s %d invalid param %s", __func__, __LINE__, param.toString().c_str());
+            return BAD_VALUE;
+        }
 
-    VendorExtension idTag;
-    idTag.extension.setParcelable(defaultExt);
-    Parameter::Id id = UNION_MAKE(Parameter::Id, vendorEffectTag, idTag);
-    Parameter aidlParam;
-    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));
-    // copy the AIDL extension data back to effect_param_t
-    return VALUE_OR_RETURN_STATUS(
-            ::aidl::android::aidl2legacy_Parameter_EffectParameterWriter(aidlParam, param));
+        switch (command) {
+            case SPATIALIZER_PARAM_SUPPORTED_LEVELS: {
+                const auto& range = getRange<Range::spatializer, Range::SpatializerRange>(
+                        mDesc.capability, Spatializer::spatializationLevel);
+                if (!range) {
+                    return BAD_VALUE;
+                }
+                std::vector<Spatialization::Level> levels;
+                for (const auto level : ::ndk::enum_range<Spatialization::Level>()) {
+                    const auto spatializer =
+                            Spatializer::make<Spatializer::spatializationLevel>(level);
+                    if (spatializer >= range->min && spatializer <= range->max) {
+                        levels.emplace_back(level);
+                    }
+                }
+                const uint8_t num = levels.size();
+                RETURN_STATUS_IF_ERROR(param.writeToValue(&num));
+                for (const auto level : levels) {
+                    RETURN_STATUS_IF_ERROR(param.writeToValue(&level));
+                }
+                return OK;
+            }
+            case SPATIALIZER_PARAM_LEVEL: {
+                Parameter aidlParam;
+                Parameter::Id id = MAKE_SPECIFIC_PARAMETER_ID(Spatializer, spatializerTag,
+                                                              Spatializer::spatializationLevel);
+                RETURN_STATUS_IF_ERROR(
+                        statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));
+                const auto level = VALUE_OR_RETURN_STATUS(GET_PARAMETER_SPECIFIC_FIELD(
+                        aidlParam, Spatializer, spatializer, Spatializer::spatializationLevel,
+                        Spatialization::Level));
+                return param.writeToValue(&level);
+            }
+            case SPATIALIZER_PARAM_HEADTRACKING_SUPPORTED: {
+                const auto& range = getRange<Range::spatializer, Range::SpatializerRange>(
+                        mDesc.capability, Spatializer::spatializationLevel);
+                if (!range) {
+                    ALOGE("%s %d: range not defined for spatializationLevel", __func__, __LINE__);
+                    return BAD_VALUE;
+                }
+                const auto& nonSupport = Spatializer::make<Spatializer::spatializationLevel>(
+                        Spatialization::Level::NONE);
+                const bool support = (range->min > range->max ||
+                                         (range->min == nonSupport && range->max == nonSupport))
+                                                ? false
+                                                : true;
+                return param.writeToValue(&support);
+            }
+            case SPATIALIZER_PARAM_HEADTRACKING_MODE: {
+                Parameter aidlParam;
+                Parameter::Id id = MAKE_SPECIFIC_PARAMETER_ID(Spatializer, spatializerTag,
+                                                Spatializer::headTrackingMode);
+                RETURN_STATUS_IF_ERROR(
+                        statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));
+                const auto mode = VALUE_OR_RETURN_STATUS(GET_PARAMETER_SPECIFIC_FIELD(
+                        aidlParam, Spatializer, spatializer, Spatializer::headTrackingMode,
+                        HeadTracking::Mode));
+                return param.writeToValue(&mode);
+            }
+            case SPATIALIZER_PARAM_SUPPORTED_CHANNEL_MASKS: {
+                Parameter aidlParam;
+                Parameter::Id id = MAKE_SPECIFIC_PARAMETER_ID(Spatializer, spatializerTag,
+                                                              Spatializer::supportedChannelLayout);
+                RETURN_STATUS_IF_ERROR(
+                        statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));
+                const auto& supportedLayouts = VALUE_OR_RETURN_STATUS(GET_PARAMETER_SPECIFIC_FIELD(
+                        aidlParam, Spatializer, spatializer, Spatializer::supportedChannelLayout,
+                        std::vector<AudioChannelLayout>));
+                // audio_channel_mask_t is uint32_t enum, write number in 32bit
+                const uint32_t num = supportedLayouts.size();
+                RETURN_STATUS_IF_ERROR(param.writeToValue(&num));
+                for (const auto& layout : supportedLayouts) {
+                    audio_channel_mask_t mask = VALUE_OR_RETURN_STATUS(
+                            ::aidl::android::aidl2legacy_AudioChannelLayout_audio_channel_mask_t(
+                                    layout, false /* isInput */));
+                    RETURN_STATUS_IF_ERROR(param.writeToValue(&mask));
+                }
+                return OK;
+            }
+            case SPATIALIZER_PARAM_SUPPORTED_SPATIALIZATION_MODES: {
+                const auto& range = getRange<Range::spatializer, Range::SpatializerRange>(
+                        mDesc.capability, Spatializer::spatializationMode);
+                if (!range) {
+                    return BAD_VALUE;
+                }
+                std::vector<Spatialization::Mode> modes;
+                for (const auto mode : ::ndk::enum_range<Spatialization::Mode>()) {
+                    if (const auto spatializer =
+                                Spatializer::make<Spatializer::spatializationMode>(mode);
+                        spatializer >= range->min && spatializer <= range->max) {
+                        modes.emplace_back(mode);
+                    }
+                }
+                const uint8_t num = modes.size();
+                RETURN_STATUS_IF_ERROR(param.writeToValue(&num));
+                for (const auto mode : modes) {
+                    RETURN_STATUS_IF_ERROR(param.writeToValue(&mode));
+                }
+                return OK;
+            }
+            case SPATIALIZER_PARAM_SUPPORTED_HEADTRACKING_CONNECTION: {
+                const auto& range = getRange<Range::spatializer, Range::SpatializerRange>(
+                        mDesc.capability, Spatializer::headTrackingConnectionMode);
+                if (!range) {
+                    return BAD_VALUE;
+                }
+                std::vector<HeadTracking::ConnectionMode> modes;
+                for (const auto mode : ::ndk::enum_range<HeadTracking::ConnectionMode>()) {
+                    if (const auto spatializer =
+                                Spatializer::make<Spatializer::headTrackingConnectionMode>(mode);
+                        spatializer < range->min || spatializer > range->max) {
+                        modes.emplace_back(mode);
+                    }
+                }
+                const uint8_t num = modes.size();
+                RETURN_STATUS_IF_ERROR(param.writeToValue(&num));
+                for (const auto mode : modes) {
+                    RETURN_STATUS_IF_ERROR(param.writeToValue(&mode));
+                }
+                return OK;
+            }
+            case SPATIALIZER_PARAM_HEADTRACKING_CONNECTION: {
+                status_t status = OK;
+                Parameter aidlParam;
+                Parameter::Id id = MAKE_SPECIFIC_PARAMETER_ID(
+                        Spatializer, spatializerTag, Spatializer::headTrackingConnectionMode);
+                RETURN_STATUS_IF_ERROR(
+                        statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));
+                const auto mode = VALUE_OR_RETURN_STATUS(GET_PARAMETER_SPECIFIC_FIELD(
+                        aidlParam, Spatializer, spatializer,
+                        Spatializer::headTrackingConnectionMode, HeadTracking::ConnectionMode));
+
+                id = MAKE_SPECIFIC_PARAMETER_ID(Spatializer, spatializerTag,
+                                                Spatializer::headTrackingSensorId);
+                RETURN_STATUS_IF_ERROR(
+                        statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));
+                const auto sensorId = VALUE_OR_RETURN_STATUS(
+                        GET_PARAMETER_SPECIFIC_FIELD(aidlParam, Spatializer, spatializer,
+                                                     Spatializer::headTrackingSensorId, int32_t));
+                uint32_t modeInt32 = static_cast<int32_t>(mode);
+                if (status = param.writeToValue(&modeInt32); status != OK) {
+                    ALOGW("%s %d: write mode %s to value failed %d", __func__, __LINE__,
+                          toString(mode).c_str(), status);
+                    return status;
+                }
+                if (status = param.writeToValue(&sensorId); status != OK) {
+                    ALOGW("%s %d: write sensorId %d to value failed %d", __func__, __LINE__,
+                          sensorId, status);
+                    return status;
+                }
+                return OK;
+            }
+            default: {
+                VENDOR_EXTENSION_GET_AND_RETURN(Spatializer, spatializer, param);
+            }
+        }
+    } else {
+        Parameter aidlParam;
+        DefaultExtension defaultExt;
+        // read parameters into DefaultExtension vector<uint8_t>
+        defaultExt.bytes.resize(param.getParameterSize());
+        if (OK != param.readFromParameter(defaultExt.bytes.data(), param.getParameterSize())) {
+            ALOGE("%s %d invalid param %s", __func__, __LINE__, param.toString().c_str());
+            param.setStatus(BAD_VALUE);
+            return BAD_VALUE;
+        }
+
+        VendorExtension idTag;
+        idTag.extension.setParcelable(defaultExt);
+        Parameter::Id id = UNION_MAKE(Parameter::Id, vendorEffectTag, idTag);
+        RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));
+        // copy the AIDL extension data back to effect_param_t
+        return VALUE_OR_RETURN_STATUS(
+                ::aidl::android::aidl2legacy_Parameter_EffectParameterWriter(aidlParam, param));
+    }
 }
 
 } // namespace effect
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionSpatializer.h b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionSpatializer.h
index 7c60b14..444e5a7 100644
--- a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionSpatializer.h
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionSpatializer.h
@@ -32,6 +32,8 @@
     ~AidlConversionSpatializer() {}
 
   private:
+    std::optional<bool> mIsSpatializerAidlParamSupported;
+    bool isSpatializerParameterSupported();
     status_t setParameter(utils::EffectParamReader& param) override;
     status_t getParameter(utils::EffectParamWriter& param) override;
 };
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionVirtualizer.cpp b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionVirtualizer.cpp
index cad0068..db5cb9a 100644
--- a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionVirtualizer.cpp
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionVirtualizer.cpp
@@ -133,7 +133,6 @@
                 const audio_channel_mask_t chMask = ::aidl::android::
                         aidl2legacy_AudioChannelLayout_layout_audio_channel_mask_t_bits(
                                 angle.channel, false);
-                ALOGW("%s aidl %d ch %d", __func__, angle.channel, chMask);
                 if (OK != param.writeToValue(&chMask) ||
                     OK != param.writeToValue(&angle.azimuthDegree) ||
                     OK != param.writeToValue(&angle.elevationDegree)) {
diff --git a/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h b/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h
index bb5f851..3f16526 100644
--- a/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h
+++ b/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h
@@ -78,8 +78,9 @@
     virtual status_t getParameters(const String8& keys, String8 *values) = 0;
 
     // Returns audio input buffer size according to parameters passed.
-    virtual status_t getInputBufferSize(const struct audio_config *config,
-            size_t *size) = 0;
+    // If there is no possibility for the HAL to open an input with the provided
+    // parameters, the method will return BAD_VALUE and modify the provided `config`.
+    virtual status_t getInputBufferSize(struct audio_config *config, size_t *size) = 0;
 
     // Creates and opens the audio hardware output stream. The stream is closed
     // by releasing all references to the returned object.
@@ -89,7 +90,8 @@
             audio_output_flags_t flags,
             struct audio_config *config,
             const char *address,
-            sp<StreamOutHalInterface> *outStream) = 0;
+            sp<StreamOutHalInterface> *outStream,
+            const std::vector<playback_track_metadata_v7_t>& sourceMetadata = {}) = 0;
 
     // Creates and opens the audio hardware input stream. The stream is closed
     // by releasing all references to the returned object.
diff --git a/media/libaudiohal/include/media/audiohal/StreamHalInterface.h b/media/libaudiohal/include/media/audiohal/StreamHalInterface.h
index a780a17..4bd7e3d 100644
--- a/media/libaudiohal/include/media/audiohal/StreamHalInterface.h
+++ b/media/libaudiohal/include/media/audiohal/StreamHalInterface.h
@@ -107,7 +107,7 @@
   public:
     virtual void onWriteReady() {}
     virtual void onDrainReady() {}
-    virtual void onError() {}
+    virtual void onError(bool /*isHardError*/) {}
 
   protected:
     StreamOutHalInterfaceCallback() = default;
@@ -116,7 +116,7 @@
 
 class StreamOutHalInterfaceEventCallback : public virtual RefBase {
 public:
-    virtual void onCodecFormatChanged(const std::basic_string<uint8_t>& metadataBs) = 0;
+    virtual void onCodecFormatChanged(const std::vector<uint8_t>& metadataBs) = 0;
 
 protected:
     StreamOutHalInterfaceEventCallback() = default;
@@ -135,6 +135,38 @@
     virtual ~StreamOutHalInterfaceLatencyModeCallback() = default;
 };
 
+/**
+ * On position reporting. There are two methods: 'getRenderPosition' and
+ * 'getPresentationPosition'. The first difference is that they may have a
+ * time offset because "render" position relates to what happens between
+ * ADSP and DAC, while "observable" position is relative to the external
+ * observer. The second difference is that 'getRenderPosition' always
+ * resets on standby (for all types of stream data) according to its
+ * definition. Since the original C definition of 'getRenderPosition' used
+ * 32-bit frame counters, and also because in complex playback chains that
+ * include wireless devices the "observable" position has more practical
+ * meaning, 'getRenderPosition' does not exist in the AIDL HAL interface.
+ * The table below summarizes frame count behavior for 'getPresentationPosition':
+ *
+ *               | Mixed      | Direct       | Direct
+ *               |            | non-offload  | offload
+ * ==============|============|==============|==============
+ *  PCM and      | Continuous |              |
+ *  encapsulated |            |              |
+ *  bitstream    |            |              |
+ * --------------|------------| Continuous†  |
+ *  Bitstream    |            |              | Reset on
+ *  encapsulated |            |              | flush, drain
+ *  into PCM     |            |              | and standby
+ *               | Not        |              |
+ * --------------| supported  |--------------|
+ *  Bitstream    |            | Reset on     |
+ *               |            | flush, drain |
+ *               |            | and standby  |
+ *               |            |              |
+ *
+ * † - on standby, reset of the frame count happens at the framework level.
+ */
 class StreamOutHalInterface : public virtual StreamHalInterface {
   public:
     // Return the audio hardware driver estimated latency in milliseconds.
@@ -151,10 +183,7 @@
 
     // Return the number of audio frames written by the audio dsp to DAC since
     // the output has exited standby.
-    virtual status_t getRenderPosition(uint32_t *dspFrames) = 0;
-
-    // Get the local time at which the next write to the audio driver will be presented.
-    virtual status_t getNextWriteTimestamp(int64_t *timestamp) = 0;
+    virtual status_t getRenderPosition(uint64_t *dspFrames) = 0;
 
     // Set the callback for notifying completion of non-blocking write and drain.
     // The callback must be owned by someone else. The output stream does not own it
@@ -176,12 +205,19 @@
     // Requests notification when data buffered by the driver/hardware has been played.
     virtual status_t drain(bool earlyNotify) = 0;
 
-    // Notifies to the audio driver to flush the queued data.
+    // Notifies to the audio driver to flush (that is, drop) the queued data. Stream must
+    // already be paused before calling 'flush'.
     virtual status_t flush() = 0;
 
     // Return a recent count of the number of audio frames presented to an external observer.
+    // This excludes frames which have been written but are still in the pipeline. See the
+    // table at the start of the 'StreamOutHalInterface' for the specification of the frame
+    // count behavior w.r.t. 'flush', 'drain' and 'standby' operations.
     virtual status_t getPresentationPosition(uint64_t *frames, struct timespec *timestamp) = 0;
 
+    // Notifies the HAL layer that the framework considers the current playback as completed.
+    virtual status_t presentationComplete() = 0;
+
     struct SourceMetadata {
         std::vector<playback_track_metadata_v7_t> tracks;
     };
@@ -270,6 +306,7 @@
 
     // Return a recent count of the number of audio frames received and
     // the clock time associated with that frame count.
+    // The count must not reset to zero when a PCM input enters standby.
     virtual status_t getCapturePosition(int64_t *frames, int64_t *time) = 0;
 
     // Get active microphones
diff --git a/media/libaudiohal/tests/Android.bp b/media/libaudiohal/tests/Android.bp
index 8f011c8..f6a7eea 100644
--- a/media/libaudiohal/tests/Android.bp
+++ b/media/libaudiohal/tests/Android.bp
@@ -17,6 +17,7 @@
 // frameworks/av/include.
 
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     default_applicable_licenses: ["frameworks_av_license"],
 }
 
@@ -24,8 +25,8 @@
     name: "libaudiohal_aidl_test_default",
     test_suites: ["device-tests"],
     defaults: [
-        "libaudiohal_default",
         "libaudiohal_aidl_default",
+        "libaudiohal_default",
     ],
     shared_libs: [
         "libaudiohal",
@@ -35,8 +36,8 @@
 cc_test {
     name: "CoreAudioHalAidlTest",
     srcs: [
-        "CoreAudioHalAidl_test.cpp",
         ":core_audio_hal_aidl_src_files",
+        "CoreAudioHalAidl_test.cpp",
     ],
     defaults: ["libaudiohal_aidl_test_default"],
     header_libs: ["libaudiohalimpl_headers"],
@@ -49,13 +50,14 @@
     shared_libs: [
         "libvibrator",
     ],
+    test_config_template: "AudioHalTestTemplate.xml",
 }
 
 cc_test {
     name: "EffectProxyTest",
     srcs: [
-        "EffectProxy_test.cpp",
         ":audio_effectproxy_src_files",
+        "EffectProxy_test.cpp",
     ],
     defaults: [
         "libaudiohal_aidl_test_default",
@@ -63,3 +65,14 @@
     ],
     header_libs: ["libaudiohalimpl_headers"],
 }
+
+cc_test {
+    name: "EffectHalVersionCompatibilityTest",
+    srcs: [
+        ":audio_effect_hal_aidl_src_files",
+        "EffectHalVersionCompatibility_test.cpp",
+    ],
+    defaults: ["libaudiohal_aidl_test_default"],
+    header_libs: ["libaudiohalimpl_headers"],
+    static_libs: ["libgmock"],
+}
diff --git a/media/libaudiohal/tests/AudioHalTestTemplate.xml b/media/libaudiohal/tests/AudioHalTestTemplate.xml
new file mode 100644
index 0000000..b1cb2f0
--- /dev/null
+++ b/media/libaudiohal/tests/AudioHalTestTemplate.xml
@@ -0,0 +1,29 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2024 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<configuration description="Runs {MODULE}.">
+    <target_preparer class="com.android.tradefed.targetprep.RootTargetPreparer" />
+
+    <target_preparer class="com.android.tradefed.targetprep.PushFilePreparer">
+        <option name="cleanup" value="true" />
+        <option name="push" value="{MODULE}->/data/local/tmp/{MODULE}" />
+    </target_preparer>
+
+    <test class="com.android.tradefed.testtype.GTest" >
+        <option name="native-test-device-path" value="/data/local/tmp" />
+        <option name="module-name" value="{MODULE}" />
+        <option name="native-test-timeout" value="10m" />
+    </test>
+</configuration>
diff --git a/media/libaudiohal/tests/CoreAudioHalAidl_test.cpp b/media/libaudiohal/tests/CoreAudioHalAidl_test.cpp
index 1204a3b..50b748e 100644
--- a/media/libaudiohal/tests/CoreAudioHalAidl_test.cpp
+++ b/media/libaudiohal/tests/CoreAudioHalAidl_test.cpp
@@ -14,7 +14,9 @@
  * limitations under the License.
  */
 
+#include <algorithm>
 #include <memory>
+#include <mutex>
 #include <string>
 #include <vector>
 
@@ -22,16 +24,38 @@
 #include <gtest/gtest.h>
 
 #include <DeviceHalAidl.h>
+#include <Hal2AidlMapper.h>
 #include <StreamHalAidl.h>
 #include <aidl/android/hardware/audio/core/BnModule.h>
 #include <aidl/android/hardware/audio/core/BnStreamCommon.h>
 #include <aidl/android/media/audio/BnHalAdapterVendorExtension.h>
+#include <aidl/android/media/audio/common/AudioGainMode.h>
 #include <aidl/android/media/audio/common/Int.h>
 #include <utils/Log.h>
 
 namespace {
 
+using ::aidl::android::hardware::audio::core::AudioPatch;
+using ::aidl::android::hardware::audio::core::AudioRoute;
 using ::aidl::android::hardware::audio::core::VendorParameter;
+using ::aidl::android::media::audio::common::AudioChannelLayout;
+using ::aidl::android::media::audio::common::AudioConfig;
+using ::aidl::android::media::audio::common::AudioDevice;
+using ::aidl::android::media::audio::common::AudioDeviceDescription;
+using ::aidl::android::media::audio::common::AudioDeviceType;
+using ::aidl::android::media::audio::common::AudioFormatDescription;
+using ::aidl::android::media::audio::common::AudioFormatType;
+using ::aidl::android::media::audio::common::AudioGainConfig;
+using ::aidl::android::media::audio::common::AudioGainMode;
+using ::aidl::android::media::audio::common::AudioIoFlags;
+using ::aidl::android::media::audio::common::AudioPort;
+using ::aidl::android::media::audio::common::AudioPortConfig;
+using ::aidl::android::media::audio::common::AudioPortDeviceExt;
+using ::aidl::android::media::audio::common::AudioPortExt;
+using ::aidl::android::media::audio::common::AudioPortMixExt;
+using ::aidl::android::media::audio::common::AudioProfile;
+using ::aidl::android::media::audio::common::AudioSource;
+using ::aidl::android::media::audio::common::PcmType;
 
 class VendorParameterMock {
   public:
@@ -63,11 +87,143 @@
     std::vector<VendorParameter> mSyncParameters;
 };
 
+struct Configuration {
+    std::vector<AudioPort> ports;
+    std::vector<AudioPortConfig> portConfigs;
+    std::vector<AudioRoute> routes;
+    std::vector<AudioPatch> patches;
+    int32_t nextPortId = 1;
+    int32_t nextPatchId = 1;
+};
+
+void fillProfile(AudioProfile* profile, const std::vector<int32_t>& channelLayouts,
+                 const std::vector<int32_t>& sampleRates) {
+    for (auto layout : channelLayouts) {
+        profile->channelMasks.push_back(
+                AudioChannelLayout::make<AudioChannelLayout::layoutMask>(layout));
+    }
+    profile->sampleRates.insert(profile->sampleRates.end(), sampleRates.begin(), sampleRates.end());
+}
+
+AudioProfile createProfile(PcmType pcmType, const std::vector<int32_t>& channelLayouts,
+                           const std::vector<int32_t>& sampleRates) {
+    AudioProfile profile;
+    profile.format.type = AudioFormatType::PCM;
+    profile.format.pcm = pcmType;
+    fillProfile(&profile, channelLayouts, sampleRates);
+    return profile;
+}
+
+AudioPortExt createPortDeviceExt(AudioDeviceType devType, int32_t flags,
+                                 std::string connection = "") {
+    AudioPortDeviceExt deviceExt;
+    deviceExt.device.type.type = devType;
+    if (devType == AudioDeviceType::IN_MICROPHONE && connection.empty()) {
+        deviceExt.device.address = "bottom";
+    } else if (devType == AudioDeviceType::IN_MICROPHONE_BACK && connection.empty()) {
+        deviceExt.device.address = "back";
+    }
+    deviceExt.device.type.connection = std::move(connection);
+    deviceExt.flags = flags;
+    return AudioPortExt::make<AudioPortExt::device>(deviceExt);
+}
+
+AudioPortExt createPortMixExt(int32_t maxOpenStreamCount, int32_t maxActiveStreamCount) {
+    AudioPortMixExt mixExt;
+    mixExt.maxOpenStreamCount = maxOpenStreamCount;
+    mixExt.maxActiveStreamCount = maxActiveStreamCount;
+    return AudioPortExt::make<AudioPortExt::mix>(mixExt);
+}
+
+AudioPort createPort(int32_t id, const std::string& name, int32_t flags, bool isInput,
+                     const AudioPortExt& ext) {
+    AudioPort port;
+    port.id = id;
+    port.name = name;
+    port.flags = isInput ? AudioIoFlags::make<AudioIoFlags::input>(flags)
+                         : AudioIoFlags::make<AudioIoFlags::output>(flags);
+    port.ext = ext;
+    return port;
+}
+
+AudioRoute createRoute(const std::vector<AudioPort>& sources, const AudioPort& sink) {
+    AudioRoute route;
+    route.sinkPortId = sink.id;
+    std::transform(sources.begin(), sources.end(), std::back_inserter(route.sourcePortIds),
+                   [](const auto& port) { return port.id; });
+    return route;
+}
+
+template <typename T>
+auto findById(std::vector<T>& v, int32_t id) {
+    return std::find_if(v.begin(), v.end(), [&](const auto& e) { return e.id == id; });
+}
+
+Configuration getTestConfiguration() {
+    const std::vector<AudioProfile> standardPcmAudioProfiles = {
+            createProfile(PcmType::INT_16_BIT, {AudioChannelLayout::LAYOUT_STEREO}, {48000})};
+    Configuration c;
+
+    AudioPort micInDevice =
+            createPort(c.nextPortId++, "Built-In Mic", 0, true,
+                       createPortDeviceExt(AudioDeviceType::IN_MICROPHONE,
+                                           1 << AudioPortDeviceExt::FLAG_INDEX_DEFAULT_DEVICE));
+    micInDevice.profiles = standardPcmAudioProfiles;
+    c.ports.push_back(micInDevice);
+
+    AudioPort micInBackDevice =
+            createPort(c.nextPortId++, "Built-In Back Mic", 0, true,
+                       createPortDeviceExt(AudioDeviceType::IN_MICROPHONE_BACK, 0));
+    micInDevice.profiles = standardPcmAudioProfiles;
+    c.ports.push_back(micInBackDevice);
+
+    AudioPort primaryInMix =
+            createPort(c.nextPortId++, "primary input", 0, true, createPortMixExt(0, 1));
+    primaryInMix.profiles = standardPcmAudioProfiles;
+    c.ports.push_back(primaryInMix);
+
+    AudioPort speakerOutDevice = createPort(c.nextPortId++, "Speaker", 0, false,
+                                            createPortDeviceExt(AudioDeviceType::OUT_SPEAKER, 0));
+    speakerOutDevice.profiles = standardPcmAudioProfiles;
+    c.ports.push_back(speakerOutDevice);
+
+    AudioPort btOutDevice =
+            createPort(c.nextPortId++, "BT A2DP Out", 0, false,
+                       createPortDeviceExt(AudioDeviceType::OUT_DEVICE, 0,
+                                           AudioDeviceDescription::CONNECTION_BT_A2DP));
+    btOutDevice.profiles = standardPcmAudioProfiles;
+    c.ports.push_back(btOutDevice);
+
+    AudioPort btOutMix =
+            createPort(c.nextPortId++, "a2dp output", 0, false, createPortMixExt(1, 1));
+    btOutMix.profiles = standardPcmAudioProfiles;
+    c.ports.push_back(btOutMix);
+
+    c.routes.push_back(createRoute({micInDevice, micInBackDevice}, primaryInMix));
+    c.routes.push_back(createRoute({btOutMix}, btOutDevice));
+
+    return c;
+}
+
 class ModuleMock : public ::aidl::android::hardware::audio::core::BnModule,
                    public VendorParameterMock {
   public:
+    ModuleMock() = default;
+    explicit ModuleMock(const Configuration& config) : mConfig(config) {}
     bool isScreenTurnedOn() const { return mIsScreenTurnedOn; }
     ScreenRotation getScreenRotation() const { return mScreenRotation; }
+    std::vector<AudioPatch> getPatches() {
+        std::vector<AudioPatch> result;
+        getAudioPatches(&result);
+        return result;
+    }
+    std::optional<AudioPortConfig> getPortConfig(int32_t id) {
+        auto iter = findById<AudioPortConfig>(mConfig.portConfigs, id);
+        if (iter != mConfig.portConfigs.end()) {
+            return *iter;
+        }
+        return std::nullopt;
+    }
 
   private:
     ndk::ScopedAStatus setModuleDebug(
@@ -91,35 +247,91 @@
         return ndk::ScopedAStatus::ok();
     }
     ndk::ScopedAStatus connectExternalDevice(
-            const ::aidl::android::media::audio::common::AudioPort&,
-            ::aidl::android::media::audio::common::AudioPort*) override {
+            const ::aidl::android::media::audio::common::AudioPort& portIdAndData,
+            ::aidl::android::media::audio::common::AudioPort* port) override {
+        auto src = portIdAndData;  // Make a copy to mimic RPC behavior.
+        auto iter = findById<AudioPort>(mConfig.ports, src.id);
+        if (iter == mConfig.ports.end()) {
+            return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
+        }
+        *port = *iter;
+        port->ext = src.ext;
+        port->id = mConfig.nextPortId++;
+        ALOGD("%s: returning %s", __func__, port->toString().c_str());
+        mConfig.ports.push_back(*port);
+        std::vector<AudioRoute> newRoutes;
+        for (auto& r : mConfig.routes) {
+            if (r.sinkPortId == src.id) {
+                newRoutes.push_back(AudioRoute{.sourcePortIds = r.sourcePortIds,
+                                               .sinkPortId = port->id,
+                                               .isExclusive = r.isExclusive});
+            } else if (std::find(r.sourcePortIds.begin(), r.sourcePortIds.end(), src.id) !=
+                       r.sourcePortIds.end()) {
+                r.sourcePortIds.push_back(port->id);
+            }
+        }
+        mConfig.routes.insert(mConfig.routes.end(), newRoutes.begin(), newRoutes.end());
         return ndk::ScopedAStatus::ok();
     }
-    ndk::ScopedAStatus disconnectExternalDevice(int32_t) override {
+    ndk::ScopedAStatus disconnectExternalDevice(int32_t portId) override {
+        auto iter = findById<AudioPort>(mConfig.ports, portId);
+        if (iter == mConfig.ports.end()) {
+            return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
+        }
+        mConfig.ports.erase(iter);
+        for (auto it = mConfig.routes.begin(); it != mConfig.routes.end();) {
+            if (it->sinkPortId == portId) {
+                it = mConfig.routes.erase(it);
+            } else {
+                if (auto srcIt =
+                            std::find(it->sourcePortIds.begin(), it->sourcePortIds.end(), portId);
+                    srcIt != it->sourcePortIds.end()) {
+                    it->sourcePortIds.erase(srcIt);
+                }
+                ++it;
+            }
+        }
         return ndk::ScopedAStatus::ok();
     }
     ndk::ScopedAStatus getAudioPatches(
-            std::vector<::aidl::android::hardware::audio::core::AudioPatch>*) override {
+            std::vector<::aidl::android::hardware::audio::core::AudioPatch>* patches) override {
+        *patches = mConfig.patches;
         return ndk::ScopedAStatus::ok();
     }
-    ndk::ScopedAStatus getAudioPort(int32_t,
-                                    ::aidl::android::media::audio::common::AudioPort*) override {
+    ndk::ScopedAStatus getAudioPort(
+            int32_t portId, ::aidl::android::media::audio::common::AudioPort* port) override {
+        auto iter = findById<AudioPort>(mConfig.ports, portId);
+        if (iter == mConfig.ports.end()) {
+            return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
+        }
+        *port = *iter;
         return ndk::ScopedAStatus::ok();
     }
     ndk::ScopedAStatus getAudioPortConfigs(
-            std::vector<::aidl::android::media::audio::common::AudioPortConfig>*) override {
+            std::vector<::aidl::android::media::audio::common::AudioPortConfig>* configs) override {
+        *configs = mConfig.portConfigs;
         return ndk::ScopedAStatus::ok();
     }
     ndk::ScopedAStatus getAudioPorts(
-            std::vector<::aidl::android::media::audio::common::AudioPort>*) override {
+            std::vector<::aidl::android::media::audio::common::AudioPort>* ports) override {
+        *ports = mConfig.ports;
         return ndk::ScopedAStatus::ok();
     }
     ndk::ScopedAStatus getAudioRoutes(
-            std::vector<::aidl::android::hardware::audio::core::AudioRoute>*) override {
+            std::vector<::aidl::android::hardware::audio::core::AudioRoute>* routes) override {
+        *routes = mConfig.routes;
         return ndk::ScopedAStatus::ok();
     }
     ndk::ScopedAStatus getAudioRoutesForAudioPort(
-            int32_t, std::vector<::aidl::android::hardware::audio::core::AudioRoute>*) override {
+            int32_t portId,
+            std::vector<::aidl::android::hardware::audio::core::AudioRoute>* routes) override {
+        for (auto& r : mConfig.routes) {
+            const auto& srcs = r.sourcePortIds;
+            if (r.sinkPortId == portId ||
+                std::find(srcs.begin(), srcs.end(), portId) != srcs.end()) {
+                routes->push_back(r);
+            }
+        }
         return ndk::ScopedAStatus::ok();
     }
     ndk::ScopedAStatus openInputStream(const OpenInputStreamArguments&,
@@ -133,17 +345,69 @@
     ndk::ScopedAStatus getSupportedPlaybackRateFactors(SupportedPlaybackRateFactors*) override {
         return ndk::ScopedAStatus::ok();
     }
-    ndk::ScopedAStatus setAudioPatch(const ::aidl::android::hardware::audio::core::AudioPatch&,
-                                     ::aidl::android::hardware::audio::core::AudioPatch*) override {
+    ndk::ScopedAStatus setAudioPatch(
+            const ::aidl::android::hardware::audio::core::AudioPatch& requested,
+            ::aidl::android::hardware::audio::core::AudioPatch* patch) override {
+        if (requested.id == 0) {
+            *patch = requested;
+            patch->id = mConfig.nextPatchId++;
+            mConfig.patches.push_back(*patch);
+            ALOGD("%s: returning %s", __func__, patch->toString().c_str());
+        } else {
+            auto iter = findById<AudioPatch>(mConfig.patches, requested.id);
+            if (iter == mConfig.patches.end()) {
+                return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
+            }
+            *iter = *patch = requested;
+            ALOGD("%s: updated %s", __func__, patch->toString().c_str());
+        }
         return ndk::ScopedAStatus::ok();
     }
     ndk::ScopedAStatus setAudioPortConfig(
-            const ::aidl::android::media::audio::common::AudioPortConfig&,
-            ::aidl::android::media::audio::common::AudioPortConfig*, bool*) override {
+            const ::aidl::android::media::audio::common::AudioPortConfig& requested,
+            ::aidl::android::media::audio::common::AudioPortConfig* config,
+            bool* applied) override {
+        *applied = false;
+        auto src = requested;  // Make a copy to mimic RPC behavior.
+        if (src.id == 0) {
+            *config = src;
+            if (config->ext.getTag() == AudioPortExt::unspecified) {
+                auto iter = findById<AudioPort>(mConfig.ports, src.portId);
+                if (iter == mConfig.ports.end()) {
+                    return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
+                }
+                config->ext = iter->ext;
+            }
+            config->id = mConfig.nextPortId++;
+            mConfig.portConfigs.push_back(*config);
+            ALOGD("%s: returning %s", __func__, config->toString().c_str());
+        } else {
+            auto iter = findById<AudioPortConfig>(mConfig.portConfigs, src.id);
+            if (iter == mConfig.portConfigs.end()) {
+                return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
+            }
+            *iter = *config = src;
+            ALOGD("%s: updated %s", __func__, config->toString().c_str());
+        }
+        *applied = true;
         return ndk::ScopedAStatus::ok();
     }
-    ndk::ScopedAStatus resetAudioPatch(int32_t) override { return ndk::ScopedAStatus::ok(); }
-    ndk::ScopedAStatus resetAudioPortConfig(int32_t) override { return ndk::ScopedAStatus::ok(); }
+    ndk::ScopedAStatus resetAudioPatch(int32_t patchId) override {
+        auto iter = findById<AudioPatch>(mConfig.patches, patchId);
+        if (iter == mConfig.patches.end()) {
+            return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
+        }
+        mConfig.patches.erase(iter);
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus resetAudioPortConfig(int32_t portConfigId) override {
+        auto iter = findById<AudioPortConfig>(mConfig.portConfigs, portConfigId);
+        if (iter == mConfig.portConfigs.end()) {
+            return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
+        }
+        mConfig.portConfigs.erase(iter);
+        return ndk::ScopedAStatus::ok();
+    }
     ndk::ScopedAStatus getMasterMute(bool*) override { return ndk::ScopedAStatus::ok(); }
     ndk::ScopedAStatus setMasterMute(bool) override { return ndk::ScopedAStatus::ok(); }
     ndk::ScopedAStatus getMasterVolume(float*) override { return ndk::ScopedAStatus::ok(); }
@@ -205,6 +469,7 @@
         return ndk::ScopedAStatus::ok();
     }
 
+    Configuration mConfig;
     bool mIsScreenTurnedOn = false;
     ScreenRotation mScreenRotation = ScreenRotation::DEG_0;
 };
@@ -396,8 +661,50 @@
 }
 }  // namespace aidl::android::hardware::audio::core
 
+namespace aidl::android::media::audio::common {
+template <typename P>
+std::enable_if_t<std::is_function_v<typename mf_traits<decltype(&P::toString)>::member_type>,
+                 std::ostream&>
+operator<<(std::ostream& os, const P& p) {
+    return os << p.toString();
+}
+template <typename E>
+std::enable_if_t<std::is_enum_v<E>, std::ostream&> operator<<(std::ostream& os, const E& e) {
+    return os << toString(e);
+}
+}  // namespace aidl::android::media::audio::common
+
 using namespace android;
 
+namespace {
+
+class StreamHalMock : public virtual StreamHalInterface {
+  public:
+    StreamHalMock() = default;
+    ~StreamHalMock() override = default;
+    status_t getBufferSize(size_t*) override { return OK; }
+    status_t getAudioProperties(audio_config_base_t*) override { return OK; }
+    status_t setParameters(const String8&) override { return OK; }
+    status_t getParameters(const String8&, String8*) override { return OK; }
+    status_t getFrameSize(size_t*) override { return OK; }
+    status_t addEffect(sp<EffectHalInterface>) override { return OK; }
+    status_t removeEffect(sp<EffectHalInterface>) override { return OK; }
+    status_t standby() override { return OK; }
+    status_t dump(int, const Vector<String16>&) override { return OK; }
+    status_t start() override { return OK; }
+    status_t stop() override { return OK; }
+    status_t createMmapBuffer(int32_t, struct audio_mmap_buffer_info*) override { return OK; }
+    status_t getMmapPosition(struct audio_mmap_position*) override { return OK; }
+    status_t setHalThreadPriority(int) override { return OK; }
+    status_t legacyCreateAudioPatch(const struct audio_port_config&, std::optional<audio_source_t>,
+                                    audio_devices_t) override {
+        return OK;
+    }
+    status_t legacyReleaseAudioPatch() override { return OK; }
+};
+
+}  // namespace
+
 class DeviceHalAidlTest : public testing::Test {
   public:
     void SetUp() override {
@@ -443,21 +750,6 @@
     EXPECT_EQ(ScreenRotation::DEG_0, mModule->getScreenRotation());
 }
 
-// Without a vendor extension, any unrecognized parameters must be ignored.
-TEST_F(DeviceHalAidlTest, VendorParameterIgnored) {
-    EXPECT_EQ(0UL, mModule->getAsyncParameters().size());
-    EXPECT_EQ(0UL, mModule->getSyncParameters().size());
-    EXPECT_EQ(OK, mDevice->setParameters(createParameterString("random_name", "random_value")));
-    EXPECT_EQ(0UL, mModule->getAsyncParameters().size());
-    EXPECT_EQ(0UL, mModule->getSyncParameters().size());
-
-    EXPECT_EQ(0UL, mModule->getRetrievedParameterIds().size());
-    String8 values;
-    EXPECT_EQ(OK, mDevice->getParameters(String8("random_name"), &values));
-    EXPECT_EQ(0UL, mModule->getRetrievedParameterIds().size());
-    EXPECT_EQ(0UL, values.length());
-}
-
 class DeviceHalAidlVendorParametersTest : public testing::Test {
   public:
     void SetUp() override {
@@ -608,3 +900,397 @@
     EXPECT_EQ(0UL, mStreamCommon->getAsyncParameters().size());
     EXPECT_EQ(0UL, mStreamCommon->getSyncParameters().size());
 }
+
+class Hal2AidlMapperTest : public testing::Test {
+  public:
+    void SetUp() override {
+        mModule = ndk::SharedRefBase::make<ModuleMock>(getTestConfiguration());
+        mMapper = std::make_unique<Hal2AidlMapper>("test", mModule);
+        ASSERT_EQ(OK, mMapper->initialize());
+
+        mConnectedPort.ext = createPortDeviceExt(AudioDeviceType::OUT_DEVICE, 0,
+                                                 AudioDeviceDescription::CONNECTION_BT_A2DP);
+        mConnectedPort.ext.get<AudioPortExt::device>().device.address = "00:11:22:33:44:55";
+        ASSERT_EQ(OK, mMapper->setDevicePortConnectedState(mConnectedPort, true /*connected*/));
+
+        std::mutex mutex;  // Only needed for cleanups.
+        auto mapperAccessor = std::make_unique<LockedAccessor<Hal2AidlMapper>>(*mMapper, mutex);
+        Hal2AidlMapper::Cleanups cleanups(*mapperAccessor);
+        AudioConfig config;
+        config.base.channelMask = AudioChannelLayout::make<AudioChannelLayout::layoutMask>(
+                AudioChannelLayout::LAYOUT_STEREO);
+        config.base.format =
+                AudioFormatDescription{.type = AudioFormatType::PCM, .pcm = PcmType::INT_16_BIT};
+        config.base.sampleRate = 48000;
+        ASSERT_EQ(OK,
+                  mMapper->prepareToOpenStream(
+                          42 /*ioHandle*/, mConnectedPort.ext.get<AudioPortExt::device>().device,
+                          AudioIoFlags::make<AudioIoFlags::output>(0), AudioSource::DEFAULT,
+                          &cleanups, &config, &mMixPortConfig, &mPatch));
+        cleanups.disarmAll();
+        ASSERT_NE(0, mPatch.id);
+        ASSERT_NE(0, mMixPortConfig.id);
+        mStream = sp<StreamHalMock>::make();
+        mMapper->addStream(mStream, mMixPortConfig.id, mPatch.id);
+
+        ASSERT_EQ(OK, mMapper->findPortConfig(mConnectedPort.ext.get<AudioPortExt::device>().device,
+                                              &mDevicePortConfig));
+        ASSERT_EQ(1UL, mPatch.sourcePortConfigIds.size());
+        ASSERT_EQ(mMixPortConfig.id, mPatch.sourcePortConfigIds[0]);
+        ASSERT_EQ(1UL, mPatch.sinkPortConfigIds.size());
+        ASSERT_EQ(mDevicePortConfig.id, mPatch.sinkPortConfigIds[0]);
+    }
+
+    void TearDown() override {
+        mStream.clear();
+        mMapper.reset();
+        mModule.reset();
+    }
+
+  protected:
+    void CloseDisconnectImpl() {
+        mStream.clear();
+        ASSERT_NO_FATAL_FAILURE(DisconnectDevice());
+    }
+
+    void ConnectAnotherDevice() {
+        mConnectedPort.ext.get<AudioPortExt::device>().device.address = "00:11:22:33:44:66";
+        ASSERT_EQ(OK, mMapper->setDevicePortConnectedState(mConnectedPort, true /*connected*/));
+    }
+
+    void CreateFwkPatch(int32_t* patchId) {
+        std::mutex mutex;  // Only needed for cleanups.
+        auto mapperAccessor = std::make_unique<LockedAccessor<Hal2AidlMapper>>(*mMapper, mutex);
+        Hal2AidlMapper::Cleanups cleanups(*mapperAccessor);
+        ASSERT_EQ(OK, mMapper->createOrUpdatePatch({mMixPortConfig}, {mDevicePortConfig}, patchId,
+                                                   &cleanups));
+        cleanups.disarmAll();
+    }
+
+    void DisconnectDevice() {
+        ASSERT_EQ(OK, mMapper->prepareToDisconnectExternalDevice(mConnectedPort));
+        ASSERT_EQ(OK, mMapper->setDevicePortConnectedState(mConnectedPort, false /*connected*/));
+    }
+
+    void ReleaseFwkOnlyPatch(int32_t patchId) {
+        // The patch only exists for the framework.
+        EXPECT_EQ(patchId, mMapper->findFwkPatch(patchId));
+        ASSERT_EQ(BAD_VALUE, mMapper->releaseAudioPatch(patchId));
+        mMapper->eraseFwkPatch(patchId);
+        // The patch is now erased.
+        EXPECT_EQ(0, mMapper->findFwkPatch(patchId));
+    }
+
+    std::shared_ptr<ModuleMock> mModule;
+    std::unique_ptr<Hal2AidlMapper> mMapper;
+    AudioPort mConnectedPort;
+    AudioPortConfig mMixPortConfig;
+    AudioPortConfig mDevicePortConfig;
+    AudioPatch mPatch;
+    sp<StreamHalInterface> mStream;
+};
+
+/**
+ * External device connections and patches tests diagram.
+ *
+ * [Connect device] -> [Create Stream]
+ *                            |-> [ (1) Close Stream] -> [Disconnect Device]
+ *                            |-> [ (2) Disconnect Device]
+ *                            |          |-> [ (3) Close Stream]
+ *                            |          \-> [ (4) Connect Another Device]
+ *                            |                    |-> (1)
+ *                            |                    |-> (2) -> (3)
+ *                            |                    \-> (5) -> (7)
+ *                            \-> [ (5) Create/Update Fwk Patch]
+ *                                       |-> [(6) Release Fwk Patch]
+ *                                       |        |-> (1)
+ *                                       |        \-> (2) (including reconnection)
+ *                                       \-> [(7) Disconnect Device]
+ *                                                |-> [Release Fwk Patch] -> [Close Stream]
+ *                                                \-> (4) -> (5) -> (6) -> (1)
+ *
+ * Note that the test (acting on behalf of DeviceHalAidl) is responsible
+ * for calling `eraseFwkPatch` and `updateFwkPatch` when needed.
+ */
+
+// (1)
+TEST_F(Hal2AidlMapperTest, CloseDisconnect) {
+    ASSERT_NO_FATAL_FAILURE(CloseDisconnectImpl());
+    // The patch is owned by HAL, must not be listed under fwkPatches after disconnection.
+    EXPECT_EQ(0, mMapper->findFwkPatch(mPatch.id));
+}
+
+// (2) -> (3)
+TEST_F(Hal2AidlMapperTest, DisconnectClose) {
+    ASSERT_NO_FATAL_FAILURE(DisconnectDevice());
+    // The patch is owned by HAL, must not be listed under fwkPatches after disconnection.
+    EXPECT_EQ(0, mMapper->findFwkPatch(mPatch.id));
+    mStream.clear();
+}
+
+// (2) -> (4) -> (1)
+TEST_F(Hal2AidlMapperTest, DisconnectConnectCloseDisconnect) {
+    ASSERT_NO_FATAL_FAILURE(DisconnectDevice());
+    // The patch is owned by HAL, must not be listed under fwkPatches after disconnection.
+    EXPECT_EQ(0, mMapper->findFwkPatch(mPatch.id));
+    ASSERT_NO_FATAL_FAILURE(ConnectAnotherDevice());
+    ASSERT_NO_FATAL_FAILURE(CloseDisconnectImpl());
+    // The patch is owned by HAL, must not be listed under fwkPatches after disconnection.
+    EXPECT_EQ(0, mMapper->findFwkPatch(mPatch.id));
+}
+
+// (2) -> (4) -> (2) -> (3)
+TEST_F(Hal2AidlMapperTest, DisconnectConnectDisconnectClose) {
+    ASSERT_NO_FATAL_FAILURE(DisconnectDevice());
+    // The patch is owned by HAL, must not be listed under fwkPatches after disconnection.
+    EXPECT_EQ(0, mMapper->findFwkPatch(mPatch.id));
+    ASSERT_NO_FATAL_FAILURE(ConnectAnotherDevice());
+    ASSERT_NO_FATAL_FAILURE(DisconnectDevice());
+    // The patch is owned by HAL, must not be listed under fwkPatches after disconnection.
+    EXPECT_EQ(0, mMapper->findFwkPatch(mPatch.id));
+    mStream.clear();
+}
+
+// (5) -> (6) -> (1)
+TEST_F(Hal2AidlMapperTest, CreateFwkPatchReleaseCloseDisconnect) {
+    int32_t patchId;
+    ASSERT_NO_FATAL_FAILURE(CreateFwkPatch(&patchId));
+    // Must be the patch created during stream opening.
+    ASSERT_EQ(mPatch.id, patchId);
+    // The patch was not reset by HAL, must not be listed under fwkPatches.
+    EXPECT_EQ(0, mMapper->findFwkPatch(mPatch.id));
+
+    ASSERT_EQ(OK, mMapper->releaseAudioPatch(patchId));
+    // The patch does not exist both for the fwk and the HAL, must not be listed under fwkPatches.
+    EXPECT_EQ(0, mMapper->findFwkPatch(patchId));
+    ASSERT_NO_FATAL_FAILURE(CloseDisconnectImpl());
+    // The patch does not exist both for the fwk and the HAL, must not be listed under fwkPatches.
+    EXPECT_EQ(0, mMapper->findFwkPatch(patchId));
+}
+
+// (5) -> (6) -> (2) -> (3)
+TEST_F(Hal2AidlMapperTest, CreateFwkPatchReleaseDisconnectClose) {
+    int32_t patchId;
+    ASSERT_NO_FATAL_FAILURE(CreateFwkPatch(&patchId));
+    // Must be the patch created during stream opening.
+    ASSERT_EQ(mPatch.id, patchId);
+    // The patch was not reset by HAL, must not be listed under fwkPatches.
+    EXPECT_EQ(0, mMapper->findFwkPatch(mPatch.id));
+
+    ASSERT_EQ(OK, mMapper->releaseAudioPatch(patchId));
+    // The patch does not exist both for the fwk and the HAL, must not be listed under fwkPatches.
+    EXPECT_EQ(0, mMapper->findFwkPatch(patchId));
+    ASSERT_NO_FATAL_FAILURE(DisconnectDevice());
+    // The patch does not exist both for the fwk and the HAL, must not be listed under fwkPatches.
+    EXPECT_EQ(0, mMapper->findFwkPatch(mPatch.id));
+    mStream.clear();
+}
+
+// (5) -> (6) -> (2) -> (4) -> (2) -> (3)
+TEST_F(Hal2AidlMapperTest, CreateFwkPatchReleaseDisconnectConnectDisconnectClose) {
+    int32_t patchId;
+    ASSERT_NO_FATAL_FAILURE(CreateFwkPatch(&patchId));
+    // Must be the patch created during stream opening.
+    ASSERT_EQ(mPatch.id, patchId);
+    // The patch was not reset by HAL, must not be listed under fwkPatches.
+    EXPECT_EQ(0, mMapper->findFwkPatch(mPatch.id));
+
+    ASSERT_EQ(OK, mMapper->releaseAudioPatch(patchId));
+    // The patch does not exist both for the fwk and the HAL, must not be listed under fwkPatches.
+    EXPECT_EQ(0, mMapper->findFwkPatch(patchId));
+    ASSERT_NO_FATAL_FAILURE(DisconnectDevice());
+    // The patch does not exist both for the fwk and the HAL, must not be listed under fwkPatches.
+    EXPECT_EQ(0, mMapper->findFwkPatch(mPatch.id));
+
+    ASSERT_NO_FATAL_FAILURE(ConnectAnotherDevice());
+    ASSERT_NO_FATAL_FAILURE(DisconnectDevice());
+    // The patch does not exist both for the fwk and the HAL, must not be listed under fwkPatches.
+    EXPECT_EQ(0, mMapper->findFwkPatch(mPatch.id));
+    mStream.clear();
+}
+
+// (5) -> (7) -> Release -> Close
+TEST_F(Hal2AidlMapperTest, CreateFwkPatchDisconnectReleaseClose) {
+    int32_t patchId;
+    ASSERT_NO_FATAL_FAILURE(CreateFwkPatch(&patchId));
+    // Must be the patch created during stream opening.
+    ASSERT_EQ(mPatch.id, patchId);
+    // The patch was not reset by HAL, must not be listed under fwkPatches.
+    EXPECT_EQ(0, mMapper->findFwkPatch(mPatch.id));
+
+    ASSERT_NO_FATAL_FAILURE(DisconnectDevice());
+    ASSERT_NO_FATAL_FAILURE(ReleaseFwkOnlyPatch(patchId));
+
+    mStream.clear();
+    EXPECT_EQ(0, mMapper->findFwkPatch(patchId));
+}
+
+// (5) -> (7) -> (4) -> (5) -> (6) -> (1)
+TEST_F(Hal2AidlMapperTest, CreateFwkPatchDisconnectConnectUpdateReleaseCloseDisconnect) {
+    int32_t patchId;
+    ASSERT_NO_FATAL_FAILURE(CreateFwkPatch(&patchId));
+    // Must be the patch created during stream opening.
+    ASSERT_EQ(mPatch.id, patchId);
+    // The patch was not reset by HAL, must not be listed under fwkPatches.
+    EXPECT_EQ(0, mMapper->findFwkPatch(mPatch.id));
+
+    ASSERT_NO_FATAL_FAILURE(DisconnectDevice());
+    // The patch now only exists for the framework.
+    EXPECT_EQ(mPatch.id, mMapper->findFwkPatch(mPatch.id));
+
+    ASSERT_NO_FATAL_FAILURE(ConnectAnotherDevice());
+    // Change the device address locally, for patch update.
+    mDevicePortConfig.ext.get<AudioPortExt::device>().device.address =
+            mConnectedPort.ext.get<AudioPortExt::device>().device.address;
+    int32_t newPatchId = patchId;
+    ASSERT_NO_FATAL_FAILURE(CreateFwkPatch(&newPatchId));
+    EXPECT_NE(patchId, newPatchId);
+    mMapper->updateFwkPatch(patchId, newPatchId);
+    EXPECT_EQ(newPatchId, mMapper->findFwkPatch(patchId));
+    // Just in case, check that HAL patch ID is not listed as a fwk patch.
+    EXPECT_EQ(0, mMapper->findFwkPatch(newPatchId));
+    // Verify that device port config was updated.
+    ASSERT_EQ(OK, mMapper->findPortConfig(mConnectedPort.ext.get<AudioPortExt::device>().device,
+                                          &mDevicePortConfig));
+
+    ASSERT_EQ(OK, mMapper->releaseAudioPatch(newPatchId));
+    // The patch does not exist both for the fwk and the HAL, must not be listed under fwkPatches.
+    EXPECT_EQ(0, mMapper->findFwkPatch(patchId));
+    // Just in case, check that HAL patch ID is not listed.
+    EXPECT_EQ(0, mMapper->findFwkPatch(newPatchId));
+
+    ASSERT_NO_FATAL_FAILURE(CloseDisconnectImpl());
+    EXPECT_EQ(0, mMapper->findFwkPatch(mPatch.id));
+    EXPECT_EQ(0, mMapper->findFwkPatch(patchId));
+    EXPECT_EQ(0, mMapper->findFwkPatch(newPatchId));
+}
+
+// (2) -> (4) -> (5) -> (7) -> Release -> Close
+TEST_F(Hal2AidlMapperTest, DisconnectConnectCreateFwkPatchDisconnectReleaseClose) {
+    const int32_t patchId = mPatch.id;
+    ASSERT_NO_FATAL_FAILURE(DisconnectDevice());
+    // The patch is owned by HAL, must not be listed under fwkPatches after disconnection.
+    EXPECT_EQ(0, mMapper->findFwkPatch(mPatch.id));
+
+    ASSERT_NO_FATAL_FAILURE(ConnectAnotherDevice());
+    // Change the device address locally, for patch update.
+    mDevicePortConfig.ext.get<AudioPortExt::device>().device.address =
+            mConnectedPort.ext.get<AudioPortExt::device>().device.address;
+    int32_t newPatchId = 0;  // Use 0 since the fwk does not know about the HAL patch.
+    EXPECT_EQ(0, mMapper->findFwkPatch(newPatchId));
+    ASSERT_NO_FATAL_FAILURE(CreateFwkPatch(&newPatchId));
+    EXPECT_NE(0, newPatchId);
+    EXPECT_NE(patchId, newPatchId);
+    // Just in case, check that HAL patch ID is not listed as a fwk patch.
+    EXPECT_EQ(0, mMapper->findFwkPatch(newPatchId));
+    // Verify that device port config was updated.
+    ASSERT_EQ(OK, mMapper->findPortConfig(mConnectedPort.ext.get<AudioPortExt::device>().device,
+                                          &mDevicePortConfig));
+
+    ASSERT_NO_FATAL_FAILURE(DisconnectDevice());
+    ASSERT_NO_FATAL_FAILURE(ReleaseFwkOnlyPatch(newPatchId));
+
+    mStream.clear();
+    EXPECT_EQ(0, mMapper->findFwkPatch(mPatch.id));
+    EXPECT_EQ(0, mMapper->findFwkPatch(newPatchId));
+}
+
+TEST_F(Hal2AidlMapperTest, ChangeTransientPatchDevice) {
+    std::mutex mutex;  // Only needed for cleanups.
+    auto mapperAccessor = std::make_unique<LockedAccessor<Hal2AidlMapper>>(*mMapper, mutex);
+    Hal2AidlMapper::Cleanups cleanups(*mapperAccessor);
+    AudioConfig config;
+    config.base.channelMask = AudioChannelLayout::make<AudioChannelLayout::layoutMask>(
+            AudioChannelLayout::LAYOUT_STEREO);
+    config.base.format =
+            AudioFormatDescription{.type = AudioFormatType::PCM, .pcm = PcmType::INT_16_BIT};
+    config.base.sampleRate = 48000;
+    AudioDevice defaultDevice;
+    defaultDevice.type.type = AudioDeviceType::IN_DEFAULT;
+    AudioPortConfig mixPortConfig;
+    AudioPatch transientPatch;
+    ASSERT_EQ(OK, mMapper->prepareToOpenStream(43 /*ioHandle*/, defaultDevice,
+                                               AudioIoFlags::make<AudioIoFlags::input>(0),
+                                               AudioSource::DEFAULT, &cleanups, &config,
+                                               &mixPortConfig, &transientPatch));
+    cleanups.disarmAll();
+    ASSERT_NE(0, transientPatch.id);
+    ASSERT_NE(0, mixPortConfig.id);
+    sp<StreamHalInterface> stream = sp<StreamHalMock>::make();
+    mMapper->addStream(stream, mixPortConfig.id, transientPatch.id);
+
+    AudioPatch patch{};
+    int32_t patchId;
+    AudioPortConfig backMicPortConfig;
+    backMicPortConfig.channelMask = config.base.channelMask;
+    backMicPortConfig.format = config.base.format;
+    backMicPortConfig.sampleRate = aidl::android::media::audio::common::Int{config.base.sampleRate};
+    backMicPortConfig.flags = AudioIoFlags::make<AudioIoFlags::input>(0);
+    backMicPortConfig.ext = createPortDeviceExt(AudioDeviceType::IN_MICROPHONE_BACK, 0);
+    ASSERT_EQ(OK, mMapper->createOrUpdatePatch({backMicPortConfig}, {mixPortConfig}, &patchId,
+                                               &cleanups));
+    cleanups.disarmAll();
+    ASSERT_EQ(android::OK,
+              mMapper->findPortConfig(backMicPortConfig.ext.get<AudioPortExt::device>().device,
+                                      &backMicPortConfig));
+    EXPECT_NE(0, backMicPortConfig.id);
+
+    EXPECT_EQ(transientPatch.id, patchId);
+    auto patches = mModule->getPatches();
+    auto patchIt = findById(patches, patchId);
+    ASSERT_NE(patchIt, patches.end());
+    EXPECT_EQ(std::vector<int32_t>{backMicPortConfig.id}, patchIt->sourcePortConfigIds);
+    EXPECT_EQ(std::vector<int32_t>{mixPortConfig.id}, patchIt->sinkPortConfigIds);
+}
+
+TEST_F(Hal2AidlMapperTest, SetAudioPortConfigGainChangeExistingPortConfig) {
+    // First set config, then update gain.
+    AudioPortConfig speakerPortConfig;
+    speakerPortConfig.ext = createPortDeviceExt(AudioDeviceType::OUT_SPEAKER, 0);
+    speakerPortConfig.channelMask = AudioChannelLayout::make<AudioChannelLayout::layoutMask>(
+            AudioChannelLayout::LAYOUT_STEREO);
+    speakerPortConfig.format =
+            AudioFormatDescription{.type = AudioFormatType::PCM, .pcm = PcmType::INT_16_BIT};
+    speakerPortConfig.sampleRate = ::aidl::android::media::audio::common::Int(48000);
+    AudioPortConfig resultingPortConfig;
+    ASSERT_EQ(OK,
+              mMapper->setPortConfig(speakerPortConfig, std::set<int32_t>(), &resultingPortConfig));
+    EXPECT_NE(0, resultingPortConfig.id);
+    EXPECT_NE(0, resultingPortConfig.portId);
+
+    AudioPortConfig gainUpdate;
+    gainUpdate.ext = createPortDeviceExt(AudioDeviceType::OUT_SPEAKER, 0);
+    AudioGainConfig gainConfig{.index = -1,
+                               .mode = 1 << static_cast<int>(AudioGainMode::JOINT),
+                               .channelMask = AudioChannelLayout{},
+                               .values = std::vector<int32_t>{-3200},
+                               .rampDurationMs = 0};
+    gainUpdate.gain = gainConfig;
+    AudioPortConfig resultingGainUpdate;
+    ASSERT_EQ(OK, mMapper->setPortConfig(gainUpdate, std::set<int32_t>(), &resultingGainUpdate));
+    EXPECT_EQ(resultingPortConfig.id, resultingGainUpdate.id);
+    auto updatedPortConfig = mModule->getPortConfig(resultingGainUpdate.id);
+    ASSERT_TRUE(updatedPortConfig.has_value());
+    ASSERT_TRUE(updatedPortConfig->gain.has_value());
+    EXPECT_EQ(gainConfig, updatedPortConfig->gain);
+}
+
+TEST_F(Hal2AidlMapperTest, SetAudioPortConfigGainChangeFromScratch) {
+    // Set gain as the first operation, the HAL should suggest the rest of the configuration.
+    AudioPortConfig gainSet;
+    gainSet.ext = createPortDeviceExt(AudioDeviceType::OUT_SPEAKER, 0);
+    AudioGainConfig gainConfig{.index = -1,
+                               .mode = 1 << static_cast<int>(AudioGainMode::JOINT),
+                               .channelMask = AudioChannelLayout{},
+                               .values = std::vector<int32_t>{-3200},
+                               .rampDurationMs = 0};
+    gainSet.gain = gainConfig;
+    AudioPortConfig resultingPortConfig;
+    ASSERT_EQ(OK, mMapper->setPortConfig(gainSet, std::set<int32_t>(), &resultingPortConfig));
+    EXPECT_NE(0, resultingPortConfig.id);
+    EXPECT_NE(0, resultingPortConfig.portId);
+    auto portConfig = mModule->getPortConfig(resultingPortConfig.id);
+    ASSERT_TRUE(portConfig.has_value());
+    ASSERT_TRUE(portConfig->gain.has_value());
+    EXPECT_EQ(gainConfig, portConfig->gain);
+}
diff --git a/media/libaudiohal/tests/EffectHalVersionCompatibility_test.cpp b/media/libaudiohal/tests/EffectHalVersionCompatibility_test.cpp
new file mode 100644
index 0000000..e8731ea
--- /dev/null
+++ b/media/libaudiohal/tests/EffectHalVersionCompatibility_test.cpp
@@ -0,0 +1,319 @@
+/*
+ * Copyright 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <cstddef>
+#include <unordered_map>
+#define LOG_TAG "EffectHalVersionCompatibilityTest"
+
+#include <EffectHalAidl.h>
+#include <aidl/android/hardware/audio/effect/IEffect.h>
+#include <aidl/android/hardware/audio/effect/IFactory.h>
+#include <android-base/logging.h>
+#include <android/binder_manager.h>
+#include <android/binder_process.h>
+#include <gmock/gmock.h>
+#include <gtest/gtest.h>
+#include <media/audiohal/EffectsFactoryHalInterface.h>
+#include <system/audio_aidl_utils.h>
+#include <system/audio_config.h>
+#include <system/audio_effects/audio_effects_utils.h>
+#include <system/audio_effects/effect_uuid.h>
+#include <utils/Log.h>
+
+using aidl::android::hardware::audio::effect::CommandId;
+using aidl::android::hardware::audio::effect::Descriptor;
+using aidl::android::hardware::audio::effect::IEffect;
+using aidl::android::hardware::audio::effect::IFactory;
+using aidl::android::hardware::audio::effect::kReopenSupportedVersion;
+using aidl::android::hardware::audio::effect::Parameter;
+using aidl::android::hardware::audio::effect::Processing;
+using aidl::android::hardware::audio::effect::State;
+using aidl::android::media::audio::common::AudioUuid;
+using android::OK;
+using android::sp;
+using android::effect::EffectHalAidl;
+using testing::_;
+using testing::Eq;
+
+namespace {
+
+/**
+ * Maps of parameter and the version it was introduced.
+ */
+// parameters defined directly in the Parameter union, except Parameter::specific (defined in
+// kParamIdEffectVersionMap).
+static const std::unordered_map<Parameter::Tag, int /* version */> kParamTagVersionMap = {
+        {Parameter::common, 1},         {Parameter::deviceDescription, 1},
+        {Parameter::mode, 1},           {Parameter::source, 1},
+        {Parameter::offload, 1},        {Parameter::volumeStereo, 1},
+        {Parameter::sourceMetadata, 2}, {Parameter::sinkMetadata, 2},
+};
+
+// Map of the version a specific effect type introduction
+// Id tags defined Parameter::Id union, except Parameter::Id::commonTag (defined in
+// kParamTagVersionMap).
+static const std::unordered_map<Parameter::Id::Tag, int /* version */> kParamIdEffectVersionMap = {
+        {Parameter::Id::vendorEffectTag, 1},
+        {Parameter::Id::acousticEchoCancelerTag, 1},
+        {Parameter::Id::automaticGainControlV1Tag, 1},
+        {Parameter::Id::automaticGainControlV2Tag, 1},
+        {Parameter::Id::bassBoostTag, 1},
+        {Parameter::Id::downmixTag, 1},
+        {Parameter::Id::dynamicsProcessingTag, 1},
+        {Parameter::Id::environmentalReverbTag, 1},
+        {Parameter::Id::equalizerTag, 1},
+        {Parameter::Id::hapticGeneratorTag, 1},
+        {Parameter::Id::loudnessEnhancerTag, 1},
+        {Parameter::Id::noiseSuppressionTag, 1},
+        {Parameter::Id::presetReverbTag, 1},
+        {Parameter::Id::virtualizerTag, 1},
+        {Parameter::Id::visualizerTag, 1},
+        {Parameter::Id::volumeTag, 1},
+        {Parameter::Id::spatializerTag, 2},
+};
+// Tags defined Parameter::Specific union.
+static const std::unordered_map<Parameter::Specific::Tag, int /* version */>
+        kParamEffectVersionMap = {
+                {Parameter::Specific::vendorEffect, 1},
+                {Parameter::Specific::acousticEchoCanceler, 1},
+                {Parameter::Specific::automaticGainControlV1, 1},
+                {Parameter::Specific::automaticGainControlV2, 1},
+                {Parameter::Specific::bassBoost, 1},
+                {Parameter::Specific::downmix, 1},
+                {Parameter::Specific::dynamicsProcessing, 1},
+                {Parameter::Specific::environmentalReverb, 1},
+                {Parameter::Specific::equalizer, 1},
+                {Parameter::Specific::hapticGenerator, 1},
+                {Parameter::Specific::loudnessEnhancer, 1},
+                {Parameter::Specific::noiseSuppression, 1},
+                {Parameter::Specific::presetReverb, 1},
+                {Parameter::Specific::virtualizer, 1},
+                {Parameter::Specific::visualizer, 1},
+                {Parameter::Specific::volume, 1},
+                {Parameter::Specific::spatializer, 2},
+};
+
+class MockFactory : public IFactory {
+  public:
+    explicit MockFactory(int version) : IFactory(), mVersion(version) {}
+    MOCK_METHOD(ndk::ScopedAStatus, queryEffects,
+                (const std::optional<AudioUuid>& in_type_uuid,
+                 const std::optional<AudioUuid>& in_impl_uuid,
+                 const std::optional<AudioUuid>& in_proxy_uuid,
+                 std::vector<Descriptor>* _aidl_return),
+                (override));
+
+    MOCK_METHOD(ndk::ScopedAStatus, queryProcessing,
+                (const std::optional<Processing::Type>& in_type,
+                 std::vector<Processing>* _aidl_return),
+                (override));
+
+    MOCK_METHOD(ndk::ScopedAStatus, createEffect,
+                (const AudioUuid& in_impl_uuid, std::shared_ptr<IEffect>* _aidl_return),
+                (override));
+
+    MOCK_METHOD(ndk::ScopedAStatus, destroyEffect, (const std::shared_ptr<IEffect>& in_handle),
+                (override));
+
+    ndk::ScopedAStatus getInterfaceVersion(int32_t* _aidl_return) {
+        *_aidl_return = mVersion;
+        return ndk::ScopedAStatus::ok();
+    }
+
+    // these must be implemented but won't be used in this testing
+    ::ndk::SpAIBinder asBinder() { return ::ndk::SpAIBinder(); }
+    bool isRemote() { return false; }
+    ::ndk::ScopedAStatus getInterfaceHash(std::string*) { return ndk::ScopedAStatus::ok(); }
+
+  private:
+    const int mVersion;
+};
+
+class MockEffect : public IEffect {
+  public:
+    explicit MockEffect(int version) : IEffect(), mVersion(version) {}
+    MOCK_METHOD(ndk::ScopedAStatus, open,
+                (const Parameter::Common& common,
+                 const std::optional<Parameter::Specific>& specific,
+                 IEffect::OpenEffectReturn* ret),
+                (override));
+    MOCK_METHOD(ndk::ScopedAStatus, close, (), (override));
+    MOCK_METHOD(binder_status_t, dump, (int fd, const char** args, uint32_t numArgs), (override));
+    MOCK_METHOD(ndk::ScopedAStatus, command, (CommandId id), (override));
+    MOCK_METHOD(ndk::ScopedAStatus, getState, (State * state), (override));
+    MOCK_METHOD(ndk::ScopedAStatus, getDescriptor, (Descriptor * desc), (override));
+    MOCK_METHOD(ndk::ScopedAStatus, destroy, (), ());
+
+    // reopen introduced in version kReopenSupportedVersion
+    ndk::ScopedAStatus reopen(IEffect::OpenEffectReturn*) override {
+        return mVersion < kReopenSupportedVersion
+                       ? ndk::ScopedAStatus::fromStatus(STATUS_UNKNOWN_TRANSACTION)
+                       : ndk::ScopedAStatus::ok();
+    }
+
+    // for all parameters introduced
+    ndk::ScopedAStatus setParameter(const Parameter& param) override {
+        const auto paramTag = param.getTag();
+        switch (paramTag) {
+            case Parameter::common:
+            case Parameter::deviceDescription:
+            case Parameter::mode:
+            case Parameter::source:
+            case Parameter::offload:
+            case Parameter::volumeStereo:
+            case Parameter::sinkMetadata:
+                FALLTHROUGH_INTENDED;
+            case Parameter::sourceMetadata: {
+                if (kParamTagVersionMap.find(paramTag) != kParamTagVersionMap.end() &&
+                    kParamTagVersionMap.at(paramTag) >= mVersion) {
+                    return ndk::ScopedAStatus::ok();
+                }
+                break;
+            }
+            case Parameter::specific: {
+                // TODO
+                break;
+            }
+        }
+        return ndk::ScopedAStatus::fromStatus(STATUS_BAD_VALUE);
+    }
+
+    /**
+     * Only care about version compatibility here:
+     * @return BAD_VALUE if a tag is not supported by current AIDL version.
+     * @return OK if a tag is supported by current AIDL version.
+     */
+    ndk::ScopedAStatus getParameter(const Parameter::Id& id, Parameter*) override {
+        const auto idTag = id.getTag();
+        switch (idTag) {
+            case Parameter::Id::commonTag: {
+                const auto paramTag = id.get<Parameter::Id::commonTag>();
+                if (kParamTagVersionMap.find(paramTag) != kParamTagVersionMap.end() &&
+                    kParamTagVersionMap.at(paramTag) >= mVersion) {
+                    return ndk::ScopedAStatus::ok();
+                }
+                break;
+            }
+            case Parameter::Id::vendorEffectTag:
+            case Parameter::Id::acousticEchoCancelerTag:
+            case Parameter::Id::automaticGainControlV1Tag:
+            case Parameter::Id::automaticGainControlV2Tag:
+            case Parameter::Id::bassBoostTag:
+            case Parameter::Id::downmixTag:
+            case Parameter::Id::dynamicsProcessingTag:
+            case Parameter::Id::environmentalReverbTag:
+            case Parameter::Id::equalizerTag:
+            case Parameter::Id::hapticGeneratorTag:
+            case Parameter::Id::loudnessEnhancerTag:
+            case Parameter::Id::noiseSuppressionTag:
+            case Parameter::Id::presetReverbTag:
+            case Parameter::Id::virtualizerTag:
+            case Parameter::Id::visualizerTag:
+            case Parameter::Id::volumeTag:
+                FALLTHROUGH_INTENDED;
+            case Parameter::Id::spatializerTag: {
+                if (kParamIdEffectVersionMap.find(idTag) != kParamIdEffectVersionMap.end() &&
+                    kParamIdEffectVersionMap.at(idTag) >= mVersion) {
+                    return ndk::ScopedAStatus::ok();
+                }
+                break;
+            }
+        }
+        return ndk::ScopedAStatus::fromStatus(STATUS_BAD_VALUE);
+    }
+
+    ndk::ScopedAStatus getInterfaceVersion(int32_t* _aidl_return) {
+        *_aidl_return = mVersion;
+        return ndk::ScopedAStatus::ok();
+    }
+
+    // these must be implemented but won't be used in this testing
+    ::ndk::SpAIBinder asBinder() { return ::ndk::SpAIBinder(); }
+    bool isRemote() { return false; }
+    ::ndk::ScopedAStatus getInterfaceHash(std::string*) { return ndk::ScopedAStatus::ok(); }
+
+  private:
+    const int mVersion;
+};
+
+static const std::vector<AudioUuid> kTestParamUUIDs = {
+        ::aidl::android::hardware::audio::effect::getEffectTypeUuidAcousticEchoCanceler(),
+        ::aidl::android::hardware::audio::effect::getEffectTypeUuidAutomaticGainControlV1(),
+        ::aidl::android::hardware::audio::effect::getEffectTypeUuidAutomaticGainControlV2(),
+        ::aidl::android::hardware::audio::effect::getEffectTypeUuidBassBoost(),
+        ::aidl::android::hardware::audio::effect::getEffectTypeUuidDownmix(),
+        ::aidl::android::hardware::audio::effect::getEffectTypeUuidDynamicsProcessing(),
+        ::aidl::android::hardware::audio::effect::getEffectTypeUuidEnvReverb(),
+        ::aidl::android::hardware::audio::effect::getEffectTypeUuidEqualizer(),
+        ::aidl::android::hardware::audio::effect::getEffectTypeUuidHapticGenerator(),
+        ::aidl::android::hardware::audio::effect::getEffectTypeUuidLoudnessEnhancer(),
+        ::aidl::android::hardware::audio::effect::getEffectTypeUuidNoiseSuppression(),
+        ::aidl::android::hardware::audio::effect::getEffectTypeUuidPresetReverb(),
+        ::aidl::android::hardware::audio::effect::getEffectTypeUuidSpatializer(),
+        ::aidl::android::hardware::audio::effect::getEffectTypeUuidVirtualizer(),
+        ::aidl::android::hardware::audio::effect::getEffectTypeUuidVisualizer(),
+        ::aidl::android::hardware::audio::effect::getEffectUuidNull(),
+};
+static const std::vector<int> kTestParamVersion = {1, 2};  // Effect AIDL HAL versions to test
+
+enum ParamName { UUID, VERSION };
+using TestParam = std::tuple<AudioUuid, int /* version */>;
+
+class EffectHalVersionCompatibilityTest : public ::testing::TestWithParam<TestParam> {
+  public:
+    void SetUp() override {
+        mMockFactory = ndk::SharedRefBase::make<MockFactory>(mVersion);
+        ASSERT_NE(mMockFactory, nullptr);
+        mMockEffect = ndk::SharedRefBase::make<MockEffect>(mVersion);
+        ASSERT_NE(mMockEffect, nullptr);
+        mEffectHalAidl = sp<EffectHalAidl>::make(mMockFactory, mMockEffect, 0, 0, mDesc, false);
+        ASSERT_NE(mEffectHalAidl, nullptr);
+    }
+
+    void TearDown() override {
+        EXPECT_CALL(*mMockFactory, destroyEffect(_));
+        mEffectHalAidl.clear();
+        mMockEffect.reset();
+        mMockFactory.reset();
+    }
+
+  protected:
+    const int mVersion = std::get<VERSION>(GetParam());
+    const AudioUuid mTypeUuid = std::get<UUID>(GetParam());
+    const Descriptor mDesc = {.common.id.type = mTypeUuid};
+    std::shared_ptr<MockFactory> mMockFactory = nullptr;
+    std::shared_ptr<MockEffect> mMockEffect = nullptr;
+    sp<EffectHalAidl> mEffectHalAidl = nullptr;
+};
+
+TEST_P(EffectHalVersionCompatibilityTest, testEffectAidlHalCreateDestroy) {
+    // do nothing
+}
+
+INSTANTIATE_TEST_SUITE_P(
+        EffectHalVersionCompatibilityTestWithVersion, EffectHalVersionCompatibilityTest,
+        ::testing::Combine(testing::ValuesIn(kTestParamUUIDs),
+                           testing::ValuesIn(kTestParamVersion)),
+        [](const testing::TestParamInfo<EffectHalVersionCompatibilityTest::ParamType>& info) {
+            auto version = std::to_string(std::get<VERSION>(info.param));
+            auto uuid = android::audio::utils::toString(std::get<UUID>(info.param));
+            std::string name = "EffectHalVersionCompatibilityTest_V" + version + "_" + uuid;
+            std::replace_if(
+                    name.begin(), name.end(), [](const char c) { return !std::isalnum(c); }, '_');
+            return name;
+        });
+
+}  // namespace
\ No newline at end of file
diff --git a/media/libaudiohal/tests/EffectsFactoryHalInterface_test.cpp b/media/libaudiohal/tests/EffectsFactoryHalInterface_test.cpp
index 0cb654c..d783c64 100644
--- a/media/libaudiohal/tests/EffectsFactoryHalInterface_test.cpp
+++ b/media/libaudiohal/tests/EffectsFactoryHalInterface_test.cpp
@@ -21,10 +21,13 @@
 #include <cstdint>
 #include <cstring>
 #include <memory>
+#include <string>
 #include <utility>
 #define LOG_TAG "EffectsFactoryHalInterfaceTest"
 
 #include <aidl/android/media/audio/common/AudioUuid.h>
+#include <android/media/audio/common/HeadTracking.h>
+#include <android/media/audio/common/Spatialization.h>
 #include <gtest/gtest.h>
 #include <media/AidlConversionCppNdk.h>
 #include <media/audiohal/EffectsFactoryHalInterface.h>
@@ -40,15 +43,18 @@
 #include <system/audio_effects/effect_hapticgenerator.h>
 #include <system/audio_effects/effect_loudnessenhancer.h>
 #include <system/audio_effects/effect_ns.h>
+#include <system/audio_effects/effect_spatializer.h>
 #include <utils/RefBase.h>
 #include <vibrator/ExternalVibrationUtils.h>
 
 namespace android {
 
-using ::aidl::android::media::audio::common::AudioUuid;
-using ::android::audio::utils::toString;
+using aidl::android::media::audio::common::AudioUuid;
+using android::audio::utils::toString;
 using effect::utils::EffectParamReader;
 using effect::utils::EffectParamWriter;
+using media::audio::common::HeadTracking;
+using media::audio::common::Spatialization;
 
 // EffectsFactoryHalInterface
 TEST(libAudioHalTest, createEffectsFactoryHalInterface) {
@@ -144,34 +150,68 @@
     EXPECT_NE(0, version.getMajorVersion());
 }
 
+enum ParamSetGetType { SET_N_GET, SET_ONLY, GET_ONLY };
 class EffectParamCombination {
   public:
     template <typename P, typename V>
-    void init(const P& p, const V& v, size_t len) {
-        setBuffer.resize(sizeof(effect_param_t) + sizeof(p) + sizeof(v) + 4);
-        getBuffer.resize(sizeof(effect_param_t) + sizeof(p) + len + 4);
-        expectBuffer.resize(sizeof(effect_param_t) + sizeof(p) + len + 4);
-        parameterSet =
-                std::make_shared<EffectParamReader>(createEffectParam(setBuffer.data(), p, v));
-        parameterGet =
-                std::make_shared<EffectParamReader>(createEffectParam(getBuffer.data(), p, v));
-        parameterExpect =
-                std::make_shared<EffectParamReader>(createEffectParam(expectBuffer.data(), p, v));
-        valueSize = len;
+    void init(const P& p, const V& v, size_t len, ParamSetGetType type) {
+        if (type != GET_ONLY) {
+            mSetBuffer.resize(sizeof(effect_param_t) + sizeof(p) + sizeof(v) + 4);
+            mParameterSet =
+                    std::make_shared<EffectParamReader>(createEffectParam(mSetBuffer.data(), p, v));
+        }
+
+        if (type != SET_ONLY) {
+            mGetBuffer.resize(sizeof(effect_param_t) + sizeof(p) + len + 4);
+            mExpectBuffer.resize(sizeof(effect_param_t) + sizeof(p) + len + 4);
+            mParameterGet =
+                    std::make_shared<EffectParamReader>(createEffectParam(mGetBuffer.data(), p, v));
+            mParameterExpect = std::make_shared<EffectParamReader>(
+                    createEffectParam(mExpectBuffer.data(), p, v));
+            mValueSize = len;
+        }
+        mType = type;
     }
 
-    std::shared_ptr<EffectParamReader> parameterSet; /* setParameter */
-    std::shared_ptr<EffectParamReader> parameterGet; /* getParameter */
-    std::shared_ptr<EffectParamReader> parameterExpect; /* expected from getParameter */
-    size_t valueSize;   /* ValueSize expect to write in reply data buffer */
+    std::shared_ptr<EffectParamReader> mParameterSet;    /* setParameter */
+    std::shared_ptr<EffectParamReader> mParameterGet;    /* getParameter */
+    std::shared_ptr<EffectParamReader> mParameterExpect; /* expected from getParameter */
+    size_t mValueSize = 0ul; /* ValueSize expect to write in reply data buffer */
+    ParamSetGetType mType = SET_N_GET;
+
+    std::string toString() {
+        uint32_t command = 0;
+        std::string str = "Command: ";
+        if (mType != GET_ONLY) {
+            str += (OK == mParameterSet->readFromParameter(&command) ? std::to_string(command)
+                                                                     : mParameterSet->toString());
+        } else {
+            str += (OK == mParameterGet->readFromParameter(&command) ? std::to_string(command)
+                                                                     : mParameterSet->toString());
+        }
+        str += "_";
+        str += toString(mType);
+        return str;
+    }
+
+    static std::string toString(ParamSetGetType type) {
+        switch (type) {
+            case SET_N_GET:
+                return "Type:SetAndGet";
+            case SET_ONLY:
+                return "Type:SetOnly";
+            case GET_ONLY:
+                return "Type:GetOnly";
+        }
+    }
 
   private:
-    std::vector<uint8_t> setBuffer;
-    std::vector<uint8_t> getBuffer;
-    std::vector<uint8_t> expectBuffer;
+    std::vector<uint8_t> mSetBuffer;
+    std::vector<uint8_t> mGetBuffer;
+    std::vector<uint8_t> mExpectBuffer;
 
     template <typename P, typename V>
-    EffectParamReader createEffectParam(void* buf, const P& p, const V& v) {
+    static EffectParamReader createEffectParam(void* buf, const P& p, const V& v) {
         effect_param_t* paramRet = (effect_param_t*)buf;
         paramRet->psize = sizeof(P);
         paramRet->vsize = sizeof(V);
@@ -184,48 +224,106 @@
 };
 
 template <typename P, typename V>
-std::shared_ptr<EffectParamCombination> createEffectParamCombination(const P& p, const V& v,
-                                                                     size_t len) {
+std::shared_ptr<EffectParamCombination> createEffectParamCombination(
+        const P& p, const V& v, size_t len, ParamSetGetType type = SET_N_GET) {
     auto comb = std::make_shared<EffectParamCombination>();
-    comb->init(p, v, len);
+    comb->init(p, v, len, type);
     return comb;
 }
 
-enum ParamName { TUPLE_UUID, TUPLE_PARAM_COMBINATION };
-using EffectParamTestTuple =
-        std::tuple<const effect_uuid_t* /* type UUID */, std::shared_ptr<EffectParamCombination>>;
-
+enum ParamName { TUPLE_UUID, TUPLE_IS_INPUT, TUPLE_PARAM_COMBINATION };
+using EffectParamTestTuple = std::tuple<const effect_uuid_t* /* type UUID */, bool /* isInput */,
+                                        std::vector<std::shared_ptr<EffectParamCombination>>>;
 static const effect_uuid_t EXTEND_EFFECT_TYPE_UUID = {
         0xfa81dbde, 0x588b, 0x11ed, 0x9b6a, {0x02, 0x42, 0xac, 0x12, 0x00, 0x02}};
 constexpr std::array<uint8_t, 10> kVendorExtensionData({0xff, 0x5, 0x50, 0xab, 0xcd, 0x00, 0xbd,
                                                         0xdb, 0xee, 0xff});
-std::vector<EffectParamTestTuple> testPairs = {
-        std::make_tuple(FX_IID_AEC,
-                        createEffectParamCombination(AEC_PARAM_ECHO_DELAY, 0xff /* echoDelayMs */,
-                                                     sizeof(int32_t) /* returnValueSize */)),
-        std::make_tuple(FX_IID_AGC,
-                        createEffectParamCombination(AGC_PARAM_TARGET_LEVEL, 20 /* targetLevel */,
-                                                     sizeof(int16_t) /* returnValueSize */)),
-        std::make_tuple(SL_IID_BASSBOOST,
-                        createEffectParamCombination(BASSBOOST_PARAM_STRENGTH, 20 /* strength */,
-                                                     sizeof(int16_t) /* returnValueSize */)),
-        std::make_tuple(EFFECT_UIID_DOWNMIX,
-                        createEffectParamCombination(DOWNMIX_PARAM_TYPE, DOWNMIX_TYPE_FOLD,
-                                                     sizeof(int16_t) /* returnValueSize */)),
-        std::make_tuple(SL_IID_DYNAMICSPROCESSING,
-                        createEffectParamCombination(
-                                std::array<uint32_t, 2>({DP_PARAM_INPUT_GAIN, 0 /* channel */}),
-                                30 /* gainDb */, sizeof(int32_t) /* returnValueSize */)),
+static std::vector<EffectParamTestTuple> testPairs = {
         std::make_tuple(
-                FX_IID_LOUDNESS_ENHANCER,
-                createEffectParamCombination(LOUDNESS_ENHANCER_PARAM_TARGET_GAIN_MB, 5 /* gain */,
-                                             sizeof(int32_t) /* returnValueSize */)),
-        std::make_tuple(FX_IID_NS,
-                        createEffectParamCombination(NS_PARAM_LEVEL, 1 /* level */,
-                                                     sizeof(int32_t) /* returnValueSize */)),
-        std::make_tuple(&EXTEND_EFFECT_TYPE_UUID,
-                        createEffectParamCombination(8, kVendorExtensionData,
-                                                     sizeof(kVendorExtensionData)))};
+                FX_IID_AEC, true /* isInput */,
+                std::vector<std::shared_ptr<EffectParamCombination>>{
+                        createEffectParamCombination(AEC_PARAM_ECHO_DELAY, 0xff /* echoDelayMs */,
+                                                     sizeof(int32_t) /* returnValueSize */)}),
+        std::make_tuple(
+                FX_IID_AGC, false /* isInput */,
+                std::vector<std::shared_ptr<EffectParamCombination>>{
+                        createEffectParamCombination(AGC_PARAM_TARGET_LEVEL, 20 /* targetLevel */,
+                                                     sizeof(int16_t) /* returnValueSize */)}),
+        std::make_tuple(
+                SL_IID_BASSBOOST, false /* isInput */,
+                std::vector<std::shared_ptr<EffectParamCombination>>{
+                        createEffectParamCombination(BASSBOOST_PARAM_STRENGTH, 20 /* strength */,
+                                                     sizeof(int16_t) /* returnValueSize */)}),
+        std::make_tuple(
+                EFFECT_UIID_DOWNMIX, false /* isInput */,
+                std::vector<std::shared_ptr<EffectParamCombination>>{
+                        createEffectParamCombination(DOWNMIX_PARAM_TYPE, DOWNMIX_TYPE_FOLD,
+                                                     sizeof(int16_t) /* returnValueSize */)}),
+        std::make_tuple(
+                SL_IID_DYNAMICSPROCESSING, false /* isInput */,
+                std::vector<std::shared_ptr<EffectParamCombination>>{createEffectParamCombination(
+                        std::array<uint32_t, 2>({DP_PARAM_INPUT_GAIN, 0 /* channel */}),
+                        30 /* gainDb */, sizeof(int32_t) /* returnValueSize */)}),
+        std::make_tuple(
+                FX_IID_LOUDNESS_ENHANCER, false /* isInput */,
+                std::vector<std::shared_ptr<EffectParamCombination>>{createEffectParamCombination(
+                        LOUDNESS_ENHANCER_PARAM_TARGET_GAIN_MB, 5 /* gain */,
+                        sizeof(int32_t) /* returnValueSize */)}),
+        std::make_tuple(
+                FX_IID_NS, true /* isInput */,
+                std::vector<std::shared_ptr<EffectParamCombination>>{createEffectParamCombination(
+                        NS_PARAM_LEVEL, 1 /* level */, sizeof(int32_t) /* returnValueSize */)}),
+        std::make_tuple(
+                FX_IID_SPATIALIZER, false /* isInput */,
+                std::vector<std::shared_ptr<EffectParamCombination>>{
+                        createEffectParamCombination(SPATIALIZER_PARAM_LEVEL,
+                                                     SPATIALIZATION_LEVEL_MULTICHANNEL,
+                                                     sizeof(uint8_t), SET_N_GET),
+                        createEffectParamCombination(SPATIALIZER_PARAM_HEADTRACKING_MODE,
+                                                     HeadTracking::Mode::RELATIVE_WORLD,
+                                                     sizeof(uint8_t), SET_N_GET),
+                        createEffectParamCombination(
+                                SPATIALIZER_PARAM_HEAD_TO_STAGE,
+                                std::array<float, 6>{.55f, 0.2f, 1.f, .999f, .43f, 19.f},
+                                sizeof(std::array<float, 6>), SET_ONLY),
+                        createEffectParamCombination(
+                                SPATIALIZER_PARAM_HEADTRACKING_CONNECTION,
+                                std::array<uint32_t, 2>{
+                                        static_cast<uint32_t>(HeadTracking::ConnectionMode::
+                                                                      DIRECT_TO_SENSOR_TUNNEL),
+                                        0x5e /* sensorId */},
+                                sizeof(std::array<uint32_t, 2>), SET_N_GET),
+                        createEffectParamCombination(
+                                SPATIALIZER_PARAM_SUPPORTED_LEVELS,
+                                std::array<Spatialization::Level, 3>{
+                                        Spatialization::Level::NONE,
+                                        Spatialization::Level::MULTICHANNEL,
+                                        Spatialization::Level::BED_PLUS_OBJECTS},
+                                sizeof(std::array<uint8_t, 3>), GET_ONLY),
+                        createEffectParamCombination(SPATIALIZER_PARAM_HEADTRACKING_SUPPORTED, true,
+                                                     sizeof(bool), GET_ONLY),
+                        createEffectParamCombination(SPATIALIZER_PARAM_SUPPORTED_CHANNEL_MASKS,
+                                                     AUDIO_CHANNEL_OUT_5POINT1, sizeof(uint8_t),
+                                                     GET_ONLY),
+                        createEffectParamCombination(
+                                SPATIALIZER_PARAM_SUPPORTED_SPATIALIZATION_MODES,
+                                std::array<Spatialization::Mode, 2>{
+                                        Spatialization::Mode::BINAURAL,
+                                        Spatialization::Mode::TRANSAURAL},
+                                sizeof(std::array<uint8_t, 2>), GET_ONLY),
+                        createEffectParamCombination(
+                                SPATIALIZER_PARAM_SUPPORTED_HEADTRACKING_CONNECTION,
+                                std::array<HeadTracking::ConnectionMode, 3>{
+                                        HeadTracking::ConnectionMode::FRAMEWORK_PROCESSED,
+                                        HeadTracking::ConnectionMode::DIRECT_TO_SENSOR_SW,
+                                        HeadTracking::ConnectionMode::DIRECT_TO_SENSOR_TUNNEL},
+                                sizeof(std::array<uint8_t, 3>), GET_ONLY),
+                }),
+        std::make_tuple(
+                &EXTEND_EFFECT_TYPE_UUID, false /* isInput */,
+                std::vector<std::shared_ptr<EffectParamCombination>>{createEffectParamCombination(
+                        uint32_t{8}, kVendorExtensionData, sizeof(kVendorExtensionData))}),
+};
 
 class libAudioHalEffectParamTest : public ::testing::TestWithParam<EffectParamTestTuple> {
   public:
@@ -233,13 +331,8 @@
         : mParamTuple(GetParam()),
           mFactory(EffectsFactoryHalInterface::create()),
           mTypeUuid(std::get<TUPLE_UUID>(mParamTuple)),
-          mCombination(std::get<TUPLE_PARAM_COMBINATION>(mParamTuple)),
-          mExpectedValue([&]() {
-              std::vector<uint8_t> expectData(mCombination->valueSize);
-              mCombination->parameterExpect->readFromValue(expectData.data(),
-                                                           mCombination->valueSize);
-              return expectData;
-          }()),
+          mCombinations(std::get<TUPLE_PARAM_COMBINATION>(mParamTuple)),
+          mIsInput(std::get<TUPLE_IS_INPUT>(mParamTuple)),
           mDescs([&]() {
               std::vector<effect_descriptor_t> descs;
               if (mFactory && mTypeUuid && OK == mFactory->getDescriptors(mTypeUuid, &descs)) {
@@ -263,7 +356,8 @@
         uint32_t reply = 0;
         uint32_t replySize = sizeof(reply);
         ASSERT_EQ(OK, interface->command(EFFECT_CMD_INIT, 0, nullptr, &replySize, &reply));
-        ASSERT_EQ(OK, interface->command(EFFECT_CMD_SET_CONFIG, sizeof(mEffectConfig),
+
+        ASSERT_EQ(OK, interface->command(EFFECT_CMD_SET_CONFIG, sizeof(effect_config_t),
                                          &mEffectConfig, &replySize, &reply));
     }
 
@@ -284,60 +378,85 @@
     }
 
     void setAndGetParameter(const sp<EffectHalInterface>& interface) {
-        uint32_t replySize = sizeof(uint32_t);
-        uint8_t reply[replySize];
-        auto parameterSet = mCombination->parameterSet;
-        ASSERT_EQ(OK,
-                  interface->command(EFFECT_CMD_SET_PARAM, (uint32_t)parameterSet->getTotalSize(),
-                                     const_cast<effect_param_t*>(&parameterSet->getEffectParam()),
-                                     &replySize, &reply))
-                << parameterSet->toString();
-        ASSERT_EQ(replySize, sizeof(uint32_t));
+        for (const auto combination : mCombinations) {
+            uint32_t replySize = kSetParamReplySize;
+            uint8_t reply[replySize];
+            const auto type = combination->mType;
+            if (type != GET_ONLY) {
+                const auto& set = combination->mParameterSet;
+                ASSERT_EQ(OK,
+                          interface->command(EFFECT_CMD_SET_PARAM, (uint32_t)set->getTotalSize(),
+                                             const_cast<effect_param_t*>(&set->getEffectParam()),
+                                             &replySize, &reply))
+                        << set->toString();
+                ASSERT_EQ(replySize, kSetParamReplySize);
+            }
 
-        effect_param_t* getParam =
-                const_cast<effect_param_t*>(&mCombination->parameterGet->getEffectParam());
-        size_t maxReplySize = mCombination->valueSize + sizeof(effect_param_t) +
-                              sizeof(parameterSet->getPaddedParameterSize());
-        replySize = maxReplySize;
-        EXPECT_EQ(OK,
-                  interface->command(EFFECT_CMD_GET_PARAM, (uint32_t)parameterSet->getTotalSize(),
-                                     const_cast<effect_param_t*>(&parameterSet->getEffectParam()),
-                                     &replySize, getParam));
-        EffectParamReader parameterGet(*getParam);
-        EXPECT_EQ(replySize, parameterGet.getTotalSize()) << parameterGet.toString();
-        if (mCombination->valueSize) {
-            std::vector<uint8_t> response(mCombination->valueSize);
-            EXPECT_EQ(OK, parameterGet.readFromValue(response.data(), mCombination->valueSize))
-                    << " try get valueSize " << mCombination->valueSize << " from "
-                    << parameterGet.toString() << " set " << parameterSet->toString();
-            EXPECT_EQ(response, mExpectedValue);
+            if (type != SET_ONLY) {
+                auto get = combination->mParameterGet;
+                auto expect = combination->mParameterExpect;
+                effect_param_t* getParam = const_cast<effect_param_t*>(&get->getEffectParam());
+                size_t maxReplySize = combination->mValueSize + sizeof(effect_param_t) +
+                                      sizeof(expect->getPaddedParameterSize());
+                replySize = maxReplySize;
+                EXPECT_EQ(OK,
+                          interface->command(EFFECT_CMD_GET_PARAM, (uint32_t)expect->getTotalSize(),
+                                             const_cast<effect_param_t*>(&expect->getEffectParam()),
+                                             &replySize, getParam));
+
+                EffectParamReader getReader(*getParam);
+                EXPECT_EQ(replySize, getReader.getTotalSize()) << getReader.toString();
+                if (combination->mValueSize) {
+                    std::vector<uint8_t> expectedData(combination->mValueSize);
+                    EXPECT_EQ(OK, expect->readFromValue(expectedData.data(), expectedData.size()))
+                            << combination->toString();
+                    std::vector<uint8_t> response(combination->mValueSize);
+                    EXPECT_EQ(OK, getReader.readFromValue(response.data(), combination->mValueSize))
+                            << " try get valueSize " << combination->mValueSize << " from:\n"
+                            << getReader.toString() << "\nexpect:\n"
+                            << expect->toString();
+                    EXPECT_EQ(expectedData, response) << combination->toString();
+                }
+            }
         }
     }
 
+    static constexpr size_t kSetParamReplySize = sizeof(uint32_t);
     const EffectParamTestTuple mParamTuple;
     const sp<EffectsFactoryHalInterface> mFactory;
     const effect_uuid_t* mTypeUuid;
-    std::shared_ptr<EffectParamCombination> mCombination;
-    const std::vector<uint8_t> mExpectedValue;
+    std::vector<std::shared_ptr<EffectParamCombination>> mCombinations{};
+    const bool mIsInput;
     const std::vector<effect_descriptor_t> mDescs;
-    std::vector<sp<EffectHalInterface>> mHalInterfaces;
-    effect_config_t mEffectConfig = {.inputCfg = {.accessMode = EFFECT_BUFFER_ACCESS_READ,
-                                                  .format = AUDIO_FORMAT_PCM_FLOAT,
-                                                  .bufferProvider.getBuffer = nullptr,
-                                                  .bufferProvider.releaseBuffer = nullptr,
-                                                  .bufferProvider.cookie = nullptr,
-                                                  .mask = EFFECT_CONFIG_ALL,
-                                                  .samplingRate = 48000,
-                                                  .channels = AUDIO_CHANNEL_IN_STEREO},
-
-                                     .outputCfg = {.accessMode = EFFECT_BUFFER_ACCESS_WRITE,
-                                                   .format = AUDIO_FORMAT_PCM_FLOAT,
-                                                   .bufferProvider.getBuffer = nullptr,
-                                                   .bufferProvider.releaseBuffer = nullptr,
-                                                   .bufferProvider.cookie = nullptr,
-                                                   .mask = EFFECT_CONFIG_ALL,
-                                                   .samplingRate = 48000,
-                                                   .channels = AUDIO_CHANNEL_OUT_STEREO}};
+    std::vector<sp<EffectHalInterface>> mHalInterfaces{};
+    effect_config_t mEffectConfig = {
+            .inputCfg =
+                    {
+                            .buffer = {.frameCount = 0x100},
+                            .samplingRate = 48000,
+                            .channels = mIsInput ? AUDIO_CHANNEL_IN_VOICE_CALL_MONO
+                                                 : AUDIO_CHANNEL_IN_STEREO,
+                            .bufferProvider = {.getBuffer = nullptr,
+                                               .releaseBuffer = nullptr,
+                                               .cookie = nullptr},
+                            .format = AUDIO_FORMAT_PCM_FLOAT,
+                            .accessMode = EFFECT_BUFFER_ACCESS_READ,
+                            .mask = EFFECT_CONFIG_ALL,
+                    },
+            .outputCfg =
+                    {
+                            .buffer = {.frameCount = 0x100},
+                            .samplingRate = 48000,
+                            .channels = mIsInput ? AUDIO_CHANNEL_IN_VOICE_CALL_MONO
+                                                 : AUDIO_CHANNEL_OUT_STEREO,
+                            .bufferProvider = {.getBuffer = nullptr,
+                                               .releaseBuffer = nullptr,
+                                               .cookie = nullptr},
+                            .format = AUDIO_FORMAT_PCM_FLOAT,
+                            .accessMode = EFFECT_BUFFER_ACCESS_WRITE,
+                            .mask = EFFECT_CONFIG_ALL,
+                    },
+    };
 };
 
 TEST_P(libAudioHalEffectParamTest, setAndGetParam) {
@@ -392,7 +511,8 @@
             AudioUuid uuid = ::aidl::android::legacy2aidl_audio_uuid_t_AudioUuid(
                                      *std::get<TUPLE_UUID>(info.param))
                                      .value();
-            std::string name = "UUID_" + toString(uuid);
+            std::string name = "UUID_" + toString(uuid) + "_";
+            name += std::get<TUPLE_IS_INPUT>(info.param) ? "_input" : "_output";
             std::replace_if(
                     name.begin(), name.end(), [](const char c) { return !std::isalnum(c); }, '_');
             return name;
@@ -404,6 +524,4 @@
     return RUN_ALL_TESTS();
 }
 
-// TODO: b/263986405 Add multi-thread testing
-
 } // namespace android
diff --git a/media/libaudioprocessing/Android.bp b/media/libaudioprocessing/Android.bp
index 6160d7d..c84796e 100644
--- a/media/libaudioprocessing/Android.bp
+++ b/media/libaudioprocessing/Android.bp
@@ -22,10 +22,11 @@
     ],
 
     cflags: [
-        "-Werror",
         "-Wall",
 
         // uncomment to disable NEON on architectures that actually do support NEON, for benchmarking
+
+        "-Werror",
         // "-DUSE_NEON=false",
     ],
 
@@ -62,7 +63,7 @@
     header_libs: [
         "libaudiohal_headers",
         "libbase_headers",
-        "libmedia_headers"
+        "libmedia_headers",
     ],
 
     shared_libs: [
@@ -87,8 +88,8 @@
         "AudioMixerBase.cpp",
         "AudioResampler.cpp",
         "AudioResamplerCubic.cpp",
-        "AudioResamplerSinc.cpp",
         "AudioResamplerDyn.cpp",
+        "AudioResamplerSinc.cpp",
     ],
 
     arch: {
diff --git a/media/libaudioprocessing/AudioMixer.cpp b/media/libaudioprocessing/AudioMixer.cpp
index 57b860d..7ef9ff2 100644
--- a/media/libaudioprocessing/AudioMixer.cpp
+++ b/media/libaudioprocessing/AudioMixer.cpp
@@ -441,10 +441,10 @@
                 track->prepareForAdjustChannels(mFrameCount);
             }
             } break;
-        case HAPTIC_INTENSITY: {
-            const os::HapticScale hapticIntensity = static_cast<os::HapticScale>(valueInt);
-            if (track->mHapticIntensity != hapticIntensity) {
-                track->mHapticIntensity = hapticIntensity;
+        case HAPTIC_SCALE: {
+            const os::HapticScale hapticScale = *reinterpret_cast<os::HapticScale*>(value);
+            if (track->mHapticScale != hapticScale) {
+                track->mHapticScale = hapticScale;
             }
             } break;
         case HAPTIC_MAX_AMPLITUDE: {
@@ -585,7 +585,7 @@
     t->mPlaybackRate = AUDIO_PLAYBACK_RATE_DEFAULT;
     // haptic
     t->mHapticPlaybackEnabled = false;
-    t->mHapticIntensity = os::HapticScale::NONE;
+    t->mHapticScale = os::HapticScale::none();
     t->mHapticMaxAmplitude = NAN;
     t->mMixerHapticChannelMask = AUDIO_CHANNEL_NONE;
     t->mMixerHapticChannelCount = 0;
@@ -636,7 +636,7 @@
                 switch (t->mMixerFormat) {
                 // Mixer format should be AUDIO_FORMAT_PCM_FLOAT.
                 case AUDIO_FORMAT_PCM_FLOAT: {
-                    os::scaleHapticData((float*) buffer, sampleCount, t->mHapticIntensity,
+                    os::scaleHapticData((float*) buffer, sampleCount, t->mHapticScale,
                                         t->mHapticMaxAmplitude);
                 } break;
                 default:
diff --git a/media/libaudioprocessing/AudioMixerBase.cpp b/media/libaudioprocessing/AudioMixerBase.cpp
index 3d11d92..7e362f7 100644
--- a/media/libaudioprocessing/AudioMixerBase.cpp
+++ b/media/libaudioprocessing/AudioMixerBase.cpp
@@ -1122,7 +1122,7 @@
                     aux = t->auxBuffer + numFrames;
                 }
                 for (int outFrames = frameCount; outFrames > 0; ) {
-                    // t->in == nullptr can happen if the track was flushed just after having
+                    // t->mIn == nullptr can happen if the track was flushed just after having
                     // been enabled for mixing.
                     if (t->mIn == nullptr) {
                         break;
diff --git a/media/libaudioprocessing/AudioResamplerSinc.cpp b/media/libaudioprocessing/AudioResamplerSinc.cpp
index f2c386d..1a08a03 100644
--- a/media/libaudioprocessing/AudioResamplerSinc.cpp
+++ b/media/libaudioprocessing/AudioResamplerSinc.cpp
@@ -17,7 +17,6 @@
 #define LOG_TAG "AudioResamplerSinc"
 //#define LOG_NDEBUG 0
 
-#define __STDC_CONSTANT_MACROS
 #include <malloc.h>
 #include <pthread.h>
 #include <string.h>
diff --git a/media/libaudioprocessing/audio-resampler/Android.bp b/media/libaudioprocessing/audio-resampler/Android.bp
index 4ea75e7..791ae37 100644
--- a/media/libaudioprocessing/audio-resampler/Android.bp
+++ b/media/libaudioprocessing/audio-resampler/Android.bp
@@ -13,12 +13,12 @@
     srcs: ["AudioResamplerCoefficients.cpp"],
 
     shared_libs: [
-        "libutils",
         "liblog",
+        "libutils",
     ],
 
     cflags: [
-        "-Werror",
         "-Wall",
+        "-Werror",
     ],
 }
diff --git a/media/libaudioprocessing/include/media/AudioMixer.h b/media/libaudioprocessing/include/media/AudioMixer.h
index b39fb92..f558fd5 100644
--- a/media/libaudioprocessing/include/media/AudioMixer.h
+++ b/media/libaudioprocessing/include/media/AudioMixer.h
@@ -49,7 +49,7 @@
         DOWNMIX_TYPE    = 0x4004,
         // for haptic
         HAPTIC_ENABLED  = 0x4007, // Set haptic data from this track should be played or not.
-        HAPTIC_INTENSITY = 0x4008, // Set the intensity to play haptic data.
+        HAPTIC_SCALE = 0x4008, // Set the scale to play haptic data.
         HAPTIC_MAX_AMPLITUDE = 0x4009, // Set the max amplitude allowed for haptic data.
         // for target TIMESTRETCH
         PLAYBACK_RATE   = 0x4300, // Configure timestretch on this track name;
@@ -141,7 +141,7 @@
 
         // Haptic
         bool                 mHapticPlaybackEnabled;
-        os::HapticScale      mHapticIntensity;
+        os::HapticScale      mHapticScale;
         float                mHapticMaxAmplitude;
         audio_channel_mask_t mHapticChannelMask;
         uint32_t             mHapticChannelCount;
diff --git a/media/libaudioprocessing/tests/Android.bp b/media/libaudioprocessing/tests/Android.bp
index ad402db..ba9b165 100644
--- a/media/libaudioprocessing/tests/Android.bp
+++ b/media/libaudioprocessing/tests/Android.bp
@@ -1,6 +1,7 @@
 // Build the unit tests for libaudioprocessing
 
 package {
+    default_team: "trendy_team_media_framework_audio",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
@@ -28,8 +29,8 @@
     ],
 
     cflags: [
-        "-Werror",
         "-Wall",
+        "-Werror",
     ],
 }
 
diff --git a/media/libaudioprocessing/tests/fuzzer/Android.bp b/media/libaudioprocessing/tests/fuzzer/Android.bp
index 8fb6fff..b96ec6b 100644
--- a/media/libaudioprocessing/tests/fuzzer/Android.bp
+++ b/media/libaudioprocessing/tests/fuzzer/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_media_framework_audio",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
@@ -8,23 +9,23 @@
 }
 
 cc_fuzz {
-  name: "libaudioprocessing_resampler_fuzzer",
-  srcs: [
-    "libaudioprocessing_resampler_fuzzer.cpp",
-  ],
-  defaults: ["libaudioprocessing_test_defaults"],
-  static_libs: [
-    "libsndfile",
-  ],
+    name: "libaudioprocessing_resampler_fuzzer",
+    srcs: [
+        "libaudioprocessing_resampler_fuzzer.cpp",
+    ],
+    defaults: ["libaudioprocessing_test_defaults"],
+    static_libs: [
+        "libsndfile",
+    ],
 }
 
 cc_fuzz {
-  name: "libaudioprocessing_record_buffer_converter_fuzzer",
-  srcs: [
-    "libaudioprocessing_record_buffer_converter_fuzzer.cpp",
-  ],
-  defaults: ["libaudioprocessing_test_defaults"],
-  static_libs: [
-    "libsndfile",
-  ],
+    name: "libaudioprocessing_record_buffer_converter_fuzzer",
+    srcs: [
+        "libaudioprocessing_record_buffer_converter_fuzzer.cpp",
+    ],
+    defaults: ["libaudioprocessing_test_defaults"],
+    static_libs: [
+        "libsndfile",
+    ],
 }
diff --git a/media/libcpustats/Android.bp b/media/libcpustats/Android.bp
index 1ab1de0..2b134a7 100644
--- a/media/libcpustats/Android.bp
+++ b/media/libcpustats/Android.bp
@@ -24,8 +24,8 @@
     ],
 
     cflags: [
-        "-Werror",
         "-Wall",
+        "-Werror",
     ],
 
     host_supported: true,
diff --git a/media/libeffects/config/Android.bp b/media/libeffects/config/Android.bp
index 293a9c2..1672797 100644
--- a/media/libeffects/config/Android.bp
+++ b/media/libeffects/config/Android.bp
@@ -20,11 +20,11 @@
     ],
 
     shared_libs: [
+        "libcutils",
         "liblog",
+        "libmedia_helper",
         "libtinyxml2",
         "libutils",
-        "libmedia_helper",
-        "libcutils",
     ],
 
     header_libs: [
diff --git a/media/libeffects/data/Android.bp b/media/libeffects/data/Android.bp
new file mode 100644
index 0000000..2acf229
--- /dev/null
+++ b/media/libeffects/data/Android.bp
@@ -0,0 +1,19 @@
+// Copyright (C) 2024 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+prebuilt_etc {
+    name: "framework-audio_effects.xml",
+    src: "audio_effects.xml",
+    filename: "audio_effects.xml",
+}
diff --git a/media/libeffects/downmix/Android.bp b/media/libeffects/downmix/Android.bp
index b56872c..19b8082 100644
--- a/media/libeffects/downmix/Android.bp
+++ b/media/libeffects/downmix/Android.bp
@@ -1,5 +1,6 @@
 // Multichannel downmix effect library
 package {
+    default_team: "trendy_team_media_framework_audio",
     default_applicable_licenses: [
         "frameworks_av_media_libeffects_downmix_license",
     ],
@@ -37,9 +38,9 @@
     relative_install_path: "soundfx",
 
     cflags: [
-        "-fvisibility=hidden",
         "-Wall",
         "-Werror",
+        "-fvisibility=hidden",
     ],
 
     header_libs: [
@@ -51,16 +52,16 @@
 cc_library_shared {
     name: "libdownmixaidl",
     srcs: [
-        "aidl/EffectDownmix.cpp",
-        "aidl/DownmixContext.cpp",
         ":effectCommonFile",
+        "aidl/DownmixContext.cpp",
+        "aidl/EffectDownmix.cpp",
     ],
     defaults: [
         "aidlaudioeffectservice_defaults",
     ],
     header_libs: [
         "libaudioeffects",
-        "libhardware_headers"
+        "libhardware_headers",
     ],
     shared_libs: [
         "libaudioutils",
diff --git a/media/libeffects/downmix/aidl/DownmixContext.cpp b/media/libeffects/downmix/aidl/DownmixContext.cpp
index 5fb44b5..3a55361 100644
--- a/media/libeffects/downmix/aidl/DownmixContext.cpp
+++ b/media/libeffects/downmix/aidl/DownmixContext.cpp
@@ -76,18 +76,15 @@
 
 DownmixContext::DownmixContext(int statusDepth, const Parameter::Common& common)
     : EffectContext(statusDepth, common) {
-    LOG(DEBUG) << __func__;
     mState = DOWNMIX_STATE_UNINITIALIZED;
     init_params(common);
 }
 
 DownmixContext::~DownmixContext() {
-    LOG(DEBUG) << __func__;
     mState = DOWNMIX_STATE_UNINITIALIZED;
 }
 
 RetCode DownmixContext::enable() {
-    LOG(DEBUG) << __func__;
     if (mState != DOWNMIX_STATE_INITIALIZED) {
         return RetCode::ERROR_EFFECT_LIB_ERROR;
     }
@@ -96,7 +93,6 @@
 }
 
 RetCode DownmixContext::disable() {
-    LOG(DEBUG) << __func__;
     if (mState != DOWNMIX_STATE_ACTIVE) {
         return RetCode::ERROR_EFFECT_LIB_ERROR;
     }
@@ -104,12 +100,6 @@
     return RetCode::SUCCESS;
 }
 
-void DownmixContext::reset() {
-    LOG(DEBUG) << __func__;
-    disable();
-    resetBuffer();
-}
-
 IEffect::Status DownmixContext::downmixProcess(float* in, float* out, int samples) {
     IEffect::Status status = {EX_ILLEGAL_ARGUMENT, 0, 0};
 
@@ -127,7 +117,6 @@
         return status;
     }
 
-    LOG(DEBUG) << __func__ << " start processing";
     bool accumulate = false;
     int frames = samples * sizeof(float) / getInputFrameSize();
     if (mType == Downmix::Type::STRIP) {
@@ -152,9 +141,6 @@
         }
     }
     int producedSamples = (samples / mInputChannelCount) << 1;
-    LOG(DEBUG) << __func__ << " done processing " << samples << " samples, generated "
-               << producedSamples << " frameSize: " << getInputFrameSize() << " - "
-               << getOutputFrameSize();
     return {STATUS_OK, samples, producedSamples};
 }
 
diff --git a/media/libeffects/downmix/aidl/DownmixContext.h b/media/libeffects/downmix/aidl/DownmixContext.h
index a381d7f..1be1508 100644
--- a/media/libeffects/downmix/aidl/DownmixContext.h
+++ b/media/libeffects/downmix/aidl/DownmixContext.h
@@ -32,9 +32,8 @@
   public:
     DownmixContext(int statusDepth, const Parameter::Common& common);
     ~DownmixContext();
-    RetCode enable();
-    RetCode disable();
-    void reset();
+    RetCode enable() override;
+    RetCode disable() override;
 
     RetCode setDmType(Downmix::Type type) {
         mType = type;
diff --git a/media/libeffects/downmix/aidl/EffectDownmix.cpp b/media/libeffects/downmix/aidl/EffectDownmix.cpp
index c82c23b..10c7c4f 100644
--- a/media/libeffects/downmix/aidl/EffectDownmix.cpp
+++ b/media/libeffects/downmix/aidl/EffectDownmix.cpp
@@ -14,10 +14,12 @@
  * limitations under the License.
  */
 
+#define ATRACE_TAG ATRACE_TAG_AUDIO
 #define LOG_TAG "AHAL_DownmixImpl"
 
 #include <android-base/logging.h>
 #include <system/audio_effects/effect_uuid.h>
+#include <utils/Trace.h>
 
 #include "EffectDownmix.h"
 
@@ -36,7 +38,6 @@
     }
     if (instanceSpp) {
         *instanceSpp = ndk::SharedRefBase::make<DownmixImpl>();
-        LOG(DEBUG) << __func__ << " instance " << instanceSpp->get() << " created";
         return EX_NONE;
     } else {
         LOG(ERROR) << __func__ << " invalid input parameter!";
@@ -66,31 +67,10 @@
 
 ndk::ScopedAStatus DownmixImpl::getDescriptor(Descriptor* _aidl_return) {
     RETURN_IF(!_aidl_return, EX_ILLEGAL_ARGUMENT, "Parameter:nullptr");
-    LOG(DEBUG) << __func__ << kDescriptor.toString();
     *_aidl_return = kDescriptor;
     return ndk::ScopedAStatus::ok();
 }
 
-ndk::ScopedAStatus DownmixImpl::commandImpl(CommandId command) {
-    RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
-    switch (command) {
-        case CommandId::START:
-            mContext->enable();
-            break;
-        case CommandId::STOP:
-            mContext->disable();
-            break;
-        case CommandId::RESET:
-            mContext->reset();
-            break;
-        default:
-            LOG(ERROR) << __func__ << " commandId " << toString(command) << " not supported";
-            return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
-                                                                    "commandIdNotSupported");
-    }
-    return ndk::ScopedAStatus::ok();
-}
-
 ndk::ScopedAStatus DownmixImpl::setParameterSpecific(const Parameter::Specific& specific) {
     RETURN_IF(Parameter::Specific::downmix != specific.getTag(), EX_ILLEGAL_ARGUMENT,
               "EffectNotSupported");
@@ -171,12 +151,16 @@
 }
 
 void DownmixImpl::process() {
+    ATRACE_NAME("Downmix::process");
     /**
      * wait for the EventFlag without lock, it's ok because the mEfGroup pointer will not change
      * in the life cycle of workerThread (threadLoop).
      */
     uint32_t efState = 0;
-    if (!mEventFlag || ::android::OK != mEventFlag->wait(kEventFlagNotEmpty, &efState)) {
+    if (!mEventFlag ||
+        ::android::OK != mEventFlag->wait(mDataMqNotEmptyEf, &efState, 0 /* no timeout */,
+                                          true /* retry */) ||
+        !(efState & mDataMqNotEmptyEf)) {
         LOG(ERROR) << getEffectName() << __func__ << ": StatusEventFlag invalid";
     }
 
@@ -195,14 +179,14 @@
         const auto availableToWrite = outputMQ->availableToWrite() *
                                       mImplContext->getInputFrameSize() /
                                       mImplContext->getOutputFrameSize();
+        assert(mImplContext->getWorkBufferSize() >=
+               std::max(availableToRead(), availableToWrite));
         auto processSamples = std::min(availableToRead, availableToWrite);
         if (processSamples) {
             inputMQ->read(buffer, processSamples);
             IEffect::Status status = effectProcessImpl(buffer, buffer, processSamples);
             outputMQ->write(buffer, status.fmqProduced);
             statusMQ->writeBlocking(&status, 1);
-            LOG(VERBOSE) << getEffectName() << __func__ << ": done processing, effect consumed "
-                        << status.fmqConsumed << " produced " << status.fmqProduced;
         }
     }
 }
diff --git a/media/libeffects/downmix/aidl/EffectDownmix.h b/media/libeffects/downmix/aidl/EffectDownmix.h
index 54557dc..cea6d1b 100644
--- a/media/libeffects/downmix/aidl/EffectDownmix.h
+++ b/media/libeffects/downmix/aidl/EffectDownmix.h
@@ -28,13 +28,9 @@
   public:
     static const std::string kEffectName;
     static const Descriptor kDescriptor;
-    DownmixImpl() { LOG(DEBUG) << __func__; }
-    ~DownmixImpl() {
-        cleanUp();
-        LOG(DEBUG) << __func__;
-    }
+    DownmixImpl() = default;
+    ~DownmixImpl() { cleanUp(); }
 
-    ndk::ScopedAStatus commandImpl(CommandId command) REQUIRES(mImplMutex) override;
     ndk::ScopedAStatus getDescriptor(Descriptor* _aidl_return) override;
     ndk::ScopedAStatus setParameterSpecific(const Parameter::Specific& specific)
             REQUIRES(mImplMutex) override;
diff --git a/media/libeffects/downmix/benchmark/Android.bp b/media/libeffects/downmix/benchmark/Android.bp
index 10f14e2..5b62a0c 100644
--- a/media/libeffects/downmix/benchmark/Android.bp
+++ b/media/libeffects/downmix/benchmark/Android.bp
@@ -1,5 +1,6 @@
 // Build testbench for downmix module.
 package {
+    default_team: "trendy_team_media_framework_audio",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_media_libeffects_downmix_license"
diff --git a/media/libeffects/downmix/tests/Android.bp b/media/libeffects/downmix/tests/Android.bp
index 392a6fa..77d8f83 100644
--- a/media/libeffects/downmix/tests/Android.bp
+++ b/media/libeffects/downmix/tests/Android.bp
@@ -1,5 +1,6 @@
 // Build testbench for downmix module.
 package {
+    default_team: "trendy_team_media_framework_audio",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_media_libeffects_downmix_license"
@@ -14,7 +15,7 @@
 //
 // Use "atest downmix_tests" to run.
 cc_test {
-    name:"downmix_tests",
+    name: "downmix_tests",
     gtest: true,
     host_supported: true,
     vendor: true,
@@ -45,7 +46,7 @@
 // test application and outputs then compares files in a local directory
 // on device (/data/local/tmp/downmixtest/).
 cc_test {
-    name:"downmixtest",
+    name: "downmixtest",
     host_supported: false,
     proprietary: true,
 
diff --git a/media/libeffects/dynamicsproc/Android.bp b/media/libeffects/dynamicsproc/Android.bp
index e93a4e6..12477a4 100644
--- a/media/libeffects/dynamicsproc/Android.bp
+++ b/media/libeffects/dynamicsproc/Android.bp
@@ -33,7 +33,7 @@
 }
 
 cc_defaults {
-    name : "dynamicsprocessingdefaults",
+    name: "dynamicsprocessingdefaults",
     srcs: [
         "dsp/DPBase.cpp",
         "dsp/DPFrequency.cpp",
@@ -50,9 +50,9 @@
         "libeigen",
     ],
     cflags: [
-        "-Wthread-safety",
         "-Wall",
         "-Werror",
+        "-Wthread-safety",
     ],
     relative_install_path: "soundfx",
 }
@@ -80,9 +80,9 @@
     name: "libdynamicsprocessingaidl",
 
     srcs: [
+        ":effectCommonFile",
         "aidl/DynamicsProcessing.cpp",
         "aidl/DynamicsProcessingContext.cpp",
-        ":effectCommonFile",
     ],
 
     defaults: [
diff --git a/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.cpp b/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.cpp
index 1fedea4..8324473 100644
--- a/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.cpp
+++ b/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.cpp
@@ -41,7 +41,6 @@
     }
     if (instanceSpp) {
         *instanceSpp = ndk::SharedRefBase::make<DynamicsProcessingImpl>();
-        LOG(DEBUG) << __func__ << " instance " << instanceSpp->get() << " created";
         return EX_NONE;
     } else {
         LOG(ERROR) << __func__ << " invalid input parameter!";
@@ -206,7 +205,6 @@
 ndk::ScopedAStatus DynamicsProcessingImpl::open(const Parameter::Common& common,
                                                 const std::optional<Parameter::Specific>& specific,
                                                 OpenEffectReturn* ret) {
-    LOG(DEBUG) << __func__;
     // effect only support 32bits float
     RETURN_IF(common.input.base.format.pcm != common.output.base.format.pcm ||
                       common.input.base.format.pcm != PcmType::FLOAT_32_BIT,
@@ -215,7 +213,12 @@
     RETURN_OK_IF(mState != State::INIT);
     mImplContext = createContext(common);
     RETURN_IF(!mContext || !mImplContext, EX_NULL_POINTER, "createContextFailed");
+    RETURN_IF(!getInterfaceVersion(&mVersion).isOk(), EX_UNSUPPORTED_OPERATION,
+              "FailedToGetInterfaceVersion");
+    mImplContext->setVersion(version);
     mEventFlag = mImplContext->getStatusEventFlag();
+    mDataMqNotEmptyEf =
+            mVersion >= kReopenSupportedVersion ? kEventFlagDataMqNotEmpty : kEventFlagNotEmpty;
 
     if (specific.has_value()) {
         RETURN_IF_ASTATUS_NOT_OK(setParameterSpecific(specific.value()), "setSpecParamErr");
@@ -229,39 +232,18 @@
 
     mState = State::IDLE;
     mContext->dupeFmq(ret);
-    RETURN_IF(createThread(getEffectName()) != RetCode::SUCCESS, EX_UNSUPPORTED_OPERATION,
-              "FailedToCreateWorker");
+    RETURN_IF(createThread(getEffectNameWithVersion()) != RetCode::SUCCESS,
+              EX_UNSUPPORTED_OPERATION, "FailedToCreateWorker");
+    LOG(INFO) << getEffectNameWithVersion() << __func__;
     return ndk::ScopedAStatus::ok();
 }
 
 ndk::ScopedAStatus DynamicsProcessingImpl::getDescriptor(Descriptor* _aidl_return) {
     RETURN_IF(!_aidl_return, EX_ILLEGAL_ARGUMENT, "Parameter:nullptr");
-    LOG(DEBUG) << __func__ << kDescriptor.toString();
     *_aidl_return = kDescriptor;
     return ndk::ScopedAStatus::ok();
 }
 
-ndk::ScopedAStatus DynamicsProcessingImpl::commandImpl(CommandId command) {
-    RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
-    switch (command) {
-        case CommandId::START:
-            mContext->enable();
-            return ndk::ScopedAStatus::ok();
-        case CommandId::STOP:
-            mContext->disable();
-            return ndk::ScopedAStatus::ok();
-        case CommandId::RESET:
-            mContext->disable();
-            mContext->resetBuffer();
-            return ndk::ScopedAStatus::ok();
-        default:
-            // Need this default handling for vendor extendable CommandId::VENDOR_COMMAND_*
-            LOG(ERROR) << __func__ << " commandId " << toString(command) << " not supported";
-            return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
-                                                                    "commandIdNotSupported");
-    }
-}
-
 bool DynamicsProcessingImpl::isParamInRange(const Parameter::Specific& specific) {
     auto& dp = specific.get<Parameter::Specific::dynamicsProcessing>();
     return DynamicsProcessingRanges::isParamInRange(dp, kRanges);
diff --git a/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.h b/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.h
index 4897888..b34cdcf 100644
--- a/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.h
+++ b/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.h
@@ -30,16 +30,12 @@
     static const Descriptor kDescriptor;
     static const Capability kCapability;
 
-    DynamicsProcessingImpl() { LOG(DEBUG) << __func__; }
-    ~DynamicsProcessingImpl() {
-        cleanUp();
-        LOG(DEBUG) << __func__;
-    }
+    DynamicsProcessingImpl() = default;
+    ~DynamicsProcessingImpl() { cleanUp(); }
 
     ndk::ScopedAStatus open(const Parameter::Common& common,
                             const std::optional<Parameter::Specific>& specific,
                             OpenEffectReturn* ret) override;
-    ndk::ScopedAStatus commandImpl(CommandId command) REQUIRES(mImplMutex) override;
     ndk::ScopedAStatus getDescriptor(Descriptor* _aidl_return) override;
     ndk::ScopedAStatus setParameterSpecific(const Parameter::Specific& specific)
             REQUIRES(mImplMutex) override;
diff --git a/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.cpp b/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.cpp
index 042b063..fd4e615 100644
--- a/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.cpp
+++ b/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.cpp
@@ -29,16 +29,10 @@
 DynamicsProcessingContext::DynamicsProcessingContext(int statusDepth,
                                                      const Parameter::Common& common)
     : EffectContext(statusDepth, common) {
-    LOG(DEBUG) << __func__;
     init();
 }
 
-DynamicsProcessingContext::~DynamicsProcessingContext() {
-    LOG(DEBUG) << __func__;
-}
-
 RetCode DynamicsProcessingContext::enable() {
-    std::lock_guard lg(mMutex);
     if (mState != DYNAMICS_PROCESSING_STATE_INITIALIZED) {
         return RetCode::ERROR_EFFECT_LIB_ERROR;
     }
@@ -47,7 +41,6 @@
 }
 
 RetCode DynamicsProcessingContext::disable() {
-    std::lock_guard lg(mMutex);
     if (mState != DYNAMICS_PROCESSING_STATE_ACTIVE) {
         return RetCode::ERROR_EFFECT_LIB_ERROR;
     }
@@ -55,11 +48,11 @@
     return RetCode::SUCCESS;
 }
 
-void DynamicsProcessingContext::reset() {
-    std::lock_guard lg(mMutex);
+RetCode DynamicsProcessingContext::reset() {
     if (mDpFreq != nullptr) {
-        mDpFreq.reset();
+        mDpFreq->reset();
     }
+    return RetCode::SUCCESS;
 }
 
 RetCode DynamicsProcessingContext::setCommon(const Parameter::Common& common) {
@@ -68,12 +61,10 @@
     }
     mCommon = common;
     init();
-    LOG(INFO) << __func__ << common.toString();
     return RetCode::SUCCESS;
 }
 
 RetCode DynamicsProcessingContext::setVolumeStereo(const Parameter::VolumeStereo& volumeStereo) {
-    std::lock_guard lg(mMutex);
     dp_fx::DPChannel* leftChannel = mDpFreq->getChannel(0);
     dp_fx::DPChannel* rightChannel = mDpFreq->getChannel(1);
     if (leftChannel != nullptr) {
@@ -99,8 +90,8 @@
     int32_t sampleRate = mCommon.input.base.sampleRate;
     int32_t minBlockSize = (int32_t)dp_fx::DPFrequency::getMinBockSize();
     int32_t block = engine.preferredProcessingDurationMs * sampleRate / 1000.0f;
-    LOG(INFO) << __func__ << " sampleRate " << sampleRate << " block length "
-              << engine.preferredProcessingDurationMs << " ms (" << block << "samples)";
+    LOG(VERBOSE) << __func__ << " sampleRate " << sampleRate << " block length "
+                 << engine.preferredProcessingDurationMs << " ms (" << block << "samples)";
     if (block < minBlockSize) {
         block = minBlockSize;
     } else if (!powerof2(block)) {
@@ -112,7 +103,6 @@
 
 RetCode DynamicsProcessingContext::setEngineArchitecture(
         const DynamicsProcessing::EngineArchitecture& engineArchitecture) {
-    std::lock_guard lg(mMutex);
     if (!mEngineInited || mEngineArchitecture != engineArchitecture) {
         if (engineArchitecture.resolutionPreference ==
             DynamicsProcessing::ResolutionPreference::FAVOR_FREQUENCY_RESOLUTION) {
@@ -124,36 +114,26 @@
         mEngineInited = true;
         mEngineArchitecture = engineArchitecture;
     }
-    LOG(INFO) << __func__ << engineArchitecture.toString();
     return RetCode::SUCCESS;
 }
 
 RetCode DynamicsProcessingContext::setPreEq(
         const std::vector<DynamicsProcessing::ChannelConfig>& channels) {
-    std::lock_guard lg(mMutex);
-    return setDpChannels_l<dp_fx::DPEq>(channels, mEngineArchitecture.preEqStage.inUse,
-                                        StageType::PREEQ);
+    return setDpChannels_l<dp_fx::DPEq>(channels, StageType::PREEQ);
 }
 
 RetCode DynamicsProcessingContext::setPostEq(
         const std::vector<DynamicsProcessing::ChannelConfig>& channels) {
-    std::lock_guard lg(mMutex);
-    return setDpChannels_l<dp_fx::DPEq>(channels, mEngineArchitecture.postEqStage.inUse,
-                                        StageType::POSTEQ);
+    return setDpChannels_l<dp_fx::DPEq>(channels, StageType::POSTEQ);
 }
 
 RetCode DynamicsProcessingContext::setMbc(
         const std::vector<DynamicsProcessing::ChannelConfig>& channels) {
-    std::lock_guard lg(mMutex);
-    return setDpChannels_l<dp_fx::DPMbc>(channels, mEngineArchitecture.mbcStage.inUse,
-                                         StageType::MBC);
+    return setDpChannels_l<dp_fx::DPMbc>(channels, StageType::MBC);
 }
 
 RetCode DynamicsProcessingContext::setPreEqBand(
         const std::vector<DynamicsProcessing::EqBandConfig>& bands) {
-    std::lock_guard lg(mMutex);
-    RETURN_VALUE_IF(!mEngineArchitecture.preEqStage.inUse, RetCode::ERROR_ILLEGAL_PARAMETER,
-                    "preEqNotInUse");
     RETURN_VALUE_IF(
             !validateBandConfig(bands, mChannelCount, mEngineArchitecture.preEqStage.bandCount),
             RetCode::ERROR_ILLEGAL_PARAMETER, "eqBandNotValid");
@@ -162,9 +142,6 @@
 
 RetCode DynamicsProcessingContext::setPostEqBand(
         const std::vector<DynamicsProcessing::EqBandConfig>& bands) {
-    std::lock_guard lg(mMutex);
-    RETURN_VALUE_IF(!mEngineArchitecture.postEqStage.inUse, RetCode::ERROR_ILLEGAL_PARAMETER,
-                    "postEqNotInUse");
     RETURN_VALUE_IF(
             !validateBandConfig(bands, mChannelCount, mEngineArchitecture.postEqStage.bandCount),
             RetCode::ERROR_ILLEGAL_PARAMETER, "eqBandNotValid");
@@ -173,9 +150,6 @@
 
 RetCode DynamicsProcessingContext::setMbcBand(
         const std::vector<DynamicsProcessing::MbcBandConfig>& bands) {
-    std::lock_guard lg(mMutex);
-    RETURN_VALUE_IF(!mEngineArchitecture.mbcStage.inUse, RetCode::ERROR_ILLEGAL_PARAMETER,
-                    "mbcNotInUse");
     RETURN_VALUE_IF(
             !validateBandConfig(bands, mChannelCount, mEngineArchitecture.mbcStage.bandCount),
             RetCode::ERROR_ILLEGAL_PARAMETER, "eqBandNotValid");
@@ -184,9 +158,6 @@
 
 RetCode DynamicsProcessingContext::setLimiter(
         const std::vector<DynamicsProcessing::LimiterConfig>& limiters) {
-    std::lock_guard lg(mMutex);
-    RETURN_VALUE_IF(!mEngineArchitecture.limiterInUse, RetCode::ERROR_ILLEGAL_PARAMETER,
-                    "limiterNotInUse");
     RETURN_VALUE_IF(!validateLimiterConfig(limiters, mChannelCount),
                     RetCode::ERROR_ILLEGAL_PARAMETER, "limiterConfigNotValid");
     return setBands_l<DynamicsProcessing::LimiterConfig>(limiters, StageType::LIMITER);
@@ -194,15 +165,12 @@
 
 RetCode DynamicsProcessingContext::setInputGain(
         const std::vector<DynamicsProcessing::InputGain>& inputGains) {
-    std::lock_guard lg(mMutex);
     RETURN_VALUE_IF(!validateInputGainConfig(inputGains, mChannelCount),
                     RetCode::ERROR_ILLEGAL_PARAMETER, "inputGainNotValid");
     return setBands_l<DynamicsProcessing::InputGain>(inputGains, StageType::INPUTGAIN);
 }
 
 DynamicsProcessing::EngineArchitecture DynamicsProcessingContext::getEngineArchitecture() {
-    std::lock_guard lg(mMutex);
-    LOG(INFO) << __func__ << mEngineArchitecture.toString();
     return mEngineArchitecture;
 }
 
@@ -228,8 +196,6 @@
 
 std::vector<DynamicsProcessing::MbcBandConfig> DynamicsProcessingContext::getMbcBand() {
     std::vector<DynamicsProcessing::MbcBandConfig> bands;
-
-    std::lock_guard lg(mMutex);
     auto maxBand = mEngineArchitecture.mbcStage.bandCount;
     for (int32_t ch = 0; ch < mChannelCount; ch++) {
         auto mbc = getMbc_l(ch);
@@ -261,8 +227,6 @@
 
 std::vector<DynamicsProcessing::LimiterConfig> DynamicsProcessingContext::getLimiter() {
     std::vector<DynamicsProcessing::LimiterConfig> ret;
-
-    std::lock_guard lg(mMutex);
     for (int32_t ch = 0; ch < mChannelCount; ch++) {
         auto limiter = getLimiter_l(ch);
         if (!limiter) {
@@ -282,8 +246,6 @@
 
 std::vector<DynamicsProcessing::InputGain> DynamicsProcessingContext::getInputGain() {
     std::vector<DynamicsProcessing::InputGain> ret;
-
-    std::lock_guard lg(mMutex);
     for (int32_t ch = 0; ch < mChannelCount; ch++) {
         auto channel = getChannel_l(ch);
         if (!channel) {
@@ -295,26 +257,20 @@
 }
 
 IEffect::Status DynamicsProcessingContext::dpeProcess(float* in, float* out, int samples) {
-    LOG(DEBUG) << __func__ << " in " << in << " out " << out << " sample " << samples;
 
     IEffect::Status status = {EX_NULL_POINTER, 0, 0};
     RETURN_VALUE_IF(!in, status, "nullInput");
     RETURN_VALUE_IF(!out, status, "nullOutput");
     status = {EX_ILLEGAL_STATE, 0, 0};
 
-    LOG(DEBUG) << __func__ << " start processing";
-    {
-        std::lock_guard lg(mMutex);
-        RETURN_VALUE_IF(mState != DynamicsProcessingState::DYNAMICS_PROCESSING_STATE_ACTIVE, status,
-                        "notInActiveState");
-        RETURN_VALUE_IF(!mDpFreq, status, "engineNotInited");
-        mDpFreq->processSamples(in, out, samples);
-    }
+    RETURN_VALUE_IF(mState != DynamicsProcessingState::DYNAMICS_PROCESSING_STATE_ACTIVE, status,
+                    "notInActiveState");
+    RETURN_VALUE_IF(!mDpFreq, status, "engineNotInited");
+    mDpFreq->processSamples(in, out, samples);
     return {STATUS_OK, samples, samples};
 }
 
 void DynamicsProcessingContext::init() {
-    std::lock_guard lg(mMutex);
     if (mState == DYNAMICS_PROCESSING_STATE_UNINITIALIZED) {
         mState = DYNAMICS_PROCESSING_STATE_INITIALIZED;
     }
@@ -399,7 +355,6 @@
         StageType type) {
     std::vector<DynamicsProcessing::ChannelConfig> ret;
 
-    std::lock_guard lg(mMutex);
     for (int32_t ch = 0; ch < mChannelCount; ch++) {
         auto stage = getStageWithType_l(type, ch);
         if (!stage) {
@@ -414,7 +369,6 @@
         StageType type) {
     std::vector<DynamicsProcessing::EqBandConfig> eqBands;
 
-    std::lock_guard lg(mMutex);
     auto maxBand = mEngineArchitecture.preEqStage.bandCount;
     for (int32_t ch = 0; ch < mChannelCount; ch++) {
         auto eq = getEqWithType_l(type, ch);
@@ -455,9 +409,7 @@
         }
         freqs[band.band] = band.cutoffFrequencyHz;
     }
-    return std::is_sorted(freqs.begin(), freqs.end(), [](const auto& a, const auto& b) {
-        return a.second <= b.second; //index is already sorted as map key
-    });
+    return true;
 }
 
 bool DynamicsProcessingContext::validateLimiterConfig(
@@ -478,17 +430,10 @@
 
 template <typename D>
 RetCode DynamicsProcessingContext::setDpChannels_l(
-        const std::vector<DynamicsProcessing::ChannelConfig>& channels, bool stageInUse,
-        StageType type) {
+        const std::vector<DynamicsProcessing::ChannelConfig>& channels, StageType type) {
     RetCode ret = RetCode::SUCCESS;
     std::unordered_set<int> channelSet;
 
-    if (!stageInUse) {
-        LOG(WARNING) << __func__ << " not in use " << ::android::internal::ToString(channels);
-        return RetCode::SUCCESS;
-    }
-
-    RETURN_VALUE_IF(!stageInUse, RetCode::ERROR_ILLEGAL_PARAMETER, "stageNotInUse");
     for (auto& it : channels) {
         if (0 != channelSet.count(it.channel)) {
             LOG(WARNING) << __func__ << " duplicated channel " << it.channel;
@@ -509,7 +454,6 @@
             continue;
         }
         if (dp->isEnabled() != it.enable) {
-            LOG(INFO) << __func__ << it.toString();
             dp->setEnabled(it.enable);
         }
     }
@@ -590,7 +534,6 @@
             ret = RetCode::ERROR_ILLEGAL_PARAMETER;
             continue;
         }
-        LOG(INFO) << __func__ << it.toString();
     }
     return ret;
 }
diff --git a/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.h b/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.h
index 839c6dd..15c6811 100644
--- a/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.h
+++ b/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.h
@@ -16,7 +16,6 @@
 
 #pragma once
 
-#include <android-base/thread_annotations.h>
 #include <audio_effects/effect_dynamicsprocessing.h>
 
 #include "effect-impl/EffectContext.h"
@@ -37,11 +36,10 @@
 class DynamicsProcessingContext final : public EffectContext {
   public:
     DynamicsProcessingContext(int statusDepth, const Parameter::Common& common);
-    ~DynamicsProcessingContext();
-
-    RetCode enable();
-    RetCode disable();
-    void reset();
+    ~DynamicsProcessingContext() = default;
+    RetCode enable() override;
+    RetCode disable() override;
+    RetCode reset() override;
 
     // override EffectContext::setCommon to update mChannelCount
     RetCode setCommon(const Parameter::Common& common) override;
@@ -73,12 +71,11 @@
   private:
     static constexpr float kPreferredProcessingDurationMs = 10.0f;
     static constexpr int kBandCount = 5;
-    std::mutex mMutex;
-    int mChannelCount GUARDED_BY(mMutex) = 0;
-    DynamicsProcessingState mState GUARDED_BY(mMutex) = DYNAMICS_PROCESSING_STATE_UNINITIALIZED;
-    std::unique_ptr<dp_fx::DPFrequency> mDpFreq GUARDED_BY(mMutex) = nullptr;
-    bool mEngineInited GUARDED_BY(mMutex) = false;
-    DynamicsProcessing::EngineArchitecture mEngineArchitecture GUARDED_BY(mMutex) = {
+    int mChannelCount = 0;
+    DynamicsProcessingState mState = DYNAMICS_PROCESSING_STATE_UNINITIALIZED;
+    std::unique_ptr<dp_fx::DPFrequency> mDpFreq = nullptr;
+    bool mEngineInited = false;
+    DynamicsProcessing::EngineArchitecture mEngineArchitecture = {
             .resolutionPreference =
                     DynamicsProcessing::ResolutionPreference::FAVOR_FREQUENCY_RESOLUTION,
             .preferredProcessingDurationMs = kPreferredProcessingDurationMs,
@@ -92,22 +89,21 @@
 
     void init();
 
-    void dpSetFreqDomainVariant_l(const DynamicsProcessing::EngineArchitecture& engine)
-            REQUIRES(mMutex);
-    dp_fx::DPChannel* getChannel_l(int ch) REQUIRES(mMutex);
-    dp_fx::DPEq* getPreEq_l(int ch) REQUIRES(mMutex);
-    dp_fx::DPEq* getPostEq_l(int ch) REQUIRES(mMutex);
-    dp_fx::DPMbc* getMbc_l(int ch) REQUIRES(mMutex);
-    dp_fx::DPLimiter* getLimiter_l(int ch) REQUIRES(mMutex);
-    dp_fx::DPBandStage* getStageWithType_l(StageType type, int ch) REQUIRES(mMutex);
-    dp_fx::DPEq* getEqWithType_l(StageType type, int ch) REQUIRES(mMutex);
+    void dpSetFreqDomainVariant_l(const DynamicsProcessing::EngineArchitecture& engine);
+    dp_fx::DPChannel* getChannel_l(int ch);
+    dp_fx::DPEq* getPreEq_l(int ch);
+    dp_fx::DPEq* getPostEq_l(int ch);
+    dp_fx::DPMbc* getMbc_l(int ch);
+    dp_fx::DPLimiter* getLimiter_l(int ch);
+    dp_fx::DPBandStage* getStageWithType_l(StageType type, int ch);
+    dp_fx::DPEq* getEqWithType_l(StageType type, int ch);
     template <typename D>
     RetCode setDpChannels_l(const std::vector<DynamicsProcessing::ChannelConfig>& channels,
-                            bool stageInUse, StageType type) REQUIRES(mMutex);
+                            StageType type);
     template <typename T /* BandConfig */>
-    RetCode setBands_l(const std::vector<T>& bands, StageType type) REQUIRES(mMutex);
+    RetCode setBands_l(const std::vector<T>& bands, StageType type);
     RetCode setDpChannelBand_l(const std::any& anyConfig, StageType type,
-                               std::set<std::pair<int, int>>& chBandSet) REQUIRES(mMutex);
+                               std::set<std::pair<int, int>>& chBandSet);
 
     std::vector<DynamicsProcessing::EqBandConfig> getEqBandConfigs(StageType type);
     std::vector<DynamicsProcessing::ChannelConfig> getChannelConfig(StageType type);
diff --git a/media/libeffects/factory/Android.bp b/media/libeffects/factory/Android.bp
index d94093e..9be45a5 100644
--- a/media/libeffects/factory/Android.bp
+++ b/media/libeffects/factory/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_media_framework_audio",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
@@ -20,21 +21,21 @@
     name: "libeffects",
     vendor: true,
     srcs: [
-         "EffectsFactory.c",
-         "EffectsConfigLoader.c",
-         "EffectsFactoryState.c",
-         "EffectsXmlConfigLoader.cpp",
+        "EffectsConfigLoader.c",
+        "EffectsFactory.c",
+        "EffectsFactoryState.c",
+        "EffectsXmlConfigLoader.cpp",
     ],
 
     shared_libs: [
         "libcutils",
-        "liblog",
         "libdl",
         "libeffectsconfig",
+        "liblog",
     ],
     cflags: ["-fvisibility=hidden"],
 
-    local_include_dirs:["include/media"],
+    local_include_dirs: ["include/media"],
 
     header_libs: [
         "libaudioeffects",
@@ -53,13 +54,16 @@
 
     cflags: [
         "-Wall",
-        "-Wextra",
         "-Werror",
+        "-Wextra",
     ],
 
     shared_libs: [
-        "libeffectsconfig",
         "libeffects",
+        "libeffectsconfig",
     ],
-    local_include_dirs:[".", "include"],
+    local_include_dirs: [
+        ".",
+        "include",
+    ],
 }
diff --git a/media/libeffects/hapticgenerator/Android.bp b/media/libeffects/hapticgenerator/Android.bp
index 7d96b53..e4ac38e 100644
--- a/media/libeffects/hapticgenerator/Android.bp
+++ b/media/libeffects/hapticgenerator/Android.bp
@@ -23,7 +23,7 @@
 }
 
 cc_defaults {
-    name : "hapticgeneratordefaults",
+    name: "hapticgeneratordefaults",
     srcs: [
         "Processors.cpp",
     ],
@@ -37,6 +37,14 @@
     header_libs: [
         "libaudioeffects",
     ],
+    cflags: [
+        // This is needed for the non-zero coefficients optimization for
+        // BiquadFilter. Try the biquad_filter_benchmark test in audio_utils
+        // with/without `-ffast-math` for more context.
+        "-ffast-math",
+        "-fhonor-infinities",
+        "-fhonor-nans",
+    ],
     relative_install_path: "soundfx",
 }
 
@@ -54,13 +62,11 @@
     ],
 
     cflags: [
-        "-O2", // Turning on the optimization in order to reduce effect processing time.
-               // The latency is around 1/5 less than without the optimization.
+        // Turning on the optimization in order to reduce effect processing time.
+        // The latency is around 1/5 less than without the optimization.
+        "-O2",
         "-Wall",
         "-Werror",
-        "-ffast-math", // This is needed for the non-zero coefficients optimization for
-                       // BiquadFilter. Try the biquad_filter_benchmark test in audio_utils
-                       // with/without `-ffast-math` for more context.
         "-fvisibility=hidden",
     ],
 }
@@ -69,9 +75,9 @@
     name: "libhapticgeneratoraidl",
 
     srcs: [
+        ":effectCommonFile",
         "aidl/EffectHapticGenerator.cpp",
         "aidl/HapticGeneratorContext.cpp",
-        ":effectCommonFile",
     ],
 
     defaults: [
diff --git a/media/libeffects/hapticgenerator/EffectHapticGenerator.cpp b/media/libeffects/hapticgenerator/EffectHapticGenerator.cpp
index e89e501..0c7ea7f 100644
--- a/media/libeffects/hapticgenerator/EffectHapticGenerator.cpp
+++ b/media/libeffects/hapticgenerator/EffectHapticGenerator.cpp
@@ -35,12 +35,15 @@
 #include <audio_utils/format.h>
 #include <audio_utils/safe_math.h>
 #include <system/audio.h>
+#include <system/audio_effects/audio_effects_utils.h>
 
 static constexpr float DEFAULT_RESONANT_FREQUENCY = 150.0f;
 static constexpr float DEFAULT_BSF_ZERO_Q = 8.0f;
 static constexpr float DEFAULT_BSF_POLE_Q = 4.0f;
 static constexpr float DEFAULT_DISTORTION_OUTPUT_GAIN = 1.5f;
 
+using android::effect::utils::EffectParamReader;
+
 // This is the only symbol that needs to be exported
 __attribute__ ((visibility ("default")))
 audio_effect_library_t AUDIO_EFFECT_LIBRARY_INFO_SYM = {
@@ -114,10 +117,11 @@
     std::stringstream ss;
     ss << "\t\tHaptic setting:\n";
     ss << "\t\t- tracks intensity map:\n";
-    for (const auto&[id, intensity] : param.id2Intensity) {
-        ss << "\t\t\t- id=" << id << ", intensity=" << (int) intensity;
+    for (const auto&[id, hapticScale] : param.id2HapticScale) {
+        ss << "\t\t\t- id=" << id << ", hapticLevel=" << (int) hapticScale.getLevel()
+           << ", adaptiveScaleFactor=" << hapticScale.getAdaptiveScaleFactor();
     }
-    ss << "\t\t- max intensity: " << (int) param.maxHapticIntensity << '\n';
+    ss << "\t\t- max scale level: " << (int) param.maxHapticScale.getLevel() << '\n';
     ss << "\t\t- max haptic amplitude: " << param.maxHapticAmplitude << '\n';
     return ss.str();
 }
@@ -145,7 +149,7 @@
     memset(context->param.hapticChannelSource, 0, sizeof(context->param.hapticChannelSource));
     context->param.hapticChannelCount = 0;
     context->param.audioChannelCount = 0;
-    context->param.maxHapticIntensity = os::HapticScale::MUTE;
+    context->param.maxHapticScale = os::HapticScale::mute();
 
     context->param.resonantFrequency = DEFAULT_RESONANT_FREQUENCY;
     context->param.bpfQ = 1.0f;
@@ -306,37 +310,53 @@
     return 0;
 }
 
-int HapticGenerator_SetParameter(struct HapticGeneratorContext *context,
-                                 int32_t param,
-                                 uint32_t size,
-                                 void *value) {
-    switch (param) {
+int HapticGenerator_SetParameter(struct HapticGeneratorContext *context, effect_param_t* param) {
+    if (param == nullptr) {
+        ALOGE("%s invalid effect_param_t is nullptr", __func__);
+        return -EINVAL;
+    }
+    int32_t paramType;
+    EffectParamReader reader(*param);
+    reader.readFromParameter(&paramType);
+
+    switch (paramType) {
     case HG_PARAM_HAPTIC_INTENSITY: {
-        if (value == nullptr || size != (uint32_t) (2 * sizeof(int))) {
+        if (param->vsize != (sizeof(int32_t) + sizeof(os::HapticScale))) {
+            ALOGE("%s invalid haptic intensity param size %s", __func__, reader.toString().c_str());
             return -EINVAL;
         }
-        int id = *(int *) value;
-        os::HapticScale hapticIntensity = static_cast<os::HapticScale>(*((int *) value + 1));
-        ALOGD("Setting haptic intensity as %d", hapticIntensity);
-        if (hapticIntensity == os::HapticScale::MUTE) {
-            context->param.id2Intensity.erase(id);
-        } else {
-            context->param.id2Intensity.emplace(id, hapticIntensity);
+        int32_t paramId;
+        os::HapticScale hapticScale;
+        if (reader.readFromValue(&paramId) != OK || reader.readFromValue(&hapticScale) != OK) {
+            ALOGE("%s error reading haptic intensity %s", __func__, reader.toString().c_str());
+            return -EINVAL;
         }
-        context->param.maxHapticIntensity = hapticIntensity;
-        for (const auto&[id, intensity] : context->param.id2Intensity) {
-            context->param.maxHapticIntensity = std::max(
-                    context->param.maxHapticIntensity, intensity);
+        ALOGD("Updating haptic scale, %s", hapticScale.toString().c_str());
+        if (hapticScale.isScaleMute()) {
+            context->param.id2HapticScale.erase(paramId);
+        } else {
+            context->param.id2HapticScale.emplace(paramId, hapticScale);
+        }
+        context->param.maxHapticScale = hapticScale;
+        for (const auto&[id, scale] : context->param.id2HapticScale) {
+            if (scale.getLevel() > context->param.maxHapticScale.getLevel()) {
+                context->param.maxHapticScale = scale;
+            }
         }
         break;
     }
     case HG_PARAM_VIBRATOR_INFO: {
-        if (value == nullptr || size != 3 * sizeof(float)) {
+        if (param->vsize != (3 * sizeof(float))) {
+            ALOGE("%s invalid vibrator info param size %s", __func__, reader.toString().c_str());
             return -EINVAL;
         }
-        const float resonantFrequency = *(float*) value;
-        const float qFactor = *((float *) value + 1);
-        const float maxAmplitude = *((float *) value + 2);
+        float resonantFrequency, qFactor, maxAmplitude;
+        if (reader.readFromValue(&resonantFrequency) != OK ||
+            reader.readFromValue(&qFactor) != OK ||
+            reader.readFromValue(&maxAmplitude) != OK) {
+            ALOGE("%s error reading vibrator info %s", __func__, reader.toString().c_str());
+            return -EINVAL;
+        }
         context->param.resonantFrequency =
                 audio_utils::safe_isnan(resonantFrequency) ? DEFAULT_RESONANT_FREQUENCY
                                                            : resonantFrequency;
@@ -364,7 +384,7 @@
         HapticGenerator_Reset(context);
     } break;
     default:
-        ALOGW("Unknown param: %d", param);
+        ALOGW("Unknown param: %d", paramType);
         return -EINVAL;
     }
 
@@ -478,7 +498,7 @@
         return -ENODATA;
     }
 
-    if (context->param.maxHapticIntensity == os::HapticScale::MUTE) {
+    if (context->param.maxHapticScale.isScaleMute()) {
         // Haptic channels are muted, not need to generate haptic data.
         return 0;
     }
@@ -504,8 +524,9 @@
     float* hapticOutBuffer = HapticGenerator_runProcessingChain(
             context->processingChain, context->inputBuffer.data(),
             context->outputBuffer.data(), inBuffer->frameCount);
-    os::scaleHapticData(hapticOutBuffer, hapticSampleCount, context->param.maxHapticIntensity,
-                        context->param.maxHapticAmplitude);
+        os::scaleHapticData(hapticOutBuffer, hapticSampleCount,
+                            context->param.maxHapticScale,
+                            context->param.maxHapticAmplitude);
 
     // For haptic data, the haptic playback thread will copy the data from effect input buffer,
     // which contains haptic data at the end of the buffer, directly to sink buffer.
@@ -567,8 +588,7 @@
                 return -EINVAL;
             }
             effect_param_t *cmd = (effect_param_t *) cmdData;
-            *(int *) replyData = HapticGenerator_SetParameter(
-                    context, *(int32_t *) cmd->data, cmd->vsize, cmd->data + sizeof(int32_t));
+            *(int *) replyData = HapticGenerator_SetParameter(context, cmd);
         }
             break;
 
diff --git a/media/libeffects/hapticgenerator/EffectHapticGenerator.h b/media/libeffects/hapticgenerator/EffectHapticGenerator.h
index 85e961f..dbfc5ea 100644
--- a/media/libeffects/hapticgenerator/EffectHapticGenerator.h
+++ b/media/libeffects/hapticgenerator/EffectHapticGenerator.h
@@ -48,9 +48,9 @@
     uint32_t audioChannelCount;
     uint32_t hapticChannelCount;
 
-    // A map from track id to haptic intensity.
-    std::map<int, os::HapticScale> id2Intensity;
-    os::HapticScale maxHapticIntensity; // max intensity will be used to scale haptic data.
+    // A map from track id to haptic scale.
+    std::map<int, os::HapticScale> id2HapticScale;
+    os::HapticScale maxHapticScale; // max haptic scale will be used to scale haptic data.
     float maxHapticAmplitude; // max amplitude will be used to limit haptic data absolute values.
 
     float resonantFrequency;
diff --git a/media/libeffects/hapticgenerator/aidl/EffectHapticGenerator.cpp b/media/libeffects/hapticgenerator/aidl/EffectHapticGenerator.cpp
index 031477f..55e07fb 100644
--- a/media/libeffects/hapticgenerator/aidl/EffectHapticGenerator.cpp
+++ b/media/libeffects/hapticgenerator/aidl/EffectHapticGenerator.cpp
@@ -37,7 +37,6 @@
     }
     if (instanceSpp) {
         *instanceSpp = ndk::SharedRefBase::make<HapticGeneratorImpl>();
-        LOG(DEBUG) << __func__ << " instance " << instanceSpp->get() << " created";
         return EX_NONE;
     } else {
         LOG(ERROR) << __func__ << " invalid input parameter!";
@@ -67,31 +66,10 @@
 
 ndk::ScopedAStatus HapticGeneratorImpl::getDescriptor(Descriptor* _aidl_return) {
     RETURN_IF(!_aidl_return, EX_ILLEGAL_ARGUMENT, "Parameter:nullptr");
-    LOG(DEBUG) << __func__ << kDescriptor.toString();
     *_aidl_return = kDescriptor;
     return ndk::ScopedAStatus::ok();
 }
 
-ndk::ScopedAStatus HapticGeneratorImpl::commandImpl(CommandId command) {
-    RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
-    switch (command) {
-        case CommandId::START:
-            mContext->enable();
-            break;
-        case CommandId::STOP:
-            mContext->disable();
-            break;
-        case CommandId::RESET:
-            mContext->reset();
-            break;
-        default:
-            LOG(ERROR) << __func__ << " commandId " << toString(command) << " not supported";
-            return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
-                                                                    "commandIdNotSupported");
-    }
-    return ndk::ScopedAStatus::ok();
-}
-
 ndk::ScopedAStatus HapticGeneratorImpl::setParameterSpecific(const Parameter::Specific& specific) {
     RETURN_IF(Parameter::Specific::hapticGenerator != specific.getTag(), EX_ILLEGAL_ARGUMENT,
               "EffectNotSupported");
@@ -185,7 +163,7 @@
 IEffect::Status HapticGeneratorImpl::effectProcessImpl(float* in, float* out, int samples) {
     IEffect::Status status = {EX_NULL_POINTER, 0, 0};
     RETURN_VALUE_IF(!mContext, status, "nullContext");
-    return mContext->lvmProcess(in, out, samples);
+    return mContext->process(in, out, samples);
 }
 
 }  // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/hapticgenerator/aidl/EffectHapticGenerator.h b/media/libeffects/hapticgenerator/aidl/EffectHapticGenerator.h
index 53dcd49..8bae024 100644
--- a/media/libeffects/hapticgenerator/aidl/EffectHapticGenerator.h
+++ b/media/libeffects/hapticgenerator/aidl/EffectHapticGenerator.h
@@ -27,13 +27,9 @@
   public:
     static const std::string kEffectName;
     static const Descriptor kDescriptor;
-    HapticGeneratorImpl() { LOG(DEBUG) << __func__; }
-    ~HapticGeneratorImpl() {
-        cleanUp();
-        LOG(DEBUG) << __func__;
-    }
+    HapticGeneratorImpl() = default;
+    ~HapticGeneratorImpl() { cleanUp(); }
 
-    ndk::ScopedAStatus commandImpl(CommandId command) REQUIRES(mImplMutex) override;
     ndk::ScopedAStatus getDescriptor(Descriptor* _aidl_return) override;
     ndk::ScopedAStatus setParameterSpecific(const Parameter::Specific& specific)
             REQUIRES(mImplMutex) override;
diff --git a/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.cpp b/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.cpp
index 354ee00..6e9e216 100644
--- a/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.cpp
+++ b/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.cpp
@@ -14,32 +14,47 @@
  * limitations under the License.
  */
 
-#include <cstddef>
 #define LOG_TAG "AHAL_HapticGeneratorContext"
 
-#include <Utils.h>
+#include "HapticGeneratorContext.h"
 #include <android-base/logging.h>
 #include <android-base/parsedouble.h>
 #include <android-base/properties.h>
+#include <audio_utils/primitives.h>
+#include <audio_utils/safe_math.h>
+#include <Utils.h>
 
-#include "HapticGeneratorContext.h"
+#include <cstddef>
+
+using aidl::android::hardware::audio::common::getChannelCount;
+using aidl::android::hardware::audio::common::getPcmSampleSizeInBytes;
+using aidl::android::media::audio::common::AudioChannelLayout;
 
 namespace aidl::android::hardware::audio::effect {
 
 HapticGeneratorContext::HapticGeneratorContext(int statusDepth, const Parameter::Common& common)
     : EffectContext(statusDepth, common) {
-    LOG(DEBUG) << __func__;
     mState = HAPTIC_GENERATOR_STATE_UNINITIALIZED;
-    mSampleRate = common.input.base.sampleRate;
-    mFrameCount = common.input.frameCount;
-    init_params(common.input.base.channelMask, common.output.base.channelMask);
+
+    mParams.mMaxVibratorScale = HapticGenerator::VibratorScale::MUTE;
+    mParams.mVibratorInfo.resonantFrequencyHz = DEFAULT_RESONANT_FREQUENCY;
+    mParams.mVibratorInfo.qFactor = DEFAULT_BSF_ZERO_Q;
+    mParams.mVibratorInfo.maxAmplitude = 0.f;
+
+    init_params(common);
+    mState = HAPTIC_GENERATOR_STATE_INITIALIZED;
 }
 
 HapticGeneratorContext::~HapticGeneratorContext() {
-    LOG(DEBUG) << __func__;
     mState = HAPTIC_GENERATOR_STATE_UNINITIALIZED;
 }
 
+// Override EffectImpl::setCommon for HapticGenerator because we need init_params
+RetCode HapticGeneratorContext::setCommon(const Parameter::Common& common) {
+    init_params(common);
+    return EffectContext::setCommon(common);
+}
+
 RetCode HapticGeneratorContext::enable() {
     if (mState != HAPTIC_GENERATOR_STATE_INITIALIZED) {
         return RetCode::ERROR_EFFECT_LIB_ERROR;
@@ -56,7 +71,7 @@
     return RetCode::SUCCESS;
 }
 
-void HapticGeneratorContext::reset() {
+RetCode HapticGeneratorContext::reset() {
     for (auto& filter : mProcessorsRecord.filters) {
         filter->clear();
     }
@@ -66,11 +81,11 @@
     for (auto& distortion : mProcessorsRecord.distortions) {
         distortion->clear();
     }
+    return RetCode::SUCCESS;
 }
 
 RetCode HapticGeneratorContext::setHgHapticScales(
         const std::vector<HapticGenerator::HapticScale>& hapticScales) {
-    std::lock_guard lg(mMutex);
     for (auto hapticScale : hapticScales) {
         mParams.mHapticScales.insert_or_assign(hapticScale.id, hapticScale.scale);
     }
@@ -78,17 +93,16 @@
     for (const auto& [id, vibratorScale] : mParams.mHapticScales) {
         mParams.mMaxVibratorScale = std::max(mParams.mMaxVibratorScale, vibratorScale);
     }
+    LOG(INFO) << " HapticGenerator VibratorScale set to " << toString(mParams.mMaxVibratorScale);
     return RetCode::SUCCESS;
 }
 
-HapticGenerator::VibratorInformation HapticGeneratorContext::getHgVibratorInformation() {
-    std::lock_guard lg(mMutex);
+HapticGenerator::VibratorInformation HapticGeneratorContext::getHgVibratorInformation() const {
     return mParams.mVibratorInfo;
 }
 
-std::vector<HapticGenerator::HapticScale> HapticGeneratorContext::getHgHapticScales() {
+std::vector<HapticGenerator::HapticScale> HapticGeneratorContext::getHgHapticScales() const {
     std::vector<HapticGenerator::HapticScale> result;
-    std::lock_guard lg(mMutex);
     for (const auto& [id, vibratorScale] : mParams.mHapticScales) {
         result.push_back({id, vibratorScale});
     }
@@ -97,30 +111,32 @@
 
 RetCode HapticGeneratorContext::setHgVibratorInformation(
         const HapticGenerator::VibratorInformation& vibratorInfo) {
-    {
-        std::lock_guard lg(mMutex);
-        mParams.mVibratorInfo = vibratorInfo;
-
-        if (mProcessorsRecord.bpf != nullptr) {
-            mProcessorsRecord.bpf->setCoefficients(
-                    ::android::audio_effect::haptic_generator::bpfCoefs(
-                            mParams.mVibratorInfo.resonantFrequencyHz, DEFAULT_BPF_Q, mSampleRate));
-        }
-        if (mProcessorsRecord.bsf != nullptr) {
-            mProcessorsRecord.bsf->setCoefficients(
-                    ::android::audio_effect::haptic_generator::bsfCoefs(
-                            mParams.mVibratorInfo.resonantFrequencyHz,
-                            mParams.mVibratorInfo.qFactor, mParams.mVibratorInfo.qFactor / 2.0f,
-                            mSampleRate));
-        }
+    mParams.mVibratorInfo = vibratorInfo;
+    if (::android::audio_utils::safe_isnan(mParams.mVibratorInfo.resonantFrequencyHz)) {
+        LOG(WARNING) << __func__ << " resonantFrequencyHz reset from nan to "
+                     << DEFAULT_RESONANT_FREQUENCY;
+        mParams.mVibratorInfo.resonantFrequencyHz = DEFAULT_RESONANT_FREQUENCY;
     }
+    if (::android::audio_utils::safe_isnan(mParams.mVibratorInfo.qFactor)) {
+        LOG(WARNING) << __func__ << " qFactor reset from nan to " << DEFAULT_BSF_ZERO_Q;
+        mParams.mVibratorInfo.qFactor = DEFAULT_BSF_ZERO_Q;
+    }
+
+    if (mProcessorsRecord.bpf != nullptr) {
+        mProcessorsRecord.bpf->setCoefficients(::android::audio_effect::haptic_generator::bpfCoefs(
+                mParams.mVibratorInfo.resonantFrequencyHz, DEFAULT_BPF_Q, mSampleRate));
+    }
+    if (mProcessorsRecord.bsf != nullptr) {
+        mProcessorsRecord.bsf->setCoefficients(::android::audio_effect::haptic_generator::bsfCoefs(
+                mParams.mVibratorInfo.resonantFrequencyHz, mParams.mVibratorInfo.qFactor,
+                mParams.mVibratorInfo.qFactor / 2.0f, mSampleRate));
+    }
+
     configure();
     return RetCode::SUCCESS;
 }
 
-IEffect::Status HapticGeneratorContext::lvmProcess(float* in, float* out, int samples) {
-    LOG(DEBUG) << __func__ << " in " << in << " out " << out << " sample " << samples;
-
+IEffect::Status HapticGeneratorContext::process(float* in, float* out, int samples) {
     IEffect::Status status = {EX_NULL_POINTER, 0, 0};
     RETURN_VALUE_IF(!in, status, "nullInput");
     RETURN_VALUE_IF(!out, status, "nullOutput");
@@ -129,32 +145,19 @@
     auto frameSize = getInputFrameSize();
     RETURN_VALUE_IF(0 == frameSize, status, "zeroFrameSize");
 
-    LOG(DEBUG) << __func__ << " start processing";
-    // The audio data must not be modified but just written to
-    // output buffer according the access mode.
-    bool accumulate = false;
-    if (in != out) {
-        for (int i = 0; i < samples; i++) {
-            if (accumulate) {
-                out[i] += in[i];
-            } else {
-                out[i] = in[i];
-            }
-        }
-    }
-
     if (mState != HAPTIC_GENERATOR_STATE_ACTIVE) {
+        LOG(WARNING) << " HapticGenerator in wrong state " << mState;
         return status;
     }
 
-    std::lock_guard lg(mMutex);
     if (mParams.mMaxVibratorScale == HapticGenerator::VibratorScale::MUTE) {
         // Haptic channels are muted, not need to generate haptic data.
         return {STATUS_OK, samples, samples};
     }
 
     // Resize buffer if the haptic sample count is greater than buffer size.
-    size_t hapticSampleCount = mFrameCount * mParams.mHapticChannelCount;
+    const size_t hapticSampleCount = mFrameCount * mParams.mHapticChannelCount;
+    const size_t audioSampleCount = mFrameCount * mParams.mAudioChannelCount;
     if (hapticSampleCount > mInputBuffer.size()) {
         // The inputBuffer and outputBuffer must have the same size, which must be at least
         // the haptic sample count.
@@ -174,47 +177,47 @@
             runProcessingChain(mInputBuffer.data(), mOutputBuffer.data(), mFrameCount);
     ::android::os::scaleHapticData(
             hapticOutBuffer, hapticSampleCount,
-            static_cast<::android::os::HapticScale>(mParams.mMaxVibratorScale),
-            mParams.mVibratorInfo.qFactor);
+            // TODO(b/356406686): add the new HapticScale fields to the AIDL interface.
+            ::android::os::HapticScale(
+                    static_cast<::android::os::HapticLevel>(mParams.mMaxVibratorScale)),
+            mParams.mVibratorInfo.maxAmplitude /* limit */);
 
     // For haptic data, the haptic playback thread will copy the data from effect input
     // buffer, which contains haptic data at the end of the buffer, directly to sink buffer.
-    // In that case, copy haptic data to input buffer instead of output buffer.
-    // Note: this may not work with rpc/binder calls
-    for (size_t i = 0; i < hapticSampleCount; ++i) {
-        in[samples + i] = hapticOutBuffer[i];
-    }
-    return {STATUS_OK, samples, static_cast<int32_t>(samples + hapticSampleCount)};
+    // In AIDL only output buffer is send back to the audio framework via FMQ. Here the effect copy
+    // the generated haptic data to the target position of output buffer, the framework then append
+    // it to the same position of input buffer.
+    memcpy_to_float_from_float_with_clamping(out + audioSampleCount, hapticOutBuffer,
+                                             hapticSampleCount, 2.f /* absMax */);
+    return {STATUS_OK, samples, samples};
 }
 
-void HapticGeneratorContext::init_params(media::audio::common::AudioChannelLayout inputChMask,
-                                         media::audio::common::AudioChannelLayout outputChMask) {
-    std::lock_guard lg(mMutex);
-    mParams.mMaxVibratorScale = HapticGenerator::VibratorScale::MUTE;
-    mParams.mVibratorInfo.resonantFrequencyHz = DEFAULT_RESONANT_FREQUENCY;
-    mParams.mVibratorInfo.qFactor = DEFAULT_BSF_ZERO_Q;
+void HapticGeneratorContext::init_params(const Parameter::Common& common) {
+    mSampleRate = common.input.base.sampleRate;
+    mFrameCount = common.input.frameCount;
 
     mParams.mAudioChannelCount = ::aidl::android::hardware::audio::common::getChannelCount(
-            inputChMask, ~media::audio::common::AudioChannelLayout::LAYOUT_HAPTIC_AB);
+            common.input.base.channelMask,
+            ~media::audio::common::AudioChannelLayout::LAYOUT_HAPTIC_AB);
     mParams.mHapticChannelCount = ::aidl::android::hardware::audio::common::getChannelCount(
-            outputChMask, media::audio::common::AudioChannelLayout::LAYOUT_HAPTIC_AB);
+            common.output.base.channelMask,
+            media::audio::common::AudioChannelLayout::LAYOUT_HAPTIC_AB);
     LOG_ALWAYS_FATAL_IF(mParams.mHapticChannelCount > 2, "haptic channel count is too large");
     for (int i = 0; i < mParams.mHapticChannelCount; ++i) {
         // By default, use the first audio channel to generate haptic channels.
         mParams.mHapticChannelSource[i] = 0;
     }
-
-    mState = HAPTIC_GENERATOR_STATE_INITIALIZED;
+    configure();
+    LOG(DEBUG) << " HapticGenerator init context:\n" << contextToString();
 }
 
-float HapticGeneratorContext::getDistortionOutputGain() {
+float HapticGeneratorContext::getDistortionOutputGain() const {
     float distortionOutputGain = getFloatProperty(
             "vendor.audio.hapticgenerator.distortion.output.gain", DEFAULT_DISTORTION_OUTPUT_GAIN);
-    LOG(DEBUG) << "Using distortion output gain as " << distortionOutputGain;
     return distortionOutputGain;
 }
 
-float HapticGeneratorContext::getFloatProperty(const std::string& key, float defaultValue) {
+float HapticGeneratorContext::getFloatProperty(const std::string& key, float defaultValue) const {
     float result;
     std::string value = ::android::base::GetProperty(key, "");
     if (!value.empty() && ::android::base::ParseFloat(value, &result)) {
@@ -237,7 +240,6 @@
  * Build haptic generator processing chain.
  */
 void HapticGeneratorContext::buildProcessingChain() {
-    std::lock_guard lg(mMutex);
     const size_t channelCount = mParams.mHapticChannelCount;
     float highPassCornerFrequency = 50.0f;
     auto hpf = ::android::audio_effect::haptic_generator::createHPF2(highPassCornerFrequency,
@@ -344,4 +346,34 @@
     return in;
 }
 
+std::string HapticGeneratorContext::paramToString(const struct HapticGeneratorParam& param) const {
+    std::stringstream ss;
+    ss << "\t\ttHapticGenerator Parameters:\n";
+    ss << "\t\t- mHapticChannelCount: " << param.mHapticChannelCount << '\n';
+    ss << "\t\t- mAudioChannelCount: " << param.mAudioChannelCount << '\n';
+    ss << "\t\t- mHapticChannelSource: " << param.mHapticChannelSource[0] << ", "
+       << param.mHapticChannelSource[1] << '\n';
+    ss << "\t\t- mMaxVibratorScale: " << ::android::internal::ToString(param.mMaxVibratorScale)
+       << '\n';
+    ss << "\t\t- mVibratorInfo: " << param.mVibratorInfo.toString() << '\n';
+    for (const auto& it : param.mHapticScales)
+        ss << "\t\t\t" << it.first << ": " << toString(it.second) << '\n';
+
+    return ss.str();
+}
+
+std::string HapticGeneratorContext::contextToString() const {
+    std::stringstream ss;
+    ss << "\t\tHapticGenerator Context:\n";
+    ss << "\t\t- state: " << mState << '\n';
+    ss << "\t\t- bpf Q: " << DEFAULT_BPF_Q << '\n';
+    ss << "\t\t- slow env normalization power: " << DEFAULT_SLOW_ENV_NORMALIZATION_POWER << '\n';
+    ss << "\t\t- distortion corner frequency: " << DEFAULT_DISTORTION_CORNER_FREQUENCY << '\n';
+    ss << "\t\t- distortion input gain: " << DEFAULT_DISTORTION_INPUT_GAIN << '\n';
+    ss << "\t\t- distortion cube threshold: " << DEFAULT_DISTORTION_CUBE_THRESHOLD << '\n';
+    ss << "\t\t- distortion output gain: " << getDistortionOutputGain() << '\n';
+    ss << "\t\tHapticGenerator Parameters:\n" << paramToString(mParams) << "\n";
+    return ss.str();
+}
+
 }  // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.h b/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.h
index 26e69e4..cf38e47 100644
--- a/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.h
+++ b/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.h
@@ -16,12 +16,13 @@
 
 #pragma once
 
-#include <android-base/thread_annotations.h>
-#include <vibrator/ExternalVibrationUtils.h>
-#include <map>
-
-#include "Processors.h"
 #include "effect-impl/EffectContext.h"
+#include "Processors.h"
+
+#include <vibrator/ExternalVibrationUtils.h>
+
+#include <cstddef>
+#include <map>
 
 namespace aidl::android::hardware::audio::effect {
 
@@ -40,7 +41,6 @@
     int mHapticChannelCount;
     int mAudioChannelCount;
 
-    HapticGenerator::HapticScale mHapticScale;
     std::map<int, HapticGenerator::VibratorScale> mHapticScales;
     // max intensity will be used to scale haptic data.
     HapticGenerator::VibratorScale mMaxVibratorScale;
@@ -65,17 +65,19 @@
   public:
     HapticGeneratorContext(int statusDepth, const Parameter::Common& common);
     ~HapticGeneratorContext();
-    RetCode enable();
-    RetCode disable();
-    void reset();
+    RetCode enable() override;
+    RetCode disable() override;
+    RetCode reset() override;
 
     RetCode setHgHapticScales(const std::vector<HapticGenerator::HapticScale>& hapticScales);
-    std::vector<HapticGenerator::HapticScale> getHgHapticScales();
+    std::vector<HapticGenerator::HapticScale> getHgHapticScales() const;
 
     RetCode setHgVibratorInformation(const HapticGenerator::VibratorInformation& vibratorInfo);
-    HapticGenerator::VibratorInformation getHgVibratorInformation();
+    HapticGenerator::VibratorInformation getHgVibratorInformation() const;
 
-    IEffect::Status lvmProcess(float* in, float* out, int samples);
+    IEffect::Status process(float* in, float* out, int samples);
+
+    RetCode setCommon(const Parameter::Common& common) override;
 
   private:
     static constexpr float DEFAULT_RESONANT_FREQUENCY = 150.0f;
@@ -88,9 +90,8 @@
     static constexpr float DEFAULT_DISTORTION_INPUT_GAIN = 0.3f;
     static constexpr float DEFAULT_DISTORTION_CUBE_THRESHOLD = 0.1f;
 
-    std::mutex mMutex;
     HapticGeneratorState mState;
-    HapticGeneratorParam mParams GUARDED_BY(mMutex);
+    HapticGeneratorParam mParams;
     int mSampleRate;
     int64_t mFrameCount = 0;
 
@@ -110,15 +111,17 @@
     // intermediate buffer in the generating algorithm.
     std::vector<float> mOutputBuffer;
 
-    void init_params(media::audio::common::AudioChannelLayout inputChMask,
-                     media::audio::common::AudioChannelLayout outputChMask);
+    void init_params(const Parameter::Common& common);
     void configure();
 
-    float getDistortionOutputGain();
-    float getFloatProperty(const std::string& key, float defaultValue);
+    float getDistortionOutputGain() const;
+    float getFloatProperty(const std::string& key, float defaultValue) const;
     void addBiquadFilter(std::shared_ptr<HapticBiquadFilter> filter);
     void buildProcessingChain();
     float* runProcessingChain(float* buf1, float* buf2, size_t frameCount);
+
+    std::string paramToString(const struct HapticGeneratorParam& param) const;
+    std::string contextToString() const;
 };
 
 }  // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/loudness/Android.bp b/media/libeffects/loudness/Android.bp
index 46e4669..4f04ffb 100644
--- a/media/libeffects/loudness/Android.bp
+++ b/media/libeffects/loudness/Android.bp
@@ -48,10 +48,10 @@
 cc_library_shared {
     name: "libloudnessenhanceraidl",
     srcs: [
+        ":effectCommonFile",
         "aidl/EffectLoudnessEnhancer.cpp",
         "aidl/LoudnessEnhancerContext.cpp",
         "dsp/core/dynamic_range_compression.cpp",
-        ":effectCommonFile",
     ],
     defaults: [
         "aidlaudioeffectservice_defaults",
diff --git a/media/libeffects/loudness/aidl/EffectLoudnessEnhancer.cpp b/media/libeffects/loudness/aidl/EffectLoudnessEnhancer.cpp
index a7d9282..592fd60 100644
--- a/media/libeffects/loudness/aidl/EffectLoudnessEnhancer.cpp
+++ b/media/libeffects/loudness/aidl/EffectLoudnessEnhancer.cpp
@@ -37,7 +37,6 @@
     }
     if (instanceSpp) {
         *instanceSpp = ndk::SharedRefBase::make<LoudnessEnhancerImpl>();
-        LOG(DEBUG) << __func__ << " instance " << instanceSpp->get() << " created";
         return EX_NONE;
     } else {
         LOG(ERROR) << __func__ << " invalid input parameter!";
@@ -67,32 +66,10 @@
 
 ndk::ScopedAStatus LoudnessEnhancerImpl::getDescriptor(Descriptor* _aidl_return) {
     RETURN_IF(!_aidl_return, EX_ILLEGAL_ARGUMENT, "Parameter:nullptr");
-    LOG(DEBUG) << __func__ << kDescriptor.toString();
     *_aidl_return = kDescriptor;
     return ndk::ScopedAStatus::ok();
 }
 
-ndk::ScopedAStatus LoudnessEnhancerImpl::commandImpl(CommandId command) {
-    RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
-    switch (command) {
-        case CommandId::START:
-            mContext->enable();
-            break;
-        case CommandId::STOP:
-            mContext->disable();
-            break;
-        case CommandId::RESET:
-            mContext->disable();
-            mContext->resetBuffer();
-            break;
-        default:
-            LOG(ERROR) << __func__ << " commandId " << toString(command) << " not supported";
-            return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
-                                                                    "commandIdNotSupported");
-    }
-    return ndk::ScopedAStatus::ok();
-}
-
 ndk::ScopedAStatus LoudnessEnhancerImpl::setParameterSpecific(const Parameter::Specific& specific) {
     RETURN_IF(Parameter::Specific::loudnessEnhancer != specific.getTag(), EX_ILLEGAL_ARGUMENT,
               "EffectNotSupported");
@@ -178,7 +155,7 @@
 IEffect::Status LoudnessEnhancerImpl::effectProcessImpl(float* in, float* out, int samples) {
     IEffect::Status status = {EX_NULL_POINTER, 0, 0};
     RETURN_VALUE_IF(!mContext, status, "nullContext");
-    return mContext->lvmProcess(in, out, samples);
+    return mContext->process(in, out, samples);
 }
 
 }  // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/loudness/aidl/EffectLoudnessEnhancer.h b/media/libeffects/loudness/aidl/EffectLoudnessEnhancer.h
index e2e716c..1e050f3 100644
--- a/media/libeffects/loudness/aidl/EffectLoudnessEnhancer.h
+++ b/media/libeffects/loudness/aidl/EffectLoudnessEnhancer.h
@@ -27,13 +27,9 @@
   public:
     static const std::string kEffectName;
     static const Descriptor kDescriptor;
-    LoudnessEnhancerImpl() { LOG(DEBUG) << __func__; }
-    ~LoudnessEnhancerImpl() {
-        cleanUp();
-        LOG(DEBUG) << __func__;
-    }
+    LoudnessEnhancerImpl() = default;
+    ~LoudnessEnhancerImpl() { cleanUp(); }
 
-    ndk::ScopedAStatus commandImpl(CommandId command) REQUIRES(mImplMutex) override;
     ndk::ScopedAStatus getDescriptor(Descriptor* _aidl_return) override;
     ndk::ScopedAStatus setParameterSpecific(const Parameter::Specific& specific)
             REQUIRES(mImplMutex) override;
diff --git a/media/libeffects/loudness/aidl/LoudnessEnhancerContext.cpp b/media/libeffects/loudness/aidl/LoudnessEnhancerContext.cpp
index bc3fa45..ac8b14a 100644
--- a/media/libeffects/loudness/aidl/LoudnessEnhancerContext.cpp
+++ b/media/libeffects/loudness/aidl/LoudnessEnhancerContext.cpp
@@ -24,16 +24,10 @@
 
 LoudnessEnhancerContext::LoudnessEnhancerContext(int statusDepth, const Parameter::Common& common)
     : EffectContext(statusDepth, common) {
-    LOG(DEBUG) << __func__;
     init_params();
 }
 
-LoudnessEnhancerContext::~LoudnessEnhancerContext() {
-    LOG(DEBUG) << __func__;
-}
-
 RetCode LoudnessEnhancerContext::enable() {
-    std::lock_guard lg(mMutex);
     if (mState != LOUDNESS_ENHANCER_STATE_INITIALIZED) {
         return RetCode::ERROR_EFFECT_LIB_ERROR;
     }
@@ -42,7 +36,6 @@
 }
 
 RetCode LoudnessEnhancerContext::disable() {
-    std::lock_guard lg(mMutex);
     if (mState != LOUDNESS_ENHANCER_STATE_ACTIVE) {
         return RetCode::ERROR_EFFECT_LIB_ERROR;
     }
@@ -50,24 +43,17 @@
     return RetCode::SUCCESS;
 }
 
-void LoudnessEnhancerContext::reset() {
-    float targetAmp = pow(10, mGain / 2000.0f);  // mB to linear amplification
-    std::lock_guard lg(mMutex);
+RetCode LoudnessEnhancerContext::setLeGain(int gainMb) {
+    float targetAmp = pow(10, gainMb / 2000.0f);  // mB to linear amplification
     if (mCompressor != nullptr) {
         // Get samplingRate from input
         mCompressor->Initialize(targetAmp, mCommon.input.base.sampleRate);
     }
-}
-
-RetCode LoudnessEnhancerContext::setLeGain(int gainMb) {
     mGain = gainMb;
-    reset();  // apply parameter update
     return RetCode::SUCCESS;
 }
 
-IEffect::Status LoudnessEnhancerContext::lvmProcess(float* in, float* out, int samples) {
-    LOG(DEBUG) << __func__ << " in " << in << " out " << out << " sample " << samples;
-
+IEffect::Status LoudnessEnhancerContext::process(float* in, float* out, int samples) {
     IEffect::Status status = {EX_NULL_POINTER, 0, 0};
     RETURN_VALUE_IF(!in, status, "nullInput");
     RETURN_VALUE_IF(!out, status, "nullOutput");
@@ -76,11 +62,9 @@
     auto frameSize = getInputFrameSize();
     RETURN_VALUE_IF(0 == frameSize, status, "zeroFrameSize");
 
-    std::lock_guard lg(mMutex);
     status = {STATUS_INVALID_OPERATION, 0, 0};
     RETURN_VALUE_IF(mState != LOUDNESS_ENHANCER_STATE_ACTIVE, status, "stateNotActive");
 
-    LOG(DEBUG) << __func__ << " start processing";
     // PcmType is always expected to be Float 32 bit.
     constexpr float scale = 1 << 15;  // power of 2 is lossless conversion to int16_t range
     constexpr float inverseScale = 1.f / scale;
@@ -124,9 +108,8 @@
 
     mGain = LOUDNESS_ENHANCER_DEFAULT_TARGET_GAIN_MB;
     float targetAmp = pow(10, mGain / 2000.0f);  // mB to linear amplification
-    LOG(DEBUG) << __func__ << "Target gain = " << mGain << "mB <=> factor = " << targetAmp;
+    LOG(VERBOSE) << __func__ << "Target gain = " << mGain << "mB <=> factor = " << targetAmp;
 
-    std::lock_guard lg(mMutex);
     mCompressor = std::make_unique<le_fx::AdaptiveDynamicRangeCompression>();
     mCompressor->Initialize(targetAmp, mCommon.input.base.sampleRate);
     mState = LOUDNESS_ENHANCER_STATE_INITIALIZED;
diff --git a/media/libeffects/loudness/aidl/LoudnessEnhancerContext.h b/media/libeffects/loudness/aidl/LoudnessEnhancerContext.h
index 9a1ec4c..67ccd24 100644
--- a/media/libeffects/loudness/aidl/LoudnessEnhancerContext.h
+++ b/media/libeffects/loudness/aidl/LoudnessEnhancerContext.h
@@ -16,7 +16,6 @@
 
 #pragma once
 
-#include <android-base/thread_annotations.h>
 #include <audio_effects/effect_loudnessenhancer.h>
 
 #include "dsp/core/dynamic_range_compression.h"
@@ -33,24 +32,22 @@
 class LoudnessEnhancerContext final : public EffectContext {
   public:
     LoudnessEnhancerContext(int statusDepth, const Parameter::Common& common);
-    ~LoudnessEnhancerContext();
+    ~LoudnessEnhancerContext() = default;
 
-    RetCode enable();
-    RetCode disable();
-    void reset();
+    RetCode enable() override;
+    RetCode disable() override;
 
     RetCode setLeGain(int gainMb);
     int getLeGain() const { return mGain; }
 
-    IEffect::Status lvmProcess(float* in, float* out, int samples);
+    IEffect::Status process(float* in, float* out, int samples);
 
   private:
-    std::mutex mMutex;
-    LoudnessEnhancerState mState GUARDED_BY(mMutex) = LOUDNESS_ENHANCER_STATE_UNINITIALIZED;
+    LoudnessEnhancerState mState = LOUDNESS_ENHANCER_STATE_UNINITIALIZED;
     int mGain = LOUDNESS_ENHANCER_DEFAULT_TARGET_GAIN_MB;
     // In this implementation, there is no coupling between the compression on the left and right
     // channels
-    std::unique_ptr<le_fx::AdaptiveDynamicRangeCompression> mCompressor GUARDED_BY(mMutex);
+    std::unique_ptr<le_fx::AdaptiveDynamicRangeCompression> mCompressor;
 
     void init_params();
 };
diff --git a/media/libeffects/lvm/benchmarks/Android.bp b/media/libeffects/lvm/benchmarks/Android.bp
index c21c5f2..8036983 100644
--- a/media/libeffects/lvm/benchmarks/Android.bp
+++ b/media/libeffects/lvm/benchmarks/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_media_framework_audio",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/media/libeffects/lvm/lib/Android.bp b/media/libeffects/lvm/lib/Android.bp
index 7998879..02b918b 100644
--- a/media/libeffects/lvm/lib/Android.bp
+++ b/media/libeffects/lvm/lib/Android.bp
@@ -1,5 +1,6 @@
 // Music bundle
 package {
+    default_team: "trendy_team_media_framework_audio",
     default_applicable_licenses: [
         "frameworks_av_media_libeffects_lvm_lib_license",
     ],
@@ -30,6 +31,60 @@
     vendor: true,
     host_supported: true,
     srcs: [
+        "Bass/src/LVDBE_Control.cpp",
+        "Bass/src/LVDBE_Init.cpp",
+        "Bass/src/LVDBE_Process.cpp",
+        "Bass/src/LVDBE_Tables.cpp",
+        "Bundle/src/LVM_API_Specials.cpp",
+        "Bundle/src/LVM_Buffers.cpp",
+        "Bundle/src/LVM_Control.cpp",
+        "Bundle/src/LVM_Init.cpp",
+        "Bundle/src/LVM_Process.cpp",
+        "Bundle/src/LVM_Tables.cpp",
+        "Common/src/AGC_MIX_VOL_2St1Mon_D32_WRA.cpp",
+        "Common/src/Add2_Sat_32x32.cpp",
+        "Common/src/Copy_16.cpp",
+        "Common/src/DC_2I_D16_TRC_WRA_01.cpp",
+        "Common/src/DC_2I_D16_TRC_WRA_01_Init.cpp",
+        "Common/src/DelayMix_16x16.cpp",
+        "Common/src/From2iToMS_16x16.cpp",
+        "Common/src/From2iToMono_32.cpp",
+        "Common/src/LVC_Core_MixHard_1St_2i_D16C31_SAT.cpp",
+        "Common/src/LVC_Core_MixHard_2St_D16C31_SAT.cpp",
+        "Common/src/LVC_Core_MixInSoft_D16C31_SAT.cpp",
+        "Common/src/LVC_Core_MixSoft_1St_2i_D16C31_WRA.cpp",
+        "Common/src/LVC_Core_MixSoft_1St_D16C31_WRA.cpp",
+        "Common/src/LVC_MixInSoft_D16C31_SAT.cpp",
+        "Common/src/LVC_MixSoft_1St_2i_D16C31_SAT.cpp",
+        "Common/src/LVC_MixSoft_1St_D16C31_SAT.cpp",
+        "Common/src/LVC_MixSoft_2St_D16C31_SAT.cpp",
+        "Common/src/LVC_Mixer_GetCurrent.cpp",
+        "Common/src/LVC_Mixer_GetTarget.cpp",
+        "Common/src/LVC_Mixer_Init.cpp",
+        "Common/src/LVC_Mixer_SetTarget.cpp",
+        "Common/src/LVC_Mixer_SetTimeConstant.cpp",
+        "Common/src/LVC_Mixer_VarSlope_SetTimeConstant.cpp",
+        "Common/src/LVM_Timer.cpp",
+        "Common/src/LVM_Timer_Init.cpp",
+        "Common/src/MSTo2i_Sat_16x16.cpp",
+        "Common/src/Mac3s_Sat_32x16.cpp",
+        "Common/src/MonoTo2I_32.cpp",
+        "Common/src/Mult3s_32x16.cpp",
+        "Common/src/NonLinComp_D16.cpp",
+        "Common/src/Shift_Sat_v16xv16.cpp",
+        "Common/src/Shift_Sat_v32xv32.cpp",
+        "Common/src/dB_to_Lin32.cpp",
+        "Eq/src/LVEQNB_CalcCoef.cpp",
+        "Eq/src/LVEQNB_Control.cpp",
+        "Eq/src/LVEQNB_Init.cpp",
+        "Eq/src/LVEQNB_Process.cpp",
+        "Eq/src/LVEQNB_Tables.cpp",
+        "SpectrumAnalyzer/src/LVPSA_Control.cpp",
+        "SpectrumAnalyzer/src/LVPSA_Init.cpp",
+        "SpectrumAnalyzer/src/LVPSA_Process.cpp",
+        "SpectrumAnalyzer/src/LVPSA_QPD_Init.cpp",
+        "SpectrumAnalyzer/src/LVPSA_QPD_Process.cpp",
+        "SpectrumAnalyzer/src/LVPSA_Tables.cpp",
         "StereoWidening/src/LVCS_BypassMix.cpp",
         "StereoWidening/src/LVCS_Control.cpp",
         "StereoWidening/src/LVCS_Equaliser.cpp",
@@ -38,77 +93,23 @@
         "StereoWidening/src/LVCS_ReverbGenerator.cpp",
         "StereoWidening/src/LVCS_StereoEnhancer.cpp",
         "StereoWidening/src/LVCS_Tables.cpp",
-        "Bass/src/LVDBE_Control.cpp",
-        "Bass/src/LVDBE_Init.cpp",
-        "Bass/src/LVDBE_Process.cpp",
-        "Bass/src/LVDBE_Tables.cpp",
-        "Bundle/src/LVM_API_Specials.cpp",
-        "Bundle/src/LVM_Buffers.cpp",
-        "Bundle/src/LVM_Init.cpp",
-        "Bundle/src/LVM_Process.cpp",
-        "Bundle/src/LVM_Tables.cpp",
-        "Bundle/src/LVM_Control.cpp",
-        "SpectrumAnalyzer/src/LVPSA_Control.cpp",
-        "SpectrumAnalyzer/src/LVPSA_Init.cpp",
-        "SpectrumAnalyzer/src/LVPSA_Process.cpp",
-        "SpectrumAnalyzer/src/LVPSA_QPD_Init.cpp",
-        "SpectrumAnalyzer/src/LVPSA_QPD_Process.cpp",
-        "SpectrumAnalyzer/src/LVPSA_Tables.cpp",
-        "Eq/src/LVEQNB_CalcCoef.cpp",
-        "Eq/src/LVEQNB_Control.cpp",
-        "Eq/src/LVEQNB_Init.cpp",
-        "Eq/src/LVEQNB_Process.cpp",
-        "Eq/src/LVEQNB_Tables.cpp",
-        "Common/src/DC_2I_D16_TRC_WRA_01.cpp",
-        "Common/src/DC_2I_D16_TRC_WRA_01_Init.cpp",
-        "Common/src/Copy_16.cpp",
-        "Common/src/MonoTo2I_32.cpp",
-        "Common/src/dB_to_Lin32.cpp",
-        "Common/src/Shift_Sat_v16xv16.cpp",
-        "Common/src/Shift_Sat_v32xv32.cpp",
-        "Common/src/From2iToMono_32.cpp",
-        "Common/src/Mult3s_32x16.cpp",
-        "Common/src/NonLinComp_D16.cpp",
-        "Common/src/DelayMix_16x16.cpp",
-        "Common/src/MSTo2i_Sat_16x16.cpp",
-        "Common/src/From2iToMS_16x16.cpp",
-        "Common/src/Mac3s_Sat_32x16.cpp",
-        "Common/src/Add2_Sat_32x32.cpp",
-        "Common/src/LVC_MixSoft_1St_2i_D16C31_SAT.cpp",
-        "Common/src/LVC_MixSoft_1St_D16C31_SAT.cpp",
-        "Common/src/LVC_Mixer_VarSlope_SetTimeConstant.cpp",
-        "Common/src/LVC_Mixer_SetTimeConstant.cpp",
-        "Common/src/LVC_Mixer_SetTarget.cpp",
-        "Common/src/LVC_Mixer_GetTarget.cpp",
-        "Common/src/LVC_Mixer_Init.cpp",
-        "Common/src/LVC_Core_MixHard_1St_2i_D16C31_SAT.cpp",
-        "Common/src/LVC_Core_MixSoft_1St_2i_D16C31_WRA.cpp",
-        "Common/src/LVC_Core_MixInSoft_D16C31_SAT.cpp",
-        "Common/src/LVC_Mixer_GetCurrent.cpp",
-        "Common/src/LVC_MixSoft_2St_D16C31_SAT.cpp",
-        "Common/src/LVC_Core_MixSoft_1St_D16C31_WRA.cpp",
-        "Common/src/LVC_Core_MixHard_2St_D16C31_SAT.cpp",
-        "Common/src/LVC_MixInSoft_D16C31_SAT.cpp",
-        "Common/src/AGC_MIX_VOL_2St1Mon_D32_WRA.cpp",
-        "Common/src/LVM_Timer.cpp",
-        "Common/src/LVM_Timer_Init.cpp",
     ],
 
     local_include_dirs: [
-        "Eq/lib",
-        "Eq/src",
         "Bass/lib",
         "Bass/src",
-        "Common/src",
         "Bundle/src",
+        "Common/src",
+        "Eq/lib",
+        "Eq/src",
         "SpectrumAnalyzer/lib",
         "SpectrumAnalyzer/src",
-        "StereoWidening/src",
         "StereoWidening/lib",
+        "StereoWidening/src",
     ],
     export_include_dirs: [
-        "Common/lib",
         "Bundle/lib",
+        "Common/lib",
     ],
     shared_libs: [
         "liblog",
@@ -120,9 +121,9 @@
         "libhardware_headers",
     ],
     cppflags: [
-        "-fvisibility=hidden",
         "-Wall",
         "-Werror",
+        "-fvisibility=hidden",
     ],
 
 }
@@ -140,6 +141,26 @@
     vendor: true,
     host_supported: true,
     srcs: [
+        "Common/src/Add2_Sat_32x32.cpp",
+        "Common/src/Copy_16.cpp",
+        "Common/src/Core_MixHard_2St_D32C31_SAT.cpp",
+        "Common/src/Core_MixInSoft_D32C31_SAT.cpp",
+        "Common/src/Core_MixSoft_1St_D32C31_WRA.cpp",
+        "Common/src/From2iToMono_32.cpp",
+        "Common/src/JoinTo2i_32x32.cpp",
+        "Common/src/LVM_FO_HPF.cpp",
+        "Common/src/LVM_FO_LPF.cpp",
+        "Common/src/LVM_GetOmega.cpp",
+        "Common/src/LVM_Mixer_TimeConstant.cpp",
+        "Common/src/LVM_Polynomial.cpp",
+        "Common/src/LVM_Power10.cpp",
+        "Common/src/Mac3s_Sat_32x16.cpp",
+        "Common/src/MixInSoft_D32C31_SAT.cpp",
+        "Common/src/MixSoft_1St_D32C31_WRA.cpp",
+        "Common/src/MixSoft_2St_D32C31_SAT.cpp",
+        "Common/src/MonoTo2I_32.cpp",
+        "Common/src/Mult3s_32x16.cpp",
+        "Common/src/Shift_Sat_v32xv32.cpp",
         "Reverb/src/LVREV_ApplyNewSettings.cpp",
         "Reverb/src/LVREV_ClearAudioBuffers.cpp",
         "Reverb/src/LVREV_GetControlParameters.cpp",
@@ -147,42 +168,22 @@
         "Reverb/src/LVREV_Process.cpp",
         "Reverb/src/LVREV_SetControlParameters.cpp",
         "Reverb/src/LVREV_Tables.cpp",
-        "Common/src/From2iToMono_32.cpp",
-        "Common/src/Mult3s_32x16.cpp",
-        "Common/src/Copy_16.cpp",
-        "Common/src/Mac3s_Sat_32x16.cpp",
-        "Common/src/Shift_Sat_v32xv32.cpp",
-        "Common/src/Add2_Sat_32x32.cpp",
-        "Common/src/JoinTo2i_32x32.cpp",
-        "Common/src/MonoTo2I_32.cpp",
-        "Common/src/LVM_FO_HPF.cpp",
-        "Common/src/LVM_FO_LPF.cpp",
-        "Common/src/LVM_Polynomial.cpp",
-        "Common/src/LVM_Power10.cpp",
-        "Common/src/LVM_GetOmega.cpp",
-        "Common/src/MixSoft_2St_D32C31_SAT.cpp",
-        "Common/src/MixSoft_1St_D32C31_WRA.cpp",
-        "Common/src/MixInSoft_D32C31_SAT.cpp",
-        "Common/src/LVM_Mixer_TimeConstant.cpp",
-        "Common/src/Core_MixHard_2St_D32C31_SAT.cpp",
-        "Common/src/Core_MixSoft_1St_D32C31_WRA.cpp",
-        "Common/src/Core_MixInSoft_D32C31_SAT.cpp",
     ],
 
     local_include_dirs: [
-        "Reverb/src",
         "Common/src",
+        "Reverb/src",
     ],
     export_include_dirs: [
-        "Reverb/lib",
         "Common/lib",
+        "Reverb/lib",
     ],
     static_libs: [
         "libaudioutils",
     ],
     cppflags: [
-        "-fvisibility=hidden",
         "-Wall",
         "-Werror",
+        "-fvisibility=hidden",
     ],
 }
diff --git a/media/libeffects/lvm/tests/Android.bp b/media/libeffects/lvm/tests/Android.bp
index 0568fbd..c32e91e 100644
--- a/media/libeffects/lvm/tests/Android.bp
+++ b/media/libeffects/lvm/tests/Android.bp
@@ -1,6 +1,7 @@
 // Build the unit tests for effects
 
 package {
+    default_team: "trendy_team_media_framework_audio",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
@@ -12,7 +13,7 @@
 cc_test {
     name: "EffectReverbTest",
     defaults: [
-      "libeffects-test-defaults",
+        "libeffects-test-defaults",
     ],
     srcs: [
         "EffectReverbTest.cpp",
@@ -29,7 +30,7 @@
 cc_test {
     name: "EffectBundleTest",
     defaults: [
-      "libeffects-test-defaults",
+        "libeffects-test-defaults",
     ],
     srcs: [
         "EffectBundleTest.cpp",
diff --git a/media/libeffects/lvm/wrapper/Aidl/BundleContext.cpp b/media/libeffects/lvm/wrapper/Aidl/BundleContext.cpp
index bb7e4c6..d5e3cf7 100644
--- a/media/libeffects/lvm/wrapper/Aidl/BundleContext.cpp
+++ b/media/libeffects/lvm/wrapper/Aidl/BundleContext.cpp
@@ -36,20 +36,16 @@
 BundleContext::BundleContext(int statusDepth, const Parameter::Common& common,
               const lvm::BundleEffectType& type)
         : EffectContext(statusDepth, common), mType(type) {
-    LOG(DEBUG) << __func__ << type;
-
     int inputChannelCount = ::aidl::android::hardware::audio::common::getChannelCount(
             common.input.base.channelMask);
     mSamplesPerSecond = common.input.base.sampleRate * inputChannelCount;
 }
 
 BundleContext::~BundleContext() {
-    LOG(DEBUG) << __func__;
     deInit();
 }
 
 RetCode BundleContext::init() {
-    std::lock_guard lg(mMutex);
     // init with pre-defined preset NORMAL
     for (std::size_t i = 0; i < lvm::MAX_NUM_BANDS; i++) {
         mBandGainmB[i] = lvm::kSoftPresets[0 /* normal */][i] * 100;
@@ -88,7 +84,6 @@
 }
 
 void BundleContext::deInit() {
-    std::lock_guard lg(mMutex);
     if (mInstance) {
         LVM_DelInstanceHandle(&mInstance);
         mInstance = nullptr;
@@ -102,27 +97,23 @@
     bool tempDisabled = false;
     switch (mType) {
         case lvm::BundleEffectType::EQUALIZER:
-            LOG(DEBUG) << __func__ << " enable bundle EQ";
             if (mSamplesToExitCountEq <= 0) mNumberEffectsEnabled++;
             mSamplesToExitCountEq = (mSamplesPerSecond * 0.1);
             mEffectInDrain &= ~(1 << int(lvm::BundleEffectType::EQUALIZER));
             break;
         case lvm::BundleEffectType::BASS_BOOST:
-            LOG(DEBUG) << __func__ << " enable bundle BB";
             if (mSamplesToExitCountBb <= 0) mNumberEffectsEnabled++;
             mEffectInDrain &= ~(1 << int(lvm::BundleEffectType::BASS_BOOST));
             mSamplesToExitCountBb = (mSamplesPerSecond * 0.1);
             tempDisabled = mBassTempDisabled;
             break;
         case lvm::BundleEffectType::VIRTUALIZER:
-            LOG(DEBUG) << __func__ << " enable bundle VR";
             if (mSamplesToExitCountVirt <= 0) mNumberEffectsEnabled++;
             mEffectInDrain &= ~(1 << int(lvm::BundleEffectType::VIRTUALIZER));
             mSamplesToExitCountVirt = (mSamplesPerSecond * 0.1);
             tempDisabled = mVirtualizerTempDisabled;
             break;
         case lvm::BundleEffectType::VOLUME:
-            LOG(DEBUG) << __func__ << " enable bundle VOL";
             if ((mEffectInDrain & (1 << int(lvm::BundleEffectType::VOLUME))) == 0)
                 mNumberEffectsEnabled++;
             mEffectInDrain &= ~(1 << int(lvm::BundleEffectType::VOLUME));
@@ -134,30 +125,24 @@
 
 RetCode BundleContext::enableOperatingMode() {
     LVM_ControlParams_t params;
-    {
-        std::lock_guard lg(mMutex);
-        RETURN_VALUE_IF(LVM_SUCCESS != LVM_GetControlParameters(mInstance, &params),
-                        RetCode::ERROR_EFFECT_LIB_ERROR, "failGetControlParams");
-        switch (mType) {
-            case lvm::BundleEffectType::EQUALIZER:
-                LOG(DEBUG) << __func__ << " enable bundle EQ";
-                params.EQNB_OperatingMode = LVM_EQNB_ON;
-                break;
-            case lvm::BundleEffectType::BASS_BOOST:
-                LOG(DEBUG) << __func__ << " enable bundle BB";
-                params.BE_OperatingMode = LVM_BE_ON;
-                break;
-            case lvm::BundleEffectType::VIRTUALIZER:
-                LOG(DEBUG) << __func__ << " enable bundle VR";
-                params.VirtualizerOperatingMode = LVM_MODE_ON;
-                break;
-            case lvm::BundleEffectType::VOLUME:
-                LOG(DEBUG) << __func__ << " enable bundle VOL";
-                break;
-        }
-        RETURN_VALUE_IF(LVM_SUCCESS != LVM_SetControlParameters(mInstance, &params),
-                        RetCode::ERROR_EFFECT_LIB_ERROR, "failSetControlParams");
+    RETURN_VALUE_IF(LVM_SUCCESS != LVM_GetControlParameters(mInstance, &params),
+                    RetCode::ERROR_EFFECT_LIB_ERROR, "failGetControlParams");
+    switch (mType) {
+        case lvm::BundleEffectType::EQUALIZER:
+            params.EQNB_OperatingMode = LVM_EQNB_ON;
+            break;
+        case lvm::BundleEffectType::BASS_BOOST:
+            params.BE_OperatingMode = LVM_BE_ON;
+            break;
+        case lvm::BundleEffectType::VIRTUALIZER:
+            params.VirtualizerOperatingMode = LVM_MODE_ON;
+            break;
+        case lvm::BundleEffectType::VOLUME:
+            break;
     }
+    RETURN_VALUE_IF(LVM_SUCCESS != LVM_SetControlParameters(mInstance, &params),
+                    RetCode::ERROR_EFFECT_LIB_ERROR, "failSetControlParams");
+
     return limitLevel();
 }
 
@@ -165,19 +150,15 @@
     if (!mEnabled) return RetCode::ERROR_ILLEGAL_PARAMETER;
     switch (mType) {
         case lvm::BundleEffectType::EQUALIZER:
-            LOG(DEBUG) << __func__ << " disable bundle EQ";
             mEffectInDrain |= 1 << int(lvm::BundleEffectType::EQUALIZER);
             break;
         case lvm::BundleEffectType::BASS_BOOST:
-            LOG(DEBUG) << __func__ << " disable bundle BB";
             mEffectInDrain |= 1 << int(lvm::BundleEffectType::BASS_BOOST);
             break;
         case lvm::BundleEffectType::VIRTUALIZER:
-            LOG(DEBUG) << __func__ << " disable bundle VR";
             mEffectInDrain |= 1 << int(lvm::BundleEffectType::VIRTUALIZER);
             break;
         case lvm::BundleEffectType::VOLUME:
-            LOG(DEBUG) << __func__ << " disable bundle VOL";
             mEffectInDrain |= 1 << int(lvm::BundleEffectType::VOLUME);
             break;
     }
@@ -187,31 +168,23 @@
 
 RetCode BundleContext::disableOperatingMode() {
     LVM_ControlParams_t params;
-    {
-        std::lock_guard lg(mMutex);
-        RETURN_VALUE_IF(LVM_SUCCESS != LVM_GetControlParameters(mInstance, &params),
-                        RetCode::ERROR_EFFECT_LIB_ERROR, "failGetControlParams");
-        switch (mType) {
-            case lvm::BundleEffectType::EQUALIZER:
-                LOG(DEBUG) << __func__ << " disable bundle EQ";
-                params.EQNB_OperatingMode = LVM_EQNB_OFF;
-                break;
-            case lvm::BundleEffectType::BASS_BOOST:
-                LOG(DEBUG) << __func__ << " disable bundle BB";
-                params.BE_OperatingMode = LVM_BE_OFF;
-                break;
-            case lvm::BundleEffectType::VIRTUALIZER:
-                LOG(DEBUG) << __func__ << " disable bundle VR";
-                params.VirtualizerOperatingMode = LVM_MODE_OFF;
-                break;
-            case lvm::BundleEffectType::VOLUME:
-                LOG(DEBUG) << __func__ << " disable bundle VOL";
-                break;
-        }
-        RETURN_VALUE_IF(LVM_SUCCESS != LVM_SetControlParameters(mInstance, &params),
-                        RetCode::ERROR_EFFECT_LIB_ERROR, "failSetControlParams");
+    RETURN_VALUE_IF(LVM_SUCCESS != LVM_GetControlParameters(mInstance, &params),
+                    RetCode::ERROR_EFFECT_LIB_ERROR, "failGetControlParams");
+    switch (mType) {
+        case lvm::BundleEffectType::EQUALIZER:
+            params.EQNB_OperatingMode = LVM_EQNB_OFF;
+            break;
+        case lvm::BundleEffectType::BASS_BOOST:
+            params.BE_OperatingMode = LVM_BE_OFF;
+            break;
+        case lvm::BundleEffectType::VIRTUALIZER:
+            params.VirtualizerOperatingMode = LVM_MODE_OFF;
+            break;
+        case lvm::BundleEffectType::VOLUME:
+            break;
     }
-    mEnabled = false;
+    RETURN_VALUE_IF(LVM_SUCCESS != LVM_SetControlParameters(mInstance, &params),
+                    RetCode::ERROR_EFFECT_LIB_ERROR, "failSetControlParams");
     return limitLevel();
 }
 
@@ -223,89 +196,80 @@
     float energyBassBoost = 0;
     float crossCorrection = 0;
     LVM_ControlParams_t params;
-    {
-        std::lock_guard lg(mMutex);
-        RETURN_VALUE_IF(LVM_SUCCESS != LVM_GetControlParameters(mInstance, &params),
-                        RetCode::ERROR_EFFECT_LIB_ERROR, " getControlParamFailed");
+    RETURN_VALUE_IF(LVM_SUCCESS != LVM_GetControlParameters(mInstance, &params),
+                    RetCode::ERROR_EFFECT_LIB_ERROR, " getControlParamFailed");
 
-        bool eqEnabled = params.EQNB_OperatingMode == LVM_EQNB_ON;
-        bool bbEnabled = params.BE_OperatingMode == LVM_BE_ON;
-        bool viEnabled = params.VirtualizerOperatingMode == LVM_MODE_ON;
+    bool eqEnabled = params.EQNB_OperatingMode == LVM_EQNB_ON;
+    bool bbEnabled = params.BE_OperatingMode == LVM_BE_ON;
+    bool viEnabled = params.VirtualizerOperatingMode == LVM_MODE_ON;
+
+    if (eqEnabled) {
+        for (unsigned int i = 0; i < lvm::MAX_NUM_BANDS; i++) {
+            float bandFactor = mBandGainmB[i] / 1500.0;
+            float bandCoefficient = lvm::kBandEnergyCoefficient[i];
+            float bandEnergy = bandFactor * bandCoefficient * bandCoefficient;
+            if (bandEnergy > 0) energyContribution += bandEnergy;
+        }
+
+        // cross EQ coefficients
+        float bandFactorSum = 0;
+        for (unsigned int i = 0; i < lvm::MAX_NUM_BANDS - 1; i++) {
+            float bandFactor1 = mBandGainmB[i] / 1500.0;
+            float bandFactor2 = mBandGainmB[i + 1] / 1500.0;
+
+            if (bandFactor1 > 0 && bandFactor2 > 0) {
+                float crossEnergy =
+                        bandFactor1 * bandFactor2 * lvm::kBandEnergyCrossCoefficient[i];
+                bandFactorSum += bandFactor1 * bandFactor2;
+
+                if (crossEnergy > 0) energyCross += crossEnergy;
+            }
+        }
+        bandFactorSum -= 1.0;
+        if (bandFactorSum > 0) crossCorrection = bandFactorSum * 0.7;
+    }
+    // BassBoost contribution
+    if (bbEnabled) {
+        float boostFactor = mBassStrengthSaved / 1000.0;
+        float boostCoefficient = lvm::kBassBoostEnergyCoefficient;
+
+        energyContribution += boostFactor * boostCoefficient * boostCoefficient;
 
         if (eqEnabled) {
             for (unsigned int i = 0; i < lvm::MAX_NUM_BANDS; i++) {
                 float bandFactor = mBandGainmB[i] / 1500.0;
-                float bandCoefficient = lvm::kBandEnergyCoefficient[i];
-                float bandEnergy = bandFactor * bandCoefficient * bandCoefficient;
-                if (bandEnergy > 0) energyContribution += bandEnergy;
-            }
-
-            // cross EQ coefficients
-            float bandFactorSum = 0;
-            for (unsigned int i = 0; i < lvm::MAX_NUM_BANDS - 1; i++) {
-                float bandFactor1 = mBandGainmB[i] / 1500.0;
-                float bandFactor2 = mBandGainmB[i + 1] / 1500.0;
-
-                if (bandFactor1 > 0 && bandFactor2 > 0) {
-                    float crossEnergy =
-                            bandFactor1 * bandFactor2 * lvm::kBandEnergyCrossCoefficient[i];
-                    bandFactorSum += bandFactor1 * bandFactor2;
-
-                    if (crossEnergy > 0) energyCross += crossEnergy;
-                }
-            }
-            bandFactorSum -= 1.0;
-            if (bandFactorSum > 0) crossCorrection = bandFactorSum * 0.7;
-        }
-        // BassBoost contribution
-        if (bbEnabled) {
-            float boostFactor = mBassStrengthSaved / 1000.0;
-            float boostCoefficient = lvm::kBassBoostEnergyCoefficient;
-
-            energyContribution += boostFactor * boostCoefficient * boostCoefficient;
-
-            if (eqEnabled) {
-                for (unsigned int i = 0; i < lvm::MAX_NUM_BANDS; i++) {
-                    float bandFactor = mBandGainmB[i] / 1500.0;
-                    float bandCrossCoefficient = lvm::kBassBoostEnergyCrossCoefficient[i];
-                    float bandEnergy = boostFactor * bandFactor * bandCrossCoefficient;
-                    if (bandEnergy > 0) energyBassBoost += bandEnergy;
-                }
+                float bandCrossCoefficient = lvm::kBassBoostEnergyCrossCoefficient[i];
+                float bandEnergy = boostFactor * bandFactor * bandCrossCoefficient;
+                if (bandEnergy > 0) energyBassBoost += bandEnergy;
             }
         }
-        // Virtualizer contribution
-        if (viEnabled) {
-            energyContribution += lvm::kVirtualizerContribution * lvm::kVirtualizerContribution;
-        }
+    }
+    // Virtualizer contribution
+    if (viEnabled) {
+        energyContribution += lvm::kVirtualizerContribution * lvm::kVirtualizerContribution;
+    }
 
-        double totalEnergyEstimation =
-                sqrt(energyContribution + energyCross + energyBassBoost) - crossCorrection;
-        LOG(INFO) << " TOTAL energy estimation: " << totalEnergyEstimation << " dB";
+    double totalEnergyEstimation =
+            sqrt(energyContribution + energyCross + energyBassBoost) - crossCorrection;
 
-        // roundoff
-        int maxLevelRound = (int)(totalEnergyEstimation + 0.99);
-        if (maxLevelRound + mVolume > 0) {
-            gainCorrection = maxLevelRound + mVolume;
-        }
+    // roundoff
+    int maxLevelRound = (int)(totalEnergyEstimation + 0.99);
+    if (maxLevelRound + mVolumedB > 0) {
+        gainCorrection = maxLevelRound + mVolumedB;
+    }
 
-        params.VC_EffectLevel = mVolume - gainCorrection;
-        if (params.VC_EffectLevel < -96) {
-            params.VC_EffectLevel = -96;
-        }
-        LOG(INFO) << "\tVol: " << mVolume << ", GainCorrection: " << gainCorrection
-                  << ", Actual vol: " << params.VC_EffectLevel;
+    params.VC_EffectLevel = mVolumedB - gainCorrection;
+    if (params.VC_EffectLevel < -96) {
+        params.VC_EffectLevel = -96;
+    }
+    /* Activate the initial settings */
+    RETURN_VALUE_IF(LVM_SUCCESS != LVM_SetControlParameters(mInstance, &params),
+                    RetCode::ERROR_EFFECT_LIB_ERROR, " setControlParamFailed");
 
-        /* Activate the initial settings */
-        RETURN_VALUE_IF(LVM_SUCCESS != LVM_SetControlParameters(mInstance, &params),
-                        RetCode::ERROR_EFFECT_LIB_ERROR, " setControlParamFailed");
-
-        if (mFirstVolume) {
-            RETURN_VALUE_IF(LVM_SUCCESS != LVM_SetVolumeNoSmoothing(mInstance, &params),
-                            RetCode::ERROR_EFFECT_LIB_ERROR, " setVolumeNoSmoothingFailed");
-            LOG(INFO) << "\tLVM_VOLUME: Disabling Smoothing for first volume change to remove "
-                         "spikes/clicks";
-            mFirstVolume = false;
-        }
+    if (mFirstVolume) {
+        RETURN_VALUE_IF(LVM_SUCCESS != LVM_SetVolumeNoSmoothing(mInstance, &params),
+                        RetCode::ERROR_EFFECT_LIB_ERROR, " setVolumeNoSmoothingFailed");
+        mFirstVolume = false;
     }
 
     return RetCode::SUCCESS;
@@ -439,17 +403,13 @@
     float maxdB = std::max(leftdB, rightdB);
     float pandB = rightdB - leftdB;
     setVolumeLevel(maxdB);
-    LOG(DEBUG) << __func__ << " pandB: " << pandB << " maxdB " << maxdB;
 
-    {
-        std::lock_guard lg(mMutex);
-        RETURN_VALUE_IF(LVM_SUCCESS != LVM_GetControlParameters(mInstance, &params),
-                        RetCode::ERROR_EFFECT_LIB_ERROR, "");
-        params.VC_Balance = pandB;
+    RETURN_VALUE_IF(LVM_SUCCESS != LVM_GetControlParameters(mInstance, &params),
+                    RetCode::ERROR_EFFECT_LIB_ERROR, "");
+    params.VC_Balance = pandB;
 
-        RETURN_VALUE_IF(LVM_SUCCESS != LVM_SetControlParameters(mInstance, &params),
-                        RetCode::ERROR_EFFECT_LIB_ERROR, "");
-    }
+    RETURN_VALUE_IF(LVM_SUCCESS != LVM_SetControlParameters(mInstance, &params),
+                    RetCode::ERROR_EFFECT_LIB_ERROR, "");
     mVolumeStereo = volume;
     return RetCode::SUCCESS;
 }
@@ -469,7 +429,6 @@
     RetCode ret = updateControlParameter(bandLevels);
     if (RetCode::SUCCESS == ret) {
         mCurPresetIdx = presetIdx;
-        LOG(INFO) << __func__ << " success with " << presetIdx;
     } else {
         LOG(ERROR) << __func__ << " failed to setPreset " << presetIdx;
     }
@@ -483,7 +442,6 @@
     RetCode ret = updateControlParameter(bandLevels);
     if (RetCode::SUCCESS == ret) {
         mCurPresetIdx = lvm::PRESET_CUSTOM;
-        LOG(INFO) << __func__ << " succeed with " << ::android::internal::ToString(bandLevels);
     } else {
         LOG(ERROR) << __func__ << " failed with " << ::android::internal::ToString(bandLevels);
     }
@@ -502,14 +460,11 @@
 std::vector<int32_t> BundleContext::getEqualizerCenterFreqs() {
     std::vector<int32_t> freqs;
     LVM_ControlParams_t params;
-    {
-        std::lock_guard lg(mMutex);
-        /* Get the current settings */
-        RETURN_VALUE_IF(LVM_SUCCESS != LVM_GetControlParameters(mInstance, &params), freqs,
-                        " getControlParamFailed");
-        for (std::size_t i = 0; i < lvm::MAX_NUM_BANDS; i++) {
-            freqs.push_back((int32_t)params.pEQNB_BandDefinition[i].Frequency * 1000);
-        }
+    /* Get the current settings */
+    RETURN_VALUE_IF(LVM_SUCCESS != LVM_GetControlParameters(mInstance, &params), freqs,
+                    " getControlParamFailed");
+    for (std::size_t i = 0; i < lvm::MAX_NUM_BANDS; i++) {
+        freqs.push_back((int32_t)params.pEQNB_BandDefinition[i].Frequency * 1000);
     }
 
     return freqs;
@@ -533,68 +488,59 @@
     }
 
     LVM_ControlParams_t params;
-    {
-        std::lock_guard lg(mMutex);
-        RETURN_VALUE_IF(LVM_SUCCESS != LVM_GetControlParameters(mInstance, &params),
-                        RetCode::ERROR_EFFECT_LIB_ERROR, " getControlParamFailed");
+    RETURN_VALUE_IF(LVM_SUCCESS != LVM_GetControlParameters(mInstance, &params),
+                    RetCode::ERROR_EFFECT_LIB_ERROR, " getControlParamFailed");
 
-        for (std::size_t i = 0; i < lvm::MAX_NUM_BANDS; i++) {
-            params.pEQNB_BandDefinition[i].Frequency = lvm::kPresetsFrequencies[i];
-            params.pEQNB_BandDefinition[i].QFactor = lvm::kPresetsQFactors[i];
-            params.pEQNB_BandDefinition[i].Gain =
-                    tempLevel[i] > 0 ? (tempLevel[i] + 50) / 100 : (tempLevel[i] - 50) / 100;
-        }
-
-        RETURN_VALUE_IF(LVM_SUCCESS != LVM_SetControlParameters(mInstance, &params),
-                        RetCode::ERROR_EFFECT_LIB_ERROR, " setControlParamFailed");
+    for (std::size_t i = 0; i < lvm::MAX_NUM_BANDS; i++) {
+        params.pEQNB_BandDefinition[i].Frequency = lvm::kPresetsFrequencies[i];
+        params.pEQNB_BandDefinition[i].QFactor = lvm::kPresetsQFactors[i];
+        params.pEQNB_BandDefinition[i].Gain =
+                tempLevel[i] > 0 ? (tempLevel[i] + 50) / 100 : (tempLevel[i] - 50) / 100;
     }
-    mBandGainmB = tempLevel;
-    LOG(DEBUG) << __func__ << " update bandGain to " << ::android::internal::ToString(mBandGainmB)
-               << "mdB";
 
+    RETURN_VALUE_IF(LVM_SUCCESS != LVM_SetControlParameters(mInstance, &params),
+                    RetCode::ERROR_EFFECT_LIB_ERROR, " setControlParamFailed");
+
+    mBandGainmB = tempLevel;
     return RetCode::SUCCESS;
 }
 
 RetCode BundleContext::setBassBoostStrength(int strength) {
     // Update Control Parameter
     LVM_ControlParams_t params;
-    {
-        std::lock_guard lg(mMutex);
-        RETURN_VALUE_IF(LVM_SUCCESS != LVM_GetControlParameters(mInstance, &params),
-                        RetCode::ERROR_EFFECT_LIB_ERROR, " getControlParamFailed");
+    RETURN_VALUE_IF(LVM_SUCCESS != LVM_GetControlParameters(mInstance, &params),
+                    RetCode::ERROR_EFFECT_LIB_ERROR, " getControlParamFailed");
 
-        params.BE_EffectLevel = (LVM_INT16)((15 * strength) / 1000);
-        params.BE_CentreFreq = LVM_BE_CENTRE_90Hz;
+    params.BE_EffectLevel = (LVM_INT16)((15 * strength) / 1000);
+    params.BE_CentreFreq = LVM_BE_CENTRE_90Hz;
 
-        RETURN_VALUE_IF(LVM_SUCCESS != LVM_SetControlParameters(mInstance, &params),
-                        RetCode::ERROR_EFFECT_LIB_ERROR, " setControlParamFailed");
-    }
+    RETURN_VALUE_IF(LVM_SUCCESS != LVM_SetControlParameters(mInstance, &params),
+                    RetCode::ERROR_EFFECT_LIB_ERROR, " setControlParamFailed");
+
     mBassStrengthSaved = strength;
-    LOG(INFO) << __func__ << " success with strength " << strength;
     return limitLevel();
 }
 
 RetCode BundleContext::setVolumeLevel(float level) {
     if (mMuteEnabled) {
-        mLevelSaved = level;
+        mLevelSaveddB = level;
     } else {
-        mVolume = level;
+        mVolumedB = level;
     }
-    LOG(INFO) << __func__ << " success with level " << level;
     return limitLevel();
 }
 
 float BundleContext::getVolumeLevel() const {
-    return (mMuteEnabled ? mLevelSaved : mVolume);
+    return (mMuteEnabled ? mLevelSaveddB : mVolumedB);
 }
 
 RetCode BundleContext::setVolumeMute(bool mute) {
     mMuteEnabled = mute;
     if (mMuteEnabled) {
-        mLevelSaved = mVolume;
-        mVolume = -96;
+        mLevelSaveddB = mVolumedB;
+        mVolumedB = -96;
     } else {
-        mVolume = mLevelSaved;
+        mVolumedB = mLevelSaveddB;
     }
     return limitLevel();
 }
@@ -602,29 +548,55 @@
 RetCode BundleContext::setVirtualizerStrength(int strength) {
     // Update Control Parameter
     LVM_ControlParams_t params;
-    {
-        std::lock_guard lg(mMutex);
-        RETURN_VALUE_IF(LVM_SUCCESS != LVM_GetControlParameters(mInstance, &params),
-                        RetCode::ERROR_EFFECT_LIB_ERROR, " getControlParamFailed");
+    RETURN_VALUE_IF(LVM_SUCCESS != LVM_GetControlParameters(mInstance, &params),
+                    RetCode::ERROR_EFFECT_LIB_ERROR, " getControlParamFailed");
 
-        params.CS_EffectLevel = ((strength * 32767) / 1000);
+    params.CS_EffectLevel = ((strength * 32767) / 1000);
 
-        RETURN_VALUE_IF(LVM_SUCCESS != LVM_SetControlParameters(mInstance, &params),
-                        RetCode::ERROR_EFFECT_LIB_ERROR, " setControlParamFailed");
-    }
+    RETURN_VALUE_IF(LVM_SUCCESS != LVM_SetControlParameters(mInstance, &params),
+                    RetCode::ERROR_EFFECT_LIB_ERROR, " setControlParamFailed");
 
     mVirtStrengthSaved = strength;
-    LOG(INFO) << __func__ << " success with strength " << strength;
     return limitLevel();
 }
 
 
 RetCode BundleContext::setForcedDevice(
         const ::aidl::android::media::audio::common::AudioDeviceDescription& device) {
-    RETURN_VALUE_IF(true != isDeviceSupportedVirtualizer({device}), RetCode::ERROR_EFFECT_LIB_ERROR,
-                    " deviceNotSupportVirtualizer");
-    mForceDevice = device;
-    return RetCode::SUCCESS;
+    RetCode ret = RetCode::SUCCESS;
+    bool enableVirtualizer = mType == lvm::BundleEffectType::VIRTUALIZER && mEnabled;
+
+    if (isDeviceSupportedVirtualizer({device})) {
+        mVirtualizerForcedDevice = device;
+    } else {
+        // disabling forced virtualization mode
+        AudioDeviceDescription noneDevice;
+        if (device != noneDevice) {
+            // device is not supported, make it behave as a reset of forced mode but return an error
+            ret = RetCode::ERROR_ILLEGAL_PARAMETER;
+        }
+        // verify whether the virtualization should be enabled or disabled
+        if (!isDeviceSupportedVirtualizer(mOutputDevice)) {
+            enableVirtualizer = false;
+        }
+        mVirtualizerForcedDevice = noneDevice;
+    }
+
+    if (enableVirtualizer) {
+        if (mVirtualizerTempDisabled) {
+            LOG(VERBOSE) << __func__ << " re-enable virtualizer";
+            enableOperatingMode();
+            mVirtualizerTempDisabled = false;
+        }
+    } else {
+        if (!mVirtualizerTempDisabled) {
+            LOG(VERBOSE) << __func__ << " disable virtualizer";
+            disableOperatingMode();
+            mVirtualizerTempDisabled = true;
+        }
+    }
+
+    return ret;
 }
 
 RetCode BundleContext::initControlParameter(LVM_ControlParams_t& params) const {
@@ -747,7 +719,7 @@
     return angles;
 }
 
-IEffect::Status BundleContext::lvmProcess(float* in, float* out, int samples) {
+IEffect::Status BundleContext::process(float* in, float* out, int samples) {
     IEffect::Status status = {EX_NULL_POINTER, 0, 0};
     RETURN_VALUE_IF(!in, status, "nullInput");
     RETURN_VALUE_IF(!out, status, "nullOutput");
@@ -760,29 +732,24 @@
     auto frameSize = getInputFrameSize();
     RETURN_VALUE_IF(0 == frameSize, status, "zeroFrameSize");
 
-    LOG(DEBUG) << __func__ << " start processing";
     if ((mEffectProcessCalled & 1 << int(mType)) != 0) {
         const int undrainedEffects = mEffectInDrain & ~mEffectProcessCalled;
         if ((undrainedEffects & 1 << int(lvm::BundleEffectType::EQUALIZER)) != 0) {
-            LOG(DEBUG) << "Draining EQUALIZER";
             mSamplesToExitCountEq = 0;
             --mNumberEffectsEnabled;
             mEffectInDrain &= ~(1 << int(lvm::BundleEffectType::EQUALIZER));
         }
         if ((undrainedEffects & 1 << int(lvm::BundleEffectType::BASS_BOOST)) != 0) {
-            LOG(DEBUG) << "Draining BASS_BOOST";
             mSamplesToExitCountBb = 0;
             --mNumberEffectsEnabled;
             mEffectInDrain &= ~(1 << int(lvm::BundleEffectType::BASS_BOOST));
         }
         if ((undrainedEffects & 1 << int(lvm::BundleEffectType::VIRTUALIZER)) != 0) {
-            LOG(DEBUG) << "Draining VIRTUALIZER";
             mSamplesToExitCountVirt = 0;
             --mNumberEffectsEnabled;
             mEffectInDrain &= ~(1 << int(lvm::BundleEffectType::VIRTUALIZER));
         }
         if ((undrainedEffects & 1 << int(lvm::BundleEffectType::VOLUME)) != 0) {
-            LOG(DEBUG) << "Draining VOLUME";
             --mNumberEffectsEnabled;
             mEffectInDrain &= ~(1 << int(lvm::BundleEffectType::VOLUME));
         }
@@ -800,7 +767,6 @@
                         mNumberEffectsEnabled--;
                         mEffectInDrain &= ~(1 << int(lvm::BundleEffectType::EQUALIZER));
                     }
-                    LOG(DEBUG) << "Effect_process() this is the last frame for EQUALIZER";
                 }
                 break;
             case lvm::BundleEffectType::BASS_BOOST:
@@ -813,7 +779,6 @@
                         mNumberEffectsEnabled--;
                         mEffectInDrain &= ~(1 << int(lvm::BundleEffectType::BASS_BOOST));
                     }
-                    LOG(DEBUG) << "Effect_process() this is the last frame for BASS_BOOST";
                 }
                 break;
             case lvm::BundleEffectType::VIRTUALIZER:
@@ -826,7 +791,6 @@
                         mNumberEffectsEnabled--;
                         mEffectInDrain &= ~(1 << int(lvm::BundleEffectType::VIRTUALIZER));
                     }
-                    LOG(DEBUG) << "Effect_process() this is the last frame for VIRTUALIZER";
                 }
                 break;
             case lvm::BundleEffectType::VOLUME:
@@ -835,14 +799,13 @@
                     mNumberEffectsEnabled--;
                     mEffectInDrain &= ~(1 << int(lvm::BundleEffectType::VOLUME));
                 }
-                LOG(DEBUG) << "Effect_process() LVM_VOLUME Effect is not enabled";
                 break;
         }
     }
     if (isDataAvailable) {
         mNumberEffectsCalled++;
     }
-    bool accumulate = false;
+
     if (mNumberEffectsCalled >= mNumberEffectsEnabled) {
         // We expect the # effects called to be equal to # effects enabled in sequence (including
         // draining effects).  Warn if this is not the case due to inconsistent calls.
@@ -850,37 +813,33 @@
                  "%s Number of effects called %d is greater than number of effects enabled %d",
                  __func__, mNumberEffectsCalled, mNumberEffectsEnabled);
         mEffectProcessCalled = 0;  // reset our consistency check.
-        if (!isDataAvailable) {
-            LOG(DEBUG) << "Effect_process() processing last frame";
-        }
         mNumberEffectsCalled = 0;
-        float* outTmp = (accumulate ? getWorkBuffer() : out);
-        /* Process the samples */
-        LVM_ReturnStatus_en lvmStatus;
-        {
-            std::lock_guard lg(mMutex);
-
-            lvmStatus = LVM_Process(mInstance, in, outTmp, inputFrameCount, 0);
+        int frames = samples * sizeof(float) / frameSize;
+        int bufferIndex = 0;
+        // LVM library supports max of int16_t frames at a time and should be multiple of
+        // kBlockSizeMultiple.
+        constexpr int kBlockSizeMultiple = 4;
+        constexpr int kMaxBlockFrames =
+                (std::numeric_limits<int16_t>::max() / kBlockSizeMultiple) * kBlockSizeMultiple;
+        while (frames > 0) {
+            /* Process the samples */
+            LVM_ReturnStatus_en lvmStatus;
+            int processFrames = std::min(frames, kMaxBlockFrames);
+            lvmStatus = LVM_Process(mInstance, in + bufferIndex, out + bufferIndex,
+                                    processFrames, 0);
             if (lvmStatus != LVM_SUCCESS) {
-                LOG(ERROR) << __func__ << lvmStatus;
+                LOG(ERROR) << "LVM_Process failed with error: " << lvmStatus;
                 return {EX_UNSUPPORTED_OPERATION, 0, 0};
             }
-            if (accumulate) {
-                for (int i = 0; i < samples; i++) {
-                    out[i] += outTmp[i];
-                }
-            }
+            frames -= processFrames;
+            int processedSize = processFrames * frameSize / sizeof(float);
+            bufferIndex += processedSize;
         }
     } else {
         for (int i = 0; i < samples; i++) {
-            if (accumulate) {
-                out[i] += in[i];
-            } else {
-                out[i] = in[i];
-            }
+            out[i] = in[i];
         }
     }
-    LOG(DEBUG) << __func__ << " done processing";
     return {STATUS_OK, samples, samples};
 }
 
diff --git a/media/libeffects/lvm/wrapper/Aidl/BundleContext.h b/media/libeffects/lvm/wrapper/Aidl/BundleContext.h
index 809f402..e5ab40d 100644
--- a/media/libeffects/lvm/wrapper/Aidl/BundleContext.h
+++ b/media/libeffects/lvm/wrapper/Aidl/BundleContext.h
@@ -17,7 +17,6 @@
 #pragma once
 
 #include <android-base/logging.h>
-#include <android-base/thread_annotations.h>
 #include <array>
 #include <cstddef>
 
@@ -36,9 +35,9 @@
     void deInit();
     lvm::BundleEffectType getBundleType() const { return mType; }
 
-    RetCode enable();
+    RetCode enable() override;
     RetCode enableOperatingMode();
-    RetCode disable();
+    RetCode disable() override;
     RetCode disableOperatingMode();
 
     bool isDeviceSupportedBassBoost(
@@ -77,7 +76,7 @@
     RetCode setForcedDevice(
             const ::aidl::android::media::audio::common::AudioDeviceDescription& device);
     aidl::android::media::audio::common::AudioDeviceDescription getForcedDevice() const {
-        return mForceDevice;
+        return mVirtualizerForcedDevice;
     }
     std::vector<Virtualizer::ChannelAngle> getSpeakerAngles(
             const Virtualizer::SpeakerAnglesPayload payload);
@@ -85,17 +84,14 @@
     RetCode setVolumeStereo(const Parameter::VolumeStereo& volumeStereo) override;
     Parameter::VolumeStereo getVolumeStereo() override { return {1.0f, 1.0f}; }
 
-    IEffect::Status lvmProcess(float* in, float* out, int samples);
+    IEffect::Status process(float* in, float* out, int samples);
 
     IEffect::Status processEffect(float* in, float* out, int sampleToProcess);
 
   private:
-    std::mutex mMutex;
     const lvm::BundleEffectType mType;
     bool mEnabled = false;
-    LVM_Handle_t mInstance GUARDED_BY(mMutex);
-
-    aidl::android::media::audio::common::AudioDeviceDescription mVirtualizerForcedDevice;
+    LVM_Handle_t mInstance;
 
     int mSamplesPerSecond = 0;
     int mSamplesToExitCountEq = 0;
@@ -122,10 +118,10 @@
     // Virtualizer
     int mVirtStrengthSaved = 0; /* Conversion between Get/Set */
     bool mVirtualizerTempDisabled = false;
-    ::aidl::android::media::audio::common::AudioDeviceDescription mForceDevice;
+    ::aidl::android::media::audio::common::AudioDeviceDescription mVirtualizerForcedDevice;
     // Volume
-    float mLevelSaved = 0; /* for when mute is set, level must be saved */
-    float mVolume = 0;
+    float mLevelSaveddB = 0; /* for when mute is set, level must be saved */
+    float mVolumedB = 0;
     bool mMuteEnabled = false; /* Must store as mute = -96dB level */
 
     RetCode initControlParameter(LVM_ControlParams_t& params) const;
diff --git a/media/libeffects/lvm/wrapper/Aidl/BundleTypes.h b/media/libeffects/lvm/wrapper/Aidl/BundleTypes.h
index 143329d..e5373f3 100644
--- a/media/libeffects/lvm/wrapper/Aidl/BundleTypes.h
+++ b/media/libeffects/lvm/wrapper/Aidl/BundleTypes.h
@@ -65,9 +65,9 @@
          {5, 3, -1, 3, 5}}}; /* Rock Preset */
 
 static const std::vector<Equalizer::Preset> kEqPresets = {
-        {0, "Normal"},      {1, "Classical"}, {2, "Dance"}, {3, "Flat"}, {4, "Folk"},
-        {5, "Heavy Metal"}, {6, "Hip Hop"},   {7, "Jazz"},  {8, "Pop"},  {9, "Rock"}};
-
+        {-1, "Custom"}, {0, "Normal"}, {1, "Classical"},   {2, "Dance"},
+        {3, "Flat"},    {4, "Folk"},   {5, "Heavy Metal"}, {6, "Hip Hop"},
+        {7, "Jazz"},    {8, "Pop"},    {9, "Rock"}};
 
 const std::vector<Range::EqualizerRange> kEqRanges = {
         MAKE_RANGE(Equalizer, preset, 0, MAX_NUM_PRESETS - 1),
@@ -129,8 +129,7 @@
                    .implementor = "NXP Software Ltd."},
         .capability = kVirtualizerCap};
 
-static const std::vector<Range::VolumeRange> kVolumeRanges = {
-        MAKE_RANGE(Volume, levelDb, -9600, 0)};
+static const std::vector<Range::VolumeRange> kVolumeRanges = {MAKE_RANGE(Volume, levelDb, -96, 0)};
 static const Capability kVolumeCap = {.range = kVolumeRanges};
 static const std::string kVolumeEffectName = "Volume";
 static const Descriptor kVolumeDesc = {
diff --git a/media/libeffects/lvm/wrapper/Aidl/EffectBundleAidl.cpp b/media/libeffects/lvm/wrapper/Aidl/EffectBundleAidl.cpp
index 257e972..2a81673 100644
--- a/media/libeffects/lvm/wrapper/Aidl/EffectBundleAidl.cpp
+++ b/media/libeffects/lvm/wrapper/Aidl/EffectBundleAidl.cpp
@@ -55,7 +55,6 @@
     }
     if (instanceSpp) {
         *instanceSpp = ndk::SharedRefBase::make<EffectBundleAidl>(*uuid);
-        LOG(DEBUG) << __func__ << " instance " << instanceSpp->get() << " created";
         return EX_NONE;
     } else {
         LOG(ERROR) << __func__ << " invalid input parameter!";
@@ -83,7 +82,6 @@
 namespace aidl::android::hardware::audio::effect {
 
 EffectBundleAidl::EffectBundleAidl(const AudioUuid& uuid) {
-    LOG(DEBUG) << __func__ << uuid.toString();
     if (uuid == getEffectImplUuidEqualizerBundle()) {
         mType = lvm::BundleEffectType::EQUALIZER;
         mDescriptor = &lvm::kEqualizerDesc;
@@ -107,12 +105,10 @@
 
 EffectBundleAidl::~EffectBundleAidl() {
     cleanUp();
-    LOG(DEBUG) << __func__;
 }
 
 ndk::ScopedAStatus EffectBundleAidl::getDescriptor(Descriptor* _aidl_return) {
     RETURN_IF(!_aidl_return, EX_ILLEGAL_ARGUMENT, "Parameter:nullptr");
-    LOG(DEBUG) << _aidl_return->toString();
     *_aidl_return = *mDescriptor;
     return ndk::ScopedAStatus::ok();
 }
@@ -154,7 +150,6 @@
 }
 
 ndk::ScopedAStatus EffectBundleAidl::setParameterSpecific(const Parameter::Specific& specific) {
-    LOG(DEBUG) << __func__ << " specific " << specific.toString();
     RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
 
     auto tag = specific.getTag();
@@ -433,32 +428,11 @@
     return RetCode::SUCCESS;
 }
 
-ndk::ScopedAStatus EffectBundleAidl::commandImpl(CommandId command) {
-    RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
-    switch (command) {
-        case CommandId::START:
-            mContext->enable();
-            break;
-        case CommandId::STOP:
-            mContext->disable();
-            break;
-        case CommandId::RESET:
-            mContext->disable();
-            mContext->resetBuffer();
-            break;
-        default:
-            LOG(ERROR) << __func__ << " commandId " << toString(command) << " not supported";
-            return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
-                                                                    "commandIdNotSupported");
-    }
-    return ndk::ScopedAStatus::ok();
-}
-
 // Processing method running in EffectWorker thread.
 IEffect::Status EffectBundleAidl::effectProcessImpl(float* in, float* out, int sampleToProcess) {
     IEffect::Status status = {EX_NULL_POINTER, 0, 0};
     RETURN_VALUE_IF(!mContext, status, "nullContext");
-    return mContext->lvmProcess(in, out, sampleToProcess);
+    return mContext->process(in, out, sampleToProcess);
 }
 
 }  // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/lvm/wrapper/Aidl/EffectBundleAidl.h b/media/libeffects/lvm/wrapper/Aidl/EffectBundleAidl.h
index 429e941..479579b 100644
--- a/media/libeffects/lvm/wrapper/Aidl/EffectBundleAidl.h
+++ b/media/libeffects/lvm/wrapper/Aidl/EffectBundleAidl.h
@@ -49,8 +49,6 @@
     IEffect::Status effectProcessImpl(float* in, float* out, int samples)
             REQUIRES(mImplMutex) override;
 
-    ndk::ScopedAStatus commandImpl(CommandId command) REQUIRES(mImplMutex) override;
-
     std::string getEffectName() override { return *mEffectName; }
 
   private:
diff --git a/media/libeffects/lvm/wrapper/Aidl/GlobalSession.h b/media/libeffects/lvm/wrapper/Aidl/GlobalSession.h
index d31763b..ea1a8fe 100644
--- a/media/libeffects/lvm/wrapper/Aidl/GlobalSession.h
+++ b/media/libeffects/lvm/wrapper/Aidl/GlobalSession.h
@@ -21,7 +21,6 @@
 #include <unordered_map>
 
 #include <android-base/logging.h>
-#include <android-base/thread_annotations.h>
 
 #include "BundleContext.h"
 #include "BundleTypes.h"
@@ -41,11 +40,6 @@
         return instance;
     }
 
-    bool isSessionIdExist(int sessionId) {
-        std::lock_guard lg(mMutex);
-        return mSessionMap.count(sessionId);
-    }
-
     static bool findBundleTypeInList(std::vector<std::shared_ptr<BundleContext>>& list,
                                      const lvm::BundleEffectType& type, bool remove = false) {
         auto itor = std::find_if(list.begin(), list.end(),
@@ -69,8 +63,7 @@
     std::shared_ptr<BundleContext> createSession(const lvm::BundleEffectType& type, int statusDepth,
                                                  const Parameter::Common& common) {
         int sessionId = common.session;
-        LOG(DEBUG) << __func__ << type << " with sessionId " << sessionId;
-        std::lock_guard lg(mMutex);
+        LOG(VERBOSE) << __func__ << type << " with sessionId " << sessionId;
         if (mSessionMap.count(sessionId) == 0 && mSessionMap.size() >= MAX_BUNDLE_SESSIONS) {
             LOG(ERROR) << __func__ << " exceed max bundle session";
             return nullptr;
@@ -97,8 +90,7 @@
     }
 
     void releaseSession(const lvm::BundleEffectType& type, int sessionId) {
-        LOG(DEBUG) << __func__ << type << " sessionId " << sessionId;
-        std::lock_guard lg(mMutex);
+        LOG(VERBOSE) << __func__ << type << " sessionId " << sessionId;
         if (mSessionMap.count(sessionId)) {
             auto& list = mSessionMap[sessionId];
             if (!findBundleTypeInList(list, type, true /* remove */)) {
@@ -112,11 +104,9 @@
     }
 
   private:
-    // Lock for mSessionMap access.
-    std::mutex mMutex;
     // Max session number supported.
     static constexpr int MAX_BUNDLE_SESSIONS = 32;
     std::unordered_map<int /* session ID */, std::vector<std::shared_ptr<BundleContext>>>
-            mSessionMap GUARDED_BY(mMutex);
+            mSessionMap;
 };
 }  // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/lvm/wrapper/Android.bp b/media/libeffects/lvm/wrapper/Android.bp
index 62837b9..5b48045 100644
--- a/media/libeffects/lvm/wrapper/Android.bp
+++ b/media/libeffects/lvm/wrapper/Android.bp
@@ -1,5 +1,6 @@
 // music bundle wrapper
 package {
+    default_team: "trendy_team_media_framework_audio",
     default_applicable_licenses: [
         "frameworks_av_media_libeffects_lvm_wrapper_license",
     ],
@@ -51,8 +52,8 @@
     local_include_dirs: ["Bundle"],
 
     header_libs: [
-        "libhardware_headers",
         "libaudioeffects",
+        "libhardware_headers",
     ],
 }
 
@@ -92,8 +93,8 @@
     export_include_dirs: ["Reverb"],
 
     header_libs: [
-        "libhardware_headers",
         "libaudioeffects",
+        "libhardware_headers",
     ],
 
     sanitize: {
@@ -104,9 +105,9 @@
 cc_library_shared {
     name: "libbundleaidl",
     srcs: [
+        ":effectCommonFile",
         "Aidl/BundleContext.cpp",
         "Aidl/EffectBundleAidl.cpp",
-        ":effectCommonFile",
     ],
     static_libs: ["libmusicbundle"],
     defaults: [
@@ -124,8 +125,8 @@
         "libstagefright_foundation",
     ],
     cflags: [
-        "-Wthread-safety",
         "-DBACKEND_NDK",
+        "-Wthread-safety",
     ],
     relative_install_path: "soundfx",
     visibility: [
@@ -136,9 +137,9 @@
 cc_library_shared {
     name: "libreverbaidl",
     srcs: [
-        "Reverb/aidl/ReverbContext.cpp",
-        "Reverb/aidl/EffectReverb.cpp",
         ":effectCommonFile",
+        "Reverb/aidl/EffectReverb.cpp",
+        "Reverb/aidl/ReverbContext.cpp",
     ],
     static_libs: ["libreverb"],
     defaults: [
@@ -150,8 +151,8 @@
         "libhardware_headers",
     ],
     shared_libs: [
-        "libbase",
         "libaudioutils",
+        "libbase",
         "libcutils",
         "liblog",
     ],
diff --git a/media/libeffects/lvm/wrapper/Reverb/aidl/EffectReverb.cpp b/media/libeffects/lvm/wrapper/Reverb/aidl/EffectReverb.cpp
index f9afe69..201c659 100644
--- a/media/libeffects/lvm/wrapper/Reverb/aidl/EffectReverb.cpp
+++ b/media/libeffects/lvm/wrapper/Reverb/aidl/EffectReverb.cpp
@@ -55,7 +55,6 @@
     }
     if (instanceSpp) {
         *instanceSpp = ndk::SharedRefBase::make<EffectReverb>(*uuid);
-        LOG(DEBUG) << __func__ << " instance " << instanceSpp->get() << " created";
         return EX_NONE;
     } else {
         LOG(ERROR) << __func__ << " invalid input parameter!";
@@ -82,7 +81,6 @@
 namespace aidl::android::hardware::audio::effect {
 
 EffectReverb::EffectReverb(const AudioUuid& uuid) {
-    LOG(DEBUG) << __func__ << uuid.toString();
     if (uuid == getEffectImplUuidAuxEnvReverb()) {
         mType = lvm::ReverbEffectType::AUX_ENV;
         mDescriptor = &lvm::kAuxEnvReverbDesc;
@@ -106,18 +104,16 @@
 
 EffectReverb::~EffectReverb() {
     cleanUp();
-    LOG(DEBUG) << __func__;
 }
 
 ndk::ScopedAStatus EffectReverb::getDescriptor(Descriptor* _aidl_return) {
     RETURN_IF(!_aidl_return, EX_ILLEGAL_ARGUMENT, "Parameter:nullptr");
-    LOG(DEBUG) << _aidl_return->toString();
     *_aidl_return = *mDescriptor;
     return ndk::ScopedAStatus::ok();
 }
 
 ndk::ScopedAStatus EffectReverb::setParameterSpecific(const Parameter::Specific& specific) {
-    LOG(DEBUG) << __func__ << " specific " << specific.toString();
+    LOG(VERBOSE) << __func__ << " specific " << specific.toString();
     RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
 
     auto tag = specific.getTag();
@@ -365,32 +361,11 @@
     return RetCode::SUCCESS;
 }
 
-ndk::ScopedAStatus EffectReverb::commandImpl(CommandId command) {
-    RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
-    switch (command) {
-        case CommandId::START:
-            mContext->enable();
-            break;
-        case CommandId::STOP:
-            mContext->disable();
-            break;
-        case CommandId::RESET:
-            mContext->disable();
-            mContext->resetBuffer();
-            break;
-        default:
-            LOG(ERROR) << __func__ << " commandId " << toString(command) << " not supported";
-            return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
-                                                                    "commandIdNotSupported");
-    }
-    return ndk::ScopedAStatus::ok();
-}
-
 // Processing method running in EffectWorker thread.
 IEffect::Status EffectReverb::effectProcessImpl(float* in, float* out, int sampleToProcess) {
     IEffect::Status status = {EX_NULL_POINTER, 0, 0};
     RETURN_VALUE_IF(!mContext, status, "nullContext");
-    return mContext->lvmProcess(in, out, sampleToProcess);
+    return mContext->process(in, out, sampleToProcess);
 }
 
 }  // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/lvm/wrapper/Reverb/aidl/EffectReverb.h b/media/libeffects/lvm/wrapper/Reverb/aidl/EffectReverb.h
index e0771a1..4acac1d 100644
--- a/media/libeffects/lvm/wrapper/Reverb/aidl/EffectReverb.h
+++ b/media/libeffects/lvm/wrapper/Reverb/aidl/EffectReverb.h
@@ -42,8 +42,6 @@
     IEffect::Status effectProcessImpl(float* in, float* out, int samples)
             REQUIRES(mImplMutex) override;
 
-    ndk::ScopedAStatus commandImpl(CommandId command) REQUIRES(mImplMutex) override;
-
     std::string getEffectName() override { return *mEffectName; }
 
   private:
diff --git a/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.cpp b/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.cpp
index 468b268..3ae3edc 100644
--- a/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.cpp
+++ b/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.cpp
@@ -19,6 +19,7 @@
 #define LOG_TAG "ReverbContext"
 #include <android-base/logging.h>
 #include <Utils.h>
+#include <audio_utils/primitives.h>
 
 #include "ReverbContext.h"
 #include "VectorArithmetic.h"
@@ -68,24 +69,21 @@
 
     // allocate lvm reverb instance
     LVREV_ReturnStatus_en status = LVREV_SUCCESS;
-    {
-        std::lock_guard lg(mMutex);
-        LVREV_InstanceParams_st params = {
-                .MaxBlockSize = lvm::kMaxCallSize,
-                // Max format, could be mono during process
-                .SourceFormat = LVM_STEREO,
-                .NumDelays = LVREV_DELAYLINES_4,
-        };
-        /* Init sets the instance handle */
-        status = LVREV_GetInstanceHandle(&mInstance, &params);
-        GOTO_IF_LVREV_ERROR(status, deinit, "LVREV_GetInstanceHandleFailed");
+    LVREV_InstanceParams_st params = {
+            .MaxBlockSize = lvm::kMaxCallSize,
+            // Max format, could be mono during process
+            .SourceFormat = LVM_STEREO,
+            .NumDelays = LVREV_DELAYLINES_4,
+    };
+    /* Init sets the instance handle */
+    status = LVREV_GetInstanceHandle(&mInstance, &params);
+    GOTO_IF_LVREV_ERROR(status, deinit, "LVREV_GetInstanceHandleFailed");
 
-        // set control
-        LVREV_ControlParams_st controlParams;
-        initControlParameter(controlParams);
-        status = LVREV_SetControlParameters(mInstance, &controlParams);
-        GOTO_IF_LVREV_ERROR(status, deinit, "LVREV_SetControlParametersFailed");
-    }
+    // set control
+    LVREV_ControlParams_st controlParams;
+    initControlParameter(controlParams);
+    status = LVREV_SetControlParameters(mInstance, &controlParams);
+    GOTO_IF_LVREV_ERROR(status, deinit, "LVREV_SetControlParametersFailed");
 
     return RetCode::SUCCESS;
 
@@ -95,7 +93,6 @@
 }
 
 void ReverbContext::deInit() {
-    std::lock_guard lg(mMutex);
     if (mInstance) {
         LVREV_FreeInstance(mInstance);
         mInstance = nullptr;
@@ -143,19 +140,16 @@
 RetCode ReverbContext::setEnvironmentalReverbRoomLevel(int roomLevel) {
     // Update Control Parameter
     LVREV_ControlParams_st params;
-    {
-        std::lock_guard lg(mMutex);
-        RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_GetControlParameters(mInstance, &params),
-                        RetCode::ERROR_EFFECT_LIB_ERROR, " getControlParamFailed");
+    RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_GetControlParameters(mInstance, &params),
+                    RetCode::ERROR_EFFECT_LIB_ERROR, " getControlParamFailed");
 
-        // Sum of room and reverb level controls
-        // needs to subtract max levels for both room level and reverb level
-        int combinedLevel = (roomLevel + mLevel) - lvm::kMaxReverbLevel;
-        params.Level = convertLevel(combinedLevel);
+    // Sum of room and reverb level controls
+    // needs to subtract max levels for both room level and reverb level
+    int combinedLevel = (roomLevel + mLevel) - lvm::kMaxReverbLevel;
+    params.Level = convertLevel(combinedLevel);
 
-        RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_SetControlParameters(mInstance, &params),
-                        RetCode::ERROR_EFFECT_LIB_ERROR, " setControlParamFailed");
-    }
+    RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_SetControlParameters(mInstance, &params),
+                    RetCode::ERROR_EFFECT_LIB_ERROR, " setControlParamFailed");
     mRoomLevel = roomLevel;
     return RetCode::SUCCESS;
 }
@@ -163,16 +157,13 @@
 RetCode ReverbContext::setEnvironmentalReverbRoomHfLevel(int roomHfLevel) {
     // Update Control Parameter
     LVREV_ControlParams_st params;
-    {
-        std::lock_guard lg(mMutex);
-        RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_GetControlParameters(mInstance, &params),
-                        RetCode::ERROR_EFFECT_LIB_ERROR, " getControlParamFailed");
+    RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_GetControlParameters(mInstance, &params),
+                    RetCode::ERROR_EFFECT_LIB_ERROR, " getControlParamFailed");
 
-        params.LPF = convertHfLevel(roomHfLevel);
+    params.LPF = convertHfLevel(roomHfLevel);
 
-        RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_SetControlParameters(mInstance, &params),
-                        RetCode::ERROR_EFFECT_LIB_ERROR, " setControlParamFailed");
-    }
+    RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_SetControlParameters(mInstance, &params),
+                    RetCode::ERROR_EFFECT_LIB_ERROR, " setControlParamFailed");
     mRoomHfLevel = roomHfLevel;
     return RetCode::SUCCESS;
 }
@@ -185,17 +176,15 @@
 
     // Update Control Parameter
     LVREV_ControlParams_st params;
-    {
-        std::lock_guard lg(mMutex);
-        RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_GetControlParameters(mInstance, &params),
-                        RetCode::ERROR_EFFECT_LIB_ERROR, " getControlParamFailed");
+    RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_GetControlParameters(mInstance, &params),
+                    RetCode::ERROR_EFFECT_LIB_ERROR, " getControlParamFailed");
 
-        params.T60 = (LVM_UINT16)time;
-        mSamplesToExitCount = (params.T60 * mCommon.input.base.sampleRate) / 1000;
+    params.T60 = (LVM_UINT16)time;
+    mSamplesToExitCount = (params.T60 * mCommon.input.base.sampleRate) / 1000;
 
-        RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_SetControlParameters(mInstance, &params),
-                        RetCode::ERROR_EFFECT_LIB_ERROR, " setControlParamFailed");
-    }
+    RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_SetControlParameters(mInstance, &params),
+                    RetCode::ERROR_EFFECT_LIB_ERROR, " setControlParamFailed");
+
     mDecayTime = time;
     return RetCode::SUCCESS;
 }
@@ -203,16 +192,13 @@
 RetCode ReverbContext::setEnvironmentalReverbDecayHfRatio(int decayHfRatio) {
     // Update Control Parameter
     LVREV_ControlParams_st params;
-    {
-        std::lock_guard lg(mMutex);
-        RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_GetControlParameters(mInstance, &params),
-                        RetCode::ERROR_EFFECT_LIB_ERROR, " getControlParamFailed");
+    RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_GetControlParameters(mInstance, &params),
+                    RetCode::ERROR_EFFECT_LIB_ERROR, " getControlParamFailed");
 
-        params.Damping = (LVM_INT16)(decayHfRatio / 20);
+    params.Damping = (LVM_INT16)(decayHfRatio / 20);
 
-        RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_SetControlParameters(mInstance, &params),
-                        RetCode::ERROR_EFFECT_LIB_ERROR, " setControlParamFailed");
-    }
+    RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_SetControlParameters(mInstance, &params),
+                    RetCode::ERROR_EFFECT_LIB_ERROR, " setControlParamFailed");
     mDecayHfRatio = decayHfRatio;
     return RetCode::SUCCESS;
 }
@@ -220,19 +206,17 @@
 RetCode ReverbContext::setEnvironmentalReverbLevel(int level) {
     // Update Control Parameter
     LVREV_ControlParams_st params;
-    {
-        std::lock_guard lg(mMutex);
-        RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_GetControlParameters(mInstance, &params),
-                        RetCode::ERROR_EFFECT_LIB_ERROR, " getControlParamFailed");
+    RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_GetControlParameters(mInstance, &params),
+                    RetCode::ERROR_EFFECT_LIB_ERROR, " getControlParamFailed");
 
-        // Sum of room and reverb level controls
-        // needs to subtract max levels for both room level and level
-        int combinedLevel = (level + mRoomLevel) - lvm::kMaxReverbLevel;
-        params.Level = convertLevel(combinedLevel);
+    // Sum of room and reverb level controls
+    // needs to subtract max levels for both room level and level
+    int combinedLevel = (level + mRoomLevel) - lvm::kMaxReverbLevel;
+    params.Level = convertLevel(combinedLevel);
 
-        RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_SetControlParameters(mInstance, &params),
-                        RetCode::ERROR_EFFECT_LIB_ERROR, " setControlParamFailed");
-    }
+    RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_SetControlParameters(mInstance, &params),
+                    RetCode::ERROR_EFFECT_LIB_ERROR, " setControlParamFailed");
+
     mLevel = level;
     return RetCode::SUCCESS;
 }
@@ -245,16 +229,14 @@
 RetCode ReverbContext::setEnvironmentalReverbDiffusion(int diffusion) {
     // Update Control Parameter
     LVREV_ControlParams_st params;
-    {
-        std::lock_guard lg(mMutex);
-        RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_GetControlParameters(mInstance, &params),
-                        RetCode::ERROR_EFFECT_LIB_ERROR, " getControlParamFailed");
+    RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_GetControlParameters(mInstance, &params),
+                    RetCode::ERROR_EFFECT_LIB_ERROR, " getControlParamFailed");
 
-        params.Density = (LVM_INT16)(diffusion / 10);
+    params.Density = (LVM_INT16)(diffusion / 10);
 
-        RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_SetControlParameters(mInstance, &params),
-                        RetCode::ERROR_EFFECT_LIB_ERROR, " setControlParamFailed");
-    }
+    RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_SetControlParameters(mInstance, &params),
+                    RetCode::ERROR_EFFECT_LIB_ERROR, " setControlParamFailed");
+
     mDiffusion = diffusion;
     return RetCode::SUCCESS;
 }
@@ -262,16 +244,14 @@
 RetCode ReverbContext::setEnvironmentalReverbDensity(int density) {
     // Update Control Parameter
     LVREV_ControlParams_st params;
-    {
-        std::lock_guard lg(mMutex);
-        RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_GetControlParameters(mInstance, &params),
-                        RetCode::ERROR_EFFECT_LIB_ERROR, " getControlParamFailed");
+    RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_GetControlParameters(mInstance, &params),
+                    RetCode::ERROR_EFFECT_LIB_ERROR, " getControlParamFailed");
 
-        params.RoomSize = (LVM_INT16)(((density * 99) / 1000) + 1);
+    params.RoomSize = (LVM_INT16)(((density * 99) / 1000) + 1);
 
-        RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_SetControlParameters(mInstance, &params),
-                        RetCode::ERROR_EFFECT_LIB_ERROR, " setControlParamFailed");
-    }
+    RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_SetControlParameters(mInstance, &params),
+                    RetCode::ERROR_EFFECT_LIB_ERROR, " setControlParamFailed");
+
     mDensity = density;
     return RetCode::SUCCESS;
 }
@@ -352,7 +332,7 @@
     return kDefaultLPF;
 }
 
-IEffect::Status ReverbContext::lvmProcess(float* in, float* out, int samples) {
+IEffect::Status ReverbContext::process(float* in, float* out, int samples) {
     IEffect::Status status = {EX_NULL_POINTER, 0, 0};
     RETURN_VALUE_IF(!in, status, "nullInput");
     RETURN_VALUE_IF(!out, status, "nullOutput");
@@ -362,61 +342,83 @@
     RETURN_VALUE_IF(inputFrameCount != outputFrameCount, status, "FrameCountMismatch");
     RETURN_VALUE_IF(0 == getInputFrameSize(), status, "zeroFrameSize");
 
-    LOG(DEBUG) << __func__ << " start processing";
-    std::lock_guard lg(mMutex);
-
     int channels = ::aidl::android::hardware::audio::common::getChannelCount(
             mCommon.input.base.channelMask);
     int outChannels = ::aidl::android::hardware::audio::common::getChannelCount(
             mCommon.output.base.channelMask);
     int frameCount = mCommon.input.frameCount;
 
+    if (mBypass) {
+        if (isAuxiliary()) {
+            memset(out, 0, getOutputFrameSize() * frameCount);
+        } else {
+            memcpy_to_float_from_float_with_clamping(out, in, samples, 1);
+        }
+        return {STATUS_OK, samples, outChannels * frameCount};
+    }
+
     // Reverb only effects the stereo channels in multichannel source.
     if (channels < 1 || channels > LVM_MAX_CHANNELS) {
         LOG(ERROR) << __func__ << " process invalid PCM channels " << channels;
         return status;
     }
 
-    std::vector<float> inFrames(samples);
-    std::vector<float> outFrames(frameCount * FCC_2);
+    std::vector<float> inputSamples;
+    std::vector<float> outputSamples(frameCount * FCC_2);
 
     if (isPreset() && mNextPreset != mPreset) {
         loadPreset();
     }
 
     if (isAuxiliary()) {
-        inFrames.assign(in, in + samples);
+        inputSamples.resize(samples);
+        inputSamples.assign(in, in + samples);
     } else {
-        // mono input is duplicated
+        // Resizing to stereo is required to duplicate mono input
+        inputSamples.resize(frameCount * FCC_2);
         if (channels >= FCC_2) {
             for (int i = 0; i < frameCount; i++) {
-                inFrames[FCC_2 * i] = in[channels * i] * kSendLevel;
-                inFrames[FCC_2 * i + 1] = in[channels * i + 1] * kSendLevel;
+                inputSamples[FCC_2 * i] = in[channels * i] * kSendLevel;
+                inputSamples[FCC_2 * i + 1] = in[channels * i + 1] * kSendLevel;
             }
         } else {
             for (int i = 0; i < frameCount; i++) {
-                inFrames[FCC_2 * i] = inFrames[FCC_2 * i + 1] = in[i] * kSendLevel;
+                inputSamples[FCC_2 * i] = inputSamples[FCC_2 * i + 1] = in[i] * kSendLevel;
             }
         }
     }
 
     if (isPreset() && mPreset == PresetReverb::Presets::NONE) {
-        std::fill(outFrames.begin(), outFrames.end(), 0);  // always stereo here
+        std::fill(outputSamples.begin(), outputSamples.end(), 0);  // always stereo here
     } else {
         if (!mEnabled && mSamplesToExitCount > 0) {
-            std::fill(outFrames.begin(), outFrames.end(), 0);
-            LOG(VERBOSE) << "Zeroing " << channels << " samples per frame at the end of call ";
+            std::fill(outputSamples.begin(), outputSamples.end(), 0);
         }
+        int inputBufferIndex = 0;
+        int outputBufferIndex = 0;
+
+        // LVREV library supports max of int16_t frames at a time
+        constexpr int kMaxBlockFrames = std::numeric_limits<int16_t>::max();
+        const auto inputFrameSize = getInputFrameSize();
+        const auto outputFrameSize = getOutputFrameSize();
 
         /* Process the samples, producing a stereo output */
-        LVREV_ReturnStatus_en lvrevStatus =
-                LVREV_Process(mInstance,        /* Instance handle */
-                              inFrames.data(),  /* Input buffer */
-                              outFrames.data(), /* Output buffer */
-                              frameCount);      /* Number of samples to read */
-        if (lvrevStatus != LVREV_SUCCESS) {
-            LOG(ERROR) << __func__ << lvrevStatus;
-            return {EX_UNSUPPORTED_OPERATION, 0, 0};
+        for (int fc = frameCount; fc > 0;) {
+            int processFrames = std::min(fc, kMaxBlockFrames);
+            LVREV_ReturnStatus_en lvrevStatus =
+                    LVREV_Process(mInstance,                            /* Instance handle */
+                                  inputSamples.data() + inputBufferIndex,   /* Input buffer */
+                                  outputSamples.data() + outputBufferIndex, /* Output buffer */
+                                  processFrames); /* Number of samples to process */
+            if (lvrevStatus != LVREV_SUCCESS) {
+                LOG(ERROR) << __func__ << " LVREV_Process error: " << lvrevStatus;
+                return {EX_UNSUPPORTED_OPERATION, 0, 0};
+            }
+
+            fc -= processFrames;
+
+            inputBufferIndex += processFrames * inputFrameSize / sizeof(float);
+            outputBufferIndex += processFrames * outputFrameSize / sizeof(float);
         }
     }
     // Convert to 16 bits
@@ -426,14 +428,14 @@
         if (channels >= FCC_2) {
             for (int i = 0; i < frameCount; i++) {
                 // Mix with dry input
-                outFrames[FCC_2 * i] += in[channels * i];
-                outFrames[FCC_2 * i + 1] += in[channels * i + 1];
+                outputSamples[FCC_2 * i] += in[channels * i];
+                outputSamples[FCC_2 * i + 1] += in[channels * i + 1];
             }
         } else {
             for (int i = 0; i < frameCount; i++) {
                 // Mix with dry input
-                outFrames[FCC_2 * i] += in[i];
-                outFrames[FCC_2 * i + 1] += in[i];
+                outputSamples[FCC_2 * i] += in[i];
+                outputSamples[FCC_2 * i + 1] += in[i];
             }
         }
 
@@ -445,8 +447,8 @@
             float incr = (mVolume.right - vr) / frameCount;
 
             for (int i = 0; i < frameCount; i++) {
-                outFrames[FCC_2 * i] *= vl;
-                outFrames[FCC_2 * i + 1] *= vr;
+                outputSamples[FCC_2 * i] *= vl;
+                outputSamples[FCC_2 * i + 1] *= vr;
 
                 vl += incl;
                 vr += incr;
@@ -455,8 +457,8 @@
         } else if (volumeMode != VOLUME_OFF) {
             if (mVolume.left != kUnitVolume || mVolume.right != kUnitVolume) {
                 for (int i = 0; i < frameCount; i++) {
-                    outFrames[FCC_2 * i] *= mVolume.left;
-                    outFrames[FCC_2 * i + 1] *= mVolume.right;
+                    outputSamples[FCC_2 * i] *= mVolume.left;
+                    outputSamples[FCC_2 * i + 1] *= mVolume.right;
                 }
             }
             mPrevVolume = mVolume;
@@ -464,19 +466,10 @@
         }
     }
 
-    bool accumulate = false;
     if (outChannels > 2) {
-        // Accumulate if required
-        if (accumulate) {
-            for (int i = 0; i < frameCount; i++) {
-                out[outChannels * i] += outFrames[FCC_2 * i];
-                out[outChannels * i + 1] += outFrames[FCC_2 * i + 1];
-            }
-        } else {
-            for (int i = 0; i < frameCount; i++) {
-                out[outChannels * i] = outFrames[FCC_2 * i];
-                out[outChannels * i + 1] = outFrames[FCC_2 * i + 1];
-            }
+        for (int i = 0; i < frameCount; i++) {
+            out[outChannels * i] = outputSamples[FCC_2 * i];
+            out[outChannels * i + 1] = outputSamples[FCC_2 * i + 1];
         }
         if (!isAuxiliary()) {
             for (int i = 0; i < frameCount; i++) {
@@ -487,29 +480,15 @@
             }
         }
     } else {
-        if (accumulate) {
-            if (outChannels == FCC_1) {
-                for (int i = 0; i < frameCount; i++) {
-                    out[i] += ((outFrames[i * FCC_2] + outFrames[i * FCC_2 + 1]) * 0.5f);
-                }
-            } else {
-                for (int i = 0; i < frameCount * FCC_2; i++) {
-                    out[i] += outFrames[i];
-                }
-            }
+        if (outChannels == FCC_1) {
+            From2iToMono_Float(outputSamples.data(), out, frameCount);
         } else {
-            if (outChannels == FCC_1) {
-                From2iToMono_Float(outFrames.data(), out, frameCount);
-            } else {
-                for (int i = 0; i < frameCount * FCC_2; i++) {
-                    out[i] = outFrames[i];
-                }
+            for (int i = 0; i < frameCount * FCC_2; i++) {
+                out[i] = outputSamples[i];
             }
         }
     }
 
-    LOG(DEBUG) << __func__ << " done processing";
-
     if (!mEnabled && mSamplesToExitCount > 0) {
         // signed - unsigned will trigger integer overflow if result becomes negative.
         mSamplesToExitCount -= samples;
diff --git a/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.h b/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.h
index d11a081..f55eac5 100644
--- a/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.h
+++ b/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.h
@@ -17,7 +17,6 @@
 #pragma once
 
 #include <android-base/logging.h>
-#include <android-base/thread_annotations.h>
 #include <unordered_map>
 
 #include "ReverbTypes.h"
@@ -52,8 +51,8 @@
     RetCode init();
     void deInit();
 
-    RetCode enable();
-    RetCode disable();
+    RetCode enable() override;
+    RetCode disable() override;
 
     bool isAuxiliary();
     bool isPreset();
@@ -100,7 +99,7 @@
     }
     bool getReflectionsLevel() const { return mReflectionsLevelMb; }
 
-    IEffect::Status lvmProcess(float* in, float* out, int samples);
+    IEffect::Status process(float* in, float* out, int samples);
 
   private:
     static constexpr inline float kUnitVolume = 1;
@@ -158,10 +157,9 @@
              {-400, -600, 1800, 700, -2000, 30, -1400, 60, 1000, 1000}},
             {PresetReverb::Presets::PLATE, {-400, -200, 1300, 900, 0, 2, 0, 10, 1000, 750}}};
 
-    std::mutex mMutex;
     const lvm::ReverbEffectType mType;
     bool mEnabled = false;
-    LVREV_Handle_t mInstance GUARDED_BY(mMutex);
+    LVREV_Handle_t mInstance = LVM_NULL;
 
     int mRoomLevel = 0;
     int mRoomHfLevel = 0;
diff --git a/media/libeffects/preprocessing/Android.bp b/media/libeffects/preprocessing/Android.bp
index 994b061..44b7d97 100644
--- a/media/libeffects/preprocessing/Android.bp
+++ b/media/libeffects/preprocessing/Android.bp
@@ -1,5 +1,6 @@
 // audio preprocessing wrapper
 package {
+    default_team: "trendy_team_media_framework_audio",
     default_applicable_licenses: [
         "frameworks_av_media_libeffects_preprocessing_license",
     ],
@@ -62,30 +63,30 @@
 cc_library_shared {
     name: "libpreprocessingaidl",
     srcs: [
-        "aidl/PreProcessingContext.cpp",
-        "aidl/EffectPreProcessing.cpp",
         ":effectCommonFile",
+        "aidl/EffectPreProcessing.cpp",
+        "aidl/PreProcessingContext.cpp",
     ],
     defaults: [
         "aidlaudioeffectservice_defaults",
     ],
     local_include_dirs: ["aidl"],
     shared_libs: [
+        "libaudioutils",
         "liblog",
         "libutils",
-        "libaudioutils",
     ],
     static_libs: [
         "webrtc_audio_processing",
     ],
     header_libs: [
-        "libwebrtc_absl_headers",
         "libaudioeffects",
         "libhardware_headers",
+        "libwebrtc_absl_headers",
     ],
     cflags: [
-        "-Wthread-safety",
         "-Wno-unused-parameter",
+        "-Wthread-safety",
     ],
     relative_install_path: "soundfx",
     visibility: [
diff --git a/media/libeffects/preprocessing/aidl/EffectPreProcessing.cpp b/media/libeffects/preprocessing/aidl/EffectPreProcessing.cpp
index 7552804..4bc34e7 100644
--- a/media/libeffects/preprocessing/aidl/EffectPreProcessing.cpp
+++ b/media/libeffects/preprocessing/aidl/EffectPreProcessing.cpp
@@ -50,7 +50,6 @@
     }
     if (instanceSpp) {
         *instanceSpp = ndk::SharedRefBase::make<EffectPreProcessing>(*uuid);
-        LOG(DEBUG) << __func__ << " instance " << instanceSpp->get() << " created";
         return EX_NONE;
     } else {
         LOG(ERROR) << __func__ << " invalid input parameter!";
@@ -78,7 +77,6 @@
 namespace aidl::android::hardware::audio::effect {
 
 EffectPreProcessing::EffectPreProcessing(const AudioUuid& uuid) {
-    LOG(DEBUG) << __func__ << uuid.toString();
     if (uuid == getEffectImplUuidAcousticEchoCancelerSw()) {
         mType = PreProcessingEffectType::ACOUSTIC_ECHO_CANCELLATION;
         mDescriptor = &kAcousticEchoCancelerDesc;
@@ -102,18 +100,16 @@
 
 EffectPreProcessing::~EffectPreProcessing() {
     cleanUp();
-    LOG(DEBUG) << __func__;
 }
 
 ndk::ScopedAStatus EffectPreProcessing::getDescriptor(Descriptor* _aidl_return) {
     RETURN_IF(!_aidl_return, EX_ILLEGAL_ARGUMENT, "Parameter:nullptr");
-    LOG(DEBUG) << _aidl_return->toString();
     *_aidl_return = *mDescriptor;
     return ndk::ScopedAStatus::ok();
 }
 
 ndk::ScopedAStatus EffectPreProcessing::setParameterSpecific(const Parameter::Specific& specific) {
-    LOG(DEBUG) << __func__ << " specific " << specific.toString();
+    LOG(VERBOSE) << __func__ << " specific " << specific.toString();
     RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
 
     auto tag = specific.getTag();
@@ -421,32 +417,11 @@
     return RetCode::SUCCESS;
 }
 
-ndk::ScopedAStatus EffectPreProcessing::commandImpl(CommandId command) {
-    RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
-    switch (command) {
-        case CommandId::START:
-            mContext->enable();
-            break;
-        case CommandId::STOP:
-            mContext->disable();
-            break;
-        case CommandId::RESET:
-            mContext->disable();
-            mContext->resetBuffer();
-            break;
-        default:
-            LOG(ERROR) << __func__ << " commandId " << toString(command) << " not supported";
-            return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
-                                                                    "commandIdNotSupported");
-    }
-    return ndk::ScopedAStatus::ok();
-}
-
 // Processing method running in EffectWorker thread.
 IEffect::Status EffectPreProcessing::effectProcessImpl(float* in, float* out, int sampleToProcess) {
     IEffect::Status status = {EX_NULL_POINTER, 0, 0};
     RETURN_VALUE_IF(!mContext, status, "nullContext");
-    return mContext->lvmProcess(in, out, sampleToProcess);
+    return mContext->process(in, out, sampleToProcess);
 }
 
 }  // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/preprocessing/aidl/EffectPreProcessing.h b/media/libeffects/preprocessing/aidl/EffectPreProcessing.h
index 9ce5597..31f5737 100644
--- a/media/libeffects/preprocessing/aidl/EffectPreProcessing.h
+++ b/media/libeffects/preprocessing/aidl/EffectPreProcessing.h
@@ -43,8 +43,6 @@
     IEffect::Status effectProcessImpl(float* in, float* out, int samples)
             REQUIRES(mImplMutex) override;
 
-    ndk::ScopedAStatus commandImpl(CommandId command) REQUIRES(mImplMutex) override;
-
     std::string getEffectName() override { return *mEffectName; }
 
   private:
diff --git a/media/libeffects/preprocessing/aidl/PreProcessingContext.cpp b/media/libeffects/preprocessing/aidl/PreProcessingContext.cpp
index 2c44e5c..2d549ef 100644
--- a/media/libeffects/preprocessing/aidl/PreProcessingContext.cpp
+++ b/media/libeffects/preprocessing/aidl/PreProcessingContext.cpp
@@ -26,7 +26,6 @@
 using aidl::android::media::audio::common::AudioDeviceType;
 
 RetCode PreProcessingContext::init(const Parameter::Common& common) {
-    std::lock_guard lg(mMutex);
     webrtc::AudioProcessingBuilder apBuilder;
     mAudioProcessingModule = apBuilder.Create();
     if (mAudioProcessingModule == nullptr) {
@@ -64,7 +63,6 @@
 }
 
 RetCode PreProcessingContext::deInit() {
-    std::lock_guard lg(mMutex);
     mAudioProcessingModule = nullptr;
     mState = PRE_PROC_STATE_UNINITIALIZED;
     return RetCode::SUCCESS;
@@ -75,7 +73,6 @@
         return RetCode::ERROR_EFFECT_LIB_ERROR;
     }
     int typeMsk = (1 << int(mType));
-    std::lock_guard lg(mMutex);
     // Check if effect is already enabled.
     if ((mEnabledMsk & typeMsk) == typeMsk) {
         return RetCode::ERROR_ILLEGAL_PARAMETER;
@@ -110,7 +107,6 @@
         return RetCode::ERROR_EFFECT_LIB_ERROR;
     }
     int typeMsk = (1 << int(mType));
-    std::lock_guard lg(mMutex);
     // Check if effect is already disabled.
     if ((mEnabledMsk & typeMsk) != typeMsk) {
         return RetCode::ERROR_ILLEGAL_PARAMETER;
@@ -160,7 +156,6 @@
 
 RetCode PreProcessingContext::setAcousticEchoCancelerEchoDelay(int echoDelayUs) {
     mEchoDelayUs = echoDelayUs;
-    std::lock_guard lg(mMutex);
     mAudioProcessingModule->set_stream_delay_ms(mEchoDelayUs / 1000);
     return RetCode::SUCCESS;
 }
@@ -171,7 +166,6 @@
 
 RetCode PreProcessingContext::setAcousticEchoCancelerMobileMode(bool mobileMode) {
     mMobileMode = mobileMode;
-    std::lock_guard lg(mMutex);
     auto config = mAudioProcessingModule->GetConfig();
     config.echo_canceller.mobile_mode = mobileMode;
     mAudioProcessingModule->ApplyConfig(config);
@@ -184,7 +178,6 @@
 
 RetCode PreProcessingContext::setAutomaticGainControlV1TargetPeakLevel(int targetPeakLevel) {
     mTargetPeakLevel = targetPeakLevel;
-    std::lock_guard lg(mMutex);
     auto config = mAudioProcessingModule->GetConfig();
     config.gain_controller1.target_level_dbfs = -(mTargetPeakLevel / 100);
     mAudioProcessingModule->ApplyConfig(config);
@@ -197,7 +190,6 @@
 
 RetCode PreProcessingContext::setAutomaticGainControlV1MaxCompressionGain(int maxCompressionGain) {
     mMaxCompressionGain = maxCompressionGain;
-    std::lock_guard lg(mMutex);
     auto config = mAudioProcessingModule->GetConfig();
     config.gain_controller1.compression_gain_db = mMaxCompressionGain / 100;
     mAudioProcessingModule->ApplyConfig(config);
@@ -210,7 +202,6 @@
 
 RetCode PreProcessingContext::setAutomaticGainControlV1EnableLimiter(bool enableLimiter) {
     mEnableLimiter = enableLimiter;
-    std::lock_guard lg(mMutex);
     auto config = mAudioProcessingModule->GetConfig();
     config.gain_controller1.enable_limiter = mEnableLimiter;
     mAudioProcessingModule->ApplyConfig(config);
@@ -223,7 +214,6 @@
 
 RetCode PreProcessingContext::setAutomaticGainControlV2DigitalGain(int gain) {
     mDigitalGain = gain;
-    std::lock_guard lg(mMutex);
     auto config = mAudioProcessingModule->GetConfig();
     config.gain_controller2.fixed_digital.gain_db = mDigitalGain;
     mAudioProcessingModule->ApplyConfig(config);
@@ -256,7 +246,6 @@
 
 RetCode PreProcessingContext::setNoiseSuppressionLevel(NoiseSuppression::Level level) {
     mLevel = level;
-    std::lock_guard lg(mMutex);
     auto config = mAudioProcessingModule->GetConfig();
     config.noise_suppression.level =
             (webrtc::AudioProcessing::Config::NoiseSuppression::Level)level;
@@ -268,7 +257,7 @@
     return mLevel;
 }
 
-IEffect::Status PreProcessingContext::lvmProcess(float* in, float* out, int samples) {
+IEffect::Status PreProcessingContext::process(float* in, float* out, int samples) {
     IEffect::Status status = {EX_NULL_POINTER, 0, 0};
     RETURN_VALUE_IF(!in, status, "nullInput");
     RETURN_VALUE_IF(!out, status, "nullOutput");
@@ -278,9 +267,6 @@
     RETURN_VALUE_IF(inputFrameCount != outputFrameCount, status, "FrameCountMismatch");
     RETURN_VALUE_IF(0 == getInputFrameSize(), status, "zeroFrameSize");
 
-    LOG(DEBUG) << __func__ << " start processing";
-    std::lock_guard lg(mMutex);
-
     mProcessedMsk |= (1 << int(mType));
 
     // webrtc implementation clear out was_stream_delay_set every time after ProcessStream() call
diff --git a/media/libeffects/preprocessing/aidl/PreProcessingContext.h b/media/libeffects/preprocessing/aidl/PreProcessingContext.h
index 9ba1bbe..1b9b77b 100644
--- a/media/libeffects/preprocessing/aidl/PreProcessingContext.h
+++ b/media/libeffects/preprocessing/aidl/PreProcessingContext.h
@@ -17,7 +17,6 @@
 #pragma once
 
 #include <android-base/logging.h>
-#include <android-base/thread_annotations.h>
 #include <audio_processing.h>
 #include <unordered_map>
 
@@ -37,18 +36,17 @@
     PreProcessingContext(int statusDepth, const Parameter::Common& common,
                          const PreProcessingEffectType& type)
         : EffectContext(statusDepth, common), mType(type) {
-        LOG(DEBUG) << __func__ << type;
         mState = PRE_PROC_STATE_UNINITIALIZED;
     }
-    ~PreProcessingContext() override { LOG(DEBUG) << __func__; }
+    ~PreProcessingContext() = default;
 
     RetCode init(const Parameter::Common& common);
     RetCode deInit();
 
     PreProcessingEffectType getPreProcessingType() const { return mType; }
 
-    RetCode enable();
-    RetCode disable();
+    RetCode enable() override;
+    RetCode disable() override;
 
     RetCode setCommon(const Parameter::Common& common) override;
     void updateConfigs(const Parameter::Common& common);
@@ -76,7 +74,7 @@
     RetCode setNoiseSuppressionLevel(NoiseSuppression::Level level);
     NoiseSuppression::Level getNoiseSuppressionLevel() const;
 
-    IEffect::Status lvmProcess(float* in, float* out, int samples);
+    IEffect::Status process(float* in, float* out, int samples);
 
   private:
     static constexpr inline int kAgcDefaultTargetLevel = 3;
@@ -85,20 +83,19 @@
     static constexpr inline webrtc::AudioProcessing::Config::NoiseSuppression::Level
             kNsDefaultLevel = webrtc::AudioProcessing::Config::NoiseSuppression::kModerate;
 
-    std::mutex mMutex;
     const PreProcessingEffectType mType;
     PreProcEffectState mState;  // current state
 
     // handle on webRTC audio processing module (APM)
-    rtc::scoped_refptr<webrtc::AudioProcessing> mAudioProcessingModule GUARDED_BY(mMutex);
+    rtc::scoped_refptr<webrtc::AudioProcessing> mAudioProcessingModule;
 
-    int mEnabledMsk GUARDED_BY(mMutex);       // bit field containing IDs of enabled pre processors
-    int mProcessedMsk GUARDED_BY(mMutex);     // bit field containing IDs of pre processors already
+    int mEnabledMsk;       // bit field containing IDs of enabled pre processors
+    int mProcessedMsk;     // bit field containing IDs of pre processors already
                                               // processed in current round
-    int mRevEnabledMsk GUARDED_BY(mMutex);    // bit field containing IDs of enabled pre processors
+    int mRevEnabledMsk;    // bit field containing IDs of enabled pre processors
                                               // with reverse channel
-    int mRevProcessedMsk GUARDED_BY(mMutex);  // bit field containing IDs of pre processors with
-                                              // reverse channel already processed in current round
+    int mRevProcessedMsk;  // bit field containing IDs of pre processors with
+                           // reverse channel already processed in current round
 
     webrtc::StreamConfig mInputConfig;   // input stream configuration
     webrtc::StreamConfig mOutputConfig;  // output stream configuration
diff --git a/media/libeffects/preprocessing/aidl/PreProcessingSession.h b/media/libeffects/preprocessing/aidl/PreProcessingSession.h
index 877292f..4a66e81 100644
--- a/media/libeffects/preprocessing/aidl/PreProcessingSession.h
+++ b/media/libeffects/preprocessing/aidl/PreProcessingSession.h
@@ -21,7 +21,6 @@
 #include <unordered_map>
 
 #include <android-base/logging.h>
-#include <android-base/thread_annotations.h>
 
 #include "PreProcessingContext.h"
 #include "PreProcessingTypes.h"
@@ -67,7 +66,6 @@
                                                         const Parameter::Common& common) {
         int sessionId = common.session;
         LOG(DEBUG) << __func__ << type << " with sessionId " << sessionId;
-        std::lock_guard lg(mMutex);
         if (mSessionMap.count(sessionId) == 0 && mSessionMap.size() >= MAX_PRE_PROC_SESSIONS) {
             LOG(ERROR) << __func__ << " exceed max bundle session";
             return nullptr;
@@ -95,7 +93,6 @@
 
     void releaseSession(const PreProcessingEffectType& type, int sessionId) {
         LOG(DEBUG) << __func__ << type << " sessionId " << sessionId;
-        std::lock_guard lg(mMutex);
         if (mSessionMap.count(sessionId)) {
             auto& list = mSessionMap[sessionId];
             if (!findPreProcessingTypeInList(list, type, true /* remove */)) {
@@ -109,11 +106,9 @@
     }
 
   private:
-    // Lock for mSessionMap access.
-    std::mutex mMutex;
     // Max session number supported.
     static constexpr int MAX_PRE_PROC_SESSIONS = 8;
     std::unordered_map<int /* session ID */, std::vector<std::shared_ptr<PreProcessingContext>>>
-            mSessionMap GUARDED_BY(mMutex);
+            mSessionMap;
 };
 }  // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/preprocessing/benchmarks/Android.bp b/media/libeffects/preprocessing/benchmarks/Android.bp
index fbbcab4..ca99bf8 100644
--- a/media/libeffects/preprocessing/benchmarks/Android.bp
+++ b/media/libeffects/preprocessing/benchmarks/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_media_framework_audio",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_media_libeffects_preprocessing_license"
diff --git a/media/libeffects/preprocessing/tests/Android.bp b/media/libeffects/preprocessing/tests/Android.bp
index d80b135..ad8d84d 100644
--- a/media/libeffects/preprocessing/tests/Android.bp
+++ b/media/libeffects/preprocessing/tests/Android.bp
@@ -1,5 +1,6 @@
 // audio preprocessing unit test
 package {
+    default_team: "trendy_team_media_framework_audio",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_media_libeffects_preprocessing_license"
diff --git a/media/libeffects/proxy/Android.bp b/media/libeffects/proxy/Android.bp
index 6256eda..95da4de 100644
--- a/media/libeffects/proxy/Android.bp
+++ b/media/libeffects/proxy/Android.bp
@@ -29,19 +29,19 @@
     srcs: ["EffectProxy.cpp"],
 
     cflags: [
-        "-fvisibility=hidden",
         "-Wall",
         "-Werror",
+        "-fvisibility=hidden",
     ],
 
     include_dirs: ["frameworks/av/media/libeffects/factory"],
 
     header_libs: ["libaudioeffects"],
     shared_libs: [
-        "liblog",
         "libcutils",
-        "libutils",
         "libdl",
         "libeffects",
+        "liblog",
+        "libutils",
     ],
 }
diff --git a/media/libeffects/spatializer/benchmarks/Android.bp b/media/libeffects/spatializer/benchmarks/Android.bp
index ab7e468..2d07a9b 100644
--- a/media/libeffects/spatializer/benchmarks/Android.bp
+++ b/media/libeffects/spatializer/benchmarks/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_media_framework_audio",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/media/libeffects/spatializer/tests/Android.bp b/media/libeffects/spatializer/tests/Android.bp
index 704e873..ddfcff3 100644
--- a/media/libeffects/spatializer/tests/Android.bp
+++ b/media/libeffects/spatializer/tests/Android.bp
@@ -1,6 +1,7 @@
 // Build the unit tests for spatializer effect
 
 package {
+    default_team: "trendy_team_media_framework_audio",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
@@ -12,7 +13,7 @@
 cc_test {
     name: "SpatializerTest",
     defaults: [
-      "libeffects-test-defaults",
+        "libeffects-test-defaults",
     ],
     host_supported: false,
     srcs: [
diff --git a/media/libeffects/testlibs/Android.bp b/media/libeffects/testlibs/Android.bp
index 5ba56bb..f5aad92 100644
--- a/media/libeffects/testlibs/Android.bp
+++ b/media/libeffects/testlibs/Android.bp
@@ -33,10 +33,10 @@
     relative_install_path: "soundfx",
 
     cflags: [
-        "-fvisibility=hidden",
         "-Wall",
         "-Werror",
         "-Wno-address-of-packed-member",
+        "-fvisibility=hidden",
     ],
 
     header_libs: [
@@ -66,9 +66,9 @@
     relative_install_path: "soundfx",
 
     cflags: [
-        "-fvisibility=hidden",
         "-Wall",
         "-Werror",
+        "-fvisibility=hidden",
     ],
 
     header_libs: [
diff --git a/media/libeffects/visualizer/Android.bp b/media/libeffects/visualizer/Android.bp
index 66ceadf..8f1d8da 100644
--- a/media/libeffects/visualizer/Android.bp
+++ b/media/libeffects/visualizer/Android.bp
@@ -54,9 +54,9 @@
 cc_library_shared {
     name: "libvisualizeraidl",
     srcs: [
+        ":effectCommonFile",
         "aidl/Visualizer.cpp",
         "aidl/VisualizerContext.cpp",
-        ":effectCommonFile",
     ],
     defaults: [
         "aidlaudioeffectservice_defaults",
diff --git a/media/libeffects/visualizer/aidl/Visualizer.cpp b/media/libeffects/visualizer/aidl/Visualizer.cpp
index 0303842..f4b9b25 100644
--- a/media/libeffects/visualizer/aidl/Visualizer.cpp
+++ b/media/libeffects/visualizer/aidl/Visualizer.cpp
@@ -37,7 +37,6 @@
     }
     if (instanceSpp) {
         *instanceSpp = ndk::SharedRefBase::make<VisualizerImpl>();
-        LOG(DEBUG) << __func__ << " instance " << instanceSpp->get() << " created";
         return EX_NONE;
     } else {
         LOG(ERROR) << __func__ << " invalid input parameter!";
@@ -59,10 +58,11 @@
 const std::string VisualizerImpl::kEffectName = "Visualizer";
 const std::vector<Range::VisualizerRange> VisualizerImpl::kRanges = {
         MAKE_RANGE(Visualizer, latencyMs, 0, VisualizerContext::kMaxLatencyMs),
-        MAKE_RANGE(Visualizer, captureSamples, 0, VisualizerContext::kMaxCaptureBufSize),
+        MAKE_RANGE(Visualizer, captureSamples, VisualizerContext::kMinCaptureBufSize,
+                   VisualizerContext::kMaxCaptureBufSize),
         /* get only parameters, set invalid range (min > max) to indicate not support set */
-        MAKE_RANGE(Visualizer, measurement, Visualizer::Measurement({.peak = 1, .rms = 1}),
-                   Visualizer::Measurement({.peak = 0, .rms = 0})),
+        MAKE_RANGE(Visualizer, measurement, Visualizer::Measurement({.rms = 1, .peak = 1}),
+                   Visualizer::Measurement({.rms = 0, .peak = 0})),
         MAKE_RANGE(Visualizer, captureSampleBuffer, std::vector<uint8_t>({1}),
                    std::vector<uint8_t>({0}))};
 const Capability VisualizerImpl::kCapability = {
@@ -72,7 +72,7 @@
                           .uuid = getEffectImplUuidVisualizer(),
                           .proxy = std::nullopt},
                    .flags = {.type = Flags::Type::INSERT,
-                             .insert = Flags::Insert::LAST,
+                             .insert = Flags::Insert::FIRST,
                              .volume = Flags::Volume::NONE},
                    .name = VisualizerImpl::kEffectName,
                    .implementor = "The Android Open Source Project"},
@@ -85,27 +85,6 @@
     return ndk::ScopedAStatus::ok();
 }
 
-ndk::ScopedAStatus VisualizerImpl::commandImpl(CommandId command) {
-    RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
-    switch (command) {
-        case CommandId::START:
-            mContext->enable();
-            break;
-        case CommandId::STOP:
-            mContext->disable();
-            break;
-        case CommandId::RESET:
-            mContext->disable();
-            mContext->resetBuffer();
-            break;
-        default:
-            LOG(ERROR) << __func__ << " commandId " << toString(command) << " not supported";
-            return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
-                                                                    "commandIdNotSupported");
-    }
-    return ndk::ScopedAStatus::ok();
-}
-
 ndk::ScopedAStatus VisualizerImpl::setParameterSpecific(const Parameter::Specific& specific) {
     RETURN_IF(Parameter::Specific::visualizer != specific.getTag(), EX_ILLEGAL_ARGUMENT,
               "EffectNotSupported");
@@ -222,6 +201,7 @@
 RetCode VisualizerImpl::releaseContext() {
     if (mContext) {
         mContext->disable();
+        mContext->reset();
         mContext->resetBuffer();
     }
     return RetCode::SUCCESS;
diff --git a/media/libeffects/visualizer/aidl/Visualizer.h b/media/libeffects/visualizer/aidl/Visualizer.h
index b48c85e..f25b78d 100644
--- a/media/libeffects/visualizer/aidl/Visualizer.h
+++ b/media/libeffects/visualizer/aidl/Visualizer.h
@@ -29,13 +29,9 @@
     static const std::string kEffectName;
     static const Capability kCapability;
     static const Descriptor kDescriptor;
-    VisualizerImpl() { LOG(DEBUG) << __func__; }
-    ~VisualizerImpl() {
-        cleanUp();
-        LOG(DEBUG) << __func__;
-    }
+    VisualizerImpl() = default;
+    ~VisualizerImpl() { cleanUp(); }
 
-    ndk::ScopedAStatus commandImpl(CommandId command) REQUIRES(mImplMutex) override;
     ndk::ScopedAStatus getDescriptor(Descriptor* _aidl_return) override;
     ndk::ScopedAStatus setParameterSpecific(const Parameter::Specific& specific)
             REQUIRES(mImplMutex) override;
diff --git a/media/libeffects/visualizer/aidl/VisualizerContext.cpp b/media/libeffects/visualizer/aidl/VisualizerContext.cpp
index 5d2bb3a..a368e52 100644
--- a/media/libeffects/visualizer/aidl/VisualizerContext.cpp
+++ b/media/libeffects/visualizer/aidl/VisualizerContext.cpp
@@ -38,14 +38,10 @@
 }
 
 VisualizerContext::~VisualizerContext() {
-    std::lock_guard lg(mMutex);
-    LOG(DEBUG) << __func__;
     mState = State::UNINITIALIZED;
 }
 
 RetCode VisualizerContext::initParams(const Parameter::Common& common) {
-    std::lock_guard lg(mMutex);
-    LOG(DEBUG) << __func__;
     if (common.input != common.output) {
         LOG(ERROR) << __func__ << " mismatch input: " << common.input.toString()
                    << " and output: " << common.output.toString();
@@ -61,12 +57,11 @@
 #endif
     mChannelCount = channelCount;
     mCommon = common;
-    std::fill(mCaptureBuf.begin(), mCaptureBuf.end(), 0x80);
+    reset();
     return RetCode::SUCCESS;
 }
 
 RetCode VisualizerContext::enable() {
-    std::lock_guard lg(mMutex);
     if (mState != State::INITIALIZED) {
         return RetCode::ERROR_EFFECT_LIB_ERROR;
     }
@@ -75,7 +70,6 @@
 }
 
 RetCode VisualizerContext::disable() {
-    std::lock_guard lg(mMutex);
     if (mState != State::ACTIVE) {
         return RetCode::ERROR_EFFECT_LIB_ERROR;
     }
@@ -83,49 +77,40 @@
     return RetCode::SUCCESS;
 }
 
-void VisualizerContext::reset() {
-    std::lock_guard lg(mMutex);
+RetCode VisualizerContext::reset() {
     std::fill(mCaptureBuf.begin(), mCaptureBuf.end(), 0x80);
+    return RetCode::SUCCESS;
 }
 
 RetCode VisualizerContext::setCaptureSamples(int samples) {
-    std::lock_guard lg(mMutex);
     mCaptureSamples = samples;
     return RetCode::SUCCESS;
 }
-int VisualizerContext::getCaptureSamples() {
-    std::lock_guard lg(mMutex);
+int32_t VisualizerContext::getCaptureSamples() {
     return mCaptureSamples;
 }
 
 RetCode VisualizerContext::setMeasurementMode(Visualizer::MeasurementMode mode) {
-    std::lock_guard lg(mMutex);
     mMeasurementMode = mode;
     return RetCode::SUCCESS;
 }
 Visualizer::MeasurementMode VisualizerContext::getMeasurementMode() {
-    std::lock_guard lg(mMutex);
     return mMeasurementMode;
 }
 
 RetCode VisualizerContext::setScalingMode(Visualizer::ScalingMode mode) {
-    std::lock_guard lg(mMutex);
     mScalingMode = mode;
     return RetCode::SUCCESS;
 }
 Visualizer::ScalingMode VisualizerContext::getScalingMode() {
-    std::lock_guard lg(mMutex);
     return mScalingMode;
 }
 
 RetCode VisualizerContext::setDownstreamLatency(int latency) {
-    std::lock_guard lg(mMutex);
     mDownstreamLatency = latency;
     return RetCode::SUCCESS;
 }
-
 int VisualizerContext::getDownstreamLatency() {
-    std::lock_guard lg(mMutex);
     return mDownstreamLatency;
 }
 
@@ -152,7 +137,6 @@
     uint8_t nbValidMeasurements = 0;
 
     {
-        std::lock_guard lg(mMutex);
         // reset measurements if last measurement was too long ago (which implies stored
         // measurements aren't relevant anymore and shouldn't bias the new one)
         const uint32_t delayMs = getDeltaTimeMsFromUpdatedTime_l();
@@ -185,13 +169,12 @@
     // convert from I16 sample values to mB and write results
     measure.rms = (rms < 0.000016f) ? -9600 : (int32_t)(2000 * log10(rms / 32767.0f));
     measure.peak = (peakU16 == 0) ? -9600 : (int32_t)(2000 * log10(peakU16 / 32767.0f));
-    LOG(INFO) << __func__ << " peak " << peakU16 << " (" << measure.peak << "mB), rms " << rms
-              << " (" << measure.rms << "mB)";
+    LOG(VERBOSE) << __func__ << " peak " << peakU16 << " (" << measure.peak << "mB), rms " << rms
+                 << " (" << measure.rms << "mB)";
     return measure;
 }
 
 std::vector<uint8_t> VisualizerContext::capture() {
-    std::lock_guard lg(mMutex);
     uint32_t captureSamples = mCaptureSamples;
     std::vector<uint8_t> result(captureSamples, 0x80);
     // cts android.media.audio.cts.VisualizerTest expecting silence data when effect not running
@@ -205,7 +188,6 @@
     // clear the capture buffer to return silence
     if ((mLastCaptureIdx == mCaptureIdx) && (mBufferUpdateTime.tv_sec != 0) &&
         (deltaMs > kMaxStallTimeMs)) {
-        LOG(INFO) << __func__ << " capture going to idle";
         mBufferUpdateTime.tv_sec = 0;
         return result;
     }
@@ -247,10 +229,8 @@
     IEffect::Status result = {STATUS_NOT_ENOUGH_DATA, 0, 0};
     RETURN_VALUE_IF(in == nullptr || out == nullptr || samples == 0, result, "dataBufferError");
 
-    std::lock_guard lg(mMutex);
     result.status = STATUS_INVALID_OPERATION;
     RETURN_VALUE_IF(mState != State::ACTIVE, result, "stateNotActive");
-    LOG(DEBUG) << __func__ << " in " << in << " out " << out << " sample " << samples;
     // perform measurements if needed
     if (mMeasurementMode == Visualizer::MeasurementMode::PEAK_RMS) {
         // find the peak and RMS squared for the new buffer
diff --git a/media/libeffects/visualizer/aidl/VisualizerContext.h b/media/libeffects/visualizer/aidl/VisualizerContext.h
index 958035f..d4abbd3 100644
--- a/media/libeffects/visualizer/aidl/VisualizerContext.h
+++ b/media/libeffects/visualizer/aidl/VisualizerContext.h
@@ -16,8 +16,8 @@
 
 #pragma once
 
-#include <android-base/thread_annotations.h>
 #include <audio_effects/effect_dynamicsprocessing.h>
+#include <system/audio_effects/effect_visualizer.h>
 
 #include "effect-impl/EffectContext.h"
 
@@ -25,21 +25,24 @@
 
 class VisualizerContext final : public EffectContext {
   public:
-    static const uint32_t kMaxCaptureBufSize = 65536;
-    static const uint32_t kMaxLatencyMs = 3000;  // 3 seconds of latency for audio pipeline
+    // need align the min/max capture size to VISUALIZER_CAPTURE_SIZE_MIN and
+    // VISUALIZER_CAPTURE_SIZE_MAX because of limitation in audio_utils fixedfft.
+    static constexpr int32_t kMinCaptureBufSize = VISUALIZER_CAPTURE_SIZE_MIN;
+    static constexpr int32_t kMaxCaptureBufSize = VISUALIZER_CAPTURE_SIZE_MAX;
+    static constexpr uint32_t kMaxLatencyMs = 3000;  // 3 seconds of latency for audio pipeline
 
     VisualizerContext(int statusDepth, const Parameter::Common& common);
     ~VisualizerContext();
 
     RetCode initParams(const Parameter::Common& common);
 
-    RetCode enable();
-    RetCode disable();
+    RetCode enable() override;
+    RetCode disable() override;
     // keep all parameters and reset buffer.
-    void reset();
+    RetCode reset() override;
 
-    RetCode setCaptureSamples(int captureSize);
-    int getCaptureSamples();
+    RetCode setCaptureSamples(int32_t captureSize);
+    int32_t getCaptureSamples();
     RetCode setMeasurementMode(Visualizer::MeasurementMode mode);
     Visualizer::MeasurementMode getMeasurementMode();
     RetCode setScalingMode(Visualizer::ScalingMode mode);
@@ -75,28 +78,26 @@
     // note: buffer index is stored in uint8_t
     static const uint32_t kMeasurementWindowMaxSizeInBuffers = 25;
 
-    // serialize process() and parameter setting
-    std::mutex mMutex;
-    Parameter::Common mCommon GUARDED_BY(mMutex);
-    State mState GUARDED_BY(mMutex) = State::UNINITIALIZED;
-    uint32_t mCaptureIdx GUARDED_BY(mMutex) = 0;
-    uint32_t mLastCaptureIdx GUARDED_BY(mMutex) = 0;
-    Visualizer::ScalingMode mScalingMode GUARDED_BY(mMutex) = Visualizer::ScalingMode::NORMALIZED;
-    struct timespec mBufferUpdateTime GUARDED_BY(mMutex);
+    Parameter::Common mCommon;
+    State mState = State::UNINITIALIZED;
+    uint32_t mCaptureIdx = 0;
+    uint32_t mLastCaptureIdx = 0;
+    Visualizer::ScalingMode mScalingMode = Visualizer::ScalingMode::NORMALIZED;
+    struct timespec mBufferUpdateTime;
     // capture buf with 8 bits mono PCM samples
-    std::array<uint8_t, kMaxCaptureBufSize> mCaptureBuf GUARDED_BY(mMutex);
-    uint32_t mDownstreamLatency GUARDED_BY(mMutex) = 0;
-    uint32_t mCaptureSamples GUARDED_BY(mMutex) = kMaxCaptureBufSize;
+    std::array<uint8_t, kMaxCaptureBufSize> mCaptureBuf;
+    uint32_t mDownstreamLatency = 0;
+    int32_t mCaptureSamples = kMaxCaptureBufSize;
 
     // to avoid recomputing it every time a buffer is processed
-    uint8_t mChannelCount GUARDED_BY(mMutex) = 0;
-    Visualizer::MeasurementMode mMeasurementMode GUARDED_BY(mMutex) =
+    uint8_t mChannelCount = 0;
+    Visualizer::MeasurementMode mMeasurementMode =
             Visualizer::MeasurementMode::NONE;
     uint8_t mMeasurementWindowSizeInBuffers = kMeasurementWindowMaxSizeInBuffers;
-    uint8_t mMeasurementBufferIdx GUARDED_BY(mMutex) = 0;
+    uint8_t mMeasurementBufferIdx = 0;
     std::array<BufferStats, kMeasurementWindowMaxSizeInBuffers> mPastMeasurements;
     void init_params();
 
-    uint32_t getDeltaTimeMsFromUpdatedTime_l() REQUIRES(mMutex);
+    uint32_t getDeltaTimeMsFromUpdatedTime_l();
 };
 }  // namespace aidl::android::hardware::audio::effect
diff --git a/media/liberror/include/error/BinderResult.h b/media/liberror/include/error/BinderResult.h
new file mode 100644
index 0000000..1f1211c
--- /dev/null
+++ b/media/liberror/include/error/BinderResult.h
@@ -0,0 +1,48 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#pragma once
+
+#include <binder/Status.h>
+#include <error/expected_utils.h>
+#include <utils/Errors.h>
+
+namespace android {
+namespace error {
+
+/**
+ * A convenience short-hand for base::expected, where the error type is a binder::Status, for use
+ * when implementing binder services.
+ * Clients need to link against libbinder, since this library is header only.
+ */
+template <typename T>
+using BinderResult = base::expected<T, binder::Status>;
+
+inline base::unexpected<binder::Status> unexpectedExceptionCode(int32_t exceptionCode,
+                                                                const char* s) {
+    return base::unexpected{binder::Status::fromExceptionCode(exceptionCode, s)};
+}
+
+inline base::unexpected<binder::Status> unexpectedServiceException(int32_t serviceSpecificCode,
+                                                                   const char* s) {
+    return base::unexpected{binder::Status::fromServiceSpecificError(serviceSpecificCode, s)};
+}
+
+}  // namespace error
+}  // namespace android
+
+inline std::string errorToString(const ::android::binder::Status& status) {
+    return std::string{status.toString8().c_str()};
+}
diff --git a/media/liberror/include/error/BinderStatusMatcher.h b/media/liberror/include/error/BinderStatusMatcher.h
new file mode 100644
index 0000000..11d9e65
--- /dev/null
+++ b/media/liberror/include/error/BinderStatusMatcher.h
@@ -0,0 +1,60 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <gmock/gmock.h>
+#include <gtest/gtest.h>
+
+#include <ostream>
+
+#include <binder/Status.h>
+
+namespace android::error {
+
+class BinderStatusMatcher {
+  public:
+    using is_gtest_matcher = void;
+
+    explicit BinderStatusMatcher(binder::Status status) : status_(std::move(status)) {}
+
+    static BinderStatusMatcher hasException(binder::Status::Exception ex) {
+        return BinderStatusMatcher(binder::Status::fromExceptionCode(ex));
+    }
+
+    static BinderStatusMatcher isOk() { return BinderStatusMatcher(binder::Status::ok()); }
+
+    bool MatchAndExplain(const binder::Status& value,
+                         ::testing::MatchResultListener* listener) const {
+        if (status_.exceptionCode() == value.exceptionCode() &&
+            status_.transactionError() == value.transactionError() &&
+            status_.serviceSpecificErrorCode() == value.serviceSpecificErrorCode()) {
+            return true;
+        }
+        *listener << "received binder status: " << value;
+        return false;
+    }
+
+    void DescribeTo(std::ostream* os) const { *os << "contains binder status " << status_; }
+
+    void DescribeNegationTo(std::ostream* os) const {
+        *os << "does not contain binder status " << status_;
+    }
+
+  private:
+    const binder::Status status_;
+};
+}  // namespace android::error
diff --git a/media/liberror/include/error/ExpectedMatchers.h b/media/liberror/include/error/ExpectedMatchers.h
new file mode 100644
index 0000000..b81adbf
--- /dev/null
+++ b/media/liberror/include/error/ExpectedMatchers.h
@@ -0,0 +1,136 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <gmock/gmock.h>
+#include <gtest/gtest.h>
+
+#include <ostream>
+#include <type_traits>
+
+namespace android::error {
+
+/**
+ * Example Usage:
+ * Given a function with signature
+ *       Result<T, U> foo()
+ * Matchers can be used as follows:
+ *       EXPECT_THAT(foo(), IsOkAnd(Eq(T{})));
+ *       EXPECT_THAT(foo(), IsErrorAnd(Eq(U{})));
+ */
+template <typename ExpectedT>
+class IsOkAndImpl : public ::testing::MatcherInterface<ExpectedT> {
+  public:
+    using ValueT = std::remove_reference_t<ExpectedT>::value_type;
+
+    template <typename InnerMatcher>
+    explicit IsOkAndImpl(InnerMatcher innerMatcher)
+        : inner_matcher_(::testing::SafeMatcherCast<const ValueT&>(
+                  std::forward<InnerMatcher>(innerMatcher))) {}
+
+    bool MatchAndExplain(ExpectedT val, ::testing::MatchResultListener* listener) const {
+        if (!val.has_value()) {
+            *listener << "which has error " << ::testing::PrintToString(val.error());
+            return false;
+        }
+        const auto res = inner_matcher_.MatchAndExplain(val.value(), listener);
+        if (!res) {
+            *listener << "which has value " << ::testing::PrintToString(val.value());
+        }
+        return res;
+    }
+
+    void DescribeTo(std::ostream* os) const {
+        *os << "contains expected value which ";
+        inner_matcher_.DescribeTo(os);
+    }
+
+    void DescribeNegationTo(std::ostream* os) const {
+        *os << "does not contain expected, or contains expected value which ";
+        inner_matcher_.DescribeNegationTo(os);
+    }
+
+  private:
+    ::testing::Matcher<const ValueT&> inner_matcher_;
+};
+
+template <typename InnerMatcher>
+class IsOkAnd {
+  public:
+    explicit IsOkAnd(InnerMatcher innerMatcher) : inner_matcher_(std::move(innerMatcher)) {}
+
+    template <typename T>
+    operator ::testing::Matcher<T>() const {
+        return ::testing::Matcher<T>{new IsOkAndImpl<const T&>(inner_matcher_)};
+    }
+
+  private:
+    InnerMatcher inner_matcher_;
+};
+
+template <typename ExpectedT>
+class IsErrorAndImpl : public ::testing::MatcherInterface<ExpectedT> {
+  public:
+    using ErrorT = typename std::remove_reference_t<ExpectedT>::error_type;
+
+    template <typename InnerMatcher>
+    explicit IsErrorAndImpl(InnerMatcher innerMatcher)
+        : inner_matcher_(::testing::SafeMatcherCast<const ErrorT&>(
+                  std::forward<InnerMatcher>(innerMatcher))) {}
+
+    bool MatchAndExplain(ExpectedT val, ::testing::MatchResultListener* listener) const {
+        if (val.has_value()) {
+            *listener << "which has value " << ::testing::PrintToString(val.value());
+            return false;
+        }
+
+        const auto res = inner_matcher_.MatchAndExplain(val.error(), listener);
+        if (!res) {
+            *listener << "which has error " << ::testing::PrintToString(val.error());
+        }
+        return res;
+    }
+
+    void DescribeTo(std::ostream* os) const {
+        *os << "contains error value which ";
+        inner_matcher_.DescribeTo(os);
+    }
+
+    void DescribeNegationTo(std::ostream* os) const {
+        *os << "does not contain error value, or contains error value which ";
+        inner_matcher_.DescribeNegationTo(os);
+    }
+
+  private:
+    ::testing::Matcher<const ErrorT&> inner_matcher_;
+};
+
+template <typename InnerMatcher>
+class IsErrorAnd {
+  public:
+    explicit IsErrorAnd(InnerMatcher innerMatcher) : inner_matcher_(std::move(innerMatcher)) {}
+
+    template <typename T>
+    operator ::testing::Matcher<T>() const {
+        return ::testing::Matcher<T>{new IsErrorAndImpl<const T&>(inner_matcher_)};
+    }
+
+  private:
+    InnerMatcher inner_matcher_;
+};
+
+}  // namespace android::error
diff --git a/media/libheadtracking/Android.bp b/media/libheadtracking/Android.bp
index 9955862..70a242d 100644
--- a/media/libheadtracking/Android.bp
+++ b/media/libheadtracking/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_media_framework_audio",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
@@ -11,18 +12,18 @@
     name: "libheadtracking",
     host_supported: true,
     srcs: [
-      "HeadTrackingProcessor.cpp",
-      "ModeSelector.cpp",
-      "Pose.cpp",
-      "PoseBias.cpp",
-      "PoseDriftCompensator.cpp",
-      "PosePredictor.cpp",
-      "PoseRateLimiter.cpp",
-      "QuaternionUtil.cpp",
-      "ScreenHeadFusion.cpp",
-      "StillnessDetector.cpp",
-      "Twist.cpp",
-      "VectorRecorder.cpp",
+        "HeadTrackingProcessor.cpp",
+        "ModeSelector.cpp",
+        "Pose.cpp",
+        "PoseBias.cpp",
+        "PoseDriftCompensator.cpp",
+        "PosePredictor.cpp",
+        "PoseRateLimiter.cpp",
+        "QuaternionUtil.cpp",
+        "ScreenHeadFusion.cpp",
+        "StillnessDetector.cpp",
+        "Twist.cpp",
+        "VectorRecorder.cpp",
     ],
     shared_libs: [
         "libaudioutils",
@@ -51,7 +52,7 @@
 cc_library {
     name: "libheadtracking-binding",
     srcs: [
-      "SensorPoseProvider.cpp",
+        "SensorPoseProvider.cpp",
     ],
     shared_libs: [
         "libbase",
diff --git a/media/libheif/HeifDecoderImpl.cpp b/media/libheif/HeifDecoderImpl.cpp
index 085a7e4..ee4075f 100644
--- a/media/libheif/HeifDecoderImpl.cpp
+++ b/media/libheif/HeifDecoderImpl.cpp
@@ -32,6 +32,7 @@
 #include <private/media/VideoFrame.h>
 #include <utils/Log.h>
 #include <utils/RefBase.h>
+#include <algorithm>
 #include <vector>
 
 HeifDecoder* createHeifDecoder() {
@@ -42,7 +43,10 @@
 
 void initFrameInfo(HeifFrameInfo *info, const VideoFrame *videoFrame) {
     info->mWidth = videoFrame->mDisplayWidth;
-    info->mHeight = videoFrame->mDisplayHeight;
+    // Number of scanlines is mDisplayHeight. Clamp it to mHeight to guard
+    // against malformed streams claiming that mDisplayHeight is greater than
+    // mHeight.
+    info->mHeight = std::min(videoFrame->mDisplayHeight, videoFrame->mHeight);
     info->mRotationAngle = videoFrame->mRotationAngle;
     info->mBytesPerPixel = videoFrame->mBytesPerPixel;
     info->mDurationUs = videoFrame->mDurationUs;
@@ -746,7 +750,9 @@
                    (videoFrame->mRowBytes * (mCurScanline + videoFrame->mDisplayTop)) +
                    (videoFrame->mBytesPerPixel * videoFrame->mDisplayLeft);
     mCurScanline++;
-    memcpy(dst, src, videoFrame->mBytesPerPixel * videoFrame->mDisplayWidth);
+    // Do not try to copy more than |videoFrame->mWidth| pixels.
+    uint32_t width = std::min(videoFrame->mDisplayWidth, videoFrame->mWidth);
+    memcpy(dst, src, videoFrame->mBytesPerPixel * width);
     return true;
 }
 
diff --git a/media/libheif/OWNERS b/media/libheif/OWNERS
new file mode 100644
index 0000000..a61ad21
--- /dev/null
+++ b/media/libheif/OWNERS
@@ -0,0 +1,2 @@
+include platform/frameworks/av:/media/janitors/avic_OWNERS
+include platform/frameworks/av:/media/janitors/codec_OWNERS
\ No newline at end of file
diff --git a/media/libmedia/Android.bp b/media/libmedia/Android.bp
index 590a7b7..8a962c6 100644
--- a/media/libmedia/Android.bp
+++ b/media/libmedia/Android.bp
@@ -81,9 +81,6 @@
 cc_library_shared {
     name: "libmedia_omx",
     vendor_available: true,
-    vndk: {
-        enabled: true,
-    },
     double_loadable: true,
 
     srcs: [
@@ -223,7 +220,6 @@
         "com.android.media",
     ],
 
-
     srcs: ["MidiIoWrapper.cpp"],
 
     static_libs: [
@@ -259,6 +255,45 @@
 }
 
 cc_library_shared {
+    name: "libmedia_codeclist_capabilities",
+
+    srcs: [
+        "AudioCapabilities.cpp",
+        "CodecCapabilities.cpp",
+        "CodecCapabilitiesUtils.cpp",
+    ],
+
+    local_include_dirs: [
+        "include",
+    ],
+
+    shared_libs: [
+        "libbinder",
+        "liblog",
+        "libstagefright_foundation",
+        "libutils",
+    ],
+
+    export_include_dirs: [
+        "include",
+    ],
+
+    cflags: [
+        "-Werror",
+        "-Wno-error=deprecated-declarations",
+        "-Wall",
+    ],
+
+    sanitize: {
+        misc_undefined: [
+            "unsigned-integer-overflow",
+            "signed-integer-overflow",
+        ],
+        cfi: true,
+    },
+}
+
+cc_library_shared {
     name: "libmedia_codeclist",
 
     srcs: [
@@ -274,12 +309,13 @@
         "android.hardware.media.omx@1.0",
         "libbinder",
         "liblog",
+        "libmedia_codeclist_capabilities",
         "libstagefright_foundation",
         "libutils",
     ],
 
-    include_dirs: [
-        "system/libhidl/transport/token/1.0/utils/include",
+    static_libs: [
+        "android.media.codec-aconfig-cc",
     ],
 
     export_include_dirs: [
@@ -364,7 +400,6 @@
         "av-types-aidl-cpp",
         "liblog",
         "libcutils",
-        "libprocessgroup",
         "libutils",
         "libbinder",
         "libbinder_ndk",
diff --git a/media/libmedia/AudioCapabilities.cpp b/media/libmedia/AudioCapabilities.cpp
new file mode 100644
index 0000000..e8cf517
--- /dev/null
+++ b/media/libmedia/AudioCapabilities.cpp
@@ -0,0 +1,394 @@
+/*
+ * Copyright 2024, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "AudioCapabilities"
+
+#include <android-base/strings.h>
+#include <android-base/properties.h>
+
+#include <media/AudioCapabilities.h>
+#include <media/CodecCapabilities.h>
+#include <media/stagefright/MediaCodecConstants.h>
+
+namespace android {
+
+const Range<int>& AudioCapabilities::getBitrateRange() const {
+    return mBitrateRange;
+}
+
+const std::vector<int>& AudioCapabilities::getSupportedSampleRates() const {
+    return mSampleRates;
+}
+
+const std::vector<Range<int>>&
+        AudioCapabilities::getSupportedSampleRateRanges() const {
+    return mSampleRateRanges;
+}
+
+int AudioCapabilities::getMaxInputChannelCount() const {
+    int overallMax = 0;
+    for (int i = mInputChannelRanges.size() - 1; i >= 0; i--) {
+        int lmax = mInputChannelRanges[i].upper();
+        if (lmax > overallMax) {
+            overallMax = lmax;
+        }
+    }
+    return overallMax;
+}
+
+int AudioCapabilities::getMinInputChannelCount() const {
+    int overallMin = MAX_INPUT_CHANNEL_COUNT;
+    for (int i = mInputChannelRanges.size() - 1; i >= 0; i--) {
+        int lmin = mInputChannelRanges[i].lower();
+        if (lmin < overallMin) {
+            overallMin = lmin;
+        }
+    }
+    return overallMin;
+}
+
+const std::vector<Range<int>>&
+        AudioCapabilities::getInputChannelCountRanges() const {
+    return mInputChannelRanges;
+}
+
+// static
+std::shared_ptr<AudioCapabilities> AudioCapabilities::Create(std::string mediaType,
+        std::vector<ProfileLevel> profLevs, const sp<AMessage> &format) {
+    std::shared_ptr<AudioCapabilities> caps(new AudioCapabilities());
+    caps->init(mediaType, profLevs, format);
+    return caps;
+}
+
+void AudioCapabilities::init(std::string mediaType, std::vector<ProfileLevel> profLevs,
+        const sp<AMessage> &format) {
+    mMediaType = mediaType;
+    mProfileLevels = profLevs;
+    mError = 0;
+
+    initWithPlatformLimits();
+    applyLevelLimits();
+    parseFromInfo(format);
+}
+
+void AudioCapabilities::initWithPlatformLimits() {
+    mBitrateRange = Range<int>(0, INT_MAX);
+    mInputChannelRanges.push_back(Range<int>(1, MAX_INPUT_CHANNEL_COUNT));
+
+    const int minSampleRate = base::GetIntProperty("ro.mediacodec.min_sample_rate", 7350);
+    const int maxSampleRate = base::GetIntProperty("ro.mediacodec.max_sample_rate", 192000);
+    mSampleRateRanges.push_back(Range<int>(minSampleRate, maxSampleRate));
+}
+
+bool AudioCapabilities::supports(int sampleRate, int inputChannels) {
+    // channels and sample rates are checked orthogonally
+    if (inputChannels != 0
+            && !std::any_of(mInputChannelRanges.begin(), mInputChannelRanges.end(),
+            [inputChannels](const Range<int> &a) { return a.contains(inputChannels); })) {
+        return false;
+    }
+    if (sampleRate != 0
+            && !std::any_of(mSampleRateRanges.begin(), mSampleRateRanges.end(),
+            [sampleRate](const Range<int> &a) { return a.contains(sampleRate); })) {
+        return false;
+    }
+    return true;
+}
+
+bool AudioCapabilities::isSampleRateSupported(int sampleRate) {
+    return supports(sampleRate, 0);
+}
+
+void AudioCapabilities::limitSampleRates(std::vector<int> rates) {
+    std::vector<Range<int>> sampleRateRanges;
+    std::sort(rates.begin(), rates.end());
+    for (int rate : rates) {
+        if (supports(rate, 0 /* channels */)) {
+            sampleRateRanges.push_back(Range<int>(rate, rate));
+        }
+    }
+    mSampleRateRanges = intersectSortedDistinctRanges(mSampleRateRanges, sampleRateRanges);
+    createDiscreteSampleRates();
+}
+
+void AudioCapabilities::createDiscreteSampleRates() {
+    mSampleRates.clear();
+    for (int i = 0; i < mSampleRateRanges.size(); i++) {
+        mSampleRates.push_back(mSampleRateRanges[i].lower());
+    }
+}
+
+void AudioCapabilities::limitSampleRates(std::vector<Range<int>> rateRanges) {
+    sortDistinctRanges(&rateRanges);
+    mSampleRateRanges = intersectSortedDistinctRanges(mSampleRateRanges, rateRanges);
+    // check if all values are discrete
+    for (Range<int> range: mSampleRateRanges) {
+        if (range.lower() != range.upper()) {
+            mSampleRates.clear();
+            return;
+        }
+    }
+    createDiscreteSampleRates();
+}
+
+void AudioCapabilities::applyLevelLimits() {
+    std::vector<int> sampleRates;
+    std::optional<Range<int>> sampleRateRange;
+    std::optional<Range<int>> bitRates;
+    int maxChannels = MAX_INPUT_CHANNEL_COUNT;
+
+    // const char *mediaType = mMediaType.c_str();
+    if (base::EqualsIgnoreCase(mMediaType, MIMETYPE_AUDIO_MPEG)) {
+        sampleRates = {
+                8000, 11025, 12000,
+                16000, 22050, 24000,
+                32000, 44100, 48000 };
+        bitRates = Range<int>(8000, 320000);
+        maxChannels = 2;
+    } else if (base::EqualsIgnoreCase(mMediaType, MIMETYPE_AUDIO_AMR_NB)) {
+        sampleRates = { 8000 };
+        bitRates = Range<int>(4750, 12200);
+        maxChannels = 1;
+    } else if (base::EqualsIgnoreCase(mMediaType, MIMETYPE_AUDIO_AMR_WB)) {
+        sampleRates = { 16000 };
+        bitRates = Range<int>(6600, 23850);
+        maxChannels = 1;
+    } else if (base::EqualsIgnoreCase(mMediaType, MIMETYPE_AUDIO_AAC)) {
+        sampleRates = {
+                7350, 8000,
+                11025, 12000, 16000,
+                22050, 24000, 32000,
+                44100, 48000, 64000,
+                88200, 96000 };
+        bitRates = Range<int>(8000, 510000);
+        maxChannels = 48;
+    } else if (base::EqualsIgnoreCase(mMediaType, MIMETYPE_AUDIO_VORBIS)) {
+        bitRates = Range<int>(32000, 500000);
+        sampleRateRange = Range<int>(8000, 192000);
+        maxChannels = 255;
+    } else if (base::EqualsIgnoreCase(mMediaType, MIMETYPE_AUDIO_OPUS)) {
+        bitRates = Range<int>(6000, 510000);
+        sampleRates = { 8000, 12000, 16000, 24000, 48000 };
+        maxChannels = 255;
+    } else if (base::EqualsIgnoreCase(mMediaType, MIMETYPE_AUDIO_RAW)) {
+        sampleRateRange = Range<int>(1, 192000);
+        bitRates = Range<int>(1, 10000000);
+        maxChannels = MAX_NUM_CHANNELS;
+    } else if (base::EqualsIgnoreCase(mMediaType, MIMETYPE_AUDIO_FLAC)) {
+        sampleRateRange = Range<int>(1, 655350);
+        // lossless codec, so bitrate is ignored
+        maxChannels = 255;
+    } else if (base::EqualsIgnoreCase(mMediaType, MIMETYPE_AUDIO_G711_ALAW)
+            || base::EqualsIgnoreCase(mMediaType, MIMETYPE_AUDIO_G711_MLAW)) {
+        sampleRates = { 8000 };
+        bitRates = Range<int>(64000, 64000);
+        // platform allows multiple channels for this format
+    } else if (base::EqualsIgnoreCase(mMediaType, MIMETYPE_AUDIO_MSGSM)) {
+        sampleRates = { 8000 };
+        bitRates = Range<int>(13000, 13000);
+        maxChannels = 1;
+    } else if (base::EqualsIgnoreCase(mMediaType, MIMETYPE_AUDIO_AC3)) {
+        maxChannels = 6;
+    } else if (base::EqualsIgnoreCase(mMediaType, MIMETYPE_AUDIO_EAC3)) {
+        maxChannels = 16;
+    } else if (base::EqualsIgnoreCase(mMediaType, MIMETYPE_AUDIO_EAC3_JOC)) {
+        sampleRates = { 48000 };
+        bitRates = Range<int>(32000, 6144000);
+        maxChannels = 16;
+    } else if (base::EqualsIgnoreCase(mMediaType, MIMETYPE_AUDIO_AC4)) {
+        sampleRates = { 44100, 48000, 96000, 192000 };
+        bitRates = Range<int>(16000, 2688000);
+        maxChannels = 24;
+    } else if (base::EqualsIgnoreCase(mMediaType, MIMETYPE_AUDIO_DTS)) {
+        sampleRates = { 44100, 48000 };
+        bitRates = Range<int>(96000, 1524000);
+        maxChannels = 6;
+    } else if (base::EqualsIgnoreCase(mMediaType, MIMETYPE_AUDIO_DTS_HD)) {
+        for (ProfileLevel profileLevel: mProfileLevels) {
+            switch (profileLevel.mProfile) {
+                case DTS_HDProfileLBR:
+                    sampleRates = { 22050, 24000, 44100, 48000 };
+                    bitRates = Range<int>(32000, 768000);
+                    break;
+                case DTS_HDProfileHRA:
+                case DTS_HDProfileMA:
+                    sampleRates = { 44100, 48000, 88200, 96000, 176400, 192000 };
+                    bitRates = Range<int>(96000, 24500000);
+                    break;
+                default:
+                    ALOGW("Unrecognized profile %d for %s", profileLevel.mProfile,
+                            mMediaType.c_str());
+                    mError |= ERROR_CAPABILITIES_UNRECOGNIZED;
+                    sampleRates = { 44100, 48000, 88200, 96000, 176400, 192000 };
+                    bitRates = Range<int>(96000, 24500000);
+            }
+        }
+        maxChannels = 8;
+    } else if (base::EqualsIgnoreCase(mMediaType, MIMETYPE_AUDIO_DTS_UHD)) {
+        for (ProfileLevel profileLevel: mProfileLevels) {
+            switch (profileLevel.mProfile) {
+                case DTS_UHDProfileP2:
+                    sampleRates = { 48000 };
+                    bitRates = Range<int>(96000, 768000);
+                    maxChannels = 10;
+                    break;
+                case DTS_UHDProfileP1:
+                    sampleRates = { 44100, 48000, 88200, 96000, 176400, 192000 };
+                    bitRates = Range<int>(96000, 24500000);
+                    maxChannels = 32;
+                    break;
+                default:
+                    ALOGW("Unrecognized profile %d for %s", profileLevel.mProfile,
+                            mMediaType.c_str());
+                    mError |= ERROR_CAPABILITIES_UNRECOGNIZED;
+                    sampleRates = { 44100, 48000, 88200, 96000, 176400, 192000 };
+                    bitRates = Range<int>(96000, 24500000);
+                    maxChannels = 32;
+            }
+        }
+    } else {
+        ALOGW("Unsupported mediaType %s", mMediaType.c_str());
+        mError |= ERROR_CAPABILITIES_UNSUPPORTED;
+    }
+
+    // restrict ranges
+    if (!sampleRates.empty()) {
+        limitSampleRates(sampleRates);
+    } else if (sampleRateRange) {
+        std::vector<Range<int>> rateRanges = { sampleRateRange.value() };
+        limitSampleRates(rateRanges);
+    }
+
+    Range<int> channelRange = Range<int>(1, maxChannels);
+    std::vector<Range<int>> inputChannels = { channelRange };
+    applyLimits(inputChannels, bitRates);
+}
+
+void AudioCapabilities::applyLimits(
+        const std::vector<Range<int>> &inputChannels,
+        const std::optional<Range<int>> &bitRates) {
+    // clamp & make a local copy
+    std::vector<Range<int>> inputChannelsCopy(inputChannels.size());
+    for (int i = 0; i < inputChannels.size(); i++) {
+        int lower = inputChannels[i].clamp(1);
+        int upper = inputChannels[i].clamp(MAX_INPUT_CHANNEL_COUNT);
+        inputChannelsCopy[i] = Range<int>(lower, upper);
+    }
+
+    // sort, intersect with existing, & save channel list
+    sortDistinctRanges(&inputChannelsCopy);
+    mInputChannelRanges = intersectSortedDistinctRanges(inputChannelsCopy, mInputChannelRanges);
+
+    if (bitRates) {
+        mBitrateRange = mBitrateRange.intersect(bitRates.value());
+    }
+}
+
+void AudioCapabilities::parseFromInfo(const sp<AMessage> &format) {
+    int maxInputChannels = MAX_INPUT_CHANNEL_COUNT;
+    std::vector<Range<int>> channels = { Range<int>(1, maxInputChannels) };
+    std::optional<Range<int>> bitRates = POSITIVE_INTEGERS;
+
+    AString rateAString;
+    if (format->findString("sample-rate-ranges", &rateAString)) {
+        std::vector<std::string> rateStrings = base::Split(std::string(rateAString.c_str()), ",");
+        std::vector<Range<int>> rateRanges;
+        for (std::string rateString : rateStrings) {
+            std::optional<Range<int>> rateRange = ParseIntRange(rateString);
+            if (!rateRange) {
+                continue;
+            }
+            rateRanges.push_back(rateRange.value());
+        }
+        limitSampleRates(rateRanges);
+    }
+
+    // we will prefer channel-ranges over max-channel-count
+    AString valueStr;
+    if (format->findString("channel-ranges", &valueStr)) {
+        std::vector<std::string> channelStrings = base::Split(std::string(valueStr.c_str()), ",");
+        std::vector<Range<int>> channelRanges;
+        for (std::string channelString : channelStrings) {
+            std::optional<Range<int>> channelRange = ParseIntRange(channelString);
+            if (!channelRange) {
+                continue;
+            }
+            channelRanges.push_back(channelRange.value());
+        }
+        channels = channelRanges;
+    } else if (format->findString("channel-range", &valueStr)) {
+        std::optional<Range<int>> oneRange = ParseIntRange(std::string(valueStr.c_str()));
+        if (oneRange) {
+            channels = { oneRange.value() };
+        }
+    } else if (format->findString("max-channel-count", &valueStr)) {
+        maxInputChannels = std::atoi(valueStr.c_str());
+        if (maxInputChannels == 0) {
+            channels = { Range<int>(0, 0) };
+        } else {
+            channels = { Range<int>(1, maxInputChannels) };
+        }
+    } else if ((mError & ERROR_CAPABILITIES_UNSUPPORTED) != 0) {
+        maxInputChannels = 0;
+        channels = { Range<int>(0, 0) };
+    }
+
+    if (format->findString("bitrate-range", &valueStr)) {
+        std::optional<Range<int>> parsedBitrate = ParseIntRange(valueStr.c_str());
+        if (parsedBitrate) {
+            bitRates = bitRates.value().intersect(parsedBitrate.value());
+        }
+    }
+
+    applyLimits(channels, bitRates);
+}
+
+void AudioCapabilities::getDefaultFormat(sp<AMessage> &format) {
+    // report settings that have only a single choice
+    if (mBitrateRange.lower() == mBitrateRange.upper()) {
+        format->setInt32(KEY_BIT_RATE, mBitrateRange.lower());
+    }
+    if (getMaxInputChannelCount() == 1) {
+        // mono-only format
+        format->setInt32(KEY_CHANNEL_COUNT, 1);
+    }
+    if (!mSampleRates.empty() && mSampleRates.size() == 1) {
+        format->setInt32(KEY_SAMPLE_RATE, mSampleRates[0]);
+    }
+}
+
+bool AudioCapabilities::supportsFormat(const sp<AMessage> &format) {
+    int32_t sampleRate;
+    format->findInt32(KEY_SAMPLE_RATE, &sampleRate);
+    int32_t channels;
+    format->findInt32(KEY_CHANNEL_COUNT, &channels);
+
+    if (!supports(sampleRate, channels)) {
+        return false;
+    }
+
+    if (!CodecCapabilities::SupportsBitrate(mBitrateRange, format)) {
+        return false;
+    }
+
+    // nothing to do for:
+    // KEY_CHANNEL_MASK: codecs don't get this
+    // KEY_IS_ADTS:      required feature for all AAC decoders
+    return true;
+}
+
+}  // namespace android
\ No newline at end of file
diff --git a/media/libmedia/CodecCapabilities.cpp b/media/libmedia/CodecCapabilities.cpp
new file mode 100644
index 0000000..5bed1c4
--- /dev/null
+++ b/media/libmedia/CodecCapabilities.cpp
@@ -0,0 +1,57 @@
+/*
+ * Copyright 2024, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "CodecCapabilities"
+
+#include <utils/Log.h>
+#include <media/CodecCapabilities.h>
+#include <media/CodecCapabilitiesUtils.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+
+namespace android {
+
+bool CodecCapabilities::SupportsBitrate(Range<int> bitrateRange,
+        const sp<AMessage> &format) {
+    // consider max bitrate over average bitrate for support
+    int32_t maxBitrate = 0;
+    format->findInt32(KEY_MAX_BIT_RATE, &maxBitrate);
+    int32_t bitrate = 0;
+    format->findInt32(KEY_BIT_RATE, &bitrate);
+
+    if (bitrate == 0) {
+        bitrate = maxBitrate;
+    } else if (maxBitrate != 0) {
+        bitrate = std::max(bitrate, maxBitrate);
+    }
+
+    if (bitrate > 0) {
+        return bitrateRange.contains(bitrate);
+    }
+
+    return true;
+}
+
+const std::string& CodecCapabilities::getMediaType() {
+    return mMediaType;
+}
+
+const std::vector<ProfileLevel>& CodecCapabilities::getProfileLevels() {
+    return mProfileLevels;
+}
+
+}  // namespace android
\ No newline at end of file
diff --git a/media/libmedia/CodecCapabilitiesUtils.cpp b/media/libmedia/CodecCapabilitiesUtils.cpp
new file mode 100644
index 0000000..edfc9be
--- /dev/null
+++ b/media/libmedia/CodecCapabilitiesUtils.cpp
@@ -0,0 +1,53 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "CodecCapabilitiesUtils"
+#include <utils/Log.h>
+
+#include <algorithm>
+#include <cmath>
+#include <regex>
+#include <string>
+#include <vector>
+
+#include <media/CodecCapabilitiesUtils.h>
+
+#include <media/stagefright/foundation/AUtils.h>
+
+namespace android {
+
+std::optional<Range<int>> ParseIntRange(const std::string &str) {
+    if (str.empty()) {
+        ALOGW("could not parse empty integer range");
+        return std::nullopt;
+    }
+    int lower, upper;
+    std::regex regex("([0-9]+)-([0-9]+)");
+    std::smatch match;
+    if (std::regex_match(str, match, regex)) {
+        lower = std::atoi(match[1].str().c_str());
+        upper = std::atoi(match[2].str().c_str());
+    } else if (std::atoi(str.c_str()) != 0) {
+        lower = upper = std::atoi(str.c_str());
+    } else {
+        ALOGW("could not parse integer range: %s", str.c_str());
+        return std::nullopt;
+    }
+    return std::make_optional<Range<int>>(lower, upper);
+}
+
+}  // namespace android
\ No newline at end of file
diff --git a/media/libmedia/IMediaMetadataRetriever.cpp b/media/libmedia/IMediaMetadataRetriever.cpp
index cdb1837..ef6250f 100644
--- a/media/libmedia/IMediaMetadataRetriever.cpp
+++ b/media/libmedia/IMediaMetadataRetriever.cpp
@@ -23,7 +23,6 @@
 #include <binder/Parcel.h>
 #include <media/IMediaHTTPService.h>
 #include <media/IMediaMetadataRetriever.h>
-#include <processgroup/sched_policy.h>
 #include <utils/String8.h>
 #include <utils/KeyedVector.h>
 
diff --git a/media/libmedia/MediaCodecInfo.cpp b/media/libmedia/MediaCodecInfo.cpp
index 86ad997..d5d1a09 100644
--- a/media/libmedia/MediaCodecInfo.cpp
+++ b/media/libmedia/MediaCodecInfo.cpp
@@ -18,8 +18,6 @@
 #define LOG_TAG "MediaCodecInfo"
 #include <utils/Log.h>
 
-#include <media/IOMX.h>
-
 #include <media/MediaCodecInfo.h>
 
 #include <media/stagefright/foundation/ADebug.h>
@@ -36,6 +34,7 @@
 constexpr char MediaCodecInfo::Capabilities::FEATURE_MULTIPLE_FRAMES[];
 constexpr char MediaCodecInfo::Capabilities::FEATURE_SECURE_PLAYBACK[];
 constexpr char MediaCodecInfo::Capabilities::FEATURE_TUNNELED_PLAYBACK[];
+constexpr char MediaCodecInfo::Capabilities::FEATURE_DETACHED_SURFACE[];
 
 void MediaCodecInfo::Capabilities::getSupportedProfileLevels(
         Vector<ProfileLevel> *profileLevels) const {
diff --git a/media/libmedia/include/media/AudioCapabilities.h b/media/libmedia/include/media/AudioCapabilities.h
new file mode 100644
index 0000000..2bc3335
--- /dev/null
+++ b/media/libmedia/include/media/AudioCapabilities.h
@@ -0,0 +1,138 @@
+/*
+ * Copyright 2024, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef AUDIO_CAPABILITIES_H_
+
+#define AUDIO_CAPABILITIES_H_
+
+#include <media/CodecCapabilitiesUtils.h>
+#include <media/stagefright/foundation/AMessage.h>
+
+#include <system/audio.h>
+
+#include <utils/StrongPointer.h>
+
+namespace android {
+
+struct AudioCapabilities {
+    /**
+     * Create AudioCapabilities.
+     */
+    static std::shared_ptr<AudioCapabilities> Create(std::string mediaType,
+            std::vector<ProfileLevel> profLevs, const sp<AMessage> &format);
+
+    /**
+     * Returns the range of supported bitrates in bits/second.
+     */
+    const Range<int>& getBitrateRange() const;
+
+    /**
+     * Returns the array of supported sample rates if the codec
+     * supports only discrete values. Otherwise, it returns an empty array.
+     * The array is sorted in ascending order.
+     */
+    const std::vector<int>& getSupportedSampleRates() const;
+
+    /**
+     * Returns the array of supported sample rate ranges.  The
+     * array is sorted in ascending order, and the ranges are
+     * distinct.
+     */
+    const std::vector<Range<int>>& getSupportedSampleRateRanges() const;
+
+    /**
+     * Returns the maximum number of input channels supported.
+     * The returned value should be between 1 and 255.
+     *
+     * Through {@link android.os.Build.VERSION_CODES#R}, this method indicated support
+     * for any number of input channels between 1 and this maximum value.
+     *
+     * As of {@link android.os.Build.VERSION_CODES#S},
+     * the implied lower limit of 1 channel is no longer valid.
+     * As of {@link android.os.Build.VERSION_CODES#S}, {@link #getMaxInputChannelCount} is
+     * superseded by {@link #getInputChannelCountRanges},
+     * which returns an array of ranges of channels.
+     * The {@link #getMaxInputChannelCount} method will return the highest value
+     * in the ranges returned by {@link #getInputChannelCountRanges}
+     */
+    int getMaxInputChannelCount() const;
+
+    /**
+     * Returns the minimum number of input channels supported.
+     * This is often 1, but does vary for certain mime types.
+     *
+     * This returns the lowest channel count in the ranges returned by
+     * {@link #getInputChannelCountRanges}.
+     */
+    int getMinInputChannelCount() const;
+
+    /**
+     * Returns an array of ranges representing the number of input channels supported.
+     * The codec supports any number of input channels within this range.
+     *
+     * This supersedes the {@link #getMaxInputChannelCount} method.
+     *
+     * For many codecs, this will be a single range [1..N], for some N.
+     *
+     * The returned array cannot be empty.
+     */
+    const std::vector<Range<int>>& getInputChannelCountRanges() const;
+
+    /**
+     * Query whether the sample rate is supported by the codec.
+     */
+    bool isSampleRateSupported(int sampleRate);
+
+    /* For internal use only. Not exposed as a public API */
+    void getDefaultFormat(sp<AMessage> &format);
+
+    /* For internal use only. Not exposed as a public API */
+    bool supportsFormat(const sp<AMessage> &format);
+
+private:
+    static constexpr int MAX_INPUT_CHANNEL_COUNT = 30;
+    static constexpr uint32_t MAX_NUM_CHANNELS = FCC_LIMIT;
+
+    int mError;
+    std::string mMediaType;
+    std::vector<ProfileLevel> mProfileLevels;
+
+    Range<int> mBitrateRange;
+
+    std::vector<int> mSampleRates;
+    std::vector<Range<int>> mSampleRateRanges;
+    std::vector<Range<int>> mInputChannelRanges;
+
+    /* no public constructor */
+    AudioCapabilities() {}
+    void init(std::string mediaType, std::vector<ProfileLevel> profLevs,
+            const sp<AMessage> &format);
+    void initWithPlatformLimits();
+    bool supports(int sampleRate, int inputChannels);
+    void limitSampleRates(std::vector<int> rates);
+    void createDiscreteSampleRates();
+    void limitSampleRates(std::vector<Range<int>> rateRanges);
+    void applyLevelLimits();
+    void applyLimits(const std::vector<Range<int>> &inputChannels,
+            const std::optional<Range<int>> &bitRates);
+    void parseFromInfo(const sp<AMessage> &format);
+
+    friend struct CodecCapabilities;
+};
+
+}  // namespace android
+
+#endif // AUDIO_CAPABILITIES_H_
\ No newline at end of file
diff --git a/media/libmedia/include/media/CodecCapabilities.h b/media/libmedia/include/media/CodecCapabilities.h
new file mode 100644
index 0000000..9d1c4ea
--- /dev/null
+++ b/media/libmedia/include/media/CodecCapabilities.h
@@ -0,0 +1,59 @@
+/*
+ * Copyright 2024, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef CODEC_CAPABILITIES_H_
+
+#define CODEC_CAPABILITIES_H_
+
+#include <media/AudioCapabilities.h>
+#include <media/CodecCapabilitiesUtils.h>
+#include <media/stagefright/foundation/ABase.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/AString.h>
+#include <media/stagefright/MediaCodecConstants.h>
+
+#include <utils/Errors.h>
+#include <utils/RefBase.h>
+#include <utils/Vector.h>
+#include <utils/StrongPointer.h>
+
+namespace android {
+
+struct CodecCapabilities {
+
+    static bool SupportsBitrate(Range<int> bitrateRange,
+            const sp<AMessage> &format);
+
+    /**
+     * Returns the media type for which this codec-capability object was created.
+     */
+    const std::string& getMediaType();
+
+    /**
+     * Returns the supported profile levels.
+     */
+    const std::vector<ProfileLevel>& getProfileLevels();
+
+private:
+    std::string mMediaType;
+    std::vector<ProfileLevel> mProfileLevels;
+
+    std::shared_ptr<AudioCapabilities> mAudioCaps;
+};
+
+}  // namespace android
+
+#endif // CODEC_CAPABILITIES_H_
\ No newline at end of file
diff --git a/media/libmedia/include/media/CodecCapabilitiesUtils.h b/media/libmedia/include/media/CodecCapabilitiesUtils.h
new file mode 100644
index 0000000..2bf822a
--- /dev/null
+++ b/media/libmedia/include/media/CodecCapabilitiesUtils.h
@@ -0,0 +1,184 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef CODEC_CAPABILITIES__UTILS_H_
+
+#define CODEC_CAPABILITIES__UTILS_H_
+
+#include <algorithm>
+#include <cmath>
+#include <optional>
+#include <string>
+#include <vector>
+
+#include <utils/Log.h>
+
+#include <media/stagefright/foundation/AUtils.h>
+
+namespace android {
+
+struct ProfileLevel {
+    uint32_t mProfile;
+    uint32_t mLevel;
+    bool operator <(const ProfileLevel &o) const {
+        return mProfile < o.mProfile || (mProfile == o.mProfile && mLevel < o.mLevel);
+    }
+};
+
+/**
+ * Immutable class for describing the range of two numeric values.
+ *
+ * To make it immutable, all data are private and all functions are const.
+ *
+ * From frameworks/base/core/java/android/util/Range.java
+ */
+template<typename T>
+struct Range {
+    Range() : lower_(), upper_() {}
+
+    Range(T l, T u) : lower_(l), upper_(u) {}
+
+    constexpr bool empty() const { return lower_ > upper_; }
+
+    T lower() const { return lower_; }
+
+    T upper() const { return upper_; }
+
+    // Check if a value is in the range.
+    bool contains(T value) const {
+        return lower_ <= value && upper_ >= value;
+    }
+
+    bool contains(Range<T> range) const {
+        return (range.lower_ >= lower_) && (range.upper_ <= upper_);
+    }
+
+    // Clamp a value in the range
+    T clamp(T value) const{
+        if (value < lower_) {
+            return lower_;
+        } else if (value > upper_) {
+            return upper_;
+        } else {
+            return value;
+        }
+    }
+
+    // Return the intersected range
+    Range<T> intersect(Range<T> range) const {
+        if (lower_ >= range.lower() && range.upper() >= upper_) {
+            // range includes this
+            return *this;
+        } else if (range.lower() >= lower_ && range.upper() <= upper_) {
+            // this includes range
+            return range;
+        } else {
+            // if ranges are disjoint returns an empty Range(lower > upper)
+            Range<T> result = Range<T>(std::max(lower_, range.lower_),
+                    std::min(upper_, range.upper_));
+            if (result.empty()) {
+                ALOGE("Failed to intersect 2 ranges as they are disjoint");
+            }
+            return result;
+        }
+    }
+
+    /**
+     * Returns the intersection of this range and the inclusive range
+     * specified by {@code [lower, upper]}.
+     * <p>
+     * See {@link #intersect(Range)} for more details.</p>
+     *
+     * @param lower a non-{@code null} {@code T} reference
+     * @param upper a non-{@code null} {@code T} reference
+     * @return the intersection of this range and the other range
+     *
+     * @throws NullPointerException if {@code lower} or {@code upper} was {@code null}
+     * @throws IllegalArgumentException if the ranges are disjoint.
+     */
+    Range<T> intersect(T lower, T upper) {
+        return Range(std::max(lower_, lower), std::min(upper_, upper));
+    }
+
+private:
+    T lower_;
+    T upper_;
+};
+
+static const Range<int> POSITIVE_INTEGERS = Range<int>(1, INT_MAX);
+
+// found stuff that is not supported by framework (=> this should not happen)
+constexpr int ERROR_CAPABILITIES_UNRECOGNIZED   = (1 << 0);
+// found profile/level for which we don't have capability estimates
+constexpr int ERROR_CAPABILITIES_UNSUPPORTED    = (1 << 1);
+// have not found any profile/level for which we don't have capability estimate
+// constexpr int ERROR_NONE_SUPPORTED = (1 << 2);
+
+/**
+ * Sorts distinct (non-intersecting) range array in ascending order.
+ * From frameworks/base/media/java/android/media/Utils.java
+ */
+template<typename T>
+void sortDistinctRanges(std::vector<Range<T>> *ranges) {
+    std::sort(ranges->begin(), ranges->end(),
+            [](Range<T> r1, Range<T> r2) {
+        if (r1.upper() < r2.lower()) {
+            return true;
+        } else if (r1.lower() > r2.upper()) {
+            return false;
+        } else {
+            ALOGE("sample rate ranges must be distinct.");
+            return false;
+        }
+    });
+}
+
+/**
+ * Returns the intersection of two sets of non-intersecting ranges
+ * From frameworks/base/media/java/android/media/Utils.java
+ * @param one a sorted set of non-intersecting ranges in ascending order
+ * @param another another sorted set of non-intersecting ranges in ascending order
+ * @return the intersection of the two sets, sorted in ascending order
+ */
+template<typename T>
+std::vector<Range<T>> intersectSortedDistinctRanges(
+        const std::vector<Range<T>> &one, const std::vector<Range<T>> &another) {
+    std::vector<Range<T>> result;
+    int ix = 0;
+    for (Range<T> range : another) {
+        while (ix < one.size() && one[ix].upper() < range.lower()) {
+            ++ix;
+        }
+        while (ix < one.size() && one[ix].upper() < range.upper()) {
+            result.push_back(range.intersect(one[ix]));
+            ++ix;
+        }
+        if (ix == one.size()) {
+            break;
+        }
+        if (one[ix].lower() <= range.upper()) {
+            result.push_back(range.intersect(one[ix]));
+        }
+    }
+    return result;
+}
+
+// parse string into int range
+std::optional<Range<int>> ParseIntRange(const std::string &str);
+
+}  // namespace android
+
+#endif  // CODEC_CAPABILITIES__UTILS_H_
\ No newline at end of file
diff --git a/media/libmedia/include/media/MediaCodecInfo.h b/media/libmedia/include/media/MediaCodecInfo.h
index 54f565a..72aca98 100644
--- a/media/libmedia/include/media/MediaCodecInfo.h
+++ b/media/libmedia/include/media/MediaCodecInfo.h
@@ -20,6 +20,8 @@
 
 #include <android-base/macros.h>
 #include <binder/Parcel.h>
+#include <media/CodecCapabilities.h>
+#include <media/CodecCapabilitiesUtils.h>
 #include <media/stagefright/foundation/ABase.h>
 #include <media/stagefright/foundation/AString.h>
 
@@ -43,13 +45,10 @@
 struct MediaCodecListWriter;
 
 struct MediaCodecInfo : public RefBase {
-    struct ProfileLevel {
-        uint32_t mProfile;
-        uint32_t mLevel;
-        bool operator <(const ProfileLevel &o) const {
-            return mProfile < o.mProfile || (mProfile == o.mProfile && mLevel < o.mLevel);
-        }
-    };
+
+    // Moved to CodecCapabilitiesUtils.h
+    // Map MediaCodecInfo::ProfileLevel to android::ProfileLevel to maintain compatibility.
+    typedef ::android::ProfileLevel ProfileLevel;
 
     struct CapabilitiesWriter;
 
@@ -69,6 +68,7 @@
         constexpr static char FEATURE_MULTIPLE_FRAMES[] = "feature-multiple-frames";
         constexpr static char FEATURE_SECURE_PLAYBACK[] = "feature-secure-playback";
         constexpr static char FEATURE_TUNNELED_PLAYBACK[] = "feature-tunneled-playback";
+        constexpr static char FEATURE_DETACHED_SURFACE[] = "feature-detached-surface";
 
         /**
          * Returns the supported levels for each supported profile in a target array.
diff --git a/media/libmedia/include/media/RingBuffer.h b/media/libmedia/include/media/RingBuffer.h
index 4d92d87..a08f35e 100644
--- a/media/libmedia/include/media/RingBuffer.h
+++ b/media/libmedia/include/media/RingBuffer.h
@@ -44,8 +44,14 @@
     /**
      * Forward iterator to this class.  Implements an std:forward_iterator.
      */
-    class iterator : public std::iterator<std::forward_iterator_tag, T> {
+    class iterator {
     public:
+        using iterator_category = std::forward_iterator_tag;
+        using value_type = T;
+        using difference_type = std::ptrdiff_t;
+        using pointer = T*;
+        using reference = T&;
+
         iterator(T* ptr, size_t size, size_t pos, size_t ctr);
 
         iterator& operator++();
@@ -357,5 +363,3 @@
 }; // namespace android
 
 #endif // ANDROID_SERVICE_UTILS_RING_BUFFER_H
-
-
diff --git a/media/libmedia/tests/codeccapabilities/Android.bp b/media/libmedia/tests/codeccapabilities/Android.bp
new file mode 100644
index 0000000..79eb71a
--- /dev/null
+++ b/media/libmedia/tests/codeccapabilities/Android.bp
@@ -0,0 +1,36 @@
+cc_test {
+    name: "CodecCapabilitiesTest",
+    team: "trendy_team_media_codec_framework",
+
+    test_suites: [
+        "general-tests",
+    ],
+    gtest: true,
+
+    srcs: [
+        "CodecCapabilitiesTest.cpp",
+    ],
+
+    shared_libs: [
+        "libbinder",
+        "liblog",
+        "libmedia_codeclist", // available >= R
+        "libmedia_codeclist_capabilities",
+        "libstagefright",
+        "libstagefright_foundation",
+        "libutils",
+    ],
+
+    cflags: [
+        "-Werror",
+        "-Wall",
+    ],
+
+    sanitize: {
+        cfi: true,
+        misc_undefined: [
+            "unsigned-integer-overflow",
+            "signed-integer-overflow",
+        ],
+    },
+}
diff --git a/media/libmedia/tests/codeccapabilities/CodecCapabilitiesTest.cpp b/media/libmedia/tests/codeccapabilities/CodecCapabilitiesTest.cpp
new file mode 100644
index 0000000..89c9739
--- /dev/null
+++ b/media/libmedia/tests/codeccapabilities/CodecCapabilitiesTest.cpp
@@ -0,0 +1,150 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "CodecCapabilitiesTest"
+
+#include <utils/Log.h>
+
+#include <memory>
+
+#include <gtest/gtest.h>
+
+#include <binder/Parcel.h>
+
+#include <media/CodecCapabilities.h>
+#include <media/CodecCapabilitiesUtils.h>
+#include <media/MediaCodecInfo.h>
+
+#include <media/stagefright/MediaCodecConstants.h>
+#include <media/stagefright/MediaCodecList.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/AString.h>
+
+using namespace android;
+
+class AudioCapsAacTest : public testing::Test {
+protected:
+    AudioCapsAacTest() {
+        std::string mediaType = MIMETYPE_AUDIO_AAC;
+
+        sp<AMessage> details = new AMessage;
+        details->setString("bitrate-range", "8000-960000");
+        details->setString("max-channel-count", "8");
+        details->setString("sample-rate-ranges",
+                "7350,8000,11025,12000,16000,22050,24000,32000,44100,48000");
+
+        std::vector<ProfileLevel> profileLevel{
+            ProfileLevel(2, 0),
+            ProfileLevel(5, 0),
+            ProfileLevel(29, 0),
+            ProfileLevel(23, 0),
+            ProfileLevel(39, 0),
+            ProfileLevel(20, 0),
+            ProfileLevel(42, 0),
+        };
+
+        audioCaps = AudioCapabilities::Create(mediaType, profileLevel, details);
+    }
+
+    std::shared_ptr<AudioCapabilities> audioCaps;
+};
+
+TEST_F(AudioCapsAacTest, AudioCaps_Aac_Bitrate) {
+    const Range<int>& bitrateRange = audioCaps->getBitrateRange();
+    EXPECT_EQ(bitrateRange.lower(), 8000) << "bitrate range1 does not match. lower: "
+            << bitrateRange.lower();
+    EXPECT_EQ(bitrateRange.upper(), 510000) << "bitrate range1 does not match. upper: "
+            << bitrateRange.upper();
+}
+
+TEST_F(AudioCapsAacTest, AudioCaps_Aac_InputChannelCount) {
+    int maxInputChannelCount = audioCaps->getMaxInputChannelCount();
+    EXPECT_EQ(maxInputChannelCount, 8);
+    int minInputChannelCount = audioCaps->getMinInputChannelCount();
+    EXPECT_EQ(minInputChannelCount, 1);
+}
+
+TEST_F(AudioCapsAacTest, AudioCaps_Aac_SupportedSampleRates) {
+    const std::vector<int>& sampleRates = audioCaps->getSupportedSampleRates();
+    EXPECT_EQ(sampleRates, std::vector<int>({7350, 8000, 11025, 12000, 16000, 22050,
+            24000, 32000, 44100, 48000}));
+
+    EXPECT_FALSE(audioCaps->isSampleRateSupported(6000))
+            << "isSampleRateSupported returned true for unsupported sample rate";
+    EXPECT_TRUE(audioCaps->isSampleRateSupported(8000))
+            << "isSampleRateSupported returned false for supported sample rate";
+    EXPECT_TRUE(audioCaps->isSampleRateSupported(12000))
+            << "isSampleRateSupported returned false for supported sample rate";
+    EXPECT_FALSE(audioCaps->isSampleRateSupported(44000))
+            << "isSampleRateSupported returned true for unsupported sample rate";
+    EXPECT_TRUE(audioCaps->isSampleRateSupported(48000))
+            << "isSampleRateSupported returned true for unsupported sample rate";
+}
+
+class AudioCapsRawTest : public testing::Test {
+protected:
+    AudioCapsRawTest() {
+        std::string mediaType = MIMETYPE_AUDIO_RAW;
+
+        sp<AMessage> details = new AMessage;
+        details->setString("bitrate-range", "1-10000000");
+        details->setString("channel-ranges", "1,2,3,4,5,6,7,8,9,10,11,12");
+        details->setString("sample-rate-ranges", "8000-192000");
+
+        std::vector<ProfileLevel> profileLevel;
+
+        audioCaps = AudioCapabilities::Create(mediaType, profileLevel, details);
+    }
+
+    std::shared_ptr<AudioCapabilities> audioCaps;
+};
+
+TEST_F(AudioCapsRawTest, AudioCaps_Raw_Bitrate) {
+    const Range<int>& bitrateRange = audioCaps->getBitrateRange();
+    EXPECT_EQ(bitrateRange.lower(), 1);
+    EXPECT_EQ(bitrateRange.upper(), 10000000);
+}
+
+TEST_F(AudioCapsRawTest, AudioCaps_Raw_InputChannelCount) {
+    int maxInputChannelCount = audioCaps->getMaxInputChannelCount();
+    EXPECT_EQ(maxInputChannelCount, 12);
+    int minInputChannelCount = audioCaps->getMinInputChannelCount();
+    EXPECT_EQ(minInputChannelCount, 1);
+}
+
+TEST_F(AudioCapsRawTest, AudioCaps_Raw_InputChannelCountRanges) {
+    const std::vector<Range<int>>& inputChannelCountRanges
+            = audioCaps->getInputChannelCountRanges();
+    std::vector<Range<int>> expectedOutput({{1,1}, {2,2}, {3,3}, {4,4}, {5,5},
+            {6,6}, {7,7}, {8,8}, {9,9}, {10,10}, {11,11}, {12,12}});
+    ASSERT_EQ(inputChannelCountRanges.size(), expectedOutput.size());
+    for (int i = 0; i < inputChannelCountRanges.size(); i++) {
+        EXPECT_EQ(inputChannelCountRanges.at(i).lower(), expectedOutput.at(i).lower());
+        EXPECT_EQ(inputChannelCountRanges.at(i).upper(), expectedOutput.at(i).upper());
+    }
+}
+
+TEST_F(AudioCapsRawTest, AudioCaps_Raw_SupportedSampleRates) {
+    const std::vector<Range<int>>& sampleRateRanges = audioCaps->getSupportedSampleRateRanges();
+    EXPECT_EQ(sampleRateRanges.size(), 1);
+    EXPECT_EQ(sampleRateRanges.at(0).lower(), 8000);
+    EXPECT_EQ(sampleRateRanges.at(0).upper(), 192000);
+
+    EXPECT_EQ(audioCaps->isSampleRateSupported(7000), false);
+    EXPECT_EQ(audioCaps->isSampleRateSupported(10000), true);
+    EXPECT_EQ(audioCaps->isSampleRateSupported(193000), false);
+}
diff --git a/media/libmediahelper/Android.bp b/media/libmediahelper/Android.bp
index 649f813..b5867a6 100644
--- a/media/libmediahelper/Android.bp
+++ b/media/libmediahelper/Android.bp
@@ -30,9 +30,6 @@
     name: "libmedia_helper",
     vendor_available: true,
     min_sdk_version: "29",
-    vndk: {
-        enabled: true,
-    },
     double_loadable: true,
     srcs: [
         "AudioParameter.cpp",
diff --git a/media/libmediametrics/Android.bp b/media/libmediametrics/Android.bp
index 8a38dd7..5214dfe 100644
--- a/media/libmediametrics/Android.bp
+++ b/media/libmediametrics/Android.bp
@@ -16,8 +16,8 @@
     name: "libmediametrics",
 
     srcs: [
-        "MediaMetricsItem.cpp",
         "MediaMetrics.cpp",
+        "MediaMetricsItem.cpp",
     ],
 
     shared_libs: [
@@ -40,8 +40,8 @@
 
     sanitize: {
         misc_undefined: [
-            "unsigned-integer-overflow",
             "signed-integer-overflow",
+            "unsigned-integer-overflow",
         ],
         cfi: true,
     },
@@ -50,8 +50,8 @@
     stubs: {
         symbol_file: "libmediametrics.map.txt",
         versions: [
-            "1" ,
-        ]
+            "1",
+        ],
     },
 
     header_abi_checker: {
@@ -65,7 +65,7 @@
         "//frameworks/base/apex/media/framework",
         "//frameworks/base/core/jni",
         "//frameworks/base/media/jni",
-	"//packages/modules/Media/apex/framework",
+        "//packages/modules/Media/apex/framework",
     ],
 }
 
diff --git a/media/libmediametrics/include/MediaMetricsConstants.h b/media/libmediametrics/include/MediaMetricsConstants.h
index 26aa375..f367a3e 100644
--- a/media/libmediametrics/include/MediaMetricsConstants.h
+++ b/media/libmediametrics/include/MediaMetricsConstants.h
@@ -213,6 +213,7 @@
                                                              // format to transport packets.
                                                              // Raw byte streams are used if this
                                                              // is false.
+#define AMEDIAMETRICS_PROP_TOSTRING "toString"             // string
 #define AMEDIAMETRICS_PROP_TOTALINPUTBYTES "totalInputBytes" // int32 (MIDI)
 #define AMEDIAMETRICS_PROP_TOTALOUTPUTBYTES "totalOutputBytes" // int32 (MIDI)
 #define AMEDIAMETRICS_PROP_THREADID       "threadId"       // int32 value io handle
@@ -243,6 +244,7 @@
 // Values are strings accepted for a given property.
 
 // An event is a general description, which often is a function name.
+#define AMEDIAMETRICS_PROP_EVENT_VALUE_APPLYVOLUMESHAPER "applyVolumeShaper"
 #define AMEDIAMETRICS_PROP_EVENT_VALUE_BEGINAUDIOINTERVALGROUP "beginAudioIntervalGroup"
 #define AMEDIAMETRICS_PROP_EVENT_VALUE_CLOSE      "close"
 #define AMEDIAMETRICS_PROP_EVENT_VALUE_CREATE     "create"
@@ -265,6 +267,7 @@
 #define AMEDIAMETRICS_PROP_EVENT_VALUE_SETLOGSESSIONID  "setLogSessionId" // AudioTrack, Record
 #define AMEDIAMETRICS_PROP_EVENT_VALUE_SETPLAYBACKPARAM "setPlaybackParam" // AudioTrack
 #define AMEDIAMETRICS_PROP_EVENT_VALUE_SETPLAYERIID "setPlayerIId" // AudioTrack
+#define AMEDIAMETRICS_PROP_EVENT_VALUE_SETSAMPLERATE "setSampleRate" // AudioTrack
 #define AMEDIAMETRICS_PROP_EVENT_VALUE_SETSTARTTHRESHOLD "setStartThreshold" // AudioTrack
 #define AMEDIAMETRICS_PROP_EVENT_VALUE_SETVOICEVOLUME   "setVoiceVolume" // AudioFlinger
 #define AMEDIAMETRICS_PROP_EVENT_VALUE_SETVOLUME  "setVolume"  // AudioTrack
diff --git a/media/libmediaplayerservice/DeathNotifier.cpp b/media/libmediaplayerservice/DeathNotifier.cpp
index ab22f67..241c52d 100644
--- a/media/libmediaplayerservice/DeathNotifier.cpp
+++ b/media/libmediaplayerservice/DeathNotifier.cpp
@@ -17,11 +17,18 @@
 //#define LOG_NDEBUG 0
 #define LOG_TAG "MediaPlayerService-DeathNotifier"
 #include <android-base/logging.h>
+#include <map>
 
 #include "DeathNotifier.h"
 
 namespace android {
 
+// Only dereference the cookie if it's valid (if it's in this set)
+// Only used with ndk
+static uintptr_t sCookieKeyCounter = 0;
+static std::map<uintptr_t, wp<DeathNotifier::DeathRecipient>> sCookies;
+static std::mutex sCookiesMutex;
+
 class DeathNotifier::DeathRecipient :
         public IBinder::DeathRecipient,
         public hardware::hidl_death_recipient {
@@ -44,13 +51,32 @@
     }
 
     static void OnBinderDied(void *cookie) {
-        DeathRecipient *thiz = (DeathRecipient *)cookie;
-        thiz->mNotify();
+        std::unique_lock<std::mutex> guard(sCookiesMutex);
+        if (auto it = sCookies.find(reinterpret_cast<uintptr_t>(cookie)); it != sCookies.end()) {
+            sp<DeathRecipient> recipient = it->second.promote();
+            sCookies.erase(it);
+            guard.unlock();
+
+            if (recipient) {
+                LOG(INFO) << "Notifying DeathRecipient from OnBinderDied.";
+                recipient->mNotify();
+            } else {
+                LOG(INFO) <<
+                    "Tried to notify DeathRecipient from OnBinderDied but could not promote.";
+            }
+        }
     }
 
     AIBinder_DeathRecipient *getNdkRecipient() {
         return mNdkRecipient.get();;
     }
+    ~DeathRecipient() {
+        // lock must be taken so object is not used in OnBinderDied"
+        std::lock_guard<std::mutex> guard(sCookiesMutex);
+        sCookies.erase(mCookieKey);
+    }
+
+    uintptr_t mCookieKey;
 
 private:
     Notify mNotify;
@@ -73,8 +99,15 @@
       : mService{std::in_place_index<3>, service},
         mDeathRecipient{new DeathRecipient(notify)} {
     mDeathRecipient->initNdk();
+    {
+        std::lock_guard<std::mutex> guard(sCookiesMutex);
+        mDeathRecipient->mCookieKey = sCookieKeyCounter++;
+        sCookies[mDeathRecipient->mCookieKey] = mDeathRecipient;
+    }
     AIBinder_linkToDeath(
-            service.get(), mDeathRecipient->getNdkRecipient(), mDeathRecipient.get());
+            service.get(),
+            mDeathRecipient->getNdkRecipient(),
+            reinterpret_cast<void*>(mDeathRecipient->mCookieKey));
 }
 
 DeathNotifier::DeathNotifier(DeathNotifier&& other)
@@ -94,10 +127,11 @@
         std::get<2>(mService)->unlinkToDeath(mDeathRecipient);
         break;
     case 3:
+
         AIBinder_unlinkToDeath(
                 std::get<3>(mService).get(),
                 mDeathRecipient->getNdkRecipient(),
-                mDeathRecipient.get());
+                reinterpret_cast<void*>(mDeathRecipient->mCookieKey));
         break;
     default:
         CHECK(false) << "Corrupted service type during destruction.";
diff --git a/media/libmediaplayerservice/DeathNotifier.h b/media/libmediaplayerservice/DeathNotifier.h
index 24e45a3..0fd7c65 100644
--- a/media/libmediaplayerservice/DeathNotifier.h
+++ b/media/libmediaplayerservice/DeathNotifier.h
@@ -37,10 +37,11 @@
     DeathNotifier(DeathNotifier&& other);
     ~DeathNotifier();
 
+    class DeathRecipient;
+
 private:
     std::variant<std::monostate, sp<IBinder>, sp<HBase>, ::ndk::SpAIBinder> mService;
 
-    class DeathRecipient;
     sp<DeathRecipient> mDeathRecipient;
 };
 
diff --git a/media/libmediaplayerservice/StagefrightRecorder.cpp b/media/libmediaplayerservice/StagefrightRecorder.cpp
index 89348a4..dce6ba8 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.cpp
+++ b/media/libmediaplayerservice/StagefrightRecorder.cpp
@@ -111,9 +111,12 @@
 // To collect the encoder usage for the battery app
 static void addBatteryData(uint32_t params) {
     sp<IBinder> binder =
-        defaultServiceManager()->getService(String16("media.player"));
+        defaultServiceManager()->waitForService(String16("media.player"));
     sp<IMediaPlayerService> service = interface_cast<IMediaPlayerService>(binder);
-    CHECK(service.get() != NULL);
+    if (service.get() == nullptr) {
+        ALOGE("%s: Failed to get media.player service", __func__);
+        return;
+    }
 
     service->addBatteryData(params);
 }
@@ -1331,10 +1334,10 @@
         // cause out-of-memory due to large input buffer size. And audio recording
         // probably doesn't make sense in the scenario, since the slow-down factor
         // is probably huge (eg. mSampleRate=48K, mCaptureFps=240, mFrameRate=1).
-        const static int32_t SAMPLE_RATE_HZ_MAX = 192000;
+        const static int32_t kSampleRateHzMax = 192000;
         sourceSampleRate =
                 (mSampleRate * mCaptureFps + mFrameRate / 2) / mFrameRate;
-        if (sourceSampleRate < mSampleRate || sourceSampleRate > SAMPLE_RATE_HZ_MAX) {
+        if (sourceSampleRate < mSampleRate || sourceSampleRate > kSampleRateHzMax) {
             ALOGE("source sample rate out of range! "
                     "(mSampleRate %d, mCaptureFps %.2f, mFrameRate %d",
                     mSampleRate, mCaptureFps, mFrameRate);
@@ -1453,29 +1456,44 @@
 }
 
 status_t StagefrightRecorder::setupAACRecording() {
-    // FIXME:
-    // Add support for OUTPUT_FORMAT_AAC_ADIF
-    CHECK_EQ(mOutputFormat, OUTPUT_FORMAT_AAC_ADTS);
+    // TODO(b/324512842): Add support for OUTPUT_FORMAT_AAC_ADIF
+    if (mOutputFormat != OUTPUT_FORMAT_AAC_ADTS) {
+        ALOGE("Invalid output format %d used for AAC recording", mOutputFormat);
+        return BAD_VALUE;
+    }
 
-    CHECK(mAudioEncoder == AUDIO_ENCODER_AAC ||
-          mAudioEncoder == AUDIO_ENCODER_HE_AAC ||
-          mAudioEncoder == AUDIO_ENCODER_AAC_ELD);
-    CHECK(mAudioSource != AUDIO_SOURCE_CNT);
+    if (mAudioEncoder != AUDIO_ENCODER_AAC
+            && mAudioEncoder != AUDIO_ENCODER_HE_AAC
+            && mAudioEncoder != AUDIO_ENCODER_AAC_ELD) {
+        ALOGE("Invalid encoder %d used for AAC recording", mAudioEncoder);
+        return BAD_VALUE;
+    }
+
+    if (mAudioSource == AUDIO_SOURCE_CNT) {
+        ALOGE("Audio source hasn't been set correctly");
+        return BAD_VALUE;
+    }
 
     mWriter = new AACWriter(mOutputFd);
     return setupRawAudioRecording();
 }
 
 status_t StagefrightRecorder::setupOggRecording() {
-    CHECK_EQ(mOutputFormat, OUTPUT_FORMAT_OGG);
+    if (mOutputFormat != OUTPUT_FORMAT_OGG) {
+        ALOGE("Invalid output format %d used for OGG recording", mOutputFormat);
+        return BAD_VALUE;
+    }
 
     mWriter = new OggWriter(mOutputFd);
     return setupRawAudioRecording();
 }
 
 status_t StagefrightRecorder::setupAMRRecording() {
-    CHECK(mOutputFormat == OUTPUT_FORMAT_AMR_NB ||
-          mOutputFormat == OUTPUT_FORMAT_AMR_WB);
+    if (mOutputFormat != OUTPUT_FORMAT_AMR_NB
+            && mOutputFormat != OUTPUT_FORMAT_AMR_WB) {
+        ALOGE("Invalid output format %d used for AMR recording", mOutputFormat);
+        return BAD_VALUE;
+    }
 
     if (mOutputFormat == OUTPUT_FORMAT_AMR_NB) {
         if (mAudioEncoder != AUDIO_ENCODER_DEFAULT &&
@@ -1528,7 +1546,10 @@
 }
 
 status_t StagefrightRecorder::setupRTPRecording() {
-    CHECK_EQ(mOutputFormat, OUTPUT_FORMAT_RTP_AVP);
+    if (mOutputFormat != OUTPUT_FORMAT_RTP_AVP) {
+        ALOGE("Invalid output format %d used for RTP recording", mOutputFormat);
+        return BAD_VALUE;
+    }
 
     if ((mAudioSource != AUDIO_SOURCE_CNT
                 && mVideoSource != VIDEO_SOURCE_LIST_END)
@@ -1571,7 +1592,10 @@
 }
 
 status_t StagefrightRecorder::setupMPEG2TSRecording() {
-    CHECK_EQ(mOutputFormat, OUTPUT_FORMAT_MPEG2TS);
+    if (mOutputFormat != OUTPUT_FORMAT_MPEG2TS) {
+        ALOGE("Invalid output format %d used for MPEG2TS recording", mOutputFormat);
+        return BAD_VALUE;
+    }
 
     sp<MediaWriter> writer = new MPEG2TSWriter(mOutputFd);
 
@@ -2095,6 +2119,11 @@
 
     if (tsLayers > 1) {
         uint32_t bLayers = std::min(2u, tsLayers - 1); // use up-to 2 B-layers
+        // TODO(b/341121900): Remove this once B frames are handled correctly in screen recorder
+        // use case in case of mic only
+        if (mAudioSource == AUDIO_SOURCE_MIC && mVideoSource == VIDEO_SOURCE_SURFACE) {
+            bLayers = 0;
+        }
         uint32_t pLayers = tsLayers - bLayers;
         format->setString(
                 "ts-schema", AStringPrintf("android.generic.%u+%u", pLayers, bLayers));
diff --git a/media/libmediaplayerservice/fuzzer/Android.bp b/media/libmediaplayerservice/fuzzer/Android.bp
index b511372..78163e4 100644
--- a/media/libmediaplayerservice/fuzzer/Android.bp
+++ b/media/libmediaplayerservice/fuzzer/Android.bp
@@ -44,7 +44,7 @@
     ],
     fuzz_config: {
         cc: [
-            "android-media-fuzzing-reports@google.com",
+            "android-media-playback+bugs@google.com",
         ],
         componentid: 155276,
         hotlists: [
@@ -110,6 +110,17 @@
         "libresourcemanagerservice",
         "libmediametricsservice",
         "mediametricsservice-aidl-cpp",
+        "libcameraservice",
+        "android.hardware.camera.common@1.0",
+        "android.hardware.camera.provider@2.4",
+        "android.hardware.camera.provider@2.5",
+        "android.hardware.camera.provider@2.6",
+        "android.hardware.camera.provider@2.7",
+        "android.hardware.camera.provider-V3-ndk",
+        "android.hardware.camera.device@1.0",
+        "android.hardware.camera.device@3.2",
+        "android.hardware.camera.device@3.4",
+        "libaudiohal@7.0",
     ],
     header_libs: [
         "libaudiohal_headers",
@@ -124,6 +135,7 @@
     ],
     defaults: [
         "libmediaplayerserviceFuzzer_defaults",
+        "libmediaplayerservice_defaults",
     ],
     static_libs: [
         "libplayerservice_datasource",
@@ -131,7 +143,10 @@
     shared_libs: [
         "libdatasource",
         "libdrmframework",
+        "libstagefright_httplive",
+        "libmediaextractorservice",
     ],
+    include_dirs: ["frameworks/av/services/mediaextractor"],
 }
 
 cc_fuzz {
@@ -143,10 +158,13 @@
         "libmediaplayerserviceFuzzer_defaults",
     ],
     static_libs: [
+        "libgmock",
+        "libgtest_ndk_c++",
         "libplayerservice_datasource",
         "libstagefright_nuplayer",
         "libstagefright_rtsp",
         "libstagefright_timedtext",
+        "libbinder_random_parcel",
     ],
     shared_libs: [
         "android.hardware.media.c2@1.0",
@@ -175,7 +193,10 @@
         "libpowermanager",
         "libstagefright_httplive",
         "libaudiohal@7.0",
+        "libmediaextractorservice",
     ],
+    corpus: ["corpus/*"],
+    include_dirs: ["frameworks/av/services/mediaextractor"],
 }
 
 cc_fuzz {
diff --git a/media/libmediaplayerservice/fuzzer/corpus/08873afe0bb32c29d6aed741d06c3ebfcfcf6204 b/media/libmediaplayerservice/fuzzer/corpus/08873afe0bb32c29d6aed741d06c3ebfcfcf6204
new file mode 100755
index 0000000..13e4732
--- /dev/null
+++ b/media/libmediaplayerservice/fuzzer/corpus/08873afe0bb32c29d6aed741d06c3ebfcfcf6204
Binary files differ
diff --git a/media/libmediaplayerservice/fuzzer/corpus/2c1e844c2b86f22075a69121efd280af3104e31f b/media/libmediaplayerservice/fuzzer/corpus/2c1e844c2b86f22075a69121efd280af3104e31f
new file mode 100755
index 0000000..591816e
--- /dev/null
+++ b/media/libmediaplayerservice/fuzzer/corpus/2c1e844c2b86f22075a69121efd280af3104e31f
Binary files differ
diff --git a/media/libmediaplayerservice/fuzzer/corpus/2e5297399ed949e919852cb0471cab25bcca82f4 b/media/libmediaplayerservice/fuzzer/corpus/2e5297399ed949e919852cb0471cab25bcca82f4
new file mode 100755
index 0000000..2acf349
--- /dev/null
+++ b/media/libmediaplayerservice/fuzzer/corpus/2e5297399ed949e919852cb0471cab25bcca82f4
Binary files differ
diff --git a/media/libmediaplayerservice/fuzzer/corpus/2f9bd1fce3b3422e31f2f5bb38db695e2ace7bb8 b/media/libmediaplayerservice/fuzzer/corpus/2f9bd1fce3b3422e31f2f5bb38db695e2ace7bb8
new file mode 100755
index 0000000..941885f
--- /dev/null
+++ b/media/libmediaplayerservice/fuzzer/corpus/2f9bd1fce3b3422e31f2f5bb38db695e2ace7bb8
Binary files differ
diff --git a/media/libmediaplayerservice/fuzzer/corpus/3f967d82b89c0b39e04727213ba71910801b85fa b/media/libmediaplayerservice/fuzzer/corpus/3f967d82b89c0b39e04727213ba71910801b85fa
new file mode 100755
index 0000000..a6920fa
--- /dev/null
+++ b/media/libmediaplayerservice/fuzzer/corpus/3f967d82b89c0b39e04727213ba71910801b85fa
Binary files differ
diff --git a/media/libmediaplayerservice/fuzzer/corpus/40a46cd4f323d9d5d8508188f21f94ddae5bfae6 b/media/libmediaplayerservice/fuzzer/corpus/40a46cd4f323d9d5d8508188f21f94ddae5bfae6
new file mode 100755
index 0000000..6b70ddd
--- /dev/null
+++ b/media/libmediaplayerservice/fuzzer/corpus/40a46cd4f323d9d5d8508188f21f94ddae5bfae6
Binary files differ
diff --git a/media/libmediaplayerservice/fuzzer/corpus/543adcc7136463c859c38fce066f87b1141bd622 b/media/libmediaplayerservice/fuzzer/corpus/543adcc7136463c859c38fce066f87b1141bd622
new file mode 100755
index 0000000..a919290
--- /dev/null
+++ b/media/libmediaplayerservice/fuzzer/corpus/543adcc7136463c859c38fce066f87b1141bd622
Binary files differ
diff --git a/media/libmediaplayerservice/fuzzer/corpus/874152a3db53b5e4c153655e8ee9443e6ec4c0b1 b/media/libmediaplayerservice/fuzzer/corpus/874152a3db53b5e4c153655e8ee9443e6ec4c0b1
new file mode 100755
index 0000000..1062677
--- /dev/null
+++ b/media/libmediaplayerservice/fuzzer/corpus/874152a3db53b5e4c153655e8ee9443e6ec4c0b1
Binary files differ
diff --git a/media/libmediaplayerservice/fuzzer/corpus/bf37374209d5417fa2a6a1cddcae6df44397b075 b/media/libmediaplayerservice/fuzzer/corpus/bf37374209d5417fa2a6a1cddcae6df44397b075
new file mode 100755
index 0000000..ed11aff
--- /dev/null
+++ b/media/libmediaplayerservice/fuzzer/corpus/bf37374209d5417fa2a6a1cddcae6df44397b075
Binary files differ
diff --git a/media/libmediaplayerservice/fuzzer/corpus/d537f5df0726fa840146a4c788855f8658b7aae0 b/media/libmediaplayerservice/fuzzer/corpus/d537f5df0726fa840146a4c788855f8658b7aae0
new file mode 100755
index 0000000..d82f45d
--- /dev/null
+++ b/media/libmediaplayerservice/fuzzer/corpus/d537f5df0726fa840146a4c788855f8658b7aae0
Binary files differ
diff --git a/media/libmediaplayerservice/fuzzer/corpus/e66a979713ba719fc30005b770b627c187a64607 b/media/libmediaplayerservice/fuzzer/corpus/e66a979713ba719fc30005b770b627c187a64607
new file mode 100755
index 0000000..32af6ee
--- /dev/null
+++ b/media/libmediaplayerservice/fuzzer/corpus/e66a979713ba719fc30005b770b627c187a64607
Binary files differ
diff --git a/media/libmediaplayerservice/fuzzer/corpus/e712688b60610ff05d48b7b74695993a05338112 b/media/libmediaplayerservice/fuzzer/corpus/e712688b60610ff05d48b7b74695993a05338112
new file mode 100755
index 0000000..abfba79
--- /dev/null
+++ b/media/libmediaplayerservice/fuzzer/corpus/e712688b60610ff05d48b7b74695993a05338112
Binary files differ
diff --git a/media/libmediaplayerservice/fuzzer/corpus/f5c60a210c8fbd5a9a3c131f6097d5d07fc45324 b/media/libmediaplayerservice/fuzzer/corpus/f5c60a210c8fbd5a9a3c131f6097d5d07fc45324
new file mode 100755
index 0000000..7fb1bca
--- /dev/null
+++ b/media/libmediaplayerservice/fuzzer/corpus/f5c60a210c8fbd5a9a3c131f6097d5d07fc45324
Binary files differ
diff --git a/media/libmediaplayerservice/fuzzer/mediaplayer_fuzzer.cpp b/media/libmediaplayerservice/fuzzer/mediaplayer_fuzzer.cpp
index a189d04..15265bf 100644
--- a/media/libmediaplayerservice/fuzzer/mediaplayer_fuzzer.cpp
+++ b/media/libmediaplayerservice/fuzzer/mediaplayer_fuzzer.cpp
@@ -15,9 +15,13 @@
  *
  */
 
+#include <MediaExtractorService.h>
 #include <MediaPlayerService.h>
+#include <android/gui/BnSurfaceComposerClient.h>
 #include <camera/Camera.h>
 #include <datasource/FileSource.h>
+#include <fuzzbinder/random_binder.h>
+#include <gmock/gmock.h>
 #include <gui/Surface.h>
 #include <gui/SurfaceComposerClient.h>
 #include <media/IMediaCodecList.h>
@@ -31,40 +35,100 @@
 #include <media/stagefright/RemoteDataSource.h>
 #include <media/stagefright/foundation/base64.h>
 #include <thread>
+#include "android-base/stringprintf.h"
 #include "fuzzer/FuzzedDataProvider.h"
-
-constexpr int32_t kUuidSize = 16;
-constexpr int32_t kMaxSleepTimeInMs = 100;
-constexpr int32_t kMinSleepTimeInMs = 0;
-constexpr int32_t kPlayCountMin = 1;
-constexpr int32_t kPlayCountMax = 10;
-constexpr int32_t kMaxDimension = 8192;
-constexpr int32_t kMinDimension = 0;
-
 using namespace std;
 using namespace android;
 
-constexpr audio_session_t kSupportedAudioSessions[] = {
-    AUDIO_SESSION_DEVICE, AUDIO_SESSION_OUTPUT_STAGE, AUDIO_SESSION_OUTPUT_MIX};
+constexpr int32_t kUuidSize = 16;
+constexpr int32_t kMinSize = 0;
+constexpr int32_t kMaxSize = 100;
+constexpr int32_t kFourCCVal = android::FOURCC('m', 't', 'r', 'X');
+constexpr int32_t kFlagVal =
+        ISurfaceComposerClient::eCursorWindow | ISurfaceComposerClient::eOpaque;
 
-constexpr audio_timestretch_stretch_mode_t kAudioStretchModes[] = {
-    AUDIO_TIMESTRETCH_STRETCH_DEFAULT, AUDIO_TIMESTRETCH_STRETCH_VOICE};
+const char dumpFile[] = "OutputDumpFile";
 
-constexpr audio_timestretch_fallback_mode_t kAudioFallbackModes[] = {
-    AUDIO_TIMESTRETCH_FALLBACK_CUT_REPEAT, AUDIO_TIMESTRETCH_FALLBACK_DEFAULT,
-    AUDIO_TIMESTRETCH_FALLBACK_MUTE, AUDIO_TIMESTRETCH_FALLBACK_FAIL};
+enum DataSourceType { HTTP, FD, STREAM, FILETYPE, SOCKET, kMaxValue = SOCKET };
+
+constexpr audio_flags_mask_t kAudioFlagsMasks[] = {AUDIO_FLAG_NONE,
+                                                   AUDIO_FLAG_AUDIBILITY_ENFORCED,
+                                                   AUDIO_FLAG_SECURE,
+                                                   AUDIO_FLAG_SCO,
+                                                   AUDIO_FLAG_BEACON,
+                                                   AUDIO_FLAG_HW_AV_SYNC,
+                                                   AUDIO_FLAG_HW_HOTWORD,
+                                                   AUDIO_FLAG_BYPASS_INTERRUPTION_POLICY,
+                                                   AUDIO_FLAG_BYPASS_MUTE,
+                                                   AUDIO_FLAG_LOW_LATENCY,
+                                                   AUDIO_FLAG_DEEP_BUFFER,
+                                                   AUDIO_FLAG_NO_MEDIA_PROJECTION,
+                                                   AUDIO_FLAG_MUTE_HAPTIC,
+                                                   AUDIO_FLAG_NO_SYSTEM_CAPTURE,
+                                                   AUDIO_FLAG_CAPTURE_PRIVATE,
+                                                   AUDIO_FLAG_CONTENT_SPATIALIZED,
+                                                   AUDIO_FLAG_NEVER_SPATIALIZE,
+                                                   AUDIO_FLAG_CALL_REDIRECTION};
+
+constexpr audio_content_type_t kAudioContentTypes[] = {
+        AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_CONTENT_TYPE_SPEECH,       AUDIO_CONTENT_TYPE_MUSIC,
+        AUDIO_CONTENT_TYPE_MOVIE,   AUDIO_CONTENT_TYPE_SONIFICATION, AUDIO_CONTENT_TYPE_ULTRASOUND};
+
+constexpr audio_source_t kAudioSources[] = {AUDIO_SOURCE_INVALID,
+                                            AUDIO_SOURCE_DEFAULT,
+                                            AUDIO_SOURCE_MIC,
+                                            AUDIO_SOURCE_VOICE_UPLINK,
+                                            AUDIO_SOURCE_VOICE_DOWNLINK,
+                                            AUDIO_SOURCE_VOICE_CALL,
+                                            AUDIO_SOURCE_CAMCORDER,
+                                            AUDIO_SOURCE_VOICE_RECOGNITION,
+                                            AUDIO_SOURCE_VOICE_COMMUNICATION,
+                                            AUDIO_SOURCE_REMOTE_SUBMIX,
+                                            AUDIO_SOURCE_UNPROCESSED,
+                                            AUDIO_SOURCE_VOICE_PERFORMANCE,
+                                            AUDIO_SOURCE_ECHO_REFERENCE,
+                                            AUDIO_SOURCE_FM_TUNER,
+                                            AUDIO_SOURCE_HOTWORD,
+                                            AUDIO_SOURCE_ULTRASOUND};
+
+constexpr audio_usage_t kAudioUsages[] = {AUDIO_USAGE_UNKNOWN,
+                                          AUDIO_USAGE_MEDIA,
+                                          AUDIO_USAGE_VOICE_COMMUNICATION,
+                                          AUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING,
+                                          AUDIO_USAGE_ALARM,
+                                          AUDIO_USAGE_NOTIFICATION,
+                                          AUDIO_USAGE_NOTIFICATION_TELEPHONY_RINGTONE,
+                                          AUDIO_USAGE_NOTIFICATION_COMMUNICATION_REQUEST,
+                                          AUDIO_USAGE_NOTIFICATION_COMMUNICATION_INSTANT,
+                                          AUDIO_USAGE_NOTIFICATION_COMMUNICATION_DELAYED,
+                                          AUDIO_USAGE_NOTIFICATION_EVENT,
+                                          AUDIO_USAGE_ASSISTANCE_ACCESSIBILITY,
+                                          AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
+                                          AUDIO_USAGE_ASSISTANCE_SONIFICATION,
+                                          AUDIO_USAGE_GAME,
+                                          AUDIO_USAGE_VIRTUAL_SOURCE,
+                                          AUDIO_USAGE_ASSISTANT,
+                                          AUDIO_USAGE_CALL_ASSISTANT,
+                                          AUDIO_USAGE_EMERGENCY,
+                                          AUDIO_USAGE_SAFETY,
+                                          AUDIO_USAGE_VEHICLE_STATUS,
+                                          AUDIO_USAGE_ANNOUNCEMENT};
+
+constexpr PixelFormat kPixelFormat[] = {
+        PIXEL_FORMAT_UNKNOWN,       PIXEL_FORMAT_NONE,        PIXEL_FORMAT_CUSTOM,
+        PIXEL_FORMAT_TRANSLUCENT,   PIXEL_FORMAT_TRANSPARENT, PIXEL_FORMAT_OPAQUE,
+        PIXEL_FORMAT_RGBA_8888,     PIXEL_FORMAT_RGBX_8888,   PIXEL_FORMAT_RGB_888,
+        PIXEL_FORMAT_RGB_565,       PIXEL_FORMAT_BGRA_8888,   PIXEL_FORMAT_RGBA_5551,
+        PIXEL_FORMAT_RGBA_4444,     PIXEL_FORMAT_RGBA_FP16,   PIXEL_FORMAT_RGBA_1010102,
+        PIXEL_FORMAT_R_8,           PIXEL_FORMAT_R_16_UINT,   PIXEL_FORMAT_RG_1616_UINT,
+        PIXEL_FORMAT_RGBA_10101010,
+};
 
 constexpr media_parameter_keys kMediaParamKeys[] = {
     KEY_PARAMETER_CACHE_STAT_COLLECT_FREQ_MS, KEY_PARAMETER_AUDIO_CHANNEL_COUNT,
     KEY_PARAMETER_PLAYBACK_RATE_PERMILLE, KEY_PARAMETER_AUDIO_ATTRIBUTES,
     KEY_PARAMETER_RTP_ATTRIBUTES};
 
-constexpr audio_stream_type_t kAudioStreamTypes[] = {
-    AUDIO_STREAM_DEFAULT,      AUDIO_STREAM_VOICE_CALL,    AUDIO_STREAM_SYSTEM,
-    AUDIO_STREAM_RING,         AUDIO_STREAM_MUSIC,         AUDIO_STREAM_ALARM,
-    AUDIO_STREAM_NOTIFICATION, AUDIO_STREAM_BLUETOOTH_SCO, AUDIO_STREAM_ENFORCED_AUDIBLE,
-    AUDIO_STREAM_DTMF,         AUDIO_STREAM_TTS,           AUDIO_STREAM_ASSISTANT};
-
 constexpr media_event_type kMediaEventTypes[] = {MEDIA_NOP,
                                                  MEDIA_PREPARED,
                                                  MEDIA_PLAYBACK_COMPLETE,
@@ -140,9 +204,26 @@
     DISALLOW_EVIL_CONSTRUCTORS(TestMediaHTTPService);
 };
 
-class BinderDeathNotifier : public IBinder::DeathRecipient {
-   public:
-    void binderDied(const wp<IBinder> &) { abort(); }
+class FakeBnSurfaceComposerClient : public gui::BnSurfaceComposerClient {
+  public:
+    MOCK_METHOD(binder::Status, createSurface,
+                (const std::string& name, int32_t flags, const sp<IBinder>& parent,
+                 const gui::LayerMetadata& metadata, gui::CreateSurfaceResult* outResult),
+                (override));
+
+    MOCK_METHOD(binder::Status, clearLayerFrameStats, (const sp<IBinder>& handle), (override));
+
+    MOCK_METHOD(binder::Status, getLayerFrameStats,
+                (const sp<IBinder>& handle, gui::FrameStats* outStats), (override));
+
+    MOCK_METHOD(binder::Status, mirrorSurface,
+                (const sp<IBinder>& mirrorFromHandle, gui::CreateSurfaceResult* outResult),
+                (override));
+
+    MOCK_METHOD(binder::Status, mirrorDisplay,
+                (int64_t displayId, gui::CreateSurfaceResult* outResult), (override));
+
+    MOCK_METHOD(binder::Status, getSchedulingPolicy, (gui::SchedulingPolicy*), (override));
 };
 
 class MediaPlayerServiceFuzzer {
@@ -153,24 +234,40 @@
     void process(const uint8_t *data, size_t size);
 
    private:
-    bool setDataSource(const uint8_t *data, size_t size);
-    void invokeMediaPlayer();
-    FuzzedDataProvider mFdp;
-    sp<IMediaPlayer> mMediaPlayer = nullptr;
-    sp<IMediaPlayerClient> mMediaPlayerClient = nullptr;
-    const int32_t mDataSourceFd;
+     FuzzedDataProvider mFdp;
+     const int32_t mDataSourceFd;
+     sp<IMediaPlayer> mMediaPlayer = nullptr;
+     sp<IMediaPlayerClient> mMediaPlayerClient = nullptr;
+     void invokeMediaPlayer();
+     sp<SurfaceControl> makeSurfaceControl();
+     bool setDataSource(const uint8_t* data, size_t size);
 };
 
-bool MediaPlayerServiceFuzzer::setDataSource(const uint8_t *data, size_t size) {
-    status_t status = -1;
-    enum DataSourceType {http, fd, stream, file, socket, kMaxValue = socket};
-    switch (mFdp.ConsumeEnum<DataSourceType>()) {
-        case http: {
+sp<SurfaceControl> MediaPlayerServiceFuzzer::makeSurfaceControl() {
+     sp<IBinder> handle = getRandomBinder(&mFdp);
+     const sp<FakeBnSurfaceComposerClient> testClient(new FakeBnSurfaceComposerClient());
+     sp<SurfaceComposerClient> client = new SurfaceComposerClient(testClient);
+     uint32_t width = mFdp.ConsumeIntegral<uint32_t>();
+     uint32_t height = mFdp.ConsumeIntegral<uint32_t>();
+     uint32_t transformHint = mFdp.ConsumeIntegral<uint32_t>();
+     uint32_t flags = mFdp.ConsumeBool() ? kFlagVal : mFdp.ConsumeIntegral<uint32_t>();
+     int32_t format = mFdp.ConsumeBool() ? mFdp.ConsumeIntegral<uint32_t>()
+                                         : mFdp.PickValueInArray(kPixelFormat);
+     int32_t layerId = mFdp.ConsumeIntegral<int32_t>();
+     std::string layerName = android::base::StringPrintf("#%d", layerId);
+     return new SurfaceControl(client, handle, layerId, layerName, width, height, format,
+                               transformHint, flags);
+}
+
+bool MediaPlayerServiceFuzzer::setDataSource(const uint8_t* data, size_t size) {
+     status_t status = UNKNOWN_ERROR;
+     switch (mFdp.ConsumeEnum<DataSourceType>()) {
+        case HTTP: {
             KeyedVector<String8, String8> headers;
             headers.add(String8(mFdp.ConsumeRandomLengthString().c_str()),
                         String8(mFdp.ConsumeRandomLengthString().c_str()));
 
-            uint32_t dataBlobSize = mFdp.ConsumeIntegralInRange<uint16_t>(0, size);
+            uint32_t dataBlobSize = mFdp.ConsumeIntegralInRange<uint16_t>(kMinSize, size);
             vector<uint8_t> uriSuffix = mFdp.ConsumeBytes<uint8_t>(dataBlobSize);
 
             string uri(mFdp.PickValueInArray(kUrlPrefix));
@@ -183,18 +280,17 @@
                     mMediaPlayer->setDataSource(testService /*httpService*/, uri.c_str(), &headers);
             break;
         }
-        case fd: {
+        case FD: {
             write(mDataSourceFd, data, size);
-
             status = mMediaPlayer->setDataSource(mDataSourceFd, 0, size);
             break;
         }
-        case stream: {
+        case STREAM: {
             sp<IStreamSource> streamSource = sp<TestStreamSource>::make();
             status = mMediaPlayer->setDataSource(streamSource);
             break;
         }
-        case file: {
+        case FILETYPE: {
             write(mDataSourceFd, data, size);
 
             sp<DataSource> dataSource = new FileSource(dup(mDataSourceFd), 0, size);
@@ -205,7 +301,7 @@
             status = mMediaPlayer->setDataSource(iDataSource);
             break;
         }
-        case socket: {
+        case SOCKET: {
             String8 rtpParams = String8(mFdp.ConsumeRandomLengthString().c_str());
             struct sockaddr_in endpoint;
             endpoint.sin_family = mFdp.ConsumeIntegral<unsigned short>();
@@ -214,190 +310,250 @@
             status = mMediaPlayer->setDataSource(rtpParams);
             break;
         }
-    }
-
-    if (status != 0) {
+     }
+     if (status != OK) {
         return false;
-    }
-    return true;
+     }
+     return true;
 }
 
 void MediaPlayerServiceFuzzer::invokeMediaPlayer() {
-    sp<SurfaceComposerClient> composerClient = new SurfaceComposerClient;
-    String8 name = String8(mFdp.ConsumeRandomLengthString().c_str());
-    uint32_t width = mFdp.ConsumeIntegralInRange<uint32_t>(kMinDimension, kMaxDimension);
-    uint32_t height = mFdp.ConsumeIntegralInRange<uint32_t>(kMinDimension, kMaxDimension);
-    uint32_t pixelFormat = mFdp.ConsumeIntegral<int32_t>();
-    uint32_t flags = mFdp.ConsumeIntegral<int32_t>();
-    sp<SurfaceControl> surfaceControl =
-        composerClient->createSurface(name, width, height, pixelFormat, flags);
-    if (surfaceControl) {
-        sp<Surface> surface = surfaceControl->getSurface();
-        mMediaPlayer->setVideoSurfaceTexture(surface->getIGraphicBufferProducer());
-    }
-
-    BufferingSettings buffering;
-    buffering.mInitialMarkMs = mFdp.ConsumeIntegral<int32_t>();
-    buffering.mResumePlaybackMarkMs = mFdp.ConsumeIntegral<int32_t>();
-    mMediaPlayer->setBufferingSettings(buffering);
-    mMediaPlayer->getBufferingSettings(&buffering);
-
-    mMediaPlayer->prepareAsync();
-    size_t playCount = mFdp.ConsumeIntegralInRange<size_t>(kPlayCountMin, kPlayCountMax);
-    for (size_t Idx = 0; Idx < playCount; ++Idx) {
-        mMediaPlayer->start();
-        this_thread::sleep_for(chrono::milliseconds(
-            mFdp.ConsumeIntegralInRange<int32_t>(kMinSleepTimeInMs, kMaxSleepTimeInMs)));
-        mMediaPlayer->pause();
-        this_thread::sleep_for(chrono::milliseconds(
-            mFdp.ConsumeIntegralInRange<int32_t>(kMinSleepTimeInMs, kMaxSleepTimeInMs)));
-        mMediaPlayer->stop();
-    }
-    bool state;
-    mMediaPlayer->isPlaying(&state);
-
-    AudioPlaybackRate rate;
-    rate.mSpeed = mFdp.ConsumeFloatingPoint<float>();
-    rate.mPitch = mFdp.ConsumeFloatingPoint<float>();
-    rate.mStretchMode = mFdp.PickValueInArray(kAudioStretchModes);
-    rate.mFallbackMode = mFdp.PickValueInArray(kAudioFallbackModes);
-    mMediaPlayer->setPlaybackSettings(rate);
-    mMediaPlayer->getPlaybackSettings(&rate);
-
-    AVSyncSettings *avSyncSettings = new AVSyncSettings();
-    float videoFpsHint = mFdp.ConsumeFloatingPoint<float>();
-    mMediaPlayer->setSyncSettings(*avSyncSettings, videoFpsHint);
-    mMediaPlayer->getSyncSettings(avSyncSettings, &videoFpsHint);
-    delete avSyncSettings;
-
-    mMediaPlayer->seekTo(mFdp.ConsumeIntegral<int32_t>());
-
-    int32_t msec;
-    mMediaPlayer->getCurrentPosition(&msec);
-    mMediaPlayer->getDuration(&msec);
-    mMediaPlayer->reset();
-
-    mMediaPlayer->notifyAt(mFdp.ConsumeIntegral<int64_t>());
-
-    mMediaPlayer->setAudioStreamType(mFdp.PickValueInArray(kAudioStreamTypes));
-    mMediaPlayer->setLooping(mFdp.ConsumeIntegral<int32_t>());
-    float left = mFdp.ConsumeFloatingPoint<float>();
-    float right = mFdp.ConsumeFloatingPoint<float>();
-    mMediaPlayer->setVolume(left, right);
-
-    Parcel request, reply;
-    request.writeInt32(mFdp.ConsumeIntegral<int32_t>());
-    request.setDataPosition(0);
-    mMediaPlayer->invoke(request, &reply);
-
-    Parcel filter;
-    filter.writeInt32(mFdp.ConsumeIntegral<int32_t>());
-    filter.setDataPosition(0);
-    mMediaPlayer->setMetadataFilter(filter);
-
-    bool updateOnly = mFdp.ConsumeBool();
-    bool applyFilter = mFdp.ConsumeBool();
-    mMediaPlayer->getMetadata(updateOnly, applyFilter, &reply);
-    mMediaPlayer->setAuxEffectSendLevel(mFdp.ConsumeFloatingPoint<float>());
-    mMediaPlayer->attachAuxEffect(mFdp.ConsumeIntegral<int32_t>());
-
-    int32_t key = mFdp.PickValueInArray(kMediaParamKeys);
-    request.writeInt32(mFdp.ConsumeIntegral<int32_t>());
-    request.setDataPosition(0);
-    mMediaPlayer->setParameter(key, request);
-    key = mFdp.PickValueInArray(kMediaParamKeys);
-    mMediaPlayer->getParameter(key, &reply);
-
-    struct sockaddr_in endpoint;
-    mMediaPlayer->getRetransmitEndpoint(&endpoint);
-
-    AttributionSourceState attributionSource;
-    attributionSource.packageName = mFdp.ConsumeRandomLengthString().c_str();
-    attributionSource.token = sp<BBinder>::make();
-    const sp<IMediaPlayerService> mpService(IMediaDeathNotifier::getMediaPlayerService());
-    sp<IMediaPlayer> mNextMediaPlayer = mpService->create(
-        mMediaPlayerClient, mFdp.PickValueInArray(kSupportedAudioSessions), attributionSource);
-    mMediaPlayer->setNextPlayer(mNextMediaPlayer);
-
-    const sp<media::VolumeShaper::Configuration> configuration =
-        sp<media::VolumeShaper::Configuration>::make();
-    const sp<media::VolumeShaper::Operation> operation = sp<media::VolumeShaper::Operation>::make();
-    mMediaPlayer->applyVolumeShaper(configuration, operation);
-
-    mMediaPlayer->getVolumeShaperState(mFdp.ConsumeIntegral<int32_t>());
-    uint8_t uuid[kUuidSize];
-    for (int32_t index = 0; index < kUuidSize; ++index) {
-        uuid[index] = mFdp.ConsumeIntegral<uint8_t>();
-    }
-    Vector<uint8_t> drmSessionId;
-    drmSessionId.push_back(mFdp.ConsumeIntegral<uint8_t>());
-    mMediaPlayer->prepareDrm(uuid, drmSessionId);
-    mMediaPlayer->releaseDrm();
-
-    audio_port_handle_t deviceId = mFdp.ConsumeIntegral<int32_t>();
-    mMediaPlayer->setOutputDevice(deviceId);
-    mMediaPlayer->getRoutedDeviceId(&deviceId);
-
-    mMediaPlayer->enableAudioDeviceCallback(mFdp.ConsumeBool());
-
-    sp<MediaPlayer> mediaPlayer = (MediaPlayer *)mMediaPlayer.get();
-
-    int32_t msg = mFdp.PickValueInArray(kMediaEventTypes);
-    int32_t ext1 = mFdp.PickValueInArray(kMediaInfoTypes);
-    int32_t ext2 = mFdp.ConsumeIntegral<int32_t>();
-    Parcel obj;
-    obj.writeInt32(mFdp.ConsumeIntegral<int32_t>());
-    obj.setDataPosition(0);
-    mediaPlayer->notify(msg, ext1, ext2, &obj);
-
-    int32_t mediaPlayerDumpFd = memfd_create("OutputDumpFile", MFD_ALLOW_SEALING);
-    Vector<String16> args;
-    args.push_back(String16(mFdp.ConsumeRandomLengthString().c_str()));
-    mediaPlayer->dump(mediaPlayerDumpFd, args);
-    close(mediaPlayerDumpFd);
-
-    mMediaPlayer->disconnect();
+     Parcel request, reply;
+     while (mFdp.remaining_bytes()) {
+        auto invokeMediaPlayerApi = mFdp.PickValueInArray<const std::function<void()>>({
+                [&]() {
+                    sp<SurfaceControl> surfaceControl = makeSurfaceControl();
+                    if (surfaceControl) {
+                        sp<Surface> surface = surfaceControl->getSurface();
+                        mMediaPlayer->setVideoSurfaceTexture(surface->getIGraphicBufferProducer());
+                    }
+                },
+                [&]() {
+                    BufferingSettings buffering;
+                    buffering.mInitialMarkMs = mFdp.ConsumeIntegral<int32_t>();
+                    buffering.mResumePlaybackMarkMs = mFdp.ConsumeIntegral<int32_t>();
+                    mMediaPlayer->setBufferingSettings(buffering);
+                },
+                [&]() {
+                    BufferingSettings buffering;
+                    mMediaPlayer->getBufferingSettings(&buffering);
+                },
+                [&]() {
+                    mMediaPlayer->prepareAsync();
+                    this_thread::sleep_for(chrono::milliseconds(100));  // Time to post message
+                },
+                [&]() {
+                    mMediaPlayer->start();
+                    this_thread::sleep_for(chrono::milliseconds(100));  // Time to post message
+                },
+                [&]() {
+                    mMediaPlayer->pause();
+                    this_thread::sleep_for(chrono::milliseconds(100));  // Time to post message
+                },
+                [&]() { mMediaPlayer->stop(); },
+                [&]() {
+                    bool state;
+                    mMediaPlayer->isPlaying(&state);
+                },
+                [&]() {
+                    AudioPlaybackRate rate;
+                    rate.mSpeed = mFdp.ConsumeFloatingPoint<float>();
+                    rate.mPitch = mFdp.ConsumeFloatingPoint<float>();
+                    rate.mStretchMode = mFdp.ConsumeBool() ? AUDIO_TIMESTRETCH_STRETCH_DEFAULT
+                                                           : AUDIO_TIMESTRETCH_STRETCH_VOICE;
+                    rate.mFallbackMode =
+                            (audio_timestretch_fallback_mode_t)mFdp.ConsumeIntegralInRange<int32_t>(
+                                    AUDIO_TIMESTRETCH_FALLBACK_CUT_REPEAT,
+                                    AUDIO_TIMESTRETCH_FALLBACK_FAIL);
+                    mMediaPlayer->setPlaybackSettings(rate);
+                    mMediaPlayer->getPlaybackSettings(&rate);
+                },
+                [&]() {
+                    AVSyncSettings* avSyncSettings = new AVSyncSettings();
+                    float videoFpsHint = mFdp.ConsumeFloatingPoint<float>();
+                    mMediaPlayer->setSyncSettings(*avSyncSettings, videoFpsHint);
+                    delete avSyncSettings;
+                },
+                [&]() {
+                    AVSyncSettings* avSyncSettings = new AVSyncSettings();
+                    float videoFpsHint = 0;
+                    mMediaPlayer->getSyncSettings(avSyncSettings, &videoFpsHint);
+                    delete avSyncSettings;
+                },
+                [&]() { mMediaPlayer->seekTo(mFdp.ConsumeIntegral<int32_t>()); },
+                [&]() {
+                    int32_t msec;
+                    mMediaPlayer->getCurrentPosition(&msec);
+                    mMediaPlayer->getDuration(&msec);
+                },
+                [&]() { mMediaPlayer->reset(); },
+                [&]() { mMediaPlayer->notifyAt(mFdp.ConsumeIntegral<uint64_t>()); },
+                [&]() {
+                    mMediaPlayer->setAudioStreamType(
+                            (audio_stream_type_t)mFdp.ConsumeIntegralInRange<int32_t>(
+                                    AUDIO_STREAM_VOICE_CALL, AUDIO_STREAM_CALL_ASSISTANT));
+                },
+                [&]() { mMediaPlayer->setLooping(mFdp.ConsumeIntegral<int32_t>()); },
+                [&]() {
+                    mMediaPlayer->setVolume(mFdp.ConsumeFloatingPoint<float>() /* left */,
+                                            mFdp.ConsumeFloatingPoint<float>() /* right */);
+                },
+                [&]() {
+                    request.writeInt32(mFdp.ConsumeIntegral<int32_t>());
+                    request.setDataPosition(0);
+                    mMediaPlayer->invoke(request, &reply);
+                },
+                [&]() {
+                    Parcel filter;
+                    filter.writeInt32(mFdp.ConsumeIntegral<int32_t>());
+                    filter.setDataPosition(0);
+                    mMediaPlayer->setMetadataFilter(filter);
+                },
+                [&]() {
+                    mMediaPlayer->getMetadata(mFdp.ConsumeBool() /* updateOnly */,
+                                              mFdp.ConsumeBool() /* applyFilter */, &reply);
+                },
+                [&]() { mMediaPlayer->setAuxEffectSendLevel(mFdp.ConsumeFloatingPoint<float>()); },
+                [&]() { mMediaPlayer->attachAuxEffect(mFdp.ConsumeIntegral<int32_t>()); },
+                [&]() {
+                    int32_t key = mFdp.PickValueInArray(kMediaParamKeys);
+                    request.writeInt32((audio_usage_t)mFdp.ConsumeIntegralInRange<int32_t>(
+                            AUDIO_USAGE_UNKNOWN, AUDIO_USAGE_ANNOUNCEMENT) /* usage */);
+                    request.writeInt32((audio_content_type_t)mFdp.ConsumeIntegralInRange<int32_t>(
+                            AUDIO_CONTENT_TYPE_UNKNOWN,
+                            AUDIO_CONTENT_TYPE_ULTRASOUND) /* content_type */);
+                    request.writeInt32((audio_source_t)mFdp.ConsumeIntegralInRange<int32_t>(
+                            AUDIO_SOURCE_INVALID, AUDIO_SOURCE_ULTRASOUND) /* source */);
+                    request.writeInt32((audio_flags_mask_t)mFdp.ConsumeIntegralInRange<int32_t>(
+                            AUDIO_FLAG_NONE, AUDIO_FLAG_CALL_REDIRECTION) /* flags */);
+                    request.writeInt32(mFdp.ConsumeBool() /* hasFlattenedTag */);
+                    request.writeString16(
+                            String16((mFdp.ConsumeRandomLengthString()).c_str()) /* tags */);
+                    request.setDataPosition(0);
+                    mMediaPlayer->setParameter(key, request);
+                    key = mFdp.PickValueInArray(kMediaParamKeys);
+                    mMediaPlayer->getParameter(key, &reply);
+                },
+                [&]() {
+                    int32_t key =
+                            mFdp.ConsumeBool() ? kFourCCVal : mFdp.ConsumeIntegral<uint32_t>();
+                    mMediaPlayer->getParameter(key, &reply);
+                },
+                [&]() {
+                    struct sockaddr_in endpoint;
+                    mMediaPlayer->getRetransmitEndpoint(&endpoint);
+                },
+                [&]() {
+                    AttributionSourceState attributionSource;
+                    attributionSource.packageName = mFdp.ConsumeRandomLengthString().c_str();
+                    attributionSource.token = sp<BBinder>::make();
+                    const sp<IMediaPlayerService> mpService(
+                            IMediaDeathNotifier::getMediaPlayerService());
+                    audio_session_t audioSessionId =
+                            (audio_session_t)mFdp.ConsumeIntegralInRange<int32_t>(
+                                    AUDIO_SESSION_DEVICE, AUDIO_SESSION_OUTPUT_MIX);
+                    sp<IMediaPlayer> mNextMediaPlayer = mpService->create(
+                            mMediaPlayerClient, audioSessionId, attributionSource);
+                    mMediaPlayer->setNextPlayer(mNextMediaPlayer);
+                },
+                [&]() {
+                    const sp<media::VolumeShaper::Configuration> configuration =
+                            sp<media::VolumeShaper::Configuration>::make();
+                    const sp<media::VolumeShaper::Operation> operation =
+                            sp<media::VolumeShaper::Operation>::make();
+                    mMediaPlayer->applyVolumeShaper(configuration, operation);
+                },
+                [&]() { mMediaPlayer->getVolumeShaperState(mFdp.ConsumeIntegral<int32_t>()); },
+                [&]() {
+                    uint8_t uuid[kUuidSize];
+                    for (int32_t index = 0; index < kUuidSize; ++index) {
+                        uuid[index] = mFdp.ConsumeIntegral<uint8_t>();
+                    }
+                    Vector<uint8_t> drmSessionId;
+                    int32_t length = mFdp.ConsumeIntegralInRange<uint32_t>(kMinSize, kMaxSize);
+                    while (length--) {
+                        drmSessionId.push_back(mFdp.ConsumeIntegral<uint8_t>());
+                    }
+                    mMediaPlayer->prepareDrm(uuid, drmSessionId);
+                },
+                [&]() { mMediaPlayer->releaseDrm(); },
+                [&]() {
+                    audio_port_handle_t deviceId = mFdp.ConsumeIntegral<int32_t>();
+                    mMediaPlayer->setOutputDevice(deviceId);
+                },
+                [&]() {
+                    audio_port_handle_t deviceId;
+                    mMediaPlayer->getRoutedDeviceId(&deviceId);
+                },
+                [&]() { mMediaPlayer->enableAudioDeviceCallback(mFdp.ConsumeBool()); },
+                [&]() {
+                    sp<MediaPlayer> mediaPlayer = (MediaPlayer*)mMediaPlayer.get();
+                    Parcel obj;
+                    obj.writeInt32(mFdp.ConsumeIntegral<int32_t>());
+                    obj.setDataPosition(0);
+                    mediaPlayer->notify(mFdp.PickValueInArray(kMediaEventTypes) /* msg */,
+                                        mFdp.PickValueInArray(kMediaInfoTypes) /* ext1 */,
+                                        mFdp.ConsumeIntegral<int32_t>() /* ext2 */, &obj);
+                },
+                [&]() {
+                    sp<MediaPlayer> mediaPlayer = (MediaPlayer*)mMediaPlayer.get();
+                    int32_t mediaPlayerDumpFd = memfd_create(dumpFile, MFD_ALLOW_SEALING);
+                    Vector<String16> args;
+                    args.push_back(String16(mFdp.ConsumeRandomLengthString().c_str()));
+                    mediaPlayer->dump(mediaPlayerDumpFd, args);
+                    close(mediaPlayerDumpFd);
+                },
+                [&]() { mMediaPlayer->disconnect(); },
+        });
+        invokeMediaPlayerApi();
+     }
 }
 
-void MediaPlayerServiceFuzzer::process(const uint8_t *data, size_t size) {
-    MediaPlayerService::instantiate();
-
-    const sp<IMediaPlayerService> mpService(IMediaDeathNotifier::getMediaPlayerService());
-    if (!mpService) {
+void MediaPlayerServiceFuzzer::process(const uint8_t* data, size_t size) {
+     const sp<IMediaPlayerService> mpService(IMediaDeathNotifier::getMediaPlayerService());
+     if (!mpService) {
         return;
-    }
+     }
 
-    sp<IMediaCodecList> mediaCodecList = mpService->getCodecList();
+     sp<IMediaCodecList> mediaCodecList = mpService->getCodecList();
 
-    sp<IRemoteDisplayClient> remoteDisplayClient;
-    sp<IRemoteDisplay> remoteDisplay = mpService->listenForRemoteDisplay(
-        String16(mFdp.ConsumeRandomLengthString().c_str()) /*opPackageName*/, remoteDisplayClient,
-        String8(mFdp.ConsumeRandomLengthString().c_str()) /*iface*/);
+     sp<IRemoteDisplayClient> remoteDisplayClient;
+     sp<IRemoteDisplay> remoteDisplay = mpService->listenForRemoteDisplay(
+             String16(mFdp.ConsumeRandomLengthString().c_str()) /*opPackageName*/,
+             remoteDisplayClient, String8(mFdp.ConsumeRandomLengthString().c_str()) /*iface*/);
 
-    mpService->addBatteryData(mFdp.ConsumeIntegral<uint32_t>());
-    Parcel reply;
-    mpService->pullBatteryData(&reply);
+     mpService->addBatteryData(mFdp.ConsumeIntegral<uint32_t>());
+     Parcel reply;
+     mpService->pullBatteryData(&reply);
 
-    sp<MediaPlayerService> mediaPlayerService = (MediaPlayerService *)mpService.get();
-    AttributionSourceState attributionSource;
-    attributionSource.packageName = mFdp.ConsumeRandomLengthString().c_str();
-    attributionSource.token = sp<BBinder>::make();
-    mMediaPlayer = mediaPlayerService->create(
-        mMediaPlayerClient, mFdp.PickValueInArray(kSupportedAudioSessions), attributionSource);
+     sp<MediaPlayerService> mediaPlayerService = (MediaPlayerService*)mpService.get();
+     AttributionSourceState attributionSource;
+     attributionSource.packageName = mFdp.ConsumeRandomLengthString().c_str();
+     attributionSource.token = sp<BBinder>::make();
+     mMediaPlayer =
+             mediaPlayerService->create(mMediaPlayerClient,
+                                        (audio_session_t)mFdp.ConsumeIntegralInRange<int32_t>(
+                                                AUDIO_SESSION_DEVICE, AUDIO_SESSION_OUTPUT_MIX),
+                                        attributionSource);
 
-    int32_t mediaPlayerServiceDumpFd = memfd_create("OutputDumpFile", MFD_ALLOW_SEALING);
-    Vector<String16> args;
-    args.push_back(String16(mFdp.ConsumeRandomLengthString().c_str()));
-    mediaPlayerService->dump(mediaPlayerServiceDumpFd, args);
-    close(mediaPlayerServiceDumpFd);
+     int32_t mediaPlayerServiceDumpFd = memfd_create(dumpFile, MFD_ALLOW_SEALING);
+     Vector<String16> args;
+     args.push_back(String16(mFdp.ConsumeRandomLengthString().c_str()));
+     mediaPlayerService->dump(mediaPlayerServiceDumpFd, args);
+     close(mediaPlayerServiceDumpFd);
 
-    if (!mMediaPlayer) {
+     if (!mMediaPlayer) {
         return;
-    }
-
-    if (setDataSource(data, size)) {
+     }
+     if (setDataSource(data, size)) {
         invokeMediaPlayer();
-    }
+     }
+}
+
+extern "C" int LLVMFuzzerInitialize(int* /* argc */, char*** /* argv */) {
+     MediaPlayerService::instantiate();
+     MediaExtractorService::instantiate();
+     return 0;
 }
 
 extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
diff --git a/media/libmediaplayerservice/fuzzer/mediarecorder_fuzzer.cpp b/media/libmediaplayerservice/fuzzer/mediarecorder_fuzzer.cpp
index fdac1a1..2518c21 100644
--- a/media/libmediaplayerservice/fuzzer/mediarecorder_fuzzer.cpp
+++ b/media/libmediaplayerservice/fuzzer/mediarecorder_fuzzer.cpp
@@ -15,22 +15,22 @@
  *
  */
 
-#include <media/stagefright/foundation/AString.h>
-#include "fuzzer/FuzzedDataProvider.h"
-
 #include <AudioFlinger.h>
 #include <MediaPlayerService.h>
 #include <ResourceManagerService.h>
-#include <fakeservicemanager/FakeServiceManager.h>
 #include <StagefrightRecorder.h>
 #include <camera/Camera.h>
 #include <camera/android/hardware/ICamera.h>
+#include <fakeservicemanager/FakeServiceManager.h>
 #include <gui/IGraphicBufferProducer.h>
 #include <gui/Surface.h>
 #include <gui/SurfaceComposerClient.h>
 #include <media/stagefright/PersistentSurface.h>
+#include <media/stagefright/foundation/AString.h>
 #include <mediametricsservice/MediaMetricsService.h>
 #include <thread>
+#include "CameraService.h"
+#include "fuzzer/FuzzedDataProvider.h"
 
 using namespace std;
 using namespace android;
@@ -46,32 +46,27 @@
     AUDIO_SOURCE_VOICE_RECOGNITION, AUDIO_SOURCE_VOICE_COMMUNICATION,
     AUDIO_SOURCE_REMOTE_SUBMIX,     AUDIO_SOURCE_UNPROCESSED,
     AUDIO_SOURCE_VOICE_PERFORMANCE, AUDIO_SOURCE_ECHO_REFERENCE,
-    AUDIO_SOURCE_FM_TUNER,          AUDIO_SOURCE_HOTWORD};
+    AUDIO_SOURCE_FM_TUNER,          AUDIO_SOURCE_HOTWORD,
+    AUDIO_SOURCE_ULTRASOUND};
+
+constexpr output_format kOutputFormat[] = {
+        OUTPUT_FORMAT_DEFAULT,        OUTPUT_FORMAT_THREE_GPP,
+        OUTPUT_FORMAT_MPEG_4,         OUTPUT_FORMAT_AUDIO_ONLY_START,
+        OUTPUT_FORMAT_RAW_AMR,        OUTPUT_FORMAT_AMR_NB,
+        OUTPUT_FORMAT_AMR_WB,         OUTPUT_FORMAT_AAC_ADTS,
+        OUTPUT_FORMAT_AUDIO_ONLY_END, OUTPUT_FORMAT_RTP_AVP,
+        OUTPUT_FORMAT_MPEG2TS,        OUTPUT_FORMAT_WEBM,
+        OUTPUT_FORMAT_HEIF,           OUTPUT_FORMAT_OGG,
+        OUTPUT_FORMAT_LIST_END};
+
+constexpr video_encoder kVideoEncoder[] = {
+        VIDEO_ENCODER_DEFAULT,      VIDEO_ENCODER_H263, VIDEO_ENCODER_H264,
+        VIDEO_ENCODER_MPEG_4_SP,    VIDEO_ENCODER_VP8,  VIDEO_ENCODER_HEVC,
+        VIDEO_ENCODER_DOLBY_VISION, VIDEO_ENCODER_AV1,  VIDEO_ENCODER_LIST_END};
 
 constexpr audio_microphone_direction_t kSupportedMicrophoneDirections[] = {
     MIC_DIRECTION_UNSPECIFIED, MIC_DIRECTION_FRONT, MIC_DIRECTION_BACK, MIC_DIRECTION_EXTERNAL};
 
-struct RecordingConfig {
-    output_format outputFormat;
-    audio_encoder audioEncoder;
-    video_encoder videoEncoder;
-};
-
-const struct RecordingConfig kRecordingConfigList[] = {
-    {OUTPUT_FORMAT_AMR_NB, AUDIO_ENCODER_AMR_NB, VIDEO_ENCODER_DEFAULT},
-    {OUTPUT_FORMAT_AMR_WB, AUDIO_ENCODER_AMR_WB, VIDEO_ENCODER_DEFAULT},
-    {OUTPUT_FORMAT_AAC_ADTS, AUDIO_ENCODER_AAC, VIDEO_ENCODER_DEFAULT},
-    {OUTPUT_FORMAT_AAC_ADTS, AUDIO_ENCODER_HE_AAC, VIDEO_ENCODER_DEFAULT},
-    {OUTPUT_FORMAT_AAC_ADTS, AUDIO_ENCODER_AAC_ELD, VIDEO_ENCODER_DEFAULT},
-    {OUTPUT_FORMAT_OGG, AUDIO_ENCODER_OPUS, VIDEO_ENCODER_DEFAULT},
-    {OUTPUT_FORMAT_RTP_AVP, AUDIO_ENCODER_DEFAULT, VIDEO_ENCODER_DEFAULT},
-    {OUTPUT_FORMAT_MPEG2TS, AUDIO_ENCODER_AAC, VIDEO_ENCODER_H264},
-    {OUTPUT_FORMAT_WEBM, AUDIO_ENCODER_VORBIS, VIDEO_ENCODER_VP8},
-    {OUTPUT_FORMAT_THREE_GPP, AUDIO_ENCODER_DEFAULT, VIDEO_ENCODER_MPEG_4_SP},
-    {OUTPUT_FORMAT_MPEG_4, AUDIO_ENCODER_AAC, VIDEO_ENCODER_H264},
-    {OUTPUT_FORMAT_MPEG_4, AUDIO_ENCODER_DEFAULT, VIDEO_ENCODER_MPEG_4_SP},
-    {OUTPUT_FORMAT_MPEG_4, AUDIO_ENCODER_DEFAULT, VIDEO_ENCODER_HEVC}};
-
 const string kParametersList[] = {"max-duration",
                                   "max-filesize",
                                   "interleave-duration-us",
@@ -104,14 +99,16 @@
                                   "rtp-param-ext-cvo-degrees",
                                   "video-param-request-i-frame",
                                   "rtp-param-set-socket-dscp",
-                                  "rtp-param-set-socket-network"};
+                                  "rtp-param-set-socket-network",
+                                  "rtp-param-set-socket-ecn",
+                                  "rtp-param-remote-ip",
+                                  "rtp-param-set-socket-network",
+                                  "log-session-id"};
 
-constexpr int32_t kMaxSleepTimeInMs = 100;
-constexpr int32_t kMinSleepTimeInMs = 0;
 constexpr int32_t kMinVideoSize = 2;
 constexpr int32_t kMaxVideoSize = 8192;
-constexpr int32_t kNumRecordMin = 1;
-constexpr int32_t kNumRecordMax = 10;
+const char kOutputFile[] = "OutputFile";
+const char kNextOutputFile[] = "NextOutputFile";
 
 class TestAudioDeviceCallback : public AudioSystem::AudioDeviceCallback {
    public:
@@ -194,8 +191,7 @@
     int32_t max;
     mStfRecorder->getMaxAmplitude(&max);
 
-    int32_t deviceId = mFdp.ConsumeIntegral<int32_t>();
-    mStfRecorder->setInputDevice(deviceId);
+    int32_t deviceId;
     mStfRecorder->getRoutedDeviceId(&deviceId);
 
     vector<android::media::MicrophoneInfoFw> activeMicrophones{};
@@ -213,101 +209,189 @@
     sp<IGraphicBufferProducer> buffer = mStfRecorder->querySurfaceMediaSource();
 }
 
-void MediaRecorderClientFuzzer::dumpInfo() {
-    int32_t dumpFd = memfd_create("DumpFile", MFD_ALLOW_SEALING);
-    Vector<String16> args;
-    args.push_back(String16(mFdp.ConsumeRandomLengthString().c_str()));
-    mStfRecorder->dump(dumpFd, args);
-    close(dumpFd);
-}
-
-void MediaRecorderClientFuzzer::setConfig() {
-    mStfRecorder->setOutputFile(mMediaRecorderOutputFd);
-    mStfRecorder->setAudioSource(mFdp.PickValueInArray(kSupportedAudioSources));
-    mStfRecorder->setVideoSource(mFdp.PickValueInArray(kSupportedVideoSources));
-    mStfRecorder->setPreferredMicrophoneDirection(
-        mFdp.PickValueInArray(kSupportedMicrophoneDirections));
-    mStfRecorder->setPrivacySensitive(mFdp.ConsumeBool());
-    bool isPrivacySensitive;
-    mStfRecorder->isPrivacySensitive(&isPrivacySensitive);
-    mStfRecorder->setVideoSize(mFdp.ConsumeIntegralInRange<int32_t>(kMinVideoSize, kMaxVideoSize),
-                               mFdp.ConsumeIntegralInRange<int32_t>(kMinVideoSize, kMaxVideoSize));
-    mStfRecorder->setVideoFrameRate(mFdp.ConsumeIntegral<int32_t>());
-    mStfRecorder->enableAudioDeviceCallback(mFdp.ConsumeBool());
-    mStfRecorder->setPreferredMicrophoneFieldDimension(mFdp.ConsumeFloatingPoint<float>());
-    mStfRecorder->setClientName(String16(mFdp.ConsumeRandomLengthString().c_str()));
-
-    int32_t Idx = mFdp.ConsumeIntegralInRange<int32_t>(0, size(kRecordingConfigList) - 1);
-    mStfRecorder->setOutputFormat(kRecordingConfigList[Idx].outputFormat);
-    mStfRecorder->setAudioEncoder(kRecordingConfigList[Idx].audioEncoder);
-    mStfRecorder->setVideoEncoder(kRecordingConfigList[Idx].videoEncoder);
-
-    int32_t nextOutputFd = memfd_create("NextOutputFile", MFD_ALLOW_SEALING);
-    mStfRecorder->setNextOutputFile(nextOutputFd);
-    close(nextOutputFd);
-
-    for (Idx = 0; Idx < size(kParametersList); ++Idx) {
-        if (mFdp.ConsumeBool()) {
-            int32_t value = mFdp.ConsumeIntegral<int32_t>();
-            mStfRecorder->setParameters(
-                String8((kParametersList[Idx] + "=" + to_string(value)).c_str()));
-        }
+template <typename FuncWrapper>
+void callMediaAPI(FuncWrapper funcWrapper, FuzzedDataProvider* fdp) {
+    if (fdp->ConsumeBool()) {
+        funcWrapper();
     }
 }
 
-MediaRecorderClientFuzzer::MediaRecorderClientFuzzer(const uint8_t *data, size_t size)
-    : mFdp(data, size), mMediaRecorderOutputFd(memfd_create("OutputFile", MFD_ALLOW_SEALING)) {
+void MediaRecorderClientFuzzer::setConfig() {
+    callMediaAPI(
+            [this]() {
+                mSurfaceControl = mComposerClient.createSurface(
+                        String8(mFdp.ConsumeRandomLengthString().c_str()) /* name */,
+                        mFdp.ConsumeIntegral<uint32_t>() /* width */,
+                        mFdp.ConsumeIntegral<uint32_t>() /* height */,
+                        mFdp.ConsumeIntegral<int32_t>() /* pixel-format */,
+                        mFdp.ConsumeIntegral<int32_t>() /* flags */);
+                if (mSurfaceControl) {
+                    mSurface = mSurfaceControl->getSurface();
+                    mStfRecorder->setPreviewSurface(mSurface->getIGraphicBufferProducer());
+                }
+            },
+            &mFdp);
+
+    callMediaAPI([this]() { mStfRecorder->setInputDevice(mFdp.ConsumeIntegral<int32_t>()); },
+                 &mFdp);
+
+    callMediaAPI(
+            [this]() {
+                sp<TestMediaRecorderClient> listener = sp<TestMediaRecorderClient>::make();
+                mStfRecorder->setListener(listener);
+            },
+            &mFdp);
+
+    callMediaAPI(
+            [this]() {
+                sp<TestCamera> testCamera = sp<TestCamera>::make();
+                sp<Camera> camera = Camera::create(testCamera);
+                mStfRecorder->setCamera(camera->remote(), camera->getRecordingProxy());
+            },
+            &mFdp);
+
+    callMediaAPI(
+            [this]() {
+                sp<PersistentSurface> persistentSurface = sp<PersistentSurface>::make();
+                mStfRecorder->setInputSurface(persistentSurface);
+            },
+            &mFdp);
+
+    callMediaAPI(
+            [this]() {
+                sp<TestAudioDeviceCallback> callback = sp<TestAudioDeviceCallback>::make();
+                mStfRecorder->setAudioDeviceCallback(callback);
+                mStfRecorder->setOutputFile(mMediaRecorderOutputFd);
+            },
+            &mFdp);
+
+    callMediaAPI(
+            [this]() {
+                mStfRecorder->setAudioSource(mFdp.PickValueInArray(kSupportedAudioSources));
+            },
+            &mFdp);
+
+    callMediaAPI(
+            [this]() {
+                mStfRecorder->setVideoSource(mFdp.PickValueInArray(kSupportedVideoSources));
+            },
+            &mFdp);
+
+    callMediaAPI(
+            [this]() {
+                mStfRecorder->setPreferredMicrophoneDirection(
+                        mFdp.PickValueInArray(kSupportedMicrophoneDirections));
+            },
+            &mFdp);
+
+    callMediaAPI([this]() { mStfRecorder->setPrivacySensitive(mFdp.ConsumeBool()); }, &mFdp);
+
+    callMediaAPI(
+            [this]() {
+                bool isPrivacySensitive;
+                mStfRecorder->isPrivacySensitive(&isPrivacySensitive);
+            },
+            &mFdp);
+
+    callMediaAPI(
+            [this]() {
+                mStfRecorder->setVideoSize(mFdp.ConsumeIntegralInRange<int32_t>(
+                                                   kMinVideoSize, kMaxVideoSize) /* width */,
+                                           mFdp.ConsumeIntegralInRange<int32_t>(
+                                                   kMinVideoSize, kMaxVideoSize) /* height */);
+            },
+            &mFdp);
+
+    callMediaAPI([this]() { mStfRecorder->setVideoFrameRate(mFdp.ConsumeIntegral<int32_t>()); },
+                 &mFdp);
+
+    callMediaAPI([this]() { mStfRecorder->enableAudioDeviceCallback(mFdp.ConsumeBool()); }, &mFdp);
+
+    callMediaAPI(
+            [this]() {
+                mStfRecorder->setPreferredMicrophoneFieldDimension(
+                        mFdp.ConsumeFloatingPoint<float>());
+            },
+            &mFdp);
+
+    callMediaAPI(
+            [this]() {
+                mStfRecorder->setClientName(String16(mFdp.ConsumeRandomLengthString().c_str()));
+            },
+            &mFdp);
+
+    callMediaAPI(
+            [this]() {
+                output_format OutputFormat = mFdp.PickValueInArray(kOutputFormat);
+                audio_encoder AudioEncoderFormat =
+                        (audio_encoder)mFdp.ConsumeIntegralInRange<int32_t>(AUDIO_ENCODER_DEFAULT,
+                                                                            AUDIO_ENCODER_LIST_END);
+                video_encoder VideoEncoderFormat = mFdp.PickValueInArray(kVideoEncoder);
+                if (OutputFormat == OUTPUT_FORMAT_AMR_NB) {
+                    AudioEncoderFormat =
+                            mFdp.ConsumeBool() ? AUDIO_ENCODER_DEFAULT : AUDIO_ENCODER_AMR_NB;
+                } else if (OutputFormat == OUTPUT_FORMAT_AMR_WB) {
+                    AudioEncoderFormat = AUDIO_ENCODER_AMR_WB;
+                } else if (OutputFormat == OUTPUT_FORMAT_AAC_ADIF ||
+                           OutputFormat == OUTPUT_FORMAT_AAC_ADTS ||
+                           OutputFormat == OUTPUT_FORMAT_MPEG2TS) {
+                    AudioEncoderFormat = (audio_encoder)mFdp.ConsumeIntegralInRange<int32_t>(
+                            AUDIO_ENCODER_AAC, AUDIO_ENCODER_AAC_ELD);
+                    if (OutputFormat == OUTPUT_FORMAT_MPEG2TS) {
+                        VideoEncoderFormat = VIDEO_ENCODER_H264;
+                    }
+                }
+                mStfRecorder->setOutputFormat(OutputFormat);
+                mStfRecorder->setAudioEncoder(AudioEncoderFormat);
+                mStfRecorder->setVideoEncoder(VideoEncoderFormat);
+            },
+            &mFdp);
+
+    callMediaAPI(
+            [this]() {
+                int32_t nextOutputFd = memfd_create(kNextOutputFile, MFD_ALLOW_SEALING);
+                mStfRecorder->setNextOutputFile(nextOutputFd);
+                close(nextOutputFd);
+            },
+            &mFdp);
+
+    callMediaAPI(
+            [this]() {
+                for (int32_t idx = 0; idx < size(kParametersList); ++idx) {
+                    if (mFdp.ConsumeBool()) {
+                        int32_t value = mFdp.ConsumeIntegral<int32_t>();
+                        mStfRecorder->setParameters(
+                                String8((kParametersList[idx] + "=" + to_string(value)).c_str()));
+                    }
+                }
+            },
+            &mFdp);
+}
+
+MediaRecorderClientFuzzer::MediaRecorderClientFuzzer(const uint8_t* data, size_t size)
+    : mFdp(data, size), mMediaRecorderOutputFd(memfd_create(kOutputFile, MFD_ALLOW_SEALING)) {
     AttributionSourceState attributionSource;
     attributionSource.packageName = mFdp.ConsumeRandomLengthString().c_str();
     attributionSource.token = sp<BBinder>::make();
     mStfRecorder = make_unique<StagefrightRecorder>(attributionSource);
-
-    mSurfaceControl = mComposerClient.createSurface(
-        String8(mFdp.ConsumeRandomLengthString().c_str()), mFdp.ConsumeIntegral<uint32_t>(),
-        mFdp.ConsumeIntegral<uint32_t>(), mFdp.ConsumeIntegral<int32_t>(),
-        mFdp.ConsumeIntegral<int32_t>());
-    if (mSurfaceControl) {
-        mSurface = mSurfaceControl->getSurface();
-        mStfRecorder->setPreviewSurface(mSurface->getIGraphicBufferProducer());
-    }
-
-    sp<TestMediaRecorderClient> listener = sp<TestMediaRecorderClient>::make();
-    mStfRecorder->setListener(listener);
-
-    sp<TestCamera> testCamera = sp<TestCamera>::make();
-    sp<Camera> camera = Camera::create(testCamera);
-    mStfRecorder->setCamera(camera->remote(), camera->getRecordingProxy());
-
-    sp<PersistentSurface> persistentSurface = sp<PersistentSurface>::make();
-    mStfRecorder->setInputSurface(persistentSurface);
-
-    sp<TestAudioDeviceCallback> callback = sp<TestAudioDeviceCallback>::make();
-    mStfRecorder->setAudioDeviceCallback(callback);
 }
 
 void MediaRecorderClientFuzzer::process() {
-    setConfig();
-
     mStfRecorder->init();
     mStfRecorder->prepare();
-    size_t numRecord = mFdp.ConsumeIntegralInRange<size_t>(kNumRecordMin, kNumRecordMax);
-    for (size_t Idx = 0; Idx < numRecord; ++Idx) {
-        mStfRecorder->start();
-        this_thread::sleep_for(chrono::milliseconds(
-            mFdp.ConsumeIntegralInRange<int32_t>(kMinSleepTimeInMs, kMaxSleepTimeInMs)));
-        mStfRecorder->pause();
-        this_thread::sleep_for(chrono::milliseconds(
-            mFdp.ConsumeIntegralInRange<int32_t>(kMinSleepTimeInMs, kMaxSleepTimeInMs)));
-        mStfRecorder->resume();
-        this_thread::sleep_for(chrono::milliseconds(
-            mFdp.ConsumeIntegralInRange<int32_t>(kMinSleepTimeInMs, kMaxSleepTimeInMs)));
-        mStfRecorder->stop();
+    while (mFdp.remaining_bytes()) {
+        auto invokeMediaPLayerApi = mFdp.PickValueInArray<const std::function<void()>>({
+                [&]() { setConfig(); },
+                [&]() { mStfRecorder->start(); },
+                [&]() { mStfRecorder->pause(); },
+                [&]() { mStfRecorder->resume(); },
+                [&]() { mStfRecorder->stop(); },
+                [&]() { getConfig(); },
+                [&]() { mStfRecorder->close(); },
+                [&]() { mStfRecorder->reset(); },
+        });
+        invokeMediaPLayerApi();
     }
-    dumpInfo();
-    getConfig();
-
-    mStfRecorder->close();
-    mStfRecorder->reset();
 }
 
 extern "C" int LLVMFuzzerInitialize(int /* *argc */, char /* ***argv */) {
@@ -320,6 +404,7 @@
     MediaPlayerService::instantiate();
     AudioFlinger::instantiate();
     ResourceManagerService::instantiate();
+    CameraService::instantiate();
     fakeServiceManager->addService(String16(MediaMetricsService::kServiceName),
                                     new MediaMetricsService());
     return 0;
diff --git a/media/libmediaplayerservice/fuzzer/metadataretriever_fuzzer.cpp b/media/libmediaplayerservice/fuzzer/metadataretriever_fuzzer.cpp
index a7cb689..857223d 100644
--- a/media/libmediaplayerservice/fuzzer/metadataretriever_fuzzer.cpp
+++ b/media/libmediaplayerservice/fuzzer/metadataretriever_fuzzer.cpp
@@ -15,6 +15,8 @@
  *
  */
 
+#include <MediaExtractorService.h>
+#include <MediaPlayerService.h>
 #include <StagefrightMetadataRetriever.h>
 #include <binder/ProcessState.h>
 #include <datasource/FileSource.h>
@@ -54,58 +56,96 @@
                             MEDIA_MIMETYPE_CONTAINER_MPEG2PS,  MEDIA_MIMETYPE_CONTAINER_HEIF,
                             MEDIA_MIMETYPE_TEXT_3GPP,          MEDIA_MIMETYPE_TEXT_SUBRIP,
                             MEDIA_MIMETYPE_TEXT_VTT,           MEDIA_MIMETYPE_TEXT_CEA_608,
-                            MEDIA_MIMETYPE_TEXT_CEA_708,       MEDIA_MIMETYPE_DATA_TIMED_ID3};
+                            MEDIA_MIMETYPE_TEXT_CEA_708,       MEDIA_MIMETYPE_DATA_TIMED_ID3,
+                            MEDIA_MIMETYPE_IMAGE_AVIF,         MEDIA_MIMETYPE_AUDIO_MPEGH_MHA1,
+                            MEDIA_MIMETYPE_AUDIO_MPEGH_MHM1,   MEDIA_MIMETYPE_AUDIO_MPEGH_BL_L3,
+                            MEDIA_MIMETYPE_AUDIO_MPEGH_BL_L4,  MEDIA_MIMETYPE_AUDIO_MPEGH_LC_L3,
+                            MEDIA_MIMETYPE_AUDIO_MPEGH_LC_L4,  MEDIA_MIMETYPE_AUDIO_DTS,
+                            MEDIA_MIMETYPE_AUDIO_DTS_HD,       MEDIA_MIMETYPE_AUDIO_DTS_HD_MA,
+                            MEDIA_MIMETYPE_AUDIO_DTS_UHD,      MEDIA_MIMETYPE_AUDIO_DTS_UHD_P1,
+                            MEDIA_MIMETYPE_AUDIO_DTS_UHD_P2,   MEDIA_MIMETYPE_AUDIO_EVRC,
+                            MEDIA_MIMETYPE_AUDIO_EVRCB,        MEDIA_MIMETYPE_AUDIO_EVRCWB,
+                            MEDIA_MIMETYPE_AUDIO_EVRCNW,       MEDIA_MIMETYPE_AUDIO_AMR_WB_PLUS,
+                            MEDIA_MIMETYPE_AUDIO_APTX,         MEDIA_MIMETYPE_AUDIO_DRA,
+                            MEDIA_MIMETYPE_AUDIO_DOLBY_MAT,    MEDIA_MIMETYPE_AUDIO_DOLBY_TRUEHD,
+                            MEDIA_MIMETYPE_AUDIO_DOLBY_MAT_1_0,MEDIA_MIMETYPE_AUDIO_AAC_MP4,
+                            MEDIA_MIMETYPE_AUDIO_DOLBY_MAT_2_0,MEDIA_MIMETYPE_AUDIO_DOLBY_MAT_2_1,
+                            MEDIA_MIMETYPE_AUDIO_AAC_MAIN,     MEDIA_MIMETYPE_AUDIO_AAC_LC,
+                            MEDIA_MIMETYPE_AUDIO_AAC_SSR,      MEDIA_MIMETYPE_AUDIO_AAC_LTP,
+                            MEDIA_MIMETYPE_AUDIO_AAC_HE_V1,    MEDIA_MIMETYPE_AUDIO_AAC_SCALABLE,
+                            MEDIA_MIMETYPE_AUDIO_AAC_ERLC,     MEDIA_MIMETYPE_AUDIO_AAC_ADTS_MAIN,
+                            MEDIA_MIMETYPE_AUDIO_AAC_HE_V2,    MEDIA_MIMETYPE_AUDIO_AAC_ADTS_HE_V1,
+                            MEDIA_MIMETYPE_AUDIO_AAC_XHE,      MEDIA_MIMETYPE_AUDIO_AAC_ADTS_HE_V2,
+                            MEDIA_MIMETYPE_AUDIO_AAC_LD,       MEDIA_MIMETYPE_AUDIO_AAC_ADTS_LC,
+                            MEDIA_MIMETYPE_AUDIO_AAC_ADTS_SSR, MEDIA_MIMETYPE_AUDIO_AAC_ADTS_LTP,
+                            MEDIA_MIMETYPE_AUDIO_AAC_ADIF,     MEDIA_MIMETYPE_AUDIO_IEC60958,
+                            MEDIA_MIMETYPE_AUDIO_AAC_ADTS_ERLC,MEDIA_MIMETYPE_AUDIO_AAC_ADTS_LD,
+                            MEDIA_MIMETYPE_AUDIO_AAC_ELD,      MEDIA_MIMETYPE_AUDIO_AAC_LATM_HE_V1,
+                            MEDIA_MIMETYPE_AUDIO_AAC_ADTS_XHE, MEDIA_MIMETYPE_AUDIO_AAC_LATM_LC,
+                            MEDIA_MIMETYPE_AUDIO_AAC_ADTS_ELD, MEDIA_MIMETYPE_AUDIO_AAC_LATM_HE_V2,
+                            MEDIA_MIMETYPE_AUDIO_IEC61937,
+                            MEDIA_MIMETYPE_AUDIO_AAC_ADTS_SCALABLE,};
+
+constexpr size_t kMaxSize = 100;
 
 class MetadataRetrieverFuzzer {
    public:
     MetadataRetrieverFuzzer(const uint8_t *data, size_t size)
-        : mFdp(data, size),
-          mMdRetriever(new StagefrightMetadataRetriever()),
-          mDataSourceFd(memfd_create("InputFile", MFD_ALLOW_SEALING)) {}
-    ~MetadataRetrieverFuzzer() { close(mDataSourceFd); }
+        : mFdp(data, size), mMdRetriever(new StagefrightMetadataRetriever()) {}
     bool setDataSource(const uint8_t *data, size_t size);
     void getData();
 
    private:
     FuzzedDataProvider mFdp;
     sp<StagefrightMetadataRetriever> mMdRetriever = nullptr;
-    const int32_t mDataSourceFd;
+    int32_t mDataSourceFd;
 };
 
 void MetadataRetrieverFuzzer::getData() {
-    int64_t timeUs = mFdp.ConsumeIntegral<int64_t>();
-    int32_t option = mFdp.ConsumeIntegral<int32_t>();
-    int32_t colorFormat = mFdp.ConsumeIntegral<int32_t>();
-    bool metaOnly = mFdp.ConsumeBool();
-    mMdRetriever->getFrameAtTime(timeUs, option, colorFormat, metaOnly);
-
-    int32_t index = mFdp.ConsumeIntegral<int32_t>();
-    colorFormat = mFdp.ConsumeIntegral<int32_t>();
-    metaOnly = mFdp.ConsumeBool();
-    bool thumbnail = mFdp.ConsumeBool();
-    mMdRetriever->getImageAtIndex(index, colorFormat, metaOnly, thumbnail);
-
-    index = mFdp.ConsumeIntegral<int32_t>();
-    colorFormat = mFdp.ConsumeIntegral<int32_t>();
-    int32_t left = mFdp.ConsumeIntegral<int32_t>();
-    int32_t top = mFdp.ConsumeIntegral<int32_t>();
-    int32_t right = mFdp.ConsumeIntegral<int32_t>();
-    int32_t bottom = mFdp.ConsumeIntegral<int32_t>();
-    mMdRetriever->getImageRectAtIndex(index, colorFormat, left, top, right, bottom);
-
-    index = mFdp.ConsumeIntegral<int32_t>();
-    colorFormat = mFdp.ConsumeIntegral<int32_t>();
-    metaOnly = mFdp.ConsumeBool();
-    mMdRetriever->getFrameAtIndex(index, colorFormat, metaOnly);
-
-    mMdRetriever->extractAlbumArt();
-
-    int32_t keyCode = mFdp.ConsumeIntegral<int32_t>();
-    mMdRetriever->extractMetadata(keyCode);
+    while (mFdp.remaining_bytes()) {
+        auto invokeMediaApi = mFdp.PickValueInArray<const std::function<void()>>({
+                [&]() {
+                    mMdRetriever->getFrameAtTime(mFdp.ConsumeIntegral<int64_t>() /* timeUs */,
+                                                 mFdp.ConsumeIntegral<int32_t>() /* option */,
+                                                 mFdp.ConsumeIntegral<int32_t>() /* colorFormat */,
+                                                 mFdp.ConsumeBool() /* metaOnly */);
+                },
+                [&]() {
+                    mMdRetriever->getImageAtIndex(mFdp.ConsumeIntegral<int32_t>() /* index */,
+                                                  mFdp.ConsumeIntegral<int32_t>() /* colorFormat */,
+                                                  mFdp.ConsumeBool() /* metaOnly */,
+                                                  mFdp.ConsumeBool() /* thumbnail */);
+                },
+                [&]() {
+                    mMdRetriever->getImageRectAtIndex(
+                            mFdp.ConsumeIntegral<int32_t>() /* index */,
+                            mFdp.ConsumeIntegral<int32_t>() /* colorFormat */,
+                            mFdp.ConsumeIntegral<int32_t>() /* left */,
+                            mFdp.ConsumeIntegral<int32_t>() /* top */,
+                            mFdp.ConsumeIntegral<int32_t>() /* right */,
+                            mFdp.ConsumeIntegral<int32_t>() /* bottom */);
+                },
+                [&]() {
+                    mMdRetriever->getFrameAtIndex(mFdp.ConsumeIntegral<int32_t>() /* index */,
+                                                  mFdp.ConsumeIntegral<int32_t>() /* colorFormat */,
+                                                  mFdp.ConsumeBool() /* metaOnly */);
+                },
+                [&]() { mMdRetriever->extractAlbumArt(); },
+                [&]() {
+                    mMdRetriever->extractMetadata(mFdp.ConsumeIntegral<int32_t>() /* keyCode */);
+                },
+        });
+        invokeMediaApi();
+    }
 }
 
 bool MetadataRetrieverFuzzer::setDataSource(const uint8_t *data, size_t size) {
     status_t status = -1;
+    std::unique_ptr<std::FILE, decltype(&fclose)> fp(tmpfile(), &fclose);
+    mDataSourceFd = fileno(fp.get());
+    if (mDataSourceFd < 0) {
+        return false;
+    }
 
     enum DataSourceChoice {FromHttp, FromFd, FromFileSource, kMaxValue = FromFileSource};
     switch (mFdp.ConsumeEnum<DataSourceChoice>()) {
@@ -114,7 +154,7 @@
             mHeaders.add(String8(mFdp.ConsumeRandomLengthString().c_str()),
                          String8(mFdp.ConsumeRandomLengthString().c_str()));
 
-            uint32_t dataBlobSize = mFdp.ConsumeIntegralInRange<uint16_t>(0, size);
+            uint32_t dataBlobSize = mFdp.ConsumeIntegralInRange<uint16_t>(0, min(kMaxSize,size));
             vector<uint8_t> uriSuffix = mFdp.ConsumeBytes<uint8_t>(dataBlobSize);
 
             string uri("data:");
@@ -146,6 +186,12 @@
     return true;
 }
 
+extern "C" int LLVMFuzzerInitialize(int* /* argc */, char*** /* argv */) {
+    MediaPlayerService::instantiate();
+    MediaExtractorService::instantiate();
+    return 0;
+}
+
 extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
     MetadataRetrieverFuzzer mrtFuzzer(data, size);
     ProcessState::self()->startThreadPool();
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
index bb49b5a..bd43fe2 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
@@ -2098,9 +2098,12 @@
              displayHeight,
              cropLeft, cropTop);
     } else {
-        CHECK(inputFormat->findInt32("width", &displayWidth));
-        CHECK(inputFormat->findInt32("height", &displayHeight));
-
+        if (!inputFormat->findInt32("width", &displayWidth)
+            || !inputFormat->findInt32("height", &displayHeight)) {
+            ALOGW("Either video width or video height missing, reporting 0x0!");
+            notifyListener(MEDIA_SET_VIDEO_SIZE, 0, 0);
+            return;
+        }
         ALOGV("Video input format %d x %d", displayWidth, displayHeight);
     }
 
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
index 3d4e955..3c8b809 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
@@ -102,6 +102,10 @@
     switch (pcmEncoding) {
     case kAudioEncodingPcmFloat:
         return AUDIO_FORMAT_PCM_FLOAT;
+    case kAudioEncodingPcm32bit:
+        return AUDIO_FORMAT_PCM_32_BIT;
+    case kAudioEncodingPcm24bitPacked:
+        return AUDIO_FORMAT_PCM_24_BIT_PACKED;
     case kAudioEncodingPcm16bit:
         return AUDIO_FORMAT_PCM_16_BIT;
     case kAudioEncodingPcm8bit:
@@ -2025,7 +2029,12 @@
     if (offloadingAudio()) {
         AString mime;
         CHECK(format->findString("mime", &mime));
-        status_t err = mapMimeToAudioFormat(audioFormat, mime.c_str());
+        status_t err = OK;
+        if (audioFormat == AUDIO_FORMAT_PCM_16_BIT) {
+            // If there is probably no pcm-encoding in the format message, try to get the format by
+            // its mimetype.
+            err = mapMimeToAudioFormat(audioFormat, mime.c_str());
+        }
 
         if (err != OK) {
             ALOGE("Couldn't map mime \"%s\" to a valid "
diff --git a/media/libnbaio/Android.bp b/media/libnbaio/Android.bp
index 434ae00..158900a 100644
--- a/media/libnbaio/Android.bp
+++ b/media/libnbaio/Android.bp
@@ -15,8 +15,8 @@
         "NBAIO.cpp",
     ],
     header_libs: [
-        "libaudioclient_headers",
         "libaudio_system_headers",
+        "libaudioclient_headers",
     ],
     export_header_lib_headers: [
         "libaudioclient_headers",
@@ -35,8 +35,8 @@
     export_include_dirs: ["include_mono"],
 
     cflags: [
-        "-Werror",
         "-Wall",
+        "-Werror",
     ],
 }
 
diff --git a/media/libnbaio/SourceAudioBufferProvider.cpp b/media/libnbaio/SourceAudioBufferProvider.cpp
index d58619f..157ebd7 100644
--- a/media/libnbaio/SourceAudioBufferProvider.cpp
+++ b/media/libnbaio/SourceAudioBufferProvider.cpp
@@ -32,7 +32,7 @@
     // negotiate with source
     NBAIO_Format counterOffers[1];
     size_t numCounterOffers = 1;
-    ssize_t index = source->negotiate(NULL, 0, counterOffers, numCounterOffers);
+    [[maybe_unused]] ssize_t index = source->negotiate(NULL, 0, counterOffers, numCounterOffers);
     ALOG_ASSERT(index == (ssize_t) NEGOTIATE && numCounterOffers > 0);
     numCounterOffers = 0;
     index = source->negotiate(counterOffers, 1, NULL, numCounterOffers);
diff --git a/media/libnblog/Android.bp b/media/libnblog/Android.bp
index 8cfece6..b4d48b0 100644
--- a/media/libnblog/Android.bp
+++ b/media/libnblog/Android.bp
@@ -35,8 +35,8 @@
     ],
 
     cflags: [
-        "-Werror",
         "-Wall",
+        "-Werror",
     ],
 
     include_dirs: ["system/media/audio_utils/include"],
diff --git a/media/libnblog/Reader.cpp b/media/libnblog/Reader.cpp
index 71ebfd1..d5f16e8 100644
--- a/media/libnblog/Reader.cpp
+++ b/media/libnblog/Reader.cpp
@@ -93,7 +93,7 @@
     do {
         availToRead = mFifoReader->obtain(iovec, capacity, NULL /*timeout*/, &lostTemp);
         lost += lostTemp;
-    } while (availToRead < 0 || ++tries <= kMaxObtainTries);
+    } while (availToRead < 0 && ++tries <= kMaxObtainTries);
 
     if (availToRead <= 0) {
         ALOGW_IF(availToRead < 0, "NBLog Reader %s failed to catch up with Writer", mName.c_str());
diff --git a/media/libshmem/Android.bp b/media/libshmem/Android.bp
index 6e48078..486a34f 100644
--- a/media/libshmem/Android.bp
+++ b/media/libshmem/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_media_framework_audio",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index 2145dd9..e06efac 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -21,6 +21,8 @@
 #define OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS
 #endif
 
+#include <android_media_codec.h>
+
 #include <inttypes.h>
 #include <utils/Trace.h>
 
@@ -2188,7 +2190,7 @@
                 int32_t colorFormat = OMX_COLOR_FormatUnused;
                 OMX_U32 flexibleEquivalent = OMX_COLOR_FormatUnused;
                 if (!outputFormat->findInt32("color-format", &colorFormat)) {
-                    ALOGE("ouptut port did not have a color format (wrong domain?)");
+                    ALOGE("output port did not have a color format (wrong domain?)");
                     return BAD_VALUE;
                 }
                 ALOGD("[%s] Requested output format %#x and got %#x.",
@@ -7573,6 +7575,22 @@
             return true;
         }
 
+        // When Acodec receive an error event at LoadedToIdleState, it will not release
+        // allocated buffers, which will cause gralloc buffer leak issue. We need to first release
+        // these buffers and then process the error event
+        case OMX_EventError:
+        {
+            if (mCodec->allYourBuffersAreBelongToUs(kPortIndexInput)) {
+                mCodec->freeBuffersOnPort(kPortIndexInput);
+            }
+
+            if (mCodec->allYourBuffersAreBelongToUs(kPortIndexOutput)) {
+                mCodec->freeBuffersOnPort(kPortIndexOutput);
+            }
+
+            return BaseState::onOMXEvent(event, data1, data2);
+        }
+
         default:
             return BaseState::onOMXEvent(event, data1, data2);
     }
@@ -9314,6 +9332,12 @@
                 // adaptive playback is not supported
                 caps->removeDetail(MediaCodecInfo::Capabilities::FEATURE_ADAPTIVE_PLAYBACK);
             }
+
+            // all non-tunneled video decoders support detached surface mode
+            if (android::media::codec::provider_->null_output_surface_support() &&
+                    android::media::codec::provider_->null_output_surface()) {
+                caps->addDetail(MediaCodecInfo::Capabilities::FEATURE_DETACHED_SURFACE, 0);
+            }
         }
     }
 
diff --git a/media/libstagefright/ACodecBufferChannel.cpp b/media/libstagefright/ACodecBufferChannel.cpp
index ad42813..16e267b 100644
--- a/media/libstagefright/ACodecBufferChannel.cpp
+++ b/media/libstagefright/ACodecBufferChannel.cpp
@@ -50,7 +50,7 @@
 using namespace hardware::cas::native::V1_0;
 using DrmBufferType = hardware::drm::V1_0::BufferType;
 using BufferInfo = ACodecBufferChannel::BufferInfo;
-using BufferInfoIterator = std::vector<const BufferInfo>::const_iterator;
+using BufferInfoIterator = std::vector<BufferInfo>::const_iterator;
 
 ACodecBufferChannel::~ACodecBufferChannel() {
     if (mCrypto != nullptr && mDealer != nullptr && mHeapSeqNum >= 0) {
@@ -59,7 +59,7 @@
 }
 
 static BufferInfoIterator findClientBuffer(
-        const std::shared_ptr<const std::vector<const BufferInfo>> &array,
+        const std::shared_ptr<const std::vector<BufferInfo>> &array,
         const sp<MediaCodecBuffer> &buffer) {
     return std::find_if(
             array->begin(), array->end(),
@@ -67,7 +67,7 @@
 }
 
 static BufferInfoIterator findBufferId(
-        const std::shared_ptr<const std::vector<const BufferInfo>> &array,
+        const std::shared_ptr<const std::vector<BufferInfo>> &array,
         IOMX::buffer_id bufferId) {
     return std::find_if(
             array->begin(), array->end(),
@@ -97,7 +97,7 @@
 }
 
 status_t ACodecBufferChannel::queueInputBuffer(const sp<MediaCodecBuffer> &buffer) {
-    std::shared_ptr<const std::vector<const BufferInfo>> array(
+    std::shared_ptr<const std::vector<BufferInfo>> array(
             std::atomic_load(&mInputBuffers));
     BufferInfoIterator it = findClientBuffer(array, buffer);
     if (it == array->end()) {
@@ -138,7 +138,7 @@
     if (!hasCryptoOrDescrambler() || mDealer == nullptr) {
         return -ENOSYS;
     }
-    std::shared_ptr<const std::vector<const BufferInfo>> array(
+    std::shared_ptr<const std::vector<BufferInfo>> array(
             std::atomic_load(&mInputBuffers));
     BufferInfoIterator it = findClientBuffer(array, buffer);
     if (it == array->end()) {
@@ -352,7 +352,7 @@
         size_t numSubSamples,
         const sp<MediaCodecBuffer> &buffer,
         AString* errorDetailMsg) {
-    std::shared_ptr<const std::vector<const BufferInfo>> array(
+    std::shared_ptr<const std::vector<BufferInfo>> array(
             std::atomic_load(&mInputBuffers));
     BufferInfoIterator it = findClientBuffer(array, buffer);
     if (it == array->end()) {
@@ -473,7 +473,7 @@
 
 status_t ACodecBufferChannel::renderOutputBuffer(
         const sp<MediaCodecBuffer> &buffer, int64_t timestampNs) {
-    std::shared_ptr<const std::vector<const BufferInfo>> array(
+    std::shared_ptr<const std::vector<BufferInfo>> array(
             std::atomic_load(&mOutputBuffers));
     BufferInfoIterator it = findClientBuffer(array, buffer);
     if (it == array->end()) {
@@ -495,7 +495,7 @@
 }
 
 status_t ACodecBufferChannel::discardBuffer(const sp<MediaCodecBuffer> &buffer) {
-    std::shared_ptr<const std::vector<const BufferInfo>> array(
+    std::shared_ptr<const std::vector<BufferInfo>> array(
             std::atomic_load(&mInputBuffers));
     bool input = true;
     BufferInfoIterator it = findClientBuffer(array, buffer);
@@ -517,7 +517,7 @@
 }
 
 void ACodecBufferChannel::getInputBufferArray(Vector<sp<MediaCodecBuffer>> *array) {
-    std::shared_ptr<const std::vector<const BufferInfo>> inputBuffers(
+    std::shared_ptr<const std::vector<BufferInfo>> inputBuffers(
             std::atomic_load(&mInputBuffers));
     array->clear();
     for (const BufferInfo &elem : *inputBuffers) {
@@ -526,7 +526,7 @@
 }
 
 void ACodecBufferChannel::getOutputBufferArray(Vector<sp<MediaCodecBuffer>> *array) {
-    std::shared_ptr<const std::vector<const BufferInfo>> outputBuffers(
+    std::shared_ptr<const std::vector<BufferInfo>> outputBuffers(
             std::atomic_load(&mOutputBuffers));
     array->clear();
     for (const BufferInfo &elem : *outputBuffers) {
@@ -583,7 +583,7 @@
             mDecryptDestination = mDealer->allocate(destinationBufferSize);
         }
     }
-    std::vector<const BufferInfo> inputBuffers;
+    std::vector<BufferInfo> inputBuffers;
     for (const BufferAndId &elem : array) {
         sp<IMemory> sharedEncryptedBuffer;
         if (hasCryptoOrDescrambler()) {
@@ -593,22 +593,22 @@
     }
     std::atomic_store(
             &mInputBuffers,
-            std::make_shared<const std::vector<const BufferInfo>>(inputBuffers));
+            std::make_shared<const std::vector<BufferInfo>>(inputBuffers));
 }
 
 void ACodecBufferChannel::setOutputBufferArray(const std::vector<BufferAndId> &array) {
-    std::vector<const BufferInfo> outputBuffers;
+    std::vector<BufferInfo> outputBuffers;
     for (const BufferAndId &elem : array) {
         outputBuffers.emplace_back(elem.mBuffer, elem.mBufferId, nullptr);
     }
     std::atomic_store(
             &mOutputBuffers,
-            std::make_shared<const std::vector<const BufferInfo>>(outputBuffers));
+            std::make_shared<const std::vector<BufferInfo>>(outputBuffers));
 }
 
 void ACodecBufferChannel::fillThisBuffer(IOMX::buffer_id bufferId) {
     ALOGV("fillThisBuffer #%d", bufferId);
-    std::shared_ptr<const std::vector<const BufferInfo>> array(
+    std::shared_ptr<const std::vector<BufferInfo>> array(
             std::atomic_load(&mInputBuffers));
     BufferInfoIterator it = findBufferId(array, bufferId);
 
@@ -629,7 +629,7 @@
         IOMX::buffer_id bufferId,
         OMX_U32 omxFlags) {
     ALOGV("drainThisBuffer #%d", bufferId);
-    std::shared_ptr<const std::vector<const BufferInfo>> array(
+    std::shared_ptr<const std::vector<BufferInfo>> array(
             std::atomic_load(&mOutputBuffers));
     BufferInfoIterator it = findBufferId(array, bufferId);
 
diff --git a/media/libstagefright/Android.bp b/media/libstagefright/Android.bp
index 712b405..ac178aa 100644
--- a/media/libstagefright/Android.bp
+++ b/media/libstagefright/Android.bp
@@ -315,12 +315,15 @@
         "libaudioclient_aidl_conversion",
         "packagemanager_aidl-cpp",
         "server_configurable_flags",
+        "libaconfig_storage_read_api_cc",
+        "aconfig_mediacodec_flags_c_lib",
     ],
 
     static_libs: [
+        "android.media.codec-aconfig-cc",
         "libstagefright_esds",
         "libstagefright_color_conversion",
-        "libyuv_static",
+        "libyuv",
         "libstagefright_webm",
         "libstagefright_timedtext",
         "libogg",
diff --git a/media/libstagefright/CameraSource.cpp b/media/libstagefright/CameraSource.cpp
index 4441121..26b8d0c 100644
--- a/media/libstagefright/CameraSource.cpp
+++ b/media/libstagefright/CameraSource.cpp
@@ -150,9 +150,15 @@
     int32_t cameraId, const std::string& clientName, uid_t clientUid, pid_t clientPid) {
 
     if (camera == 0) {
-        mCamera = Camera::connect(cameraId, clientName, clientUid, clientPid,
-                /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false,
-                /*forceSlowJpegMode*/false);
+        AttributionSourceState clientAttribution;
+        clientAttribution.pid = clientPid;
+        clientAttribution.uid = clientUid;
+        clientAttribution.deviceId = kDefaultDeviceId;
+        clientAttribution.packageName = clientName;
+
+        mCamera = Camera::connect(cameraId, /*targetSdkVersion*/__ANDROID_API_FUTURE__,
+                /*rotationOverride*/hardware::ICameraService::ROTATION_OVERRIDE_NONE,
+                /*forceSlowJpegMode*/false, clientAttribution);
         if (mCamera == 0) return -EBUSY;
         mCameraFlags &= ~FLAGS_HOT_CAMERA;
     } else {
diff --git a/media/libstagefright/CryptoAsync.cpp b/media/libstagefright/CryptoAsync.cpp
index 8b5c8ed..0fc78ec 100644
--- a/media/libstagefright/CryptoAsync.cpp
+++ b/media/libstagefright/CryptoAsync.cpp
@@ -30,6 +30,36 @@
 
 namespace android {
 
+CryptoAsync::CryptoAsyncInfo::CryptoAsyncInfo(const std::unique_ptr<CodecCryptoInfo> &info) {
+    if (info == nullptr) {
+        return;
+    }
+    size_t key_len = (info->mKey != nullptr)? 16 : 0;
+    size_t iv_len = (info->mIv != nullptr)? 16 : 0;
+    mNumSubSamples = info->mNumSubSamples;
+    mMode = info->mMode;
+    mPattern = info->mPattern;
+    if (key_len > 0) {
+        mKeyBuffer = ABuffer::CreateAsCopy((void*)info->mKey, key_len);
+        mKey = (uint8_t*)(mKeyBuffer.get() != nullptr ? mKeyBuffer.get()->data() : nullptr);
+    }
+    if (iv_len > 0) {
+        mIvBuffer = ABuffer::CreateAsCopy((void*)info->mIv, iv_len);
+        mIv = (uint8_t*)(mIvBuffer.get() != nullptr ? mIvBuffer.get()->data() : nullptr);
+    }
+    mSubSamplesBuffer =
+        new ABuffer(sizeof(CryptoPlugin::SubSample) * mNumSubSamples);
+    if (mSubSamplesBuffer.get()) {
+        CryptoPlugin::SubSample * samples =
+           (CryptoPlugin::SubSample *)(mSubSamplesBuffer.get()->data());
+        for (int s = 0 ; s < mNumSubSamples ; s++) {
+            samples[s].mNumBytesOfClearData = info->mSubSamples[s].mNumBytesOfClearData;
+            samples[s].mNumBytesOfEncryptedData = info->mSubSamples[s].mNumBytesOfEncryptedData;
+        }
+        mSubSamples = (CryptoPlugin::SubSample *)mSubSamplesBuffer.get()->data();
+    }
+}
+
 CryptoAsync::~CryptoAsync() {
 }
 
@@ -79,23 +109,27 @@
     sp<ABuffer> keyBuffer;
     sp<ABuffer> ivBuffer;
     sp<ABuffer> subSamplesBuffer;
-    msg->findInt32("encryptBlocks", (int32_t*)&pattern.mEncryptBlocks);
-    msg->findInt32("skipBlocks", (int32_t*)&pattern.mSkipBlocks);
-    msg->findBuffer("key", &keyBuffer);
-    msg->findBuffer("iv", &ivBuffer);
-    msg->findBuffer("subSamples", &subSamplesBuffer);
-    msg->findInt32("secure", &secure);
-    msg->findSize("numSubSamples", &numSubSamples);
-    msg->findObject("buffer", &obj);
-    msg->findInt32("mode", (int32_t*)&mode);
     AString errorDetailMsg;
-    const uint8_t * key = keyBuffer.get() != nullptr ? keyBuffer.get()->data() : nullptr;
-    const uint8_t * iv = ivBuffer.get() != nullptr ? ivBuffer.get()->data() : nullptr;
-    const CryptoPlugin::SubSample * subSamples =
-       (CryptoPlugin::SubSample *)(subSamplesBuffer.get()->data());
+    msg->findObject("buffer", &obj);
+    msg->findInt32("secure", &secure);
     sp<MediaCodecBuffer> buffer = static_cast<MediaCodecBuffer *>(obj.get());
-    err = channel->queueSecureInputBuffer(buffer, secure, key, iv, mode,
-        pattern, subSamples, numSubSamples, &errorDetailMsg);
+    if (buffer->meta()->findObject("cryptoInfos", &obj)) {
+        err = channel->queueSecureInputBuffers(buffer, secure, &errorDetailMsg);
+    } else {
+        msg->findInt32("encryptBlocks", (int32_t*)&pattern.mEncryptBlocks);
+        msg->findInt32("skipBlocks", (int32_t*)&pattern.mSkipBlocks);
+        msg->findBuffer("key", &keyBuffer);
+        msg->findBuffer("iv", &ivBuffer);
+        msg->findBuffer("subSamples", &subSamplesBuffer);
+        msg->findSize("numSubSamples", &numSubSamples);
+        msg->findInt32("mode", (int32_t*)&mode);
+        const uint8_t * key = keyBuffer.get() != nullptr ? keyBuffer.get()->data() : nullptr;
+        const uint8_t * iv = ivBuffer.get() != nullptr ? ivBuffer.get()->data() : nullptr;
+        const CryptoPlugin::SubSample * subSamples =
+           (CryptoPlugin::SubSample *)(subSamplesBuffer.get()->data());
+        err = channel->queueSecureInputBuffer(buffer, secure, key, iv, mode,
+            pattern, subSamples, numSubSamples, &errorDetailMsg);
+    }
     if (err != OK) {
         std::list<sp<AMessage>> errorList;
         msg->removeEntryByName("buffer");
diff --git a/media/libstagefright/FrameDecoder.cpp b/media/libstagefright/FrameDecoder.cpp
index 0ab954f..46703bb 100644
--- a/media/libstagefright/FrameDecoder.cpp
+++ b/media/libstagefright/FrameDecoder.cpp
@@ -48,6 +48,9 @@
 static const int64_t kBufferTimeOutUs = 10000LL; // 10 msec
 static const size_t kRetryCount = 100; // must be >0
 static const int64_t kDefaultSampleDurationUs = 33333LL; // 33ms
+// For codec, 0 is the highest importance; higher the number lesser important.
+// To make codec for thumbnail less important, give it a value more than 0.
+static const int kThumbnailImportance = 1;
 
 sp<IMemory> allocVideoFrame(const sp<MetaData>& trackMeta,
         int32_t width, int32_t height, int32_t tileWidth, int32_t tileHeight,
@@ -102,13 +105,6 @@
             displayTop = 0;
         }
     }
-    if (displayWidth > width) {
-        displayWidth = width;
-    }
-    if (displayHeight > height) {
-        displayHeight = height;
-    }
-
 
     if (allocRotated) {
         if (rotationAngle == 90 || rotationAngle == 270) {
@@ -592,6 +588,9 @@
         }
     }
 
+    // Set the importance for thumbnail.
+    videoFormat->setInt32(KEY_IMPORTANCE, kThumbnailImportance);
+
     int32_t frameRate;
     if (trackMeta()->findInt32(kKeyFrameRate, &frameRate) && frameRate > 0) {
         mDefaultSampleDurationUs = 1000000LL / frameRate;
@@ -909,6 +908,10 @@
         videoFormat->setInt32("android._num-input-buffers", 1);
         videoFormat->setInt32("android._num-output-buffers", 1);
     }
+
+    /// Set the importance for thumbnail.
+    videoFormat->setInt32(KEY_IMPORTANCE, kThumbnailImportance);
+
     return videoFormat;
 }
 
diff --git a/media/libstagefright/MPEG4Writer.cpp b/media/libstagefright/MPEG4Writer.cpp
index a0a2891..15188b0 100644
--- a/media/libstagefright/MPEG4Writer.cpp
+++ b/media/libstagefright/MPEG4Writer.cpp
@@ -72,6 +72,9 @@
 static const int64_t kMaxMetadataSize = 0x4000000LL;   // 64MB max per-frame metadata size
 static const int64_t kMaxCttsOffsetTimeUs = 30 * 60 * 1000000LL;  // 30 minutes
 static const size_t kESDSScratchBufferSize = 10;  // kMaxAtomSize in Mpeg4Extractor 64MB
+// Allow up to 100 milli second, which is safely above the maximum delay observed in manual testing
+// between posting from setNextFd and handling it
+static const int64_t kFdCondWaitTimeoutNs = 100000000;
 
 static const char kMetaKey_Version[]    = "com.android.version";
 static const char kMetaKey_Manufacturer[]      = "com.android.manufacturer";
@@ -172,6 +175,7 @@
     const char *getTrackType() const;
     void resetInternal();
     int64_t trackMetaDataSize();
+    bool isTimestampValid(int64_t timeUs);
 
 private:
     // A helper class to handle faster write box with table entries
@@ -1261,9 +1265,13 @@
         return OK;
     }
 
+    // Wait for the signal only if the new file is not available.
     if (mNextFd == -1) {
-        ALOGW("No FileDescriptor for next recording");
-        return INVALID_OPERATION;
+        status_t res = mFdCond.waitRelative(mLock, kFdCondWaitTimeoutNs);
+        if (res != OK) {
+            ALOGW("No FileDescriptor for next recording");
+            return INVALID_OPERATION;
+        }
     }
 
     mSwitchPending = true;
@@ -1639,6 +1647,11 @@
     ALOGV("buffer->range_length:%lld", (long long)buffer->range_length());
     if (buffer->meta_data().findInt64(kKeySampleFileOffset, &offset)) {
         ALOGV("offset:%lld, old_offset:%lld", (long long)offset, (long long)old_offset);
+        if (mMaxOffsetAppend > offset) {
+            // This has already been appended, skip updating mOffset value.
+            *bytesWritten = buffer->range_length();
+            return offset;
+        }
         if (old_offset == offset) {
             mOffset += buffer->range_length();
         } else {
@@ -2427,6 +2440,7 @@
         return INVALID_OPERATION;
     }
     mNextFd = dup(fd);
+    mFdCond.signal();
     return OK;
 }
 
@@ -4880,8 +4894,15 @@
             int32_t mediaTime = (mFirstSampleStartOffsetUs * mTimeScale + 5E5) / 1E6;
             int32_t firstSampleOffsetTicks =
                     (mFirstSampleStartOffsetUs * mvhdTimeScale + 5E5) / 1E6;
-            // samples before 0 don't count in for duration, hence subtract firstSampleOffsetTicks.
-            addOneElstTableEntry(tkhdDurationTicks - firstSampleOffsetTicks, mediaTime, 1, 0);
+            if (tkhdDurationTicks >= firstSampleOffsetTicks) {
+                // samples before 0 don't count in for duration, hence subtract
+                // firstSampleOffsetTicks.
+                addOneElstTableEntry(tkhdDurationTicks - firstSampleOffsetTicks, mediaTime, 1, 0);
+            } else {
+                ALOGW("The track header duration %" PRId64
+                      " is smaller than the first sample offset %" PRId64,
+                      mTrackDurationUs, mFirstSampleStartOffsetUs);
+            }
         } else {
             // Track starting at zero.
             ALOGV("No edit list entry required for this track");
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index 3c80f28..b380ade 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -30,6 +30,8 @@
 
 #include "include/SoftwareRenderer.h"
 
+#include <android_media_codec.h>
+
 #include <android/api-level.h>
 #include <android/content/pm/IPackageManagerNative.h>
 #include <android/hardware/cas/native/1.0/IDescrambler.h>
@@ -271,6 +273,37 @@
 // XXX suppress until we get our representation right
 static bool kEmitHistogram = false;
 
+typedef WrapperObject<std::vector<AccessUnitInfo>> BufferInfosWrapper;
+
+// Multi access unit helpers
+static status_t generateFlagsFromAccessUnitInfo(
+        sp<AMessage> &msg, const sp<BufferInfosWrapper> &bufferInfos) {
+    msg->setInt64("timeUs", bufferInfos->value[0].mTimestamp);
+    msg->setInt32("flags", bufferInfos->value[0].mFlags);
+    // will prevent any access-unit info copy.
+    if (bufferInfos->value.size() > 1) {
+        uint32_t bufferFlags = 0;
+        uint32_t flagsInAllAU = BUFFER_FLAG_DECODE_ONLY | BUFFER_FLAG_CODEC_CONFIG;
+        uint32_t andFlags = flagsInAllAU;
+        int infoIdx = 0;
+        bool foundEndOfStream = false;
+        for ( ; infoIdx < bufferInfos->value.size() && !foundEndOfStream; ++infoIdx) {
+            bufferFlags |= bufferInfos->value[infoIdx].mFlags;
+            andFlags &= bufferInfos->value[infoIdx].mFlags;
+            if (bufferFlags & BUFFER_FLAG_END_OF_STREAM) {
+                foundEndOfStream = true;
+            }
+        }
+        bufferFlags = bufferFlags & (andFlags | (~flagsInAllAU));
+        if (infoIdx != bufferInfos->value.size()) {
+            ALOGE("Error: incorrect access-units");
+            return -EINVAL;
+        }
+        msg->setInt32("flags", bufferFlags);
+    }
+    return OK;
+}
+
 static int64_t getId(IResourceManagerClient const * client) {
     return (int64_t) client;
 }
@@ -454,7 +487,7 @@
                                     .id = getId(mClient),
                                     .name = mCodecName,
                                     .importance = mImportance};
-        return std::move(clientInfo);
+        return clientInfo;
     }
 
 private:
@@ -807,7 +840,7 @@
     const sp<AMessage> mNotify;
 };
 
-class OnBufferReleasedListener : public ::android::BnProducerListener{
+class OnBufferReleasedListener : public ::android::SurfaceListener{
 private:
     uint32_t mGeneration;
     std::weak_ptr<BufferChannelBase> mBufferChannel;
@@ -819,6 +852,13 @@
         }
     }
 
+    void notifyBufferAttached() {
+        auto p = mBufferChannel.lock();
+        if (p) {
+            p->onBufferAttachedToOutputSurface(mGeneration);
+        }
+    }
+
 public:
     explicit OnBufferReleasedListener(
             uint32_t generation,
@@ -831,11 +871,22 @@
         notifyBufferReleased();
     }
 
+    void onBuffersDiscarded([[maybe_unused]] const std::vector<sp<GraphicBuffer>>& buffers)
+        override { }
+
     void onBufferDetached([[maybe_unused]] int slot) override {
         notifyBufferReleased();
     }
 
     bool needsReleaseNotify() override { return true; }
+
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(BQ_CONSUMER_ATTACH_CALLBACK)
+    void onBufferAttached() override {
+        notifyBufferAttached();
+    }
+
+    bool needsAttachNotify() override { return true; }
+#endif
 };
 
 class BufferCallback : public CodecBase::BufferCallback {
@@ -2222,7 +2273,10 @@
 static void mapFormat(AString componentName, const sp<AMessage> &format, const char *kind,
                       bool reverse);
 
-mediametrics_handle_t MediaCodec::createMediaMetrics(const sp<AMessage>& format, uint32_t flags) {
+mediametrics_handle_t MediaCodec::createMediaMetrics(const sp<AMessage>& format,
+                                                     uint32_t flags,
+                                                     status_t* err) {
+    *err = OK;
     mediametrics_handle_t nextMetricsHandle = mediametrics_create(kCodecKeyName);
     bool isEncoder = (flags & CONFIGURE_FLAG_ENCODE);
 
@@ -2302,7 +2356,9 @@
             mErrorLog.log(LOG_TAG, base::StringPrintf(
                     "Invalid size(s), width=%d, height=%d", mWidth, mHeight));
             mediametrics_delete(nextMetricsHandle);
-            return BAD_VALUE;
+            // Set the error code and return null handle.
+            *err = BAD_VALUE;
+            return 0;
         }
 
     } else {
@@ -2385,7 +2441,11 @@
     updateCodecImportance(format);
 
     // Create and set up metrics for this codec.
-    mediametrics_handle_t nextMetricsHandle = createMediaMetrics(format, flags);
+    status_t err = OK;
+    mediametrics_handle_t nextMetricsHandle = createMediaMetrics(format, flags, &err);
+    if (err != OK) {
+        return err;
+    }
 
     sp<AMessage> msg = new AMessage(kWhatConfigure, this);
     msg->setMessage("format", format);
@@ -2420,7 +2480,6 @@
 
     sp<AMessage> callback = mCallback;
 
-    status_t err;
     std::vector<MediaResourceParcel> resources;
     resources.push_back(MediaResource::CodecResource(mFlags & kFlagIsSecure,
             toMediaResourceSubType(mIsHardware, mDomain)));
@@ -2978,6 +3037,13 @@
     return PostAndAwaitResponse(msg, &response);
 }
 
+status_t MediaCodec::detachOutputSurface() {
+    sp<AMessage> msg = new AMessage(kWhatDetachSurface, this);
+
+    sp<AMessage> response;
+    return PostAndAwaitResponse(msg, &response);
+}
+
 status_t MediaCodec::setSurface(const sp<Surface> &surface) {
     sp<AMessage> msg = new AMessage(kWhatSetSurface, this);
     msg->setObject("surface", surface);
@@ -3174,7 +3240,49 @@
     msg->setInt64("timeUs", presentationTimeUs);
     msg->setInt32("flags", flags);
     msg->setPointer("errorDetailMsg", errorDetailMsg);
+    sp<AMessage> response;
+    return PostAndAwaitResponse(msg, &response);
+}
 
+status_t MediaCodec::queueInputBuffers(
+        size_t index,
+        size_t offset,
+        size_t size,
+        const sp<BufferInfosWrapper> &infos,
+        AString *errorDetailMsg) {
+    sp<AMessage> msg = new AMessage(kWhatQueueInputBuffer, this);
+    uint32_t bufferFlags = 0;
+    uint32_t flagsinAllAU = BUFFER_FLAG_DECODE_ONLY | BUFFER_FLAG_CODECCONFIG;
+    uint32_t andFlags = flagsinAllAU;
+    if (infos == nullptr || infos->value.empty()) {
+        ALOGE("ERROR: Large Audio frame with no BufferInfo");
+        return BAD_VALUE;
+    }
+    int infoIdx = 0;
+    std::vector<AccessUnitInfo> &accessUnitInfo = infos->value;
+    int64_t minTimeUs = accessUnitInfo.front().mTimestamp;
+    bool foundEndOfStream = false;
+    for ( ; infoIdx < accessUnitInfo.size() && !foundEndOfStream; ++infoIdx) {
+        bufferFlags |= accessUnitInfo[infoIdx].mFlags;
+        andFlags &= accessUnitInfo[infoIdx].mFlags;
+        if (bufferFlags & BUFFER_FLAG_END_OF_STREAM) {
+            foundEndOfStream = true;
+        }
+    }
+    bufferFlags = bufferFlags & (andFlags | (~flagsinAllAU));
+    if (infoIdx != accessUnitInfo.size()) {
+        ALOGE("queueInputBuffers has incorrect access-units");
+        return -EINVAL;
+    }
+    msg->setSize("index", index);
+    msg->setSize("offset", offset);
+    msg->setSize("size", size);
+    msg->setInt64("timeUs", minTimeUs);
+    // Make this represent flags for the entire buffer
+    // decodeOnly Flag is set only when all buffers are decodeOnly
+    msg->setInt32("flags", bufferFlags);
+    msg->setObject("accessUnitInfo", infos);
+    msg->setPointer("errorDetailMsg", errorDetailMsg);
     sp<AMessage> response;
     return PostAndAwaitResponse(msg, &response);
 }
@@ -3215,27 +3323,50 @@
     return err;
 }
 
-status_t MediaCodec::queueBuffer(
+status_t MediaCodec::queueSecureInputBuffers(
         size_t index,
-        const std::shared_ptr<C2Buffer> &buffer,
-        int64_t presentationTimeUs,
-        uint32_t flags,
-        const sp<AMessage> &tunings,
+        size_t offset,
+        size_t size,
+        const sp<BufferInfosWrapper> &auInfo,
+        const sp<CryptoInfosWrapper> &cryptoInfos,
         AString *errorDetailMsg) {
     if (errorDetailMsg != NULL) {
         errorDetailMsg->clear();
     }
-
     sp<AMessage> msg = new AMessage(kWhatQueueInputBuffer, this);
-    msg->setSize("index", index);
-    sp<WrapperObject<std::shared_ptr<C2Buffer>>> obj{
-        new WrapperObject<std::shared_ptr<C2Buffer>>{buffer}};
-    msg->setObject("c2buffer", obj);
-    msg->setInt64("timeUs", presentationTimeUs);
-    msg->setInt32("flags", flags);
-    if (tunings && tunings->countEntries() > 0) {
-        msg->setMessage("tunings", tunings);
+    uint32_t bufferFlags = 0;
+    uint32_t flagsinAllAU = BUFFER_FLAG_DECODE_ONLY | BUFFER_FLAG_CODECCONFIG;
+    uint32_t andFlags = flagsinAllAU;
+    if (auInfo == nullptr
+            || auInfo->value.empty()
+            || cryptoInfos == nullptr
+            || cryptoInfos->value.empty()) {
+        ALOGE("ERROR: Large Audio frame with no BufferInfo/CryptoInfo");
+        return BAD_VALUE;
     }
+    int infoIdx = 0;
+    std::vector<AccessUnitInfo> &accessUnitInfo = auInfo->value;
+    int64_t minTimeUs = accessUnitInfo.front().mTimestamp;
+    bool foundEndOfStream = false;
+    for ( ; infoIdx < accessUnitInfo.size() && !foundEndOfStream; ++infoIdx) {
+        bufferFlags |= accessUnitInfo[infoIdx].mFlags;
+        andFlags &= accessUnitInfo[infoIdx].mFlags;
+        if (bufferFlags & BUFFER_FLAG_END_OF_STREAM) {
+            foundEndOfStream = true;
+        }
+    }
+    bufferFlags = bufferFlags & (andFlags | (~flagsinAllAU));
+    if (infoIdx != accessUnitInfo.size()) {
+        ALOGE("queueInputBuffers has incorrect access-units");
+        return -EINVAL;
+    }
+    msg->setSize("index", index);
+    msg->setSize("offset", offset);
+    msg->setSize("ssize", size);
+    msg->setInt64("timeUs", minTimeUs);
+    msg->setInt32("flags", bufferFlags);
+    msg->setObject("accessUnitInfo", auInfo);
+    msg->setObject("cryptoInfos", cryptoInfos);
     msg->setPointer("errorDetailMsg", errorDetailMsg);
 
     sp<AMessage> response;
@@ -3244,46 +3375,77 @@
     return err;
 }
 
-status_t MediaCodec::queueEncryptedBuffer(
+status_t MediaCodec::queueBuffer(
         size_t index,
-        const sp<hardware::HidlMemory> &buffer,
-        size_t offset,
-        const CryptoPlugin::SubSample *subSamples,
-        size_t numSubSamples,
-        const uint8_t key[16],
-        const uint8_t iv[16],
-        CryptoPlugin::Mode mode,
-        const CryptoPlugin::Pattern &pattern,
-        int64_t presentationTimeUs,
-        uint32_t flags,
+        const std::shared_ptr<C2Buffer> &buffer,
+        const sp<BufferInfosWrapper> &bufferInfos,
         const sp<AMessage> &tunings,
         AString *errorDetailMsg) {
     if (errorDetailMsg != NULL) {
         errorDetailMsg->clear();
     }
+    if (bufferInfos == nullptr || bufferInfos->value.empty()) {
+        return BAD_VALUE;
+    }
+    status_t err = OK;
+    sp<AMessage> msg = new AMessage(kWhatQueueInputBuffer, this);
+    msg->setSize("index", index);
+    sp<WrapperObject<std::shared_ptr<C2Buffer>>> obj{
+        new WrapperObject<std::shared_ptr<C2Buffer>>{buffer}};
+    msg->setObject("c2buffer", obj);
+    if (OK != (err = generateFlagsFromAccessUnitInfo(msg, bufferInfos))) {
+        return err;
+    }
+    msg->setObject("accessUnitInfo", bufferInfos);
+    if (tunings && tunings->countEntries() > 0) {
+        msg->setMessage("tunings", tunings);
+    }
+    msg->setPointer("errorDetailMsg", errorDetailMsg);
+    sp<AMessage> response;
+    err = PostAndAwaitResponse(msg, &response);
 
+    return err;
+}
+
+status_t MediaCodec::queueEncryptedBuffer(
+        size_t index,
+        const sp<hardware::HidlMemory> &buffer,
+        size_t offset,
+        size_t size,
+        const sp<BufferInfosWrapper> &bufferInfos,
+        const sp<CryptoInfosWrapper> &cryptoInfos,
+        const sp<AMessage> &tunings,
+        AString *errorDetailMsg) {
+    if (errorDetailMsg != NULL) {
+        errorDetailMsg->clear();
+    }
+    if (bufferInfos == nullptr || bufferInfos->value.empty()) {
+        return BAD_VALUE;
+    }
+    status_t err = OK;
     sp<AMessage> msg = new AMessage(kWhatQueueInputBuffer, this);
     msg->setSize("index", index);
     sp<WrapperObject<sp<hardware::HidlMemory>>> memory{
         new WrapperObject<sp<hardware::HidlMemory>>{buffer}};
     msg->setObject("memory", memory);
     msg->setSize("offset", offset);
-    msg->setPointer("subSamples", (void *)subSamples);
-    msg->setSize("numSubSamples", numSubSamples);
-    msg->setPointer("key", (void *)key);
-    msg->setPointer("iv", (void *)iv);
-    msg->setInt32("mode", mode);
-    msg->setInt32("encryptBlocks", pattern.mEncryptBlocks);
-    msg->setInt32("skipBlocks", pattern.mSkipBlocks);
-    msg->setInt64("timeUs", presentationTimeUs);
-    msg->setInt32("flags", flags);
+    if (cryptoInfos != nullptr) {
+        msg->setSize("ssize", size);
+        msg->setObject("cryptoInfos", cryptoInfos);
+    } else {
+        msg->setSize("size", size);
+    }
+    msg->setObject("accessUnitInfo", bufferInfos);
+    if (OK != (err = generateFlagsFromAccessUnitInfo(msg, bufferInfos))) {
+        return err;
+    }
     if (tunings && tunings->countEntries() > 0) {
         msg->setMessage("tunings", tunings);
     }
     msg->setPointer("errorDetailMsg", errorDetailMsg);
 
     sp<AMessage> response;
-    status_t err = PostAndAwaitResponse(msg, &response);
+    err = PostAndAwaitResponse(msg, &response);
 
     return err;
 }
@@ -3831,6 +3993,15 @@
                     switch (mState) {
                         case INITIALIZING:
                         {
+                            // Resource error during INITIALIZING state needs to be logged
+                            // through metrics, to be able to track such occurrences.
+                            if (isResourceError(err)) {
+                                mediametrics_setInt32(mMetricsHandle, kCodecError, err);
+                                mediametrics_setCString(mMetricsHandle, kCodecErrorState,
+                                                        stateString(mState).c_str());
+                                flushMediametrics();
+                                initMediametrics();
+                            }
                             setState(UNINITIALIZED);
                             break;
                         }
@@ -4541,7 +4712,7 @@
                     }
 
                     mResourceManagerProxy->removeClient();
-                    mReleaseSurface.reset();
+                    mDetachedSurface.reset();
 
                     if (mReplyID != nullptr) {
                         postPendingRepliesAndDeferredMessages("kWhatReleaseCompleted");
@@ -4714,6 +4885,23 @@
                 mFlags |= kFlagPushBlankBuffersOnShutdown;
             }
 
+            uint32_t flags;
+            CHECK(msg->findInt32("flags", (int32_t *)&flags));
+
+            if (android::media::codec::provider_->null_output_surface_support()) {
+                if (obj == nullptr
+                        && (flags & CONFIGURE_FLAG_DETACHED_SURFACE)
+                        && !(flags & CONFIGURE_FLAG_ENCODE)) {
+                    sp<Surface> surface = getOrCreateDetachedSurface();
+                    if (surface == nullptr) {
+                        mErrorLog.log(
+                                LOG_TAG, "Detached surface mode is not supported by this codec");
+                        PostReplyWithError(replyID, INVALID_OPERATION);
+                    }
+                    obj = surface;
+                }
+            }
+
             if (obj != NULL) {
                 if (!format->findInt32(KEY_ALLOW_FRAME_DROP, &mAllowFrameDroppingBySurface)) {
                     // allow frame dropping by surface by default
@@ -4737,8 +4925,6 @@
 
             mApiUsageMetrics.isUsingOutputSurface = true;
 
-            uint32_t flags;
-            CHECK(msg->findInt32("flags", (int32_t *)&flags));
             if (flags & CONFIGURE_FLAG_USE_BLOCK_MODEL ||
                 flags & CONFIGURE_FLAG_USE_CRYPTO_ASYNC) {
                 if (!(mFlags & kFlagIsAsync)) {
@@ -4753,11 +4939,40 @@
                 if (flags & CONFIGURE_FLAG_USE_CRYPTO_ASYNC) {
                     mFlags |= kFlagUseCryptoAsync;
                     if ((mFlags & kFlagUseBlockModel)) {
-                        ALOGW("CrytoAsync not yet enabled for block model,\
-                                falling back to normal");
+                        ALOGW("CrytoAsync not yet enabled for block model, "
+                                "falling back to normal");
                     }
                 }
             }
+            int32_t largeFrameParamMax = 0, largeFrameParamThreshold = 0;
+            if (format->findInt32(KEY_BUFFER_BATCH_MAX_OUTPUT_SIZE, &largeFrameParamMax) ||
+                    format->findInt32(KEY_BUFFER_BATCH_THRESHOLD_OUTPUT_SIZE,
+                    &largeFrameParamThreshold)) {
+                if (largeFrameParamMax > 0 || largeFrameParamThreshold > 0) {
+                    if(mComponentName.startsWith("OMX")) {
+                        mErrorLog.log(LOG_TAG,
+                                "Large Frame params are not supported on OMX codecs."
+                                "Currently only supported on C2 audio codec.");
+                        PostReplyWithError(replyID, INVALID_OPERATION);
+                        break;
+                    }
+                    AString mime;
+                    CHECK(format->findString("mime", &mime));
+                    if (!mime.startsWith("audio")) {
+                        mErrorLog.log(LOG_TAG,
+                                "Large Frame params only works with audio codec");
+                        PostReplyWithError(replyID, INVALID_OPERATION);
+                        break;
+                    }
+                    if (!(mFlags & kFlagIsAsync)) {
+                            mErrorLog.log(LOG_TAG, "Large Frame audio" \
+                                    "config works only with async mode");
+                        PostReplyWithError(replyID, INVALID_OPERATION);
+                        break;
+                    }
+                }
+            }
+
             mReplyID = replyID;
             setState(CONFIGURING);
 
@@ -4782,8 +4997,7 @@
 
             mDescrambler = static_cast<IDescrambler *>(descrambler);
             mBufferChannel->setDescrambler(mDescrambler);
-            if ((mFlags & kFlagUseCryptoAsync) &&
-                mCrypto  && (mDomain == DOMAIN_VIDEO)) {
+            if ((mFlags & kFlagUseCryptoAsync) && mCrypto) {
                 // set kFlagUseCryptoAsync but do-not use this for block model
                 // this is to propagate the error in onCryptoError()
                 // TODO (b/274628160): Enable Use of CONFIG_FLAG_USE_CRYPTO_ASYNC
@@ -4830,6 +5044,23 @@
             break;
         }
 
+        case kWhatDetachSurface:
+        {
+            // detach surface is equivalent to setSurface(mDetachedSurface)
+            sp<Surface> surface = getOrCreateDetachedSurface();
+
+            if (surface == nullptr) {
+                sp<AReplyToken> replyID;
+                CHECK(msg->senderAwaitsResponse(&replyID));
+                mErrorLog.log(LOG_TAG, "Detaching surface is not supported by the codec.");
+                PostReplyWithError(replyID, INVALID_OPERATION);
+                break;
+            }
+
+            msg->setObject("surface", surface);
+        }
+        [[fallthrough]];
+
         case kWhatSetSurface:
         {
             sp<AReplyToken> replyID;
@@ -4847,14 +5078,17 @@
                     sp<Surface> surface = static_cast<Surface *>(obj.get());
                     if (mSurface == NULL) {
                         // do not support setting surface if it was not set
-                        mErrorLog.log(LOG_TAG,
-                                      "Cannot set surface if the codec is not configured with "
-                                      "a surface already");
+                        mErrorLog.log(LOG_TAG, base::StringPrintf(
+                                      "Cannot %s surface if the codec is not configured with "
+                                      "a surface already",
+                                      msg->what() == kWhatDetachSurface ? "detach" : "set"));
                         err = INVALID_OPERATION;
                     } else if (obj == NULL) {
                         // do not support unsetting surface
                         mErrorLog.log(LOG_TAG, "Unsetting surface is not supported");
                         err = BAD_VALUE;
+                    } else if (android::media::codec::provider_->null_output_surface_support()) {
+                        err = handleSetSurface(surface, true /* callCodec */);
                     } else {
                         uint32_t generation;
                         err = connectToSurface(surface, &generation);
@@ -4888,7 +5122,8 @@
 
                 default:
                     mErrorLog.log(LOG_TAG, base::StringPrintf(
-                            "setSurface() is valid only at Executing states; currently %s",
+                            "%sSurface() is valid only at Executing states; currently %s",
+                            msg->what() == kWhatDetachSurface ? "detach" : "set",
                             apiStateString().c_str()));
                     err = INVALID_OPERATION;
                     break;
@@ -5109,30 +5344,40 @@
 
             bool forceSync = false;
             if (asyncNotify != nullptr && mSurface != NULL) {
-                if (!mReleaseSurface) {
-                    uint64_t usage = 0;
-                    if (mSurface->getConsumerUsage(&usage) != OK) {
-                        usage = 0;
-                    }
-                    mReleaseSurface.reset(new ReleaseSurface(usage));
-                }
-                if (mSurface != mReleaseSurface->getSurface()) {
-                    uint32_t generation;
-                    status_t err = connectToSurface(mReleaseSurface->getSurface(), &generation);
-                    ALOGW_IF(err != OK, "error connecting to release surface: err = %d", err);
-                    if (err == OK && !(mFlags & kFlagUsesSoftwareRenderer)) {
-                        err = mCodec->setSurface(mReleaseSurface->getSurface(), generation);
-                        ALOGW_IF(err != OK, "error setting release surface: err = %d", err);
-                    }
-                    if (err == OK) {
-                        (void)disconnectFromSurface();
-                        mSurface = mReleaseSurface->getSurface();
-                        mSurfaceGeneration = generation;
-                    } else {
-                        // We were not able to switch the surface, so force
+                if (android::media::codec::provider_->null_output_surface_support()) {
+                    if (handleSetSurface(getOrCreateDetachedSurface(), true /* callCodec */,
+                                         true /* onShutDown */) != OK) {
+                        // We were not able to detach the surface, so force
                         // synchronous release.
                         forceSync = true;
                     }
+                } else {
+                    if (!mDetachedSurface) {
+                        uint64_t usage = 0;
+                        if (mSurface->getConsumerUsage(&usage) != OK) {
+                            usage = 0;
+                        }
+                        mDetachedSurface.reset(new ReleaseSurface(usage));
+                    }
+                    if (mSurface != mDetachedSurface->getSurface()) {
+                        uint32_t generation;
+                        status_t err =
+                            connectToSurface(mDetachedSurface->getSurface(), &generation);
+                        ALOGW_IF(err != OK, "error connecting to release surface: err = %d", err);
+                        if (err == OK && !(mFlags & kFlagUsesSoftwareRenderer)) {
+                            err = mCodec->setSurface(mDetachedSurface->getSurface(), generation);
+                            ALOGW_IF(err != OK, "error setting release surface: err = %d", err);
+                        }
+                        if (err == OK) {
+                            (void)disconnectFromSurface();
+                            mSurface = mDetachedSurface->getSurface();
+                            mSurfaceGeneration = generation;
+                        } else {
+                            // We were not able to switch the surface, so force
+                            // synchronous release.
+                            forceSync = true;
+                        }
+                    }
                 }
             }
 
@@ -5833,6 +6078,10 @@
         mErrorLog.clear();
     }
 
+    if (android::media::codec::provider_->set_state_early()) {
+        mState = newState;
+    }
+
     if (newState == UNINITIALIZED) {
         // return any straggling buffers, e.g. if we got here on an error
         returnBuffersToCodec();
@@ -5843,7 +6092,9 @@
         mFlags &= ~kFlagSawMediaServerDie;
     }
 
-    mState = newState;
+    if (!android::media::codec::provider_->set_state_early()) {
+        mState = newState;
+    }
 
     if (mBatteryChecker != nullptr) {
         mBatteryChecker->setExecuting(isExecuting());
@@ -5906,10 +6157,10 @@
 
 status_t MediaCodec::onQueueInputBuffer(const sp<AMessage> &msg) {
     size_t index;
-    size_t offset;
-    size_t size;
-    int64_t timeUs;
-    uint32_t flags;
+    size_t offset = 0;
+    size_t size = 0;
+    int64_t timeUs = 0;
+    uint32_t flags = 0;
     CHECK(msg->findSize("index", &index));
     CHECK(msg->findInt64("timeUs", &timeUs));
     CHECK(msg->findInt32("flags", (int32_t *)&flags));
@@ -5954,22 +6205,26 @@
             mErrorLog.log(LOG_TAG, "queuing secure buffer without mCrypto or mDescrambler!");
             return -EINVAL;
         }
-        CHECK(msg->findPointer("subSamples", (void **)&subSamples));
-        CHECK(msg->findSize("numSubSamples", &numSubSamples));
-        CHECK(msg->findPointer("key", (void **)&key));
-        CHECK(msg->findPointer("iv", (void **)&iv));
-        CHECK(msg->findInt32("encryptBlocks", (int32_t *)&pattern.mEncryptBlocks));
-        CHECK(msg->findInt32("skipBlocks", (int32_t *)&pattern.mSkipBlocks));
+        sp<RefBase> obj;
+        if (msg->findObject("cryptoInfos", &obj)) {
+            CHECK(msg->findSize("ssize", &size));
+        } else {
+            CHECK(msg->findPointer("subSamples", (void **)&subSamples));
+            CHECK(msg->findSize("numSubSamples", &numSubSamples));
+            CHECK(msg->findPointer("key", (void **)&key));
+            CHECK(msg->findPointer("iv", (void **)&iv));
+            CHECK(msg->findInt32("encryptBlocks", (int32_t *)&pattern.mEncryptBlocks));
+            CHECK(msg->findInt32("skipBlocks", (int32_t *)&pattern.mSkipBlocks));
 
-        int32_t tmp;
-        CHECK(msg->findInt32("mode", &tmp));
+            int32_t tmp;
+            CHECK(msg->findInt32("mode", &tmp));
 
-        mode = (CryptoPlugin::Mode)tmp;
-
-        size = 0;
-        for (size_t i = 0; i < numSubSamples; ++i) {
-            size += subSamples[i].mNumBytesOfClearData;
-            size += subSamples[i].mNumBytesOfEncryptedData;
+            mode = (CryptoPlugin::Mode)tmp;
+            size = 0;
+            for (size_t i = 0; i < numSubSamples; ++i) {
+                size += subSamples[i].mNumBytesOfClearData;
+                size += subSamples[i].mNumBytesOfEncryptedData;
+            }
         }
     }
 
@@ -5991,9 +6246,13 @@
                 "client does not own the buffer #%zu", index));
         return -EACCES;
     }
-    auto setInputBufferParams = [this, &buffer]
+    auto setInputBufferParams = [this, &msg, &buffer]
         (int64_t timeUs, uint32_t flags = 0) -> status_t {
         status_t err = OK;
+        sp<RefBase> obj;
+        if (msg->findObject("accessUnitInfo", &obj)) {
+            buffer->meta()->setObject("accessUnitInfo", obj);
+        }
         buffer->meta()->setInt64("timeUs", timeUs);
         if (flags & BUFFER_FLAG_EOS) {
             buffer->meta()->setInt32("eos", true);
@@ -6030,35 +6289,41 @@
      return err;
     };
     auto buildCryptoInfoAMessage = [&](const sp<AMessage> & cryptoInfo, int32_t action) {
-        size_t key_len = (key != nullptr)? 16 : 0;
-        size_t iv_len = (iv != nullptr)? 16 : 0;
-        sp<ABuffer> shared_key;
-        sp<ABuffer> shared_iv;
-        if (key_len > 0) {
-            shared_key = ABuffer::CreateAsCopy((void*)key, key_len);
-        }
-        if (iv_len > 0) {
-            shared_iv = ABuffer::CreateAsCopy((void*)iv, iv_len);
-        }
-        sp<ABuffer> subSamples_buffer =
-            new ABuffer(sizeof(CryptoPlugin::SubSample) * numSubSamples);
-        CryptoPlugin::SubSample * samples =
-           (CryptoPlugin::SubSample *)(subSamples_buffer.get()->data());
-        for (int s = 0 ; s < numSubSamples ; s++) {
-            samples[s].mNumBytesOfClearData = subSamples[s].mNumBytesOfClearData;
-            samples[s].mNumBytesOfEncryptedData = subSamples[s].mNumBytesOfEncryptedData;
-        }
         // set decrypt Action
         cryptoInfo->setInt32("action", action);
         cryptoInfo->setObject("buffer", buffer);
         cryptoInfo->setInt32("secure", mFlags & kFlagIsSecure);
-        cryptoInfo->setBuffer("key", shared_key);
-        cryptoInfo->setBuffer("iv", shared_iv);
-        cryptoInfo->setInt32("mode", (int)mode);
-        cryptoInfo->setInt32("encryptBlocks", pattern.mEncryptBlocks);
-        cryptoInfo->setInt32("skipBlocks", pattern.mSkipBlocks);
-        cryptoInfo->setBuffer("subSamples", subSamples_buffer);
-        cryptoInfo->setSize("numSubSamples", numSubSamples);
+        sp<RefBase> obj;
+        if (msg->findObject("cryptoInfos", &obj)) {
+            // this object is a standalone object when created (no copy requied here)
+            buffer->meta()->setObject("cryptoInfos", obj);
+        } else {
+            size_t key_len = (key != nullptr)? 16 : 0;
+            size_t iv_len = (iv != nullptr)? 16 : 0;
+            sp<ABuffer> shared_key;
+            sp<ABuffer> shared_iv;
+            if (key_len > 0) {
+                shared_key = ABuffer::CreateAsCopy((void*)key, key_len);
+            }
+            if (iv_len > 0) {
+                shared_iv = ABuffer::CreateAsCopy((void*)iv, iv_len);
+            }
+            sp<ABuffer> subSamples_buffer =
+                new ABuffer(sizeof(CryptoPlugin::SubSample) * numSubSamples);
+            CryptoPlugin::SubSample * samples =
+               (CryptoPlugin::SubSample *)(subSamples_buffer.get()->data());
+            for (int s = 0 ; s < numSubSamples ; s++) {
+                samples[s].mNumBytesOfClearData = subSamples[s].mNumBytesOfClearData;
+                samples[s].mNumBytesOfEncryptedData = subSamples[s].mNumBytesOfEncryptedData;
+            }
+            cryptoInfo->setBuffer("key", shared_key);
+            cryptoInfo->setBuffer("iv", shared_iv);
+            cryptoInfo->setInt32("mode", (int)mode);
+            cryptoInfo->setInt32("encryptBlocks", pattern.mEncryptBlocks);
+            cryptoInfo->setInt32("skipBlocks", pattern.mSkipBlocks);
+            cryptoInfo->setBuffer("subSamples", subSamples_buffer);
+            cryptoInfo->setSize("numSubSamples", numSubSamples);
+        }
     };
     if (c2Buffer || memory) {
         sp<AMessage> tunings = NULL;
@@ -6068,15 +6333,37 @@
         status_t err = OK;
         if (c2Buffer) {
             err = mBufferChannel->attachBuffer(c2Buffer, buffer);
+            // to prevent unnecessary copy for single info case.
+            if (msg->findObject("accessUnitInfo", &obj)) {
+                sp<BufferInfosWrapper> infos{(BufferInfosWrapper*)(obj.get())};
+                if (infos->value.size() == 1) {
+                   msg->removeEntryByName("accessUnitInfo");
+                }
+            }
         } else if (memory) {
             AString errorDetailMsg;
-            err = mBufferChannel->attachEncryptedBuffer(
-                    memory, (mFlags & kFlagIsSecure), key, iv, mode, pattern,
-                    offset, subSamples, numSubSamples, buffer, &errorDetailMsg);
+            if (msg->findObject("cryptoInfos", &obj)) {
+                buffer->meta()->setSize("ssize", size);
+                buffer->meta()->setObject("cryptoInfos", obj);
+                if (msg->findObject("accessUnitInfo", &obj)) {
+                    // the reference will be same here and
+                    // setBufferParams
+                    buffer->meta()->setObject("accessUnitInfo", obj);
+                }
+                err = mBufferChannel->attachEncryptedBuffers(
+                    memory,
+                    offset,
+                    buffer,
+                    (mFlags & kFlagIsSecure),
+                    &errorDetailMsg);
+            } else {
+                err = mBufferChannel->attachEncryptedBuffer(
+                        memory, (mFlags & kFlagIsSecure), key, iv, mode, pattern,
+                        offset, subSamples, numSubSamples, buffer, &errorDetailMsg);
+            }
             if (err != OK && hasCryptoOrDescrambler()
                     && (mFlags & kFlagUseCryptoAsync)) {
                 // create error detail
-                AString errorDetailMsg;
                 sp<AMessage> cryptoErrorInfo = new AMessage();
                 buildCryptoInfoAMessage(cryptoErrorInfo, CryptoAsync::kActionDecrypt);
                 cryptoErrorInfo->setInt32("err", err);
@@ -6152,6 +6439,12 @@
             sp<AMessage> cryptoInfo = new AMessage();
             buildCryptoInfoAMessage(cryptoInfo, CryptoAsync::kActionDecrypt);
             mCryptoAsync->decrypt(cryptoInfo);
+        } else if (msg->findObject("cryptoInfos", &obj)) {
+                buffer->meta()->setObject("cryptoInfos", obj);
+                err = mBufferChannel->queueSecureInputBuffers(
+                        buffer,
+                        (mFlags & kFlagIsSecure),
+                        errorDetailMsg);
         } else {
             err = mBufferChannel->queueSecureInputBuffer(
                 buffer,
@@ -6377,9 +6670,9 @@
     CHECK_EQ(info, &mPortBuffers[portIndex][index]);
     availBuffers->erase(availBuffers->begin());
 
-    CHECK(!info->mOwnedByClient);
     {
         Mutex::Autolock al(mBufferLock);
+        CHECK(!info->mOwnedByClient);
         info->mOwnedByClient = true;
 
         // set image-data
@@ -6398,6 +6691,23 @@
     return index;
 }
 
+sp<Surface> MediaCodec::getOrCreateDetachedSurface() {
+    if (mDomain != DOMAIN_VIDEO || (mFlags & kFlagIsEncoder)) {
+        return nullptr;
+    }
+
+    if (!mDetachedSurface) {
+        uint64_t usage = 0;
+        if (!mSurface || mSurface->getConsumerUsage(&usage) != OK) {
+            // TODO: should we use a/the default consumer usage?
+            usage = 0;
+        }
+        mDetachedSurface.reset(new ReleaseSurface(usage));
+    }
+
+    return mDetachedSurface->getSurface();
+}
+
 status_t MediaCodec::connectToSurface(const sp<Surface> &surface, uint32_t *generation) {
     status_t err = OK;
     if (surface != NULL) {
@@ -6430,7 +6740,7 @@
             // to this surface after disconnect/connect, and those free frames would inherit the new
             // generation number. Disconnecting after setting a unique generation prevents this.
             nativeWindowDisconnect(surface.get(), "connectToSurface(reconnect)");
-            sp<IProducerListener> listener =
+            sp<SurfaceListener> listener =
                     new OnBufferReleasedListener(*generation, mBufferChannel);
             err = surfaceConnectWithListener(
                     surface, listener, "connectToSurface(reconnect-with-listener)");
@@ -6471,7 +6781,56 @@
     return err;
 }
 
+status_t MediaCodec::handleSetSurface(const sp<Surface> &surface, bool callCodec, bool onShutDown) {
+    uint32_t generation;
+    status_t err = OK;
+    if (surface != nullptr) {
+        err = connectToSurface(surface, &generation);
+        if (err == ALREADY_EXISTS) {
+            // reconnecting to same surface
+            return OK;
+        }
+
+        if (err == OK && callCodec) {
+            if (mFlags & kFlagUsesSoftwareRenderer) {
+                if (mSoftRenderer != NULL
+                        && (mFlags & kFlagPushBlankBuffersOnShutdown)) {
+                    pushBlankBuffersToNativeWindow(mSurface.get());
+                }
+                // do not create a new software renderer on shutdown (release)
+                // as it will not be used anyway
+                if (!onShutDown) {
+                    surface->setDequeueTimeout(-1);
+                    mSoftRenderer = new SoftwareRenderer(surface);
+                    // TODO: check if this was successful
+                }
+            } else {
+                err = mCodec->setSurface(surface, generation);
+            }
+
+            mReliabilityContextMetrics.setOutputSurfaceCount++;
+        }
+    }
+
+    if (err == OK) {
+        if (mSurface != NULL) {
+            (void)disconnectFromSurface();
+        }
+
+        if (surface != NULL) {
+            mSurface = surface;
+            mSurfaceGeneration = generation;
+        }
+    }
+
+    return err;
+}
+
 status_t MediaCodec::handleSetSurface(const sp<Surface> &surface) {
+    if (android::media::codec::provider_->null_output_surface_support()) {
+        return handleSetSurface(surface, false /* callCodec */);
+    }
+
     status_t err = OK;
     if (mSurface != NULL) {
         (void)disconnectFromSurface();
@@ -6503,10 +6862,11 @@
         if (discardDecodeOnlyOutputBuffer(index)) {
             continue;
         }
+        sp<AMessage> msg = mCallback->dup();
         const sp<MediaCodecBuffer> &buffer =
             mPortBuffers[kPortIndexOutput][index].mData;
-        sp<AMessage> msg = mCallback->dup();
-        msg->setInt32("callbackID", CB_OUTPUT_AVAILABLE);
+        int32_t outputCallbackID = CB_OUTPUT_AVAILABLE;
+        sp<RefBase> accessUnitInfoObj;
         msg->setInt32("index", index);
         msg->setSize("offset", buffer->offset());
         msg->setSize("size", buffer->size());
@@ -6520,6 +6880,15 @@
         CHECK(buffer->meta()->findInt32("flags", &flags));
 
         msg->setInt32("flags", flags);
+        buffer->meta()->findObject("accessUnitInfo", &accessUnitInfoObj);
+        if (accessUnitInfoObj) {
+            outputCallbackID = CB_LARGE_FRAME_OUTPUT_AVAILABLE;
+            msg->setObject("accessUnitInfo", accessUnitInfoObj);
+            sp<BufferInfosWrapper> auInfo(
+                    (decltype(auInfo.get()))accessUnitInfoObj.get());
+             auInfo->value.back().mFlags |= flags & BUFFER_FLAG_END_OF_STREAM;
+        }
+        msg->setInt32("callbackID", outputCallbackID);
 
         statsBufferReceived(timeUs, buffer);
 
diff --git a/media/libstagefright/MediaMuxer.cpp b/media/libstagefright/MediaMuxer.cpp
index aaf7465..1008445 100644
--- a/media/libstagefright/MediaMuxer.cpp
+++ b/media/libstagefright/MediaMuxer.cpp
@@ -249,6 +249,11 @@
         sampleMetaData.setInt32(kKeyIsMuxerData, 1);
     }
 
+    if (flags & MediaCodec::BUFFER_FLAG_CODECCONFIG) {
+        sampleMetaData.setInt32(kKeyIsCodecConfig, true);
+        ALOGV("BUFFER_FLAG_CODEC_CONFIG");
+    }
+
     if (flags & MediaCodec::BUFFER_FLAG_EOS) {
         sampleMetaData.setInt32(kKeyIsEndOfStream, 1);
         ALOGV("BUFFER_FLAG_EOS");
diff --git a/media/libstagefright/SurfaceUtils.cpp b/media/libstagefright/SurfaceUtils.cpp
index 604dcb0..74432a6 100644
--- a/media/libstagefright/SurfaceUtils.cpp
+++ b/media/libstagefright/SurfaceUtils.cpp
@@ -111,8 +111,9 @@
         }
     }
 
-    int finalUsage = usage | consumerUsage;
-    ALOGV("gralloc usage: %#x(producer) + %#x(consumer) = %#x", usage, consumerUsage, finalUsage);
+    uint64_t finalUsage = (uint32_t) usage | (uint32_t) consumerUsage;
+    ALOGV("gralloc usage: %#x(producer) + %#x(consumer) = 0x%" PRIx64,
+            usage, consumerUsage, finalUsage);
     err = native_window_set_usage(nativeWindow, finalUsage);
     if (err != NO_ERROR) {
         ALOGE("native_window_set_usage failed: %s (%d)", strerror(-err), -err);
@@ -126,7 +127,7 @@
         return err;
     }
 
-    ALOGD("set up nativeWindow %p for %dx%d, color %#x, rotation %d, usage %#x",
+    ALOGD("set up nativeWindow %p for %dx%d, color %#x, rotation %d, usage 0x%" PRIx64,
             nativeWindow, width, height, format, rotation, finalUsage);
     return NO_ERROR;
 }
@@ -334,7 +335,7 @@
 }
 
 status_t surfaceConnectWithListener(
-        const sp<Surface> &surface, sp<IProducerListener> listener, const char *reason) {
+        const sp<Surface> &surface, sp<SurfaceListener> listener, const char *reason) {
     ALOGD("connecting to surface %p, reason %s", surface.get(), reason);
 
     status_t err = surface->connect(NATIVE_WINDOW_API_MEDIA, listener);
diff --git a/media/libstagefright/TEST_MAPPING b/media/libstagefright/TEST_MAPPING
index 22885c9..b7efbce 100644
--- a/media/libstagefright/TEST_MAPPING
+++ b/media/libstagefright/TEST_MAPPING
@@ -1,16 +1,4 @@
 {
-  // tests which require dynamic content
-  // invoke with: atest -- --enable-module-dynamic-download=true
-  // TODO(b/148094059): unit tests not allowed to download content
-  "dynamic-presubmit": [
-    // writerTest fails about 5 out of 66
-    // { "name": "writerTest" },
-
-    { "name": "HEVCUtilsUnitTest" },
-    { "name": "ExtractorFactoryTest" }
-
-  ],
-
   "presubmit-large": [
     {
       "name": "CtsMediaMiscTestCases",
@@ -32,7 +20,6 @@
         {
           "exclude-annotation": "android.platform.test.annotations.RequiresDevice"
         },
-        // TODO: b/149314419
         {
           "exclude-filter": "android.media.audio.cts.AudioPlaybackCaptureTest"
         },
@@ -95,8 +82,16 @@
     }
   ],
   "postsubmit": [
+    // writerTest fails about 5 out of 66
+    // { "name": "writerTest" },
     {
        "name": "BatteryChecker_test"
+    },
+    {
+        "name": "ExtractorFactoryTest"
+    },
+    {
+        "name": "HEVCUtilsUnitTest"
     }
   ]
 }
diff --git a/media/libstagefright/VideoRenderQualityTracker.cpp b/media/libstagefright/VideoRenderQualityTracker.cpp
index eb9ac0f..bf29b1d 100644
--- a/media/libstagefright/VideoRenderQualityTracker.cpp
+++ b/media/libstagefright/VideoRenderQualityTracker.cpp
@@ -302,13 +302,6 @@
         mRenderDurationMs += (actualRenderTimeUs - mLastRenderTimeUs) / 1000;
     }
 
-    // Now that a frame has been rendered, the previously skipped frames can be processed as skipped
-    // frames since the app is not skipping them to terminate playback.
-    for (int64_t contentTimeUs : mPendingSkippedFrameContentTimeUsList) {
-        processMetricsForSkippedFrame(contentTimeUs);
-    }
-    mPendingSkippedFrameContentTimeUsList = {};
-
     // We can render a pending queued frame if it's the last frame of the video, so release it
     // immediately.
     if (contentTimeUs == mTunnelFrameQueuedContentTimeUs && mTunnelFrameQueuedContentTimeUs != -1) {
@@ -332,9 +325,25 @@
                   (long long) contentTimeUs, (long long) nextExpectedFrame.contentTimeUs);
             break;
         }
+        // Process all skipped frames before the dropped frame.
+        while (!mPendingSkippedFrameContentTimeUsList.empty()) {
+            if (mPendingSkippedFrameContentTimeUsList.front() >= nextExpectedFrame.contentTimeUs) {
+                break;
+            }
+            processMetricsForSkippedFrame(mPendingSkippedFrameContentTimeUsList.front());
+            mPendingSkippedFrameContentTimeUsList.pop_front();
+        }
         processMetricsForDroppedFrame(nextExpectedFrame.contentTimeUs,
                                       nextExpectedFrame.desiredRenderTimeUs);
     }
+    // Process all skipped frames before the rendered frame.
+    while (!mPendingSkippedFrameContentTimeUsList.empty()) {
+        if (mPendingSkippedFrameContentTimeUsList.front() >= nextExpectedFrame.contentTimeUs) {
+            break;
+        }
+        processMetricsForSkippedFrame(mPendingSkippedFrameContentTimeUsList.front());
+        mPendingSkippedFrameContentTimeUsList.pop_front();
+    }
     processMetricsForRenderedFrame(nextExpectedFrame.contentTimeUs,
                                    nextExpectedFrame.desiredRenderTimeUs, actualRenderTimeUs,
                                    freezeEventOut, judderEventOut);
diff --git a/media/libstagefright/colorconversion/Android.bp b/media/libstagefright/colorconversion/Android.bp
index 7ff9b10..4072bf9 100644
--- a/media/libstagefright/colorconversion/Android.bp
+++ b/media/libstagefright/colorconversion/Android.bp
@@ -36,7 +36,7 @@
         "media_plugin_headers",
     ],
 
-    static_libs: ["libyuv_static"],
+    static_libs: ["libyuv"],
 
     cflags: ["-Werror"],
 
diff --git a/media/libstagefright/colorconversion/fuzzer/Android.bp b/media/libstagefright/colorconversion/fuzzer/Android.bp
index 237e715..50a2477 100644
--- a/media/libstagefright/colorconversion/fuzzer/Android.bp
+++ b/media/libstagefright/colorconversion/fuzzer/Android.bp
@@ -27,7 +27,7 @@
 cc_defaults {
     name: "libcolorconversion_fuzzer_defaults",
     static_libs: [
-        "libyuv_static",
+        "libyuv",
         "libstagefright_color_conversion",
         "libstagefright",
         "liblog",
diff --git a/media/libstagefright/data/media_codecs_sw.xml b/media/libstagefright/data/media_codecs_sw.xml
index 24020d1..6fb9232 100644
--- a/media/libstagefright/data/media_codecs_sw.xml
+++ b/media/libstagefright/data/media_codecs_sw.xml
@@ -139,6 +139,7 @@
                 <Limit name="bitrate" range="1-40000000" />
             </Variant>
             <Feature name="adaptive-playback" />
+            <Feature name="dynamic-color-aspects" />
             <Attribute name="software-codec" />
         </MediaCodec>
         <MediaCodec name="c2.android.hevc.decoder" type="video/hevc" variant="slow-cpu,!slow-cpu">
@@ -160,6 +161,7 @@
                 <Limit name="bitrate" range="1-5000000" />
             </Variant>
             <Feature name="adaptive-playback" />
+            <Feature name="dynamic-color-aspects" />
             <Attribute name="software-codec" />
         </MediaCodec>
         <MediaCodec name="c2.android.vp8.decoder" type="video/x-vnd.on2.vp8" variant="slow-cpu,!slow-cpu">
@@ -178,6 +180,7 @@
                 <Limit name="bitrate" range="1-40000000" />
             </Variant>
             <Feature name="adaptive-playback" />
+            <Feature name="dynamic-color-aspects" />
             <Attribute name="software-codec" />
         </MediaCodec>
         <MediaCodec name="c2.android.vp9.decoder" type="video/x-vnd.on2.vp9" variant="slow-cpu,!slow-cpu">
@@ -197,9 +200,30 @@
                 <Limit name="bitrate" range="1-5000000" />
             </Variant>
             <Feature name="adaptive-playback" />
+            <Feature name="dynamic-color-aspects" />
             <Attribute name="software-codec" />
         </MediaCodec>
-        <MediaCodec name="c2.android.av1.decoder" type="video/av01" variant="slow-cpu,!slow-cpu">
+        <MediaCodec name="c2.android.av1-dav1d.decoder" type="video/av01" variant="slow-cpu,!slow-cpu">
+            <Limit name="alignment" value="1x1" />
+            <Limit name="block-size" value="16x16" />
+            <Variant name="!slow-cpu">
+                <Limit name="size" min="2x2" max="2048x2048" />
+                <Limit name="block-count" range="1-8192" /> <!-- max 2048x1024 -->
+                <Limit name="blocks-per-second" range="1-245760" />
+                <Limit name="bitrate" range="1-40000000" />
+            </Variant>
+            <Variant name="slow-cpu">
+                <Limit name="size" min="2x2" max="1280x1280" />
+                <Limit name="block-count" range="1-3600" /> <!-- max 1280x720 -->
+                <Limit name="blocks-per-second" range="1-108000" />
+                <Limit name="bitrate" range="1-5000000" />
+            </Variant>
+            <Feature name="adaptive-playback" />
+            <Feature name="dynamic-color-aspects" />
+            <Feature name="low-latency" />
+            <Attribute name="software-codec" />
+        </MediaCodec>
+        <MediaCodec name="c2.android.av1.decoder" type="video/av01" variant="slow-cpu,!slow-cpu" rank="1024">
             <!-- TODO: implement a mechanism to prevent AV1 Decoder usage on pre-U devices -->
             <Limit name="alignment" value="1x1" />
             <Limit name="block-size" value="16x16" />
@@ -216,24 +240,8 @@
                 <Limit name="bitrate" range="1-5000000" />
             </Variant>
             <Feature name="adaptive-playback" />
-            <Attribute name="software-codec" />
-        </MediaCodec>
-        <MediaCodec name="c2.android.av1-dav1d.decoder" type="video/av01" variant="slow-cpu,!slow-cpu" rank="1024">
-            <Limit name="alignment" value="1x1" />
-            <Limit name="block-size" value="16x16" />
-            <Variant name="!slow-cpu">
-                <Limit name="size" min="2x2" max="2048x2048" />
-                <Limit name="block-count" range="1-8192" /> <!-- max 2048x1024 -->
-                <Limit name="blocks-per-second" range="1-245760" />
-                <Limit name="bitrate" range="1-40000000" />
-            </Variant>
-            <Variant name="slow-cpu">
-                <Limit name="size" min="2x2" max="1280x1280" />
-                <Limit name="block-count" range="1-3600" /> <!-- max 1280x720 -->
-                <Limit name="blocks-per-second" range="1-108000" />
-                <Limit name="bitrate" range="1-5000000" />
-            </Variant>
-            <Feature name="adaptive-playback" />
+            <Feature name="dynamic-color-aspects" />
+            <Feature name="low-latency" />
             <Attribute name="software-codec" />
         </MediaCodec>
         <MediaCodec name="c2.android.mpeg2.decoder" type="video/mpeg2" domain="tv">
@@ -330,11 +338,12 @@
             <!-- Video Quality control -->
                     <!-- supports QP bounding with standard keys -->
             <Feature name="qp-bounds" />
+            <Feature name="bitrate-modes" value="VBR,CBR" />
             <Attribute name="software-codec" />
         </MediaCodec>
         <MediaCodec name="c2.android.vp8.encoder" type="video/x-vnd.on2.vp8" variant="slow-cpu,!slow-cpu">
             <Alias name="OMX.google.vp8.encoder" />
-            <Limit name="alignment" value="2x2" />
+            <Limit name="alignment" value="1x1" />
             <Limit name="block-size" value="16x16" />
             <Variant name="!slow-cpu">
                 <Limit name="size" min="2x2" max="2048x2048" />
@@ -350,6 +359,7 @@
                 <Limit name="bitrate" range="1-20000000" />
             </Variant>
             <Feature name="bitrate-modes" value="VBR,CBR" />
+            <Feature name="qp-bounds" />
             <Attribute name="software-codec" />
         </MediaCodec>
         <MediaCodec name="c2.android.hevc.encoder" type="video/hevc" variant="!slow-cpu">
@@ -364,22 +374,24 @@
             <Limit name="complexity" range="0-10"  default="0" />
             <Limit name="quality" range="0-100"  default="80" />
             <Feature name="bitrate-modes" value="VBR,CBR,CQ" />
+            <Feature name="qp-bounds" />
             <Attribute name="software-codec" />
         </MediaCodec>
         <MediaCodec name="c2.android.vp9.encoder" type="video/x-vnd.on2.vp9" variant="!slow-cpu">
             <Alias name="OMX.google.vp9.encoder" />
             <!-- profiles and levels:  ProfileMain : Level_Version0-3 -->
             <Limit name="size" min="2x2" max="2048x2048" />
-            <Limit name="alignment" value="2x2" />
+            <Limit name="alignment" value="1x1" />
             <Limit name="block-size" value="16x16" />
             <!-- 2016 devices can encode at about 8fps at this block count -->
             <Limit name="block-count" range="1-3600" /> <!-- max 1280x720 -->
             <Limit name="bitrate" range="1-40000000" />
             <Feature name="bitrate-modes" value="VBR,CBR" />
+            <Feature name="qp-bounds" />
             <Attribute name="software-codec" />
         </MediaCodec>
         <MediaCodec name="c2.android.av1.encoder" type="video/av01" enabled="false" minsdk="34" variant="slow-cpu,!slow-cpu">
-            <Limit name="alignment" value="2x2" />
+            <Limit name="alignment" value="1x1" />
             <Limit name="block-size" value="16x16" />
             <Variant name="!slow-cpu">
                 <Limit name="size" min="2x2" max="1920x1920" />
@@ -394,6 +406,7 @@
             <Limit name="quality" range="0-100"  default="80" />
             <Limit name="complexity" range="0-5"  default="0" />
             <Feature name="bitrate-modes" value="VBR,CBR,CQ" />
+            <Feature name="qp-bounds" />
             <Attribute name="software-codec" />
         </MediaCodec>
     </Encoders>
diff --git a/media/libstagefright/include/ACodecBufferChannel.h b/media/libstagefright/include/ACodecBufferChannel.h
index 946d533..46a5183 100644
--- a/media/libstagefright/include/ACodecBufferChannel.h
+++ b/media/libstagefright/include/ACodecBufferChannel.h
@@ -155,8 +155,8 @@
     // obtained. Inside BufferInfo, mBufferId and mSharedEncryptedBuffer are
     // immutable objects. We write internal states of mClient/CodecBuffer when
     // the caller has given up the reference, so that access is also safe.
-    std::shared_ptr<const std::vector<const BufferInfo>> mInputBuffers;
-    std::shared_ptr<const std::vector<const BufferInfo>> mOutputBuffers;
+    std::shared_ptr<const std::vector<BufferInfo>> mInputBuffers;
+    std::shared_ptr<const std::vector<BufferInfo>> mOutputBuffers;
 
     sp<MemoryDealer> makeMemoryDealer(size_t heapSize);
 
diff --git a/media/libstagefright/include/media/stagefright/CameraSource.h b/media/libstagefright/include/media/stagefright/CameraSource.h
index fcd17b9..f42e315 100644
--- a/media/libstagefright/include/media/stagefright/CameraSource.h
+++ b/media/libstagefright/include/media/stagefright/CameraSource.h
@@ -236,7 +236,7 @@
     // Start of members protected by mBatchLock
     std::deque<uint32_t> mInflightBatchSizes;
     std::vector<native_handle_t*> mInflightReturnedHandles;
-    std::vector<const sp<IMemory>> mInflightReturnedMemorys;
+    std::vector<sp<IMemory>> mInflightReturnedMemorys;
     // End of members protected by mBatchLock
 
     void releaseQueuedFrames();
diff --git a/media/libstagefright/include/media/stagefright/CodecBase.h b/media/libstagefright/include/media/stagefright/CodecBase.h
index 0927653..c6087b0 100644
--- a/media/libstagefright/include/media/stagefright/CodecBase.h
+++ b/media/libstagefright/include/media/stagefright/CodecBase.h
@@ -61,6 +61,36 @@
 
 using hardware::cas::native::V1_0::IDescrambler;
 
+struct AccessUnitInfo {
+    uint32_t mFlags;
+    uint32_t mSize;
+    int64_t mTimestamp;
+    AccessUnitInfo(uint32_t flags, uint32_t size, int64_t ptsUs)
+            :mFlags(flags), mSize(size), mTimestamp(ptsUs) {
+    }
+    ~AccessUnitInfo() {}
+};
+
+struct CodecCryptoInfo {
+    size_t mNumSubSamples{0};
+    CryptoPlugin::SubSample *mSubSamples{nullptr};
+    uint8_t *mIv{nullptr};
+    uint8_t *mKey{nullptr};
+    enum CryptoPlugin::Mode mMode;
+    CryptoPlugin::Pattern mPattern;
+
+    virtual ~CodecCryptoInfo() {}
+protected:
+    CodecCryptoInfo():
+            mNumSubSamples(0),
+            mSubSamples(nullptr),
+            mIv(nullptr),
+            mKey(nullptr),
+            mMode{CryptoPlugin::kMode_Unencrypted},
+            mPattern{0, 0} {
+    }
+};
+
 struct CodecParameterDescriptor {
     std::string name;
     AMessage::Type type;
@@ -362,6 +392,30 @@
             const CryptoPlugin::SubSample *subSamples,
             size_t numSubSamples,
             AString *errorDetailMsg) = 0;
+
+    /**
+     * Queue a secure input buffer with multiple access units into the buffer channel.
+     *
+     * @param buffer The buffer to queue. The access unit delimiters and crypto
+     *               subsample information is included in the buffer metadata.
+     * @param secure Whether the buffer is secure.
+     * @param errorDetailMsg The error message to be set in case of error.
+     * @return OK if successful;
+     *         -ENOENT of the buffer is not known
+     *         -ENOSYS if mCrypto is not set so that decryption is not
+     *         possible;
+     *         other errors if decryption failed.
+     */
+     virtual status_t queueSecureInputBuffers(
+            const sp<MediaCodecBuffer> &buffer,
+            bool secure,
+            AString *errorDetailMsg) {
+        (void)buffer;
+        (void)secure;
+        (void)errorDetailMsg;
+        return -ENOSYS;
+     }
+
     /**
      * Attach a Codec 2.0 buffer to MediaCodecBuffer.
      *
@@ -408,6 +462,34 @@
         (void)errorDetailMsg;
         return -ENOSYS;
     }
+
+    /**
+     * Attach an encrypted HidlMemory buffer containing multiple access units to an index
+     *
+     * @param memory The memory to attach.
+     * @param offset index???
+     * @param buffer The MediaCodecBuffer to attach the memory to. The access
+     *               unit delimiters and crypto subsample information is included
+     *               in the buffer metadata.
+     * @param secure Whether the buffer is secure.
+     * @param errorDetailMsg The error message to be set if an error occurs.
+     * @return    OK if successful;
+     *            -ENOENT if index is not recognized
+     *            -ENOSYS if attaching buffer is not possible or not supported
+     */
+    virtual status_t attachEncryptedBuffers(
+            const sp<hardware::HidlMemory> &memory,
+            size_t offset,
+            const sp<MediaCodecBuffer> &buffer,
+            bool secure,
+            AString* errorDetailMsg) {
+        (void)memory;
+        (void)offset;
+        (void)buffer;
+        (void)secure;
+        (void)errorDetailMsg;
+        return -ENOSYS;
+    }
     /**
      * Request buffer rendering at specified time.
      *
@@ -435,6 +517,15 @@
     };
 
     /**
+     * Notify a buffer is attached to output surface.
+     *
+     * @param     generation    MediaCodec's surface specifier
+     */
+    virtual void onBufferAttachedToOutputSurface(uint32_t /*generation*/) {
+        // default: no-op
+    };
+
+    /**
      * Discard a buffer to the underlying CodecBase object.
      *
      * TODO: remove once this operation can be handled by just clearing the
diff --git a/media/libstagefright/include/media/stagefright/CryptoAsync.h b/media/libstagefright/include/media/stagefright/CryptoAsync.h
index b675518..acb3dae 100644
--- a/media/libstagefright/include/media/stagefright/CryptoAsync.h
+++ b/media/libstagefright/include/media/stagefright/CryptoAsync.h
@@ -85,6 +85,18 @@
         kActionDecrypt                 = (1 <<  0),
         kActionAttachEncryptedBuffer   = (1 <<  1)
     };
+
+    // This struct is meant to copy the mapped contents from the original info.
+    struct CryptoAsyncInfo : public CodecCryptoInfo {
+        public:
+            explicit CryptoAsyncInfo(const std::unique_ptr<CodecCryptoInfo> &info);
+            virtual ~CryptoAsyncInfo() = default;
+        protected:
+            // all backup buffers for the base object.
+            sp<ABuffer> mKeyBuffer;
+            sp<ABuffer> mIvBuffer;
+            sp<ABuffer> mSubSamplesBuffer;
+    };
 protected:
 
     // Message types for the looper
diff --git a/media/libstagefright/include/media/stagefright/MPEG4Writer.h b/media/libstagefright/include/media/stagefright/MPEG4Writer.h
index 054a4b8..ee75129 100644
--- a/media/libstagefright/include/media/stagefright/MPEG4Writer.h
+++ b/media/libstagefright/include/media/stagefright/MPEG4Writer.h
@@ -144,6 +144,7 @@
     std::mutex mFallocMutex;
     bool mPreAllocFirstTime; // Pre-allocate space for file and track headers only once per file.
     uint64_t mPrevAllTracksTotalMetaDataSizeEstimate;
+    Condition mFdCond;
 
     List<Track *> mTracks;
 
diff --git a/media/libstagefright/include/media/stagefright/MediaCodec.h b/media/libstagefright/include/media/stagefright/MediaCodec.h
index 2f94e5e..7169b1e 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodec.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodec.h
@@ -28,6 +28,7 @@
 #include <media/MediaMetrics.h>
 #include <media/MediaProfiles.h>
 #include <media/stagefright/foundation/AHandler.h>
+#include <media/stagefright/foundation/AMessage.h>
 #include <media/stagefright/CodecErrorLog.h>
 #include <media/stagefright/FrameRenderTracker.h>
 #include <media/stagefright/MediaHistogram.h>
@@ -56,7 +57,9 @@
 struct AString;
 struct BatteryChecker;
 class BufferChannelBase;
+struct AccessUnitInfo;
 struct CodecBase;
+struct CodecCryptoInfo;
 struct CodecParameterDescriptor;
 class IBatteryStats;
 struct ICrypto;
@@ -78,6 +81,9 @@
 using aidl::android::media::MediaResourceParcel;
 using aidl::android::media::ClientConfigParcel;
 
+typedef WrapperObject<std::vector<AccessUnitInfo>> BufferInfosWrapper;
+typedef WrapperObject<std::vector<std::unique_ptr<CodecCryptoInfo>>> CryptoInfosWrapper;
+
 struct MediaCodec : public AHandler {
     enum Domain {
         DOMAIN_UNKNOWN = 0,
@@ -90,6 +96,7 @@
         CONFIGURE_FLAG_ENCODE           = 1,
         CONFIGURE_FLAG_USE_BLOCK_MODEL  = 2,
         CONFIGURE_FLAG_USE_CRYPTO_ASYNC = 4,
+        CONFIGURE_FLAG_DETACHED_SURFACE = 8,
     };
 
     enum BufferFlags {
@@ -115,6 +122,7 @@
         CB_OUTPUT_FORMAT_CHANGED = 4,
         CB_RESOURCE_RECLAIMED = 5,
         CB_CRYPTO_ERROR = 6,
+        CB_LARGE_FRAME_OUTPUT_AVAILABLE = 7,
     };
 
     static const pid_t kNoPid = -1;
@@ -185,6 +193,13 @@
             uint32_t flags,
             AString *errorDetailMsg = NULL);
 
+    status_t queueInputBuffers(
+            size_t index,
+            size_t offset,
+            size_t size,
+            const sp<BufferInfosWrapper> &accessUnitInfo,
+            AString *errorDetailMsg = NULL);
+
     status_t queueSecureInputBuffer(
             size_t index,
             size_t offset,
@@ -198,11 +213,18 @@
             uint32_t flags,
             AString *errorDetailMsg = NULL);
 
+    status_t queueSecureInputBuffers(
+            size_t index,
+            size_t offset,
+            size_t size,
+            const sp<BufferInfosWrapper> &accessUnitInfo,
+            const sp<CryptoInfosWrapper> &cryptoInfos,
+            AString *errorDetailMsg = NULL);
+
     status_t queueBuffer(
             size_t index,
             const std::shared_ptr<C2Buffer> &buffer,
-            int64_t presentationTimeUs,
-            uint32_t flags,
+            const sp<BufferInfosWrapper> &bufferInfos,
             const sp<AMessage> &tunings,
             AString *errorDetailMsg = NULL);
 
@@ -210,14 +232,9 @@
             size_t index,
             const sp<hardware::HidlMemory> &memory,
             size_t offset,
-            const CryptoPlugin::SubSample *subSamples,
-            size_t numSubSamples,
-            const uint8_t key[16],
-            const uint8_t iv[16],
-            CryptoPlugin::Mode mode,
-            const CryptoPlugin::Pattern &pattern,
-            int64_t presentationTimeUs,
-            uint32_t flags,
+            size_t size,
+            const sp<BufferInfosWrapper> &bufferInfos,
+            const sp<CryptoInfosWrapper> &cryptoInfos,
             const sp<AMessage> &tunings,
             AString *errorDetailMsg = NULL);
 
@@ -258,6 +275,8 @@
 
     status_t setSurface(const sp<Surface> &nativeWindow);
 
+    status_t detachOutputSurface();
+
     status_t requestIDRFrame();
 
     // Notification will be posted once there "is something to do", i.e.
@@ -321,7 +340,10 @@
     friend struct ResourceManagerClient;
 
     // to create the metrics associated with this codec.
-    mediametrics_handle_t createMediaMetrics(const sp<AMessage>& format, uint32_t flags);
+    // Any error in this function will be captured by the output argument err.
+    mediametrics_handle_t createMediaMetrics(const sp<AMessage>& format,
+                                             uint32_t flags,
+                                             status_t* err);
 
 private:
     enum State {
@@ -349,6 +371,7 @@
         kWhatInit                           = 'init',
         kWhatConfigure                      = 'conf',
         kWhatSetSurface                     = 'sSur',
+        kWhatDetachSurface                  = 'dSur',
         kWhatCreateInputSurface             = 'cisf',
         kWhatSetInputSurface                = 'sisf',
         kWhatStart                          = 'strt',
@@ -455,6 +478,10 @@
     uint32_t mSurfaceGeneration = 0;
     SoftwareRenderer *mSoftRenderer;
 
+    // Get the detached BufferQueue surface for a video decoder, and create it
+    // if it did not yet exist.
+    sp<Surface> getOrCreateDetachedSurface();
+
     Mutex mMetricsLock;
     mediametrics_handle_t mMetricsHandle = 0;
     bool mMetricsToUpload = false;
@@ -623,6 +650,13 @@
     status_t queueCSDInputBuffer(size_t bufferIndex);
 
     status_t handleSetSurface(const sp<Surface> &surface);
+
+    // Common reimplementation of changing the output surface.
+    // Handles setting null surface, which is used during configure and init.
+    // Set |callCodec| to true if the codec needs to be notified (e.g. during executing state).
+    // Setting |onShutdown| to true will avoid extra work, if this is used for detaching on
+    // delayed release.
+    status_t handleSetSurface(const sp<Surface> &surface, bool callCodec, bool onShutdown = false);
     status_t connectToSurface(const sp<Surface> &surface, uint32_t *generation);
     status_t disconnectFromSurface();
 
@@ -695,7 +729,7 @@
     sp<AMessage> mMsgPollForRenderedBuffers;
 
     class ReleaseSurface;
-    std::unique_ptr<ReleaseSurface> mReleaseSurface;
+    std::unique_ptr<ReleaseSurface> mDetachedSurface;
 
     std::list<sp<AMessage>> mLeftover;
     status_t handleLeftover(size_t index);
diff --git a/media/libstagefright/include/media/stagefright/MediaCodecConstants.h b/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
index f4c40e1..b1cf665 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
@@ -562,6 +562,30 @@
     }
 }
 
+inline constexpr int32_t DTS_HDProfileHRA = 0x1;
+inline constexpr int32_t DTS_HDProfileLBR = 0x2;
+inline constexpr int32_t DTS_HDProfileMA = 0x4;
+
+inline static const char *asString_Dts_HDProfile(int32_t i, const char *def = "??") {
+    switch (i) {
+        case DTS_HDProfileHRA:  return "HRA";
+        case DTS_HDProfileLBR:  return "LBR";
+        case DTS_HDProfileMA:   return "MA";
+        default:                return def;
+    }
+}
+
+inline constexpr int32_t DTS_UHDProfileP1 = 0x1;
+inline constexpr int32_t DTS_UHDProfileP2 = 0x2;
+
+inline static const char *asString_Dts_UHDProfile(int32_t i, const char *def = "??") {
+    switch (i) {
+        case DTS_UHDProfileP1:  return "P1";
+        case DTS_UHDProfileP2:  return "P2";
+        default:                return def;
+    }
+}
+
 inline constexpr int32_t BITRATE_MODE_CBR = 2;
 inline constexpr int32_t BITRATE_MODE_CBR_FD = 3;
 inline constexpr int32_t BITRATE_MODE_CQ = 0;
@@ -697,6 +721,7 @@
 inline constexpr char FEATURE_AdaptivePlayback[]       = "adaptive-playback";
 inline constexpr char FEATURE_EncodingStatistics[]     = "encoding-statistics";
 inline constexpr char FEATURE_IntraRefresh[] = "intra-refresh";
+inline constexpr char FEATURE_MultipleFrames[] = "multiple-frames";
 inline constexpr char FEATURE_PartialFrame[] = "partial-frame";
 inline constexpr char FEATURE_QpBounds[] = "qp-bounds";
 inline constexpr char FEATURE_SecurePlayback[]         = "secure-playback";
@@ -728,8 +753,13 @@
 inline constexpr char MIMETYPE_AUDIO_FLAC[] = "audio/flac";
 inline constexpr char MIMETYPE_AUDIO_MSGSM[] = "audio/gsm";
 inline constexpr char MIMETYPE_AUDIO_AC3[] = "audio/ac3";
+inline constexpr char MIMETYPE_AUDIO_AC4[] = "audio/ac4";
 inline constexpr char MIMETYPE_AUDIO_EAC3[] = "audio/eac3";
+inline constexpr char MIMETYPE_AUDIO_EAC3_JOC[] = "audio/eac3-joc";
 inline constexpr char MIMETYPE_AUDIO_SCRAMBLED[] = "audio/scrambled";
+inline constexpr char MIMETYPE_AUDIO_DTS[] = "audio/vnd.dts";
+inline constexpr char MIMETYPE_AUDIO_DTS_HD[] = "audio/vnd.dts.hd";
+inline constexpr char MIMETYPE_AUDIO_DTS_UHD[] = "audio/vnd.dts.uhd";
 
 inline constexpr char MIMETYPE_IMAGE_ANDROID_HEIC[] = "image/vnd.android.heic";
 
@@ -881,7 +911,6 @@
 inline constexpr int32_t CRYPTO_MODE_AES_CBC     = 2;
 inline constexpr int32_t CRYPTO_MODE_AES_CTR     = 1;
 inline constexpr int32_t CRYPTO_MODE_UNENCRYPTED = 0;
-inline constexpr int32_t INFO_OUTPUT_BUFFERS_CHANGED = -3;
 inline constexpr int32_t INFO_OUTPUT_FORMAT_CHANGED  = -2;
 inline constexpr int32_t INFO_TRY_AGAIN_LATER        = -1;
 inline constexpr int32_t VIDEO_SCALING_MODE_SCALE_TO_FIT               = 1;
@@ -892,6 +921,8 @@
 inline constexpr char PARAMETER_KEY_SUSPEND_TIME[] = "drop-start-time-us";
 inline constexpr char PARAMETER_KEY_TUNNEL_PEEK[] =  "tunnel-peek";
 inline constexpr char PARAMETER_KEY_VIDEO_BITRATE[] = "video-bitrate";
+inline constexpr char PARAMETER_KEY_QP_OFFSET_MAP[] = "qp-offset-map";
+inline constexpr char PARAMETER_KEY_QP_OFFSET_RECTS[] = "qp-offset-rects";
 
 }
 
diff --git a/media/libstagefright/include/media/stagefright/PersistentSurface.h b/media/libstagefright/include/media/stagefright/PersistentSurface.h
index f4943c3..554ee43 100644
--- a/media/libstagefright/include/media/stagefright/PersistentSurface.h
+++ b/media/libstagefright/include/media/stagefright/PersistentSurface.h
@@ -18,6 +18,8 @@
 
 #define PERSISTENT_SURFACE_H_
 
+#include <android/binder_auto_utils.h>
+#include <android/binder_libbinder.h>
 #include <binder/Parcel.h>
 #include <hidl/HidlSupport.h>
 #include <hidl/HybridInterface.h>
@@ -29,24 +31,43 @@
 struct PersistentSurface : public RefBase {
     PersistentSurface() {}
 
-    // create a persistent surface
+    // create a persistent surface in HIDL
     PersistentSurface(
             const sp<IGraphicBufferProducer>& bufferProducer,
             const sp<hidl::base::V1_0::IBase>& hidlTarget) :
         mBufferProducer(bufferProducer),
-        mHidlTarget(hidlTarget) { }
+        mHidlTarget(hidlTarget),
+        mAidlTarget(nullptr),
+        mAidl(false) { }
+
+    // create a persistent surface in AIDL
+    PersistentSurface(
+            const sp<IGraphicBufferProducer>& bufferProducer,
+            const ::ndk::SpAIBinder& aidlTarget) :
+        mBufferProducer(bufferProducer),
+        mHidlTarget(nullptr),
+        mAidlTarget(aidlTarget),
+        mAidl(true) { }
 
     sp<IGraphicBufferProducer> getBufferProducer() const {
         return mBufferProducer;
     }
 
+    bool isTargetAidl() const {
+        return mAidl;
+    }
+
     sp<hidl::base::V1_0::IBase> getHidlTarget() const {
-        return mHidlTarget;
+        return mAidl ? nullptr : mHidlTarget;
+    }
+
+    ::ndk::SpAIBinder getAidlTarget() const {
+        return mAidl ? mAidlTarget : nullptr;
     }
 
     status_t writeToParcel(Parcel *parcel) const {
         parcel->writeStrongBinder(IInterface::asBinder(mBufferProducer));
-        // write hidl target
+        // write hidl target if available
         if (mHidlTarget != nullptr) {
             HalToken token;
             bool result = createHalToken(mHidlTarget, &token);
@@ -57,6 +78,22 @@
         } else {
             parcel->writeBool(false);
         }
+        // write aidl target if available
+        if (mAidl) {
+            AIBinder *binder = mAidlTarget.get();
+            if (binder != nullptr) {
+                ::android::sp<::android::IBinder> intf =
+                        AIBinder_toPlatformBinder(binder);
+                if (intf) {
+                    parcel->writeBool(true);
+                    parcel->writeStrongBinder(intf);
+                } else {
+                    parcel->writeBool(false);
+                }
+            } else {
+                parcel->writeBool(false);
+            }
+        }
         return NO_ERROR;
     }
 
@@ -65,21 +102,43 @@
                 parcel->readStrongBinder());
         // read hidl target
         bool haveHidlTarget = parcel->readBool();
+        mAidl = false;
         if (haveHidlTarget) {
             std::vector<uint8_t> tokenVector;
             parcel->readByteVector(&tokenVector);
             HalToken token = HalToken(tokenVector);
             mHidlTarget = retrieveHalInterface(token);
             deleteHalToken(token);
+            return NO_ERROR;
         } else {
             mHidlTarget.clear();
         }
+
+        // read aidl target
+        bool haveAidlTarget = false;
+        if (parcel->readBool(&haveAidlTarget) != NO_ERROR) {
+            return NO_ERROR;
+        }
+        mAidl = true;
+        if (haveAidlTarget) {
+            ::android::sp<::android::IBinder> intf = parcel->readStrongBinder();
+            AIBinder *ndkBinder = AIBinder_fromPlatformBinder(intf);
+            if (ndkBinder) {
+                mAidlTarget.set(ndkBinder);
+            } else {
+                mAidlTarget.set(nullptr);
+            }
+        } else {
+            mAidlTarget.set(nullptr);
+        }
         return NO_ERROR;
     }
 
 private:
     sp<IGraphicBufferProducer> mBufferProducer;
     sp<hidl::base::V1_0::IBase> mHidlTarget;
+    ::ndk::SpAIBinder mAidlTarget;
+    bool mAidl;
 
     DISALLOW_EVIL_CONSTRUCTORS(PersistentSurface);
 };
diff --git a/media/libstagefright/include/media/stagefright/SkipCutBuffer.h b/media/libstagefright/include/media/stagefright/SkipCutBuffer.h
index 0fb5690..66f0bb1 100644
--- a/media/libstagefright/include/media/stagefright/SkipCutBuffer.h
+++ b/media/libstagefright/include/media/stagefright/SkipCutBuffer.h
@@ -59,6 +59,12 @@
     int32_t mReadHead;
     int32_t mCapacity;
     char* mCutBuffer;
+
+    /*
+     * Added to use Access unit skip cut in Codec2 framework
+     */
+    friend class MultiAccessUnitSkipCutBuffer;
+
     DISALLOW_EVIL_CONSTRUCTORS(SkipCutBuffer);
 };
 
diff --git a/media/libstagefright/include/media/stagefright/SurfaceUtils.h b/media/libstagefright/include/media/stagefright/SurfaceUtils.h
index eccb413..882a5ab 100644
--- a/media/libstagefright/include/media/stagefright/SurfaceUtils.h
+++ b/media/libstagefright/include/media/stagefright/SurfaceUtils.h
@@ -27,7 +27,7 @@
 namespace android {
 
 struct HDRStaticInfo;
-class IProducerListener;
+class SurfaceListener;
 
 /**
  * Configures |nativeWindow| for given |width|x|height|, pixel |format|, |rotation| and |usage|.
@@ -45,7 +45,7 @@
 status_t nativeWindowConnect(ANativeWindow *surface, const char *reason);
 status_t nativeWindowDisconnect(ANativeWindow *surface, const char *reason);
 status_t surfaceConnectWithListener(const sp<Surface> &surface,
-        sp<IProducerListener> listener, const char *reason);
+        sp<SurfaceListener> listener, const char *reason);
 
 /**
  * Disable buffer dropping behavior of BufferQueue if target sdk of application
diff --git a/media/libstagefright/omx/Android.bp b/media/libstagefright/omx/Android.bp
index 79ab009..630817c 100644
--- a/media/libstagefright/omx/Android.bp
+++ b/media/libstagefright/omx/Android.bp
@@ -20,9 +20,6 @@
 cc_library_shared {
     name: "libstagefright_omx",
     vendor_available: true,
-    vndk: {
-        enabled: true,
-    },
     double_loadable: true,
 
     srcs: [
@@ -218,9 +215,6 @@
 cc_library_shared {
     name: "libstagefright_omx_utils",
     vendor_available: true,
-    vndk: {
-        enabled: true,
-    },
     double_loadable: true,
     srcs: ["OMXUtils.cpp"],
     export_include_dirs: [
diff --git a/media/libstagefright/omx/OMXNodeInstance.cpp b/media/libstagefright/omx/OMXNodeInstance.cpp
index 959f43e..458ac9c 100644
--- a/media/libstagefright/omx/OMXNodeInstance.cpp
+++ b/media/libstagefright/omx/OMXNodeInstance.cpp
@@ -626,6 +626,10 @@
             // ACodec is waiting for all buffers to be returned, do NOT
             // submit any more buffers to the codec.
             bufferSource->onOmxIdle();
+        } else if (param == OMX_StateExecuting) {
+            // Initiating transition from Idle -> Executing
+            // Start submitting buffers to codec.
+            bufferSource->onOmxExecuting();
         } else if (param == OMX_StateLoaded) {
             // Initiating transition from Idle/Executing -> Loaded
             // Buffers are about to be freed.
@@ -2404,13 +2408,6 @@
             asString(event), event, arg1String, arg1, arg2String, arg2);
     const sp<IOMXBufferSource> bufferSource(getBufferSource());
 
-    if (bufferSource != NULL
-            && event == OMX_EventCmdComplete
-            && arg1 == OMX_CommandStateSet
-            && arg2 == OMX_StateExecuting) {
-        bufferSource->onOmxExecuting();
-    }
-
     // allow configuration if we return to the loaded state
     if (event == OMX_EventCmdComplete
             && arg1 == OMX_CommandStateSet
diff --git a/media/libstagefright/omx/SoftVideoDecoderOMXComponent.cpp b/media/libstagefright/omx/SoftVideoDecoderOMXComponent.cpp
index 4183023..4ab5d10 100644
--- a/media/libstagefright/omx/SoftVideoDecoderOMXComponent.cpp
+++ b/media/libstagefright/omx/SoftVideoDecoderOMXComponent.cpp
@@ -619,6 +619,13 @@
                 if (!isValidOMXParam(outParams)) {
                     return OMX_ErrorBadParameter;
                 }
+                if (offsetof(DescribeHDR10PlusInfoParams, nValue) + outParams->nParamSize >
+                    outParams->nSize) {
+                    ALOGE("b/329641908: too large param size; nParamSize=%u nSize=%u",
+                          outParams->nParamSize, outParams->nSize);
+                    android_errorWriteLog(0x534e4554, "329641908");
+                    return OMX_ErrorBadParameter;
+                }
 
                 outParams->nParamSizeUsed = info->size();
 
diff --git a/media/libstagefright/renderfright/Android.bp b/media/libstagefright/renderfright/Android.bp
index 3598e8d..bb850ca 100644
--- a/media/libstagefright/renderfright/Android.bp
+++ b/media/libstagefright/renderfright/Android.bp
@@ -84,9 +84,6 @@
     name: "librenderfright",
     defaults: ["librenderfright_defaults"],
     vendor_available: true,
-    vndk: {
-        enabled: true,
-    },
     double_loadable: true,
 
     cflags: [
@@ -118,3 +115,11 @@
     local_include_dirs: ["include"],
     export_include_dirs: ["include"],
 }
+
+cc_library_headers {
+    name: "librenderfright_gl_headers",
+    export_include_dirs: ["gl"],
+    visibility: [
+        "//frameworks/av/media/libstagefright/renderfright/fuzzer:__subpackages__",
+    ],
+}
diff --git a/media/libstagefright/renderfright/fuzzer/Android.bp b/media/libstagefright/renderfright/fuzzer/Android.bp
new file mode 100644
index 0000000..574e49f
--- /dev/null
+++ b/media/libstagefright/renderfright/fuzzer/Android.bp
@@ -0,0 +1,64 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package {
+    default_team: "trendy_team_android_media_codec_framework",
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_media_libstagefright_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_media_libstagefright_license"],
+}
+
+cc_fuzz {
+    name: "libstagefright_renderfright_fuzzer",
+    srcs: [
+        "libstagefright_renderfright_fuzzer.cpp",
+    ],
+    static_libs: [
+        "librenderfright",
+    ],
+    header_libs: [
+        "librenderfright_gl_headers",
+    ],
+    shared_libs: [
+        "libcutils",
+        "libgui",
+        "liblog",
+        "libutils",
+        "libEGL",
+        "libGLESv1_CM",
+        "libGLESv2",
+        "libGLESv3",
+        "libui",
+        "libbase",
+        "libprocessgroup",
+        "libsync",
+    ],
+    fuzz_config: {
+        cc: [
+            "android-media-fuzzing-reports@google.com",
+        ],
+        componentid: 155276,
+        hotlists: ["4593311"],
+        description: "The fuzzer targets the APIs of librenderfright",
+        vector: "local_no_privileges_required",
+        service_privilege: "constrained",
+        users: "multi_user",
+        fuzzed_code_usage: "shipped",
+    },
+}
diff --git a/media/libstagefright/renderfright/fuzzer/README.md b/media/libstagefright/renderfright/fuzzer/README.md
new file mode 100644
index 0000000..742bfdc
--- /dev/null
+++ b/media/libstagefright/renderfright/fuzzer/README.md
@@ -0,0 +1,33 @@
+# Fuzzer for libstagefright_renderfright
+
+RenderFright supports the following parameters:
+1. SetContextPriority (parameter name: "kSetContextPriority")
+2. SetRenderEngineType (parameter name: "kSetRenderEngineType")
+3. CleanupMode (parameter name: "kCleanupMode")
+4. DataSpace (parameter name: "kDataSpace")
+5. ReadBufferUsage(parameter name: "kReadBufferUsage")
+6. WriteBufferUsage(parameter name: "kWriteBufferUsage")
+7. RenderBufferUsage(parameter name: "kRenderBufferUsage")
+
+| Parameter| Valid Values| Configured Value|
+|------------- |-------------| ----- |
+|`kSetContextPriority`| 0. `RenderEngine::ContextPriority::LOW`<br/>1. `RenderEngine::ContextPriority::MEDIUM`<br/>2. `RenderEngine::ContextPriority::HIGH` |Value obtained from FuzzedDataProvider|
+|`kSetRenderEngineType`| 0. `RenderEngine::RenderEngineType::GLES`<br/>1. `RenderEngine::RenderEngineType::THREADED`|Value obtained from FuzzedDataProvider|
+|`kCleanupMode`| 0. `RenderEngine::CleanupMode::CLEAN_OUTPUT_RESOURCES`<br/>1. `RenderEngine::CleanupMode::CLEAN_ALL`|Value obtained from FuzzedDataProvider|
+|`kDataSpace`| 0. `ui::Dataspace::UNKNOWN`<br/>1. `ui::Dataspace::ARBITRARY`<br/>2. `ui::Dataspace::STANDARD_SHIFT`<br/>3. `ui::Dataspace::STANDARD_MASK`<br/>4. `ui::Dataspace::STANDARD_UNSPECIFIED`<br/>5. `ui::Dataspace::STANDARD_BT709`<br/>6. `ui::Dataspace::STANDARD_BT601_625`<br/>7. `ui::Dataspace::STANDARD_BT601_625_UNADJUSTED`<br/>8. `ui::Dataspace::STANDARD_BT601_525`<br/>9. `ui::Dataspace::STANDARD_BT601_525_UNADJUSTED`<br/>10. `ui::Dataspace::STANDARD_BT2020`<br/>11. `ui::Dataspace::STANDARD_BT2020_CONSTANT_LUMINANCE`<br/>12. `ui::Dataspace::STANDARD_BT470M`<br/>13. `ui::Dataspace::STANDARD_FILM`<br/>14. `ui::Dataspace::STANDARD_DCI_P3`<br/>15. `ui::Dataspace::STANDARD_ADOBE_RGB`<br/>16. `ui::Dataspace::TRANSFER_SHIFT`<br/>17. `ui::Dataspace::TRANSFER_MASK`<br/>18. `ui::Dataspace::TRANSFER_UNSPECIFIED`<br/>19. `ui::Dataspace::TRANSFER_LINEAR`<br/>20. `ui::Dataspace::TRANSFER_SRGB`<br/>21. `ui::Dataspace::TRANSFER_SMPTE_170M`<br/>22. `ui::Dataspace::TRANSFER_GAMMA2_2`<br/>23. `ui::Dataspace::TRANSFER_GAMMA2_6`<br/>24. `ui::Dataspace::TRANSFER_GAMMA2_8`<br/>25. `ui::Dataspace::TRANSFER_ST2084`<br/>26. `ui::Dataspace::TRANSFER_HLG`<br/>27. `ui::Dataspace::RANGE_SHIFT`<br/>28. `ui::Dataspace::RANGE_MASK`<br/>29. `ui::Dataspace::RANGE_UNSPECIFIED`<br/>30. `ui::Dataspace::RANGE_FULL`<br/>31. `ui::Dataspace::RANGE_LIMITED`<br/>32. `ui::Dataspace::RANGE_EXTENDED`<br/>33. `ui::Dataspace::SRGB_LINEAR`<br/>34. `ui::Dataspace::V0_SRGB_LINEAR`<br/>35. `ui::Dataspace::V0_SCRGB_LINEAR`<br/>36. `ui::Dataspace::SRGB`<br/>37. `ui::Dataspace::V0_SRGB`<br/>38. `ui::Dataspace::V0_SCRGB`<br/>39. `ui::Dataspace::JFIF`<br/>40. `ui::Dataspace::V0_JFIF`<br/>41. `ui::Dataspace::BT601_625`<br/>42. `ui::Dataspace::V0_BT601_625`<br/>43. `ui::Dataspace::BT601_525`<br/>44. `ui::Dataspace::V0_BT601_525`<br/>45. `ui::Dataspace::BT709`<br/>46. `ui::Dataspace::V0_BT709`<br/>47. `ui::Dataspace::DCI_P3_LINEAR`<br/>48. `ui::Dataspace::DCI_P3`<br/>49. `ui::Dataspace::DISPLAY_P3_LINEAR`<br/>50. `ui::Dataspace::DISPLAY_P3`<br/>51. `ui::Dataspace::ADOBE_RGB`<br/>52. `ui::Dataspace::BT2020_LINEAR`<br/>53. `ui::Dataspace::BT2020`<br/>54. `ui::Dataspace::BT2020_PQ`<br/>55. `ui::Dataspace::DEPTH`<br/>56. `ui::Dataspace::SENSOR`<br/>57. `ui::Dataspace::BT2020_ITU`<br/>58. `ui::Dataspace::BT2020_ITU_PQ`<br/>59. `ui::Dataspace::BT2020_ITU_HLG`<br/>60. `ui::Dataspace::BT2020_HLG`<br/>61. `ui::Dataspace::DISPLAY_BT2020`<br/>62. `ui::Dataspace::DYNAMIC_DEPTH`<br/>63. `ui::Dataspace::JPEG_APP_SEGMENTS`<br/>64. `ui::Dataspace::HEIF`|Value obtained from FuzzedDataProvider|
+|`kReadBufferUsage`| 0. `GRALLOC_USAGE_SW_READ_NEVER`<br/>1. `GRALLOC_USAGE_SW_READ_RARELY`<br/>2. `GRALLOC_USAGE_SW_READ_OFTEN`<br/>3. `GRALLOC_USAGE_SW_READ_MASK`|Value obtained from FuzzedDataProvider|
+|`kWriteBufferUsage`| 0. `GRALLOC_USAGE_SW_WRITE_NEVER`<br/>1. `GRALLOC_USAGE_SW_WRITE_RARELY`<br/>2. `GRALLOC_USAGE_SW_WRITE_OFTEN`<br/>3. `GRALLOC_USAGE_SW_WRITE_MASK`|Value obtained from FuzzedDataProvider|
+|`kRenderBufferUsage`| 0. `GRALLOC_USAGE_HW_TEXTURE`<br/>1. `GRALLOC_USAGE_HW_RENDER`<br/>2. `GRALLOC_USAGE_HW_2D`<br/>3. `GRALLOC_USAGE_HW_COMPOSER`<br/>4. `GRALLOC_USAGE_HW_FB`<br/>5. `GRALLOC_USAGE_EXTERNAL_DISP`<br/>6. `GRALLOC_USAGE_PROTECTED`<br/>7. `GRALLOC_USAGE_CURSOR`<br/>8. `GRALLOC_USAGE_HW_VIDEO_ENCODER`<br/>9. `GRALLOC_USAGE_HW_CAMERA_WRITE`<br/>10. `GRALLOC_USAGE_HW_CAMERA_READ`<br/>11. `GRALLOC_USAGE_HW_CAMERA_ZSL`<br/>12. `GRALLOC_USAGE_HW_CAMERA_MASK`<br/>13. `GRALLOC_USAGE_HW_MASK`<br/>14. `GRALLOC_USAGE_RENDERSCRIPT`<br/>15. `GRALLOC_USAGE_FOREIGN_BUFFERS`<br/>16. `GRALLOC_USAGE_HW_IMAGE_ENCODER`|Value obtained from FuzzedDataProvider|
+
+
+
+#### Steps to run
+1. Build the fuzzer
+```
+  $ mm -j$(nproc) libstagefright_renderfright_fuzzer
+```
+2. Run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/arm64/libstagefright_renderfright_fuzzer/libstagefright_renderfright_fuzzer
+```
diff --git a/media/libstagefright/renderfright/fuzzer/libstagefright_renderfright_fuzzer.cpp b/media/libstagefright/renderfright/fuzzer/libstagefright_renderfright_fuzzer.cpp
new file mode 100644
index 0000000..b0721e0
--- /dev/null
+++ b/media/libstagefright/renderfright/fuzzer/libstagefright_renderfright_fuzzer.cpp
@@ -0,0 +1,297 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include <GLESRenderEngine.h>
+#include <GLFramebuffer.h>
+#include <GLImage.h>
+#include <Program.h>
+#include <ProgramCache.h>
+#include <fuzzer/FuzzedDataProvider.h>
+#include <renderengine/RenderEngine.h>
+
+using namespace android::renderengine;
+using namespace android;
+
+static constexpr int32_t kMinRenderAPI = 0;
+static constexpr int32_t kMaxRenderAPI = 8;
+static constexpr int32_t kMaxTextureCount = 100;
+static constexpr int32_t KMaxDisplayWidth = 3840;
+static constexpr int32_t KMaxDisplayHeight = 2160;
+static constexpr int32_t kMinPixelFormat = 1;
+static constexpr int32_t kMaxPixelFormat = 55;
+static constexpr int32_t kMaxRenderLayer = 5;
+
+static constexpr ui::Dataspace kDataSpace[] = {
+        ui::Dataspace::UNKNOWN,
+        ui::Dataspace::ARBITRARY,
+        ui::Dataspace::STANDARD_SHIFT,
+        ui::Dataspace::STANDARD_MASK,
+        ui::Dataspace::STANDARD_UNSPECIFIED,
+        ui::Dataspace::STANDARD_BT709,
+        ui::Dataspace::STANDARD_BT601_625,
+        ui::Dataspace::STANDARD_BT601_625_UNADJUSTED,
+        ui::Dataspace::STANDARD_BT601_525,
+        ui::Dataspace::STANDARD_BT601_525_UNADJUSTED,
+        ui::Dataspace::STANDARD_BT2020,
+        ui::Dataspace::STANDARD_BT2020_CONSTANT_LUMINANCE,
+        ui::Dataspace::STANDARD_BT470M,
+        ui::Dataspace::STANDARD_FILM,
+        ui::Dataspace::STANDARD_DCI_P3,
+        ui::Dataspace::STANDARD_ADOBE_RGB,
+        ui::Dataspace::TRANSFER_SHIFT,
+        ui::Dataspace::TRANSFER_MASK,
+        ui::Dataspace::TRANSFER_UNSPECIFIED,
+        ui::Dataspace::TRANSFER_LINEAR,
+        ui::Dataspace::TRANSFER_SRGB,
+        ui::Dataspace::TRANSFER_SMPTE_170M,
+        ui::Dataspace::TRANSFER_GAMMA2_2,
+        ui::Dataspace::TRANSFER_GAMMA2_6,
+        ui::Dataspace::TRANSFER_GAMMA2_8,
+        ui::Dataspace::TRANSFER_ST2084,
+        ui::Dataspace::TRANSFER_HLG,
+        ui::Dataspace::RANGE_SHIFT,
+        ui::Dataspace::RANGE_MASK,
+        ui::Dataspace::RANGE_UNSPECIFIED,
+        ui::Dataspace::RANGE_FULL,
+        ui::Dataspace::RANGE_LIMITED,
+        ui::Dataspace::RANGE_EXTENDED,
+        ui::Dataspace::SRGB_LINEAR,
+        ui::Dataspace::V0_SRGB_LINEAR,
+        ui::Dataspace::V0_SCRGB_LINEAR,
+        ui::Dataspace::SRGB,
+        ui::Dataspace::V0_SRGB,
+        ui::Dataspace::V0_SCRGB,
+        ui::Dataspace::JFIF,
+        ui::Dataspace::V0_JFIF,
+        ui::Dataspace::BT601_625,
+        ui::Dataspace::V0_BT601_625,
+        ui::Dataspace::BT601_525,
+        ui::Dataspace::V0_BT601_525,
+        ui::Dataspace::BT709,
+        ui::Dataspace::V0_BT709,
+        ui::Dataspace::DCI_P3_LINEAR,
+        ui::Dataspace::DCI_P3,
+        ui::Dataspace::DISPLAY_P3_LINEAR,
+        ui::Dataspace::DISPLAY_P3,
+        ui::Dataspace::ADOBE_RGB,
+        ui::Dataspace::BT2020_LINEAR,
+        ui::Dataspace::BT2020,
+        ui::Dataspace::BT2020_PQ,
+        ui::Dataspace::DEPTH,
+        ui::Dataspace::SENSOR,
+        ui::Dataspace::BT2020_ITU,
+        ui::Dataspace::BT2020_ITU_PQ,
+        ui::Dataspace::BT2020_ITU_HLG,
+        ui::Dataspace::BT2020_HLG,
+        ui::Dataspace::DISPLAY_BT2020,
+        ui::Dataspace::DYNAMIC_DEPTH,
+        ui::Dataspace::JPEG_APP_SEGMENTS,
+        ui::Dataspace::HEIF,
+};
+
+static constexpr int32_t kReadBufferUsage[] = {
+        GRALLOC_USAGE_SW_READ_NEVER, GRALLOC_USAGE_SW_READ_RARELY, GRALLOC_USAGE_SW_READ_OFTEN,
+        GRALLOC_USAGE_SW_READ_MASK};
+
+static constexpr int32_t kWriteBufferUsage[] = {
+        GRALLOC_USAGE_SW_WRITE_NEVER, GRALLOC_USAGE_SW_WRITE_RARELY, GRALLOC_USAGE_SW_WRITE_OFTEN,
+        GRALLOC_USAGE_SW_WRITE_MASK};
+
+static constexpr int32_t kRenderBufferUsage[] = {
+        GRALLOC_USAGE_HW_TEXTURE,
+        GRALLOC_USAGE_HW_RENDER,
+        GRALLOC_USAGE_HW_2D,
+        GRALLOC_USAGE_HW_COMPOSER,
+        GRALLOC_USAGE_HW_FB,
+        GRALLOC_USAGE_EXTERNAL_DISP,
+        GRALLOC_USAGE_PROTECTED,
+        GRALLOC_USAGE_CURSOR,
+        GRALLOC_USAGE_HW_VIDEO_ENCODER,
+        GRALLOC_USAGE_HW_CAMERA_WRITE,
+        GRALLOC_USAGE_HW_CAMERA_READ,
+        GRALLOC_USAGE_HW_CAMERA_ZSL,
+        GRALLOC_USAGE_HW_CAMERA_MASK,
+        GRALLOC_USAGE_HW_MASK,
+        GRALLOC_USAGE_RENDERSCRIPT,
+        GRALLOC_USAGE_FOREIGN_BUFFERS,
+        GRALLOC_USAGE_HW_IMAGE_ENCODER,
+};
+
+static constexpr RenderEngine::ContextPriority kSetContextPriority[] = {
+        RenderEngine::ContextPriority::LOW, RenderEngine::ContextPriority::MEDIUM,
+        RenderEngine::ContextPriority::HIGH};
+
+static constexpr RenderEngine::RenderEngineType kSetRenderEngineType[] = {
+        RenderEngine::RenderEngineType::GLES, RenderEngine::RenderEngineType::THREADED};
+
+static constexpr RenderEngine::CleanupMode kCleanupMode[] = {
+        RenderEngine::CleanupMode::CLEAN_OUTPUT_RESOURCES, RenderEngine::CleanupMode::CLEAN_ALL};
+
+class RenderFrightFuzzer {
+  public:
+    RenderFrightFuzzer(const uint8_t* data, size_t size) : mFdp(data, size){};
+    void process();
+
+  private:
+    FuzzedDataProvider mFdp;
+    void getLayerSetting(renderengine::LayerSettings& layerSetting, sp<GraphicBuffer> buffer,
+                         const Rect& sourceCrop, uint32_t textureName);
+};
+
+void RenderFrightFuzzer::getLayerSetting(renderengine::LayerSettings& layerSetting,
+                                         sp<GraphicBuffer> buffer, const Rect& sourceCrop,
+                                         uint32_t textureName) {
+    layerSetting.geometry.boundaries = sourceCrop.toFloatRect();
+    layerSetting.geometry.roundedCornersRadius = mFdp.ConsumeFloatingPoint<float>();
+    layerSetting.geometry.roundedCornersCrop = sourceCrop.toFloatRect();
+
+    layerSetting.alpha = mFdp.ConsumeFloatingPoint<float>();
+    layerSetting.sourceDataspace = mFdp.PickValueInArray(kDataSpace);
+    layerSetting.backgroundBlurRadius = mFdp.ConsumeFloatingPoint<float>();
+    layerSetting.source.buffer.buffer = buffer;
+    layerSetting.source.buffer.isOpaque = mFdp.ConsumeBool();
+    layerSetting.source.buffer.fence = Fence::NO_FENCE;
+    layerSetting.source.buffer.textureName = textureName;
+    layerSetting.source.buffer.usePremultipliedAlpha = mFdp.ConsumeBool();
+    layerSetting.source.buffer.isY410BT2020 =
+            (layerSetting.sourceDataspace == ui::Dataspace::BT2020_ITU_PQ ||
+             layerSetting.sourceDataspace == ui::Dataspace::BT2020_ITU_HLG);
+    layerSetting.source.buffer.maxMasteringLuminance = mFdp.ConsumeFloatingPoint<float>();
+    layerSetting.source.buffer.maxContentLuminance = mFdp.ConsumeFloatingPoint<float>();
+
+    layerSetting.shadow.lightPos =
+            vec3(mFdp.ConsumeFloatingPoint<float>(), mFdp.ConsumeFloatingPoint<float>(), 0);
+    layerSetting.shadow.ambientColor = {
+            mFdp.ConsumeFloatingPoint<float>(), mFdp.ConsumeFloatingPoint<float>(),
+            mFdp.ConsumeFloatingPoint<float>(), mFdp.ConsumeFloatingPoint<float>()};
+    layerSetting.shadow.spotColor = {
+            mFdp.ConsumeFloatingPoint<float>(), mFdp.ConsumeFloatingPoint<float>(),
+            mFdp.ConsumeFloatingPoint<float>(), mFdp.ConsumeFloatingPoint<float>()};
+    layerSetting.shadow.length = mFdp.ConsumeFloatingPoint<float>();
+    layerSetting.shadow.casterIsTranslucent = mFdp.ConsumeBool();
+}
+
+void RenderFrightFuzzer::process() {
+    auto args = RenderEngineCreationArgs::Builder()
+                        .setPixelFormat(mFdp.ConsumeIntegralInRange<int32_t>(kMinPixelFormat,
+                                                                             kMaxPixelFormat))
+                        .setImageCacheSize(mFdp.ConsumeIntegral<uint32_t>())
+                        .setUseColorManagerment(mFdp.ConsumeBool())
+                        .setEnableProtectedContext(mFdp.ConsumeBool())
+                        .setPrecacheToneMapperShaderOnly(mFdp.ConsumeBool())
+                        .setSupportsBackgroundBlur(mFdp.ConsumeBool())
+                        .setContextPriority(mFdp.PickValueInArray(kSetContextPriority))
+                        .setRenderEngineType(mFdp.PickValueInArray(kSetRenderEngineType))
+                        .build();
+    std::unique_ptr<RenderEngine> renderEngine = RenderEngine::create(args);
+
+    std::vector<uint32_t> textures;
+    int32_t maxCount = mFdp.ConsumeIntegralInRange<size_t>(0, kMaxTextureCount);
+    for (size_t i = 0; i < maxCount; ++i) {
+        textures.push_back(mFdp.ConsumeIntegral<uint32_t>());
+    }
+
+    while (mFdp.remaining_bytes()) {
+        int32_t renderFrightAPIs =
+                mFdp.ConsumeIntegralInRange<int32_t>(kMinRenderAPI, kMaxRenderAPI);
+        switch (renderFrightAPIs) {
+            case 0: {
+                renderEngine->genTextures(textures.size(), textures.data());
+                break;
+            }
+            case 1: {
+                renderEngine->deleteTextures(textures.size(), textures.data());
+                break;
+            }
+            case 2: {
+                renderEngine->useProtectedContext(mFdp.ConsumeBool());
+                break;
+            }
+            case 3: {
+                renderEngine->cleanupPostRender(mFdp.PickValueInArray(kCleanupMode));
+                break;
+            }
+            case 4: {
+                renderEngine->unbindExternalTextureBuffer(mFdp.ConsumeIntegral<uint64_t>());
+                break;
+            }
+            case 5: {
+                renderEngine->primeCache();
+                break;
+            }
+            case 6: {
+                sp<Fence> fence = sp<Fence>::make();
+                sp<GraphicBuffer> buffer = sp<GraphicBuffer>::make();
+                renderEngine->bindExternalTextureBuffer(mFdp.ConsumeIntegral<uint32_t>(), buffer,
+                                                        fence);
+                break;
+            }
+            case 7: {
+                sp<GraphicBuffer> buffer = sp<GraphicBuffer>::make();
+                renderEngine->cacheExternalTextureBuffer(buffer);
+                break;
+            }
+            case 8: {
+                std::vector<const renderengine::LayerSettings*> layers;
+                renderengine::LayerSettings layerSetting;
+                int32_t width = mFdp.ConsumeIntegralInRange<int32_t>(0, KMaxDisplayWidth);
+                int32_t height = mFdp.ConsumeIntegralInRange<int32_t>(0, KMaxDisplayHeight);
+                Rect sourceCrop(mFdp.ConsumeIntegralInRange<int32_t>(0, width),
+                                mFdp.ConsumeIntegralInRange<int32_t>(0, height));
+                uint32_t textureName = 0;
+                /* Get a single texture name to pass to layers */
+                renderEngine->genTextures(1 /*numTextures*/, &textureName);
+                sp<GraphicBuffer> buffer;
+                const uint32_t usage = (mFdp.PickValueInArray(kReadBufferUsage) |
+                                        mFdp.PickValueInArray(kWriteBufferUsage) |
+                                        mFdp.PickValueInArray(kRenderBufferUsage));
+
+                for (int i = 0; i < kMaxRenderLayer; ++i) {
+                    buffer = new GraphicBuffer(
+                            width, height,
+                            mFdp.ConsumeIntegralInRange<int32_t>(PIXEL_FORMAT_RGBA_8888,
+                                                                 PIXEL_FORMAT_RGBA_4444),
+                            usage, "input");
+                    getLayerSetting(layerSetting, buffer, sourceCrop, textureName);
+                    layers.push_back(&layerSetting);
+                }
+
+                DisplaySettings settings;
+                settings.physicalDisplay = sourceCrop;
+                settings.clip = sourceCrop;
+                settings.outputDataspace = mFdp.PickValueInArray(kDataSpace);
+                settings.maxLuminance = mFdp.ConsumeFloatingPoint<float>();
+
+                sp<GraphicBuffer> dstBuffer =
+                        new GraphicBuffer(width, height,
+                                          mFdp.ConsumeIntegralInRange<int32_t>(
+                                                  PIXEL_FORMAT_RGBA_8888, PIXEL_FORMAT_RGBA_4444),
+                                          usage, "output");
+                base::unique_fd bufferFence;
+                base::unique_fd drawFence;
+
+                renderEngine->drawLayers(settings, layers, dstBuffer, mFdp.ConsumeBool(),
+                                         std::move(bufferFence),
+                                         (mFdp.ConsumeBool() ? nullptr : &drawFence));
+            }
+        }
+    }
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    RenderFrightFuzzer renderFrightFuzzer(data, size);
+    renderFrightFuzzer.process();
+    return 0;
+}
diff --git a/media/libstagefright/renderfright/gl/ProgramCache.cpp b/media/libstagefright/renderfright/gl/ProgramCache.cpp
index 350f0b7..ad6dd03 100644
--- a/media/libstagefright/renderfright/gl/ProgramCache.cpp
+++ b/media/libstagefright/renderfright/gl/ProgramCache.cpp
@@ -683,7 +683,7 @@
             fs << "uniform mat4 inputTransformMatrix;";
             fs << R"__SHADER__(
                 highp vec3 InputTransform(const highp vec3 color) {
-                    return clamp(vec3(inputTransformMatrix * vec4(color, 1.0)), 0.0, 1.0);
+                    return vec3(inputTransformMatrix * vec4(color, 1.0));
                 }
             )__SHADER__";
         } else {
diff --git a/media/libstagefright/rtsp/fuzzer/Android.bp b/media/libstagefright/rtsp/fuzzer/Android.bp
index a2791ba..ff64af5 100644
--- a/media/libstagefright/rtsp/fuzzer/Android.bp
+++ b/media/libstagefright/rtsp/fuzzer/Android.bp
@@ -29,11 +29,19 @@
     header_libs: [
         "libstagefright_rtsp_headers",
     ],
-    fuzz_config:{
+    fuzz_config: {
         cc: [
-            "android-media-fuzzing-reports@google.com",
+            "android-media-playback@google.com",
         ],
         componentid: 155276,
+        hotlists: [
+            "4593311",
+        ],
+        description: "This fuzzer targets the APIs of libstagefright_rtsp",
+        vector: "local_privileges_required",
+        service_privilege: "privileged",
+        users: "multi_user",
+        fuzzed_code_usage: "shipped",
     },
 }
 
@@ -44,7 +52,7 @@
     ],
     defaults: [
         "libstagefright_rtsp_fuzzer_defaults",
-    ]
+    ],
 }
 
 cc_fuzz {
@@ -55,7 +63,7 @@
     defaults: [
         "libstagefright_rtsp_fuzzer_defaults",
     ],
-    shared_libs:[
+    shared_libs: [
         "libandroid_net",
         "libbase",
         "libstagefright",
diff --git a/media/libstagefright/tests/fuzzers/Android.bp b/media/libstagefright/tests/fuzzers/Android.bp
index 2bcfd67..43542c5 100644
--- a/media/libstagefright/tests/fuzzers/Android.bp
+++ b/media/libstagefright/tests/fuzzers/Android.bp
@@ -32,6 +32,15 @@
         "liblog",
         "media_permission-aidl-cpp",
     ],
+    fuzz_config: {
+        componentid: 42195,
+        hotlists: ["4593311"],
+        description: "The fuzzer targets the APIs of libstagefright",
+        vector: "local_no_privileges_required",
+        service_privilege: "constrained",
+        users: "multi_user",
+        fuzzed_code_usage: "shipped",
+    },
 }
 
 cc_fuzz {
diff --git a/media/libstagefright/tests/fuzzers/FrameDecoderFuzzer.cpp b/media/libstagefright/tests/fuzzers/FrameDecoderFuzzer.cpp
index 4218d2d..3f850c2 100644
--- a/media/libstagefright/tests/fuzzers/FrameDecoderFuzzer.cpp
+++ b/media/libstagefright/tests/fuzzers/FrameDecoderFuzzer.cpp
@@ -24,61 +24,64 @@
 
 namespace android {
 
-#define MAX_MEDIA_BUFFER_SIZE 2048
+static const android_pixel_format_t kColorFormats[] = {
+        HAL_PIXEL_FORMAT_RGBA_8888,
+        HAL_PIXEL_FORMAT_RGB_565,
+        HAL_PIXEL_FORMAT_BGRA_8888,
+        HAL_PIXEL_FORMAT_RGBA_1010102,
+        HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, /* To cover the default case */
+};
 
-// Fuzzer entry point.
-extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
-    // Init our wrapper
+static const MediaSource::ReadOptions::SeekMode kSeekModes[] = {
+        MediaSource::ReadOptions::SeekMode::SEEK_PREVIOUS_SYNC,
+        MediaSource::ReadOptions::SeekMode::SEEK_NEXT_SYNC,
+        MediaSource::ReadOptions::SeekMode::SEEK_CLOSEST_SYNC,
+        MediaSource::ReadOptions::SeekMode::SEEK_CLOSEST,
+        MediaSource::ReadOptions::SeekMode::SEEK_FRAME_INDEX,
+};
+
+static const std::string kComponentNames[] = {
+        "c2.android.avc.decoder",  "c2.android.hevc.decoder", "c2.android.vp8.decoder",
+        "c2.android.vp9.decoder",  "c2.android.av1.decoder",  "c2.android.mpeg4.decoder",
+        "c2.android.h263.decoder",
+};
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
     FuzzedDataProvider fdp(data, size);
+    std::string component = fdp.PickValueInArray(kComponentNames);
+    AString componentName(component.c_str());
+    sp<MetaData> trackMeta = generateMetaData(&fdp, component);
+    sp<IMediaSource> source = sp<IMediaSourceFuzzImpl>::make(&fdp, gMaxMediaBufferSize);
 
-    std::string name = fdp.ConsumeRandomLengthString(fdp.remaining_bytes());
-    AString componentName(name.c_str());
-    sp<MetaData> trackMeta = generateMetaData(&fdp);
-    sp<IMediaSource> source = new IMediaSourceFuzzImpl(&fdp, MAX_MEDIA_BUFFER_SIZE);
-
-    // Image or video Decoder?
-    sp<FrameDecoder> decoder;
-    bool isVideoDecoder = fdp.ConsumeBool();
-    if (isVideoDecoder) {
-        decoder = new VideoFrameDecoder(componentName, trackMeta, source);
+    sp<FrameDecoder> decoder = nullptr;
+    if (fdp.ConsumeBool()) {
+        decoder = sp<MediaImageDecoder>::make(componentName, trackMeta, source);
     } else {
-        decoder = new MediaImageDecoder(componentName, trackMeta, source);
+        decoder = sp<VideoFrameDecoder>::make(componentName, trackMeta, source);
     }
 
-    while (fdp.remaining_bytes()) {
-        uint8_t switchCase = fdp.ConsumeIntegralInRange<uint8_t>(0, 3);
-        switch (switchCase) {
-            case 0: {
-                int64_t frameTimeUs = fdp.ConsumeIntegral<int64_t>();
-                int option = fdp.ConsumeIntegral<int>();
-                int colorFormat = fdp.ConsumeIntegral<int>();
-                decoder->init(frameTimeUs, option, colorFormat);
-                break;
-            }
-            case 1:
-                decoder->extractFrame();
-                break;
-            case 2: {
-                FrameRect rect;
-                rect.left = fdp.ConsumeIntegral<int32_t>();
-                rect.top = fdp.ConsumeIntegral<int32_t>();
-                rect.right = fdp.ConsumeIntegral<int32_t>();
-                rect.bottom = fdp.ConsumeIntegral<int32_t>();
-                decoder->extractFrame(&rect);
-                break;
-            }
-            case 3: {
-                sp<MetaData> trackMeta = generateMetaData(&fdp);
-                decoder->getMetadataOnly(trackMeta,
-                                         /*colorFormat*/ fdp.ConsumeIntegral<int>(),
-                                         /*thumbnail*/ fdp.ConsumeBool());
-                break;
-            }
-        }
+    if (decoder.get() &&
+        decoder->init(fdp.ConsumeIntegral<uint64_t>() /* frameTimeUs */,
+                      fdp.PickValueInArray(kSeekModes) /* option */,
+                      fdp.PickValueInArray(kColorFormats) /* colorFormat */) == OK) {
+        auto frameDecoderAPI = fdp.PickValueInArray<const std::function<void()>>({
+                [&]() { decoder->extractFrame(); },
+                [&]() {
+                    FrameRect rect(fdp.ConsumeIntegral<int32_t>() /* left */,
+                                   fdp.ConsumeIntegral<int32_t>() /* top */,
+                                   fdp.ConsumeIntegral<int32_t>() /* right */,
+                                   fdp.ConsumeIntegral<int32_t>() /* bottom */
+                    );
+                    decoder->extractFrame(&rect);
+                },
+                [&]() {
+                    FrameDecoder::getMetadataOnly(
+                            trackMeta, fdp.PickValueInArray(kColorFormats) /* colorFormat */,
+                            fdp.ConsumeBool() /* thumbnail */);
+                },
+        });
+        frameDecoderAPI();
     }
-
-    generated_mime_types.clear();
-
     return 0;
 }
 
diff --git a/media/libstagefright/tests/fuzzers/FrameDecoderHelpers.h b/media/libstagefright/tests/fuzzers/FrameDecoderHelpers.h
index 228c04a..5430530 100644
--- a/media/libstagefright/tests/fuzzers/FrameDecoderHelpers.h
+++ b/media/libstagefright/tests/fuzzers/FrameDecoderHelpers.h
@@ -20,69 +20,100 @@
 #include <media/stagefright/MetaData.h>
 #include "MediaMimeTypes.h"
 
-#define MAX_METADATA_BUF_SIZE 512
-
 namespace android {
 
 std::vector<std::shared_ptr<char>> generated_mime_types;
+constexpr uint8_t kMinKeyHeight = 32;
+constexpr uint8_t kMinKeyWidth = 32;
+constexpr uint16_t kMaxKeyHeight = 2160;
+constexpr uint16_t kMaxKeyWidth = 3840;
+size_t gMaxMediaBufferSize = 0;
 
-sp<MetaData> generateMetaData(FuzzedDataProvider *fdp) {
-    sp<MetaData> newMeta = new MetaData();
+sp<MetaData> generateMetaData(FuzzedDataProvider* fdp, std::string componentName = std::string()) {
+    sp<MetaData> newMeta = sp<MetaData>::make();
 
-    // random MIME Type
-    const char *mime_type;
-    size_t index = fdp->ConsumeIntegralInRange<size_t>(0, kMimeTypes.size());
-    // Let there be a chance of a true random string
-    if (index == kMimeTypes.size()) {
-        std::string mime_str = fdp->ConsumeRandomLengthString(64);
-        std::shared_ptr<char> mime_cstr(new char[mime_str.length()+1]);
-        generated_mime_types.push_back(mime_cstr);
-        strncpy(mime_cstr.get(), mime_str.c_str(), mime_str.length()+1);
-        mime_type = mime_cstr.get();
-    } else {
-        mime_type = kMimeTypes[index];
+    const char* mime;
+    if(!componentName.empty())
+    {
+        auto it = decoderToMediaType.find(componentName);
+        mime = it->second;
     }
-    newMeta->setCString(kKeyMIMEType, mime_type);
+    else{
+        size_t index = fdp->ConsumeIntegralInRange<size_t>(0, kMimeTypes.size());
+        // Let there be a chance of a true random string
+        if (index == kMimeTypes.size()) {
+            std::string mime_str = fdp->ConsumeRandomLengthString(64);
+            std::shared_ptr<char> mime_cstr(new char[mime_str.length()+1]);
+            generated_mime_types.push_back(mime_cstr);
+            strncpy(mime_cstr.get(), mime_str.c_str(), mime_str.length()+1);
+            mime = mime_cstr.get();
+        } else {
+            mime = kMimeTypes[index];
+        }
+    }
+    newMeta->setCString(kKeyMIMEType, mime);
 
-    // Thumbnail time
-    newMeta->setInt64(kKeyThumbnailTime, fdp->ConsumeIntegral<int64_t>());
+    auto height = fdp->ConsumeIntegralInRange<uint16_t>(kMinKeyHeight, kMaxKeyHeight);
+    auto width = fdp->ConsumeIntegralInRange<uint16_t>(kMinKeyWidth, kMaxKeyWidth);
+    newMeta->setInt32(kKeyHeight, height);
+    newMeta->setInt32(kKeyWidth, width);
 
-    // Values used by allocVideoFrame
-    newMeta->setInt32(kKeyRotation, fdp->ConsumeIntegral<int32_t>());
-    size_t profile_size =
-        fdp->ConsumeIntegralInRange<size_t>(0, MAX_METADATA_BUF_SIZE);
-    std::vector<uint8_t> profile_bytes =
-        fdp->ConsumeBytes<uint8_t>(profile_size);
-    newMeta->setData(kKeyIccProfile,
-                     fdp->ConsumeIntegral<int32_t>(),
-                     profile_bytes.empty() ? nullptr : profile_bytes.data(),
-                     profile_bytes.size());
-    newMeta->setInt32(kKeySARWidth, fdp->ConsumeIntegral<int32_t>());
-    newMeta->setInt32(kKeySARHeight, fdp->ConsumeIntegral<int32_t>());
-    newMeta->setInt32(kKeyDisplayWidth, fdp->ConsumeIntegral<int32_t>());
-    newMeta->setInt32(kKeyDisplayHeight, fdp->ConsumeIntegral<int32_t>());
+    gMaxMediaBufferSize = height * width;
 
-    // Values used by findThumbnailInfo
-    newMeta->setInt32(kKeyThumbnailWidth, fdp->ConsumeIntegral<int32_t>());
-    newMeta->setInt32(kKeyThumbnailHeight, fdp->ConsumeIntegral<int32_t>());
-    size_t thumbnail_size =
-        fdp->ConsumeIntegralInRange<size_t>(0, MAX_METADATA_BUF_SIZE);
-    std::vector<uint8_t> thumb_bytes =
-        fdp->ConsumeBytes<uint8_t>(thumbnail_size);
-    newMeta->setData(kKeyThumbnailHVCC,
-                     fdp->ConsumeIntegral<int32_t>(),
-                     thumb_bytes.empty() ? nullptr : thumb_bytes.data(),
-                     thumb_bytes.size());
+    if (fdp->ConsumeBool()) {
+        newMeta->setInt32(kKeyTileHeight,
+                          fdp->ConsumeIntegralInRange<uint16_t>(kMinKeyHeight, height));
+        newMeta->setInt32(kKeyTileWidth,
+                          fdp->ConsumeIntegralInRange<uint16_t>(kMinKeyWidth, width));
+        newMeta->setInt32(kKeyGridRows, fdp->ConsumeIntegral<uint8_t>());
+        newMeta->setInt32(kKeyGridCols, fdp->ConsumeIntegral<uint8_t>());
+    }
 
-    // Values used by findGridInfo
-    newMeta->setInt32(kKeyTileWidth, fdp->ConsumeIntegral<int32_t>());
-    newMeta->setInt32(kKeyTileHeight, fdp->ConsumeIntegral<int32_t>());
-    newMeta->setInt32(kKeyGridRows, fdp->ConsumeIntegral<int32_t>());
-    newMeta->setInt32(kKeyGridCols, fdp->ConsumeIntegral<int32_t>());
+    if (fdp->ConsumeBool()) {
+        newMeta->setInt32(kKeySARHeight, fdp->ConsumeIntegral<uint8_t>());
+        newMeta->setInt32(kKeySARWidth, fdp->ConsumeIntegral<uint8_t>());
+    }
 
-    // A few functions perform a CHECK() that height/width are set
-    newMeta->setInt32(kKeyHeight, fdp->ConsumeIntegral<int32_t>());
-    newMeta->setInt32(kKeyWidth, fdp->ConsumeIntegral<int32_t>());
+    if (fdp->ConsumeBool()) {
+        newMeta->setInt32(kKeyDisplayHeight,
+                          fdp->ConsumeIntegralInRange<uint16_t>(height, UINT16_MAX));
+        newMeta->setInt32(kKeyDisplayWidth,
+                          fdp->ConsumeIntegralInRange<uint16_t>(width, UINT16_MAX));
+    }
+
+    if (fdp->ConsumeBool()) {
+        newMeta->setRect(kKeyCropRect, fdp->ConsumeIntegral<int32_t>() /* left */,
+                         fdp->ConsumeIntegral<int32_t>() /* top */,
+                         fdp->ConsumeIntegral<int32_t>() /* right */,
+                         fdp->ConsumeIntegral<int32_t>() /* bottom */);
+    }
+
+    if (fdp->ConsumeBool()) {
+        newMeta->setInt32(kKeyRotation, fdp->ConsumeIntegralInRange<uint8_t>(0, 3) * 90);
+    }
+
+    if (fdp->ConsumeBool()) {
+        newMeta->setInt64(kKeyThumbnailTime, fdp->ConsumeIntegral<uint64_t>());
+        newMeta->setInt32(kKeyThumbnailHeight, fdp->ConsumeIntegral<uint8_t>());
+        newMeta->setInt32(kKeyThumbnailWidth, fdp->ConsumeIntegral<uint8_t>());
+
+        size_t thumbnailSize = fdp->ConsumeIntegral<size_t>();
+        std::vector<uint8_t> thumbnailData = fdp->ConsumeBytes<uint8_t>(thumbnailSize);
+        if (mime == MEDIA_MIMETYPE_VIDEO_AV1) {
+            newMeta->setData(kKeyThumbnailAV1C, fdp->ConsumeIntegral<int32_t>() /* type */,
+                             thumbnailData.data(), thumbnailData.size());
+        } else {
+            newMeta->setData(kKeyThumbnailHVCC, fdp->ConsumeIntegral<int32_t>() /* type */,
+                             thumbnailData.data(), thumbnailData.size());
+        }
+    }
+
+    if (fdp->ConsumeBool()) {
+        size_t profileSize = fdp->ConsumeIntegral<size_t>();
+        std::vector<uint8_t> profileData = fdp->ConsumeBytes<uint8_t>(profileSize);
+        newMeta->setData(kKeyIccProfile, fdp->ConsumeIntegral<int32_t>() /* type */,
+                         profileData.data(), profileData.size());
+    }
 
     return newMeta;
 }
diff --git a/media/libstagefright/tests/fuzzers/FuzzerMediaUtility.cpp b/media/libstagefright/tests/fuzzers/FuzzerMediaUtility.cpp
index 9f46a74..b29429a 100644
--- a/media/libstagefright/tests/fuzzers/FuzzerMediaUtility.cpp
+++ b/media/libstagefright/tests/fuzzers/FuzzerMediaUtility.cpp
@@ -21,105 +21,256 @@
 #include <media/stagefright/MPEG2TSWriter.h>
 #include <media/stagefright/MPEG4Writer.h>
 #include <media/stagefright/OggWriter.h>
-
-#include "MediaMimeTypes.h"
-
 #include <webm/WebmWriter.h>
 
 namespace android {
-std::string genMimeType(FuzzedDataProvider *dataProvider) {
-    uint8_t idx = dataProvider->ConsumeIntegralInRange<uint8_t>(0, kMimeTypes.size() - 1);
-    return std::string(kMimeTypes[idx]);
-}
 
-sp<IMediaExtractor> genMediaExtractor(FuzzedDataProvider *dataProvider, std::string mimeType,
-                                      uint16_t maxDataAmount) {
-    uint32_t dataBlobSize = dataProvider->ConsumeIntegralInRange<uint16_t>(0, maxDataAmount);
-    std::vector<uint8_t> data = dataProvider->ConsumeBytes<uint8_t>(dataBlobSize);
-    // data:[<mediatype>][;base64],<data>
-    std::string uri("data:");
-    uri += mimeType;
-    // Currently libstagefright only accepts base64 uris
-    uri += ";base64,";
-    android::AString out;
-    android::encodeBase64(data.data(), data.size(), &out);
-    uri += out.c_str();
-
-    sp<DataSource> source =
-        DataSourceFactory::getInstance()->CreateFromURI(NULL /* httpService */, uri.c_str());
-
-    if (source == NULL) {
-        return NULL;
-    }
-
-    return MediaExtractorFactory::Create(source);
-}
-
-sp<MediaSource> genMediaSource(FuzzedDataProvider *dataProvider, uint16_t maxMediaBlobSize) {
-    std::string mime = genMimeType(dataProvider);
-    sp<IMediaExtractor> extractor = genMediaExtractor(dataProvider, mime, maxMediaBlobSize);
-
-    if (extractor == NULL) {
-        return NULL;
-    }
-
-    for (size_t i = 0; i < extractor->countTracks(); ++i) {
-        sp<MetaData> meta = extractor->getTrackMetaData(i);
-
-        std::string trackMime = dataProvider->PickValueInArray(kTestedMimeTypes);
-        if (!strcasecmp(mime.c_str(), trackMime.c_str())) {
-            sp<IMediaSource> track = extractor->getTrack(i);
-            if (track == NULL) {
-                return NULL;
-            }
-            return new CallbackMediaSource(track);
-        }
-    }
-
-    return NULL;
-}
-
-sp<MediaWriter> createWriter(int fd, StandardWriters writerType, sp<MetaData> fileMeta) {
+sp<MediaWriter> createWriter(int fd, StandardWriters writerType, sp<MetaData> writerMeta,
+                             FuzzedDataProvider* fdp) {
     sp<MediaWriter> writer;
+
+    if (fdp->ConsumeBool()) {
+        writerMeta->setInt32(kKeyRealTimeRecording, fdp->ConsumeBool());
+    }
+
     switch (writerType) {
-        case OGG:
-            writer = new OggWriter(fd);
-            fileMeta->setInt32(kKeyFileType, output_format::OUTPUT_FORMAT_OGG);
-            break;
         case AAC:
-            writer = new AACWriter(fd);
-            fileMeta->setInt32(kKeyFileType, output_format::OUTPUT_FORMAT_AAC_ADIF);
+            writer = sp<AACWriter>::make(fd);
+
+            if (fdp->ConsumeBool()) {
+                writerMeta->setInt32(kKeyFileType, output_format::OUTPUT_FORMAT_AAC_ADIF);
+            }
             break;
         case AAC_ADTS:
-            writer = new AACWriter(fd);
-            fileMeta->setInt32(kKeyFileType, output_format::OUTPUT_FORMAT_AAC_ADTS);
-            break;
-        case WEBM:
-            writer = new WebmWriter(fd);
-            fileMeta->setInt32(kKeyFileType, output_format::OUTPUT_FORMAT_WEBM);
-            break;
-        case MPEG4:
-            writer = new MPEG4Writer(fd);
-            fileMeta->setInt32(kKeyFileType, output_format::OUTPUT_FORMAT_MPEG_4);
+            writer = sp<AACWriter>::make(fd);
+
+            if (fdp->ConsumeBool()) {
+                writerMeta->setInt32(kKeyFileType, output_format::OUTPUT_FORMAT_AAC_ADTS);
+            }
             break;
         case AMR_NB:
-            writer = new AMRWriter(fd);
-            fileMeta->setInt32(kKeyFileType, output_format::OUTPUT_FORMAT_AMR_NB);
+            writer = sp<AMRWriter>::make(fd);
+
+            if (fdp->ConsumeBool()) {
+                writerMeta->setInt32(kKeyFileType, output_format::OUTPUT_FORMAT_AMR_NB);
+            }
             break;
         case AMR_WB:
-            writer = new AMRWriter(fd);
-            fileMeta->setInt32(kKeyFileType, output_format::OUTPUT_FORMAT_AMR_WB);
+            writer = sp<AMRWriter>::make(fd);
+
+            if (fdp->ConsumeBool()) {
+                writerMeta->setInt32(kKeyFileType, output_format::OUTPUT_FORMAT_AMR_WB);
+            }
             break;
         case MPEG2TS:
-            writer = new MPEG2TSWriter(fd);
-            fileMeta->setInt32(kKeyFileType, output_format::OUTPUT_FORMAT_MPEG2TS);
+            writer = sp<MPEG2TSWriter>::make(fd);
+
+            if (fdp->ConsumeBool()) {
+                writerMeta->setInt32(kKeyFileType, output_format::OUTPUT_FORMAT_MPEG2TS);
+            }
             break;
-        default:
-            return nullptr;
+        case MPEG4:
+            writer = sp<MPEG4Writer>::make(fd);
+
+            if (fdp->ConsumeBool()) {
+                writerMeta->setInt32(kKeyFileType, output_format::OUTPUT_FORMAT_MPEG_4);
+            } else if (fdp->ConsumeBool()) {
+                writerMeta->setInt32(kKeyFileType, output_format::OUTPUT_FORMAT_HEIF);
+            } else if (fdp->ConsumeBool()) {
+                writerMeta->setInt32(kKeyFileType, output_format::OUTPUT_FORMAT_THREE_GPP);
+            }
+
+            if (fdp->ConsumeBool()) {
+                writerMeta->setInt32(kKey2ByteNalLength, fdp->ConsumeBool());
+            }
+
+            if (fdp->ConsumeBool()) {
+                writerMeta->setInt32(kKeyTimeScale,
+                                     fdp->ConsumeIntegralInRange<int32_t>(600, 96000));
+            }
+
+            if (fdp->ConsumeBool()) {
+                writerMeta->setInt32(kKey4BitTrackIds, fdp->ConsumeBool());
+            }
+
+            if (fdp->ConsumeBool()) {
+                writerMeta->setInt64(kKeyTrackTimeStatus, fdp->ConsumeIntegral<int64_t>());
+            }
+
+            if (fdp->ConsumeBool()) {
+                writerMeta->setInt32(kKeyRotation, fdp->ConsumeIntegralInRange<uint8_t>(0, 3) * 90);
+            }
+
+            if (fdp->ConsumeBool()) {
+                writerMeta->setInt64(kKeyTime, fdp->ConsumeIntegral<int64_t>());
+            }
+            break;
+        case OGG:
+            writer = sp<OggWriter>::make(fd);
+
+            if (fdp->ConsumeBool()) {
+                writerMeta->setInt32(kKeyFileType, output_format::OUTPUT_FORMAT_OGG);
+            }
+            break;
+        case WEBM:
+            writer = sp<WebmWriter>::make(fd);
+
+            if (fdp->ConsumeBool()) {
+                writerMeta->setInt32(kKeyFileType, output_format::OUTPUT_FORMAT_WEBM);
+            }
+
+            if (fdp->ConsumeBool()) {
+                writerMeta->setInt32(kKeyTimeScale,
+                                     fdp->ConsumeIntegralInRange<int32_t>(600, 96000));
+            }
+            break;
     }
-    if (writer != nullptr) {
-        fileMeta->setInt32(kKeyRealTimeRecording, false);
-    }
+
     return writer;
 }
+
+sp<FuzzSource> createSource(StandardWriters writerType, FuzzedDataProvider* fdp) {
+    sp<MetaData> meta = sp<MetaData>::make();
+
+    switch (writerType) {
+        case AAC:
+        case AAC_ADTS:
+            meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_AAC);
+            meta->setInt32(kKeyChannelCount, fdp->ConsumeIntegralInRange<uint8_t>(1, 7));
+            meta->setInt32(kKeySampleRate, fdp->PickValueInArray<uint32_t>(kSampleRateTable));
+
+            if (fdp->ConsumeBool()) {
+                meta->setInt32(kKeyAACProfile, fdp->ConsumeIntegral<int32_t>());
+            }
+            break;
+        case AMR_NB:
+            meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_AMR_NB);
+            meta->setInt32(kKeyChannelCount, 1);
+            meta->setInt32(kKeySampleRate, 8000);
+            break;
+        case AMR_WB:
+            meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_AMR_WB);
+            meta->setInt32(kKeyChannelCount, 1);
+            meta->setInt32(kKeySampleRate, 16000);
+            break;
+        case MPEG2TS:
+            if (fdp->ConsumeBool()) {
+                meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_AAC);
+                meta->setInt32(kKeyChannelCount, fdp->ConsumeIntegral<int32_t>());
+                meta->setInt32(kKeySampleRate, fdp->PickValueInArray<uint32_t>(kSampleRateTable));
+            } else {
+                meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_AVC);
+                // The +1s ensure a minimum height and width of 1.
+                meta->setInt32(kKeyWidth, fdp->ConsumeIntegral<uint16_t>() + 1);
+                meta->setInt32(kKeyHeight, fdp->ConsumeIntegral<uint16_t>() + 1);
+            }
+            break;
+        case MPEG4: {
+            auto mime = fdp->PickValueInArray<std::string>(kMpeg4MimeTypes);
+            meta->setCString(kKeyMIMEType, mime.c_str());
+
+            if (fdp->ConsumeBool()) {
+                meta->setInt32(kKeyBackgroundMode, fdp->ConsumeBool());
+            }
+
+            if (!strncasecmp(mime.c_str(), "audio/", 6)) {
+                meta->setInt32(kKeyChannelCount, fdp->ConsumeIntegral<int32_t>());
+                meta->setInt32(kKeySampleRate, fdp->PickValueInArray<uint32_t>(kSampleRateTable));
+
+            } else {
+                // The +1s ensure a minimum height and width of 1.
+                meta->setInt32(kKeyWidth, fdp->ConsumeIntegral<uint16_t>() + 1);
+                meta->setInt32(kKeyHeight, fdp->ConsumeIntegral<uint16_t>() + 1);
+
+                if (fdp->ConsumeBool()) {
+                    meta->setInt32(kKeyDisplayWidth, fdp->ConsumeIntegral<uint16_t>());
+                }
+
+                if (fdp->ConsumeBool()) {
+                    meta->setInt32(kKeyDisplayHeight, fdp->ConsumeIntegral<uint16_t>());
+                }
+
+                if (fdp->ConsumeBool()) {
+                    meta->setInt32(kKeyTileWidth, fdp->ConsumeIntegral<uint16_t>());
+                }
+
+                if (fdp->ConsumeBool()) {
+                    meta->setInt32(kKeyTileHeight, fdp->ConsumeIntegral<uint16_t>());
+                }
+                if (fdp->ConsumeBool()) {
+                    meta->setInt32(kKeyGridRows, fdp->ConsumeIntegral<uint8_t>());
+                }
+
+                if (fdp->ConsumeBool()) {
+                    meta->setInt32(kKeyGridCols, fdp->ConsumeIntegral<uint8_t>());
+                }
+
+                if (fdp->ConsumeBool()) {
+                    meta->setInt32(kKeyTemporalLayerCount, fdp->ConsumeIntegral<int32_t>());
+                }
+
+                if (fdp->ConsumeBool()) {
+                    meta->setInt32(kKeySARWidth, fdp->ConsumeIntegral<uint16_t>());
+                }
+
+                if (fdp->ConsumeBool()) {
+                    meta->setInt32(kKeySARHeight, fdp->ConsumeIntegral<uint16_t>());
+                }
+            }
+
+            if (fdp->ConsumeBool()) {
+                meta->setInt32(kKeyBitRate, fdp->ConsumeIntegral<int32_t>());
+            }
+
+            if (fdp->ConsumeBool()) {
+                meta->setInt32(kKeyMaxBitRate, fdp->ConsumeIntegral<int32_t>());
+            }
+
+            if (fdp->ConsumeBool()) {
+                meta->setInt32(kKeyTrackIsDefault, fdp->ConsumeBool());
+            }
+            break;
+        }
+        case OGG:
+            meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_OPUS);
+
+            if (fdp->ConsumeBool()) {
+                meta->setInt32(kKeyChannelCount, fdp->ConsumeIntegral<int32_t>());
+            }
+
+            if (fdp->ConsumeBool()) {
+                meta->setInt32(kKeySampleRate, fdp->PickValueInArray<uint32_t>(kSampleRateTable));
+            }
+            break;
+        case WEBM:
+            if (fdp->ConsumeBool()) {
+                if (fdp->ConsumeBool()) {
+                    meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_VP8);
+                } else {
+                    meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_VP9);
+                }
+
+                if (fdp->ConsumeBool()) {
+                    // The +1s ensure a minimum height and width of 1.
+                    meta->setInt32(kKeyWidth, fdp->ConsumeIntegral<uint16_t>() + 1);
+                    meta->setInt32(kKeyHeight, fdp->ConsumeIntegral<uint16_t>() + 1);
+                }
+            } else {
+                if (fdp->ConsumeBool()) {
+                    meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_VORBIS);
+                } else {
+                    meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_OPUS);
+                }
+
+                if (fdp->ConsumeBool()) {
+                    meta->setInt32(kKeyChannelCount, fdp->ConsumeIntegral<int32_t>());
+                }
+                meta->setInt32(kKeySampleRate, fdp->PickValueInArray<uint32_t>(kSampleRateTable));
+            }
+
+            break;
+    }
+
+    return sp<FuzzSource>::make(meta, fdp);
+}
 }  // namespace android
diff --git a/media/libstagefright/tests/fuzzers/FuzzerMediaUtility.h b/media/libstagefright/tests/fuzzers/FuzzerMediaUtility.h
index 6856ac0..ad1218b 100644
--- a/media/libstagefright/tests/fuzzers/FuzzerMediaUtility.h
+++ b/media/libstagefright/tests/fuzzers/FuzzerMediaUtility.h
@@ -15,20 +15,52 @@
  */
 
 #pragma once
-#include <datasource/DataSourceFactory.h>
+
 #include <fuzzer/FuzzedDataProvider.h>
-#include <android/IMediaExtractor.h>
-#include <media/IMediaHTTPService.h>
-#include <media/mediarecorder.h>
-#include <media/stagefright/CallbackMediaSource.h>
+
+#include <media/stagefright/MediaBuffer.h>
 #include <media/stagefright/MediaDefs.h>
-#include <media/stagefright/MediaExtractorFactory.h>
 #include <media/stagefright/MediaWriter.h>
-#include <media/stagefright/MetaData.h>
-#include <media/stagefright/foundation/base64.h>
-#include <utils/StrongPointer.h>
 
 namespace android {
+class FuzzSource : public MediaSource {
+  public:
+    FuzzSource(sp<MetaData> meta, FuzzedDataProvider* fdp) : mMetaData(meta), mFdp(fdp) {}
+
+    status_t start(MetaData*) { return OK; }
+
+    virtual status_t stop() { return OK; }
+
+    status_t read(MediaBufferBase** buffer, const ReadOptions*) {
+        // Ensuring that mBuffer has at least two bytes to avoid check failure
+        // in MPEG2TSWriter::SourceInfo::onMessageReceived().
+        if (mFdp->remaining_bytes() > 2) {
+            auto size = mFdp->ConsumeIntegralInRange<uint8_t>(2, INT8_MAX);
+            mBuffer = mFdp->ConsumeBytes<uint8_t>(size);
+            MediaBufferBase* mbb = new MediaBuffer(mBuffer.data(), mBuffer.size());
+
+            size_t length = mFdp->ConsumeIntegralInRange<size_t>(2, mbb->size());
+            size_t offset = mFdp->ConsumeIntegralInRange<size_t>(0, mbb->size() - length);
+            mbb->set_range(offset, length);
+
+            mbb->meta_data().setInt32(kKeyIsEndOfStream, mFdp->ConsumeBool());
+            mbb->meta_data().setInt64(kKeyTime, mFdp->ConsumeIntegral<uint32_t>() / 2);
+            *buffer = mbb;
+
+            return OK;
+        }
+
+        return ERROR_END_OF_STREAM;
+    }
+
+    sp<MetaData> getFormat() { return mMetaData; }
+
+  private:
+    sp<MetaData> mMetaData = nullptr;
+    FuzzedDataProvider* mFdp = nullptr;
+    std::vector<uint8_t> mBuffer;
+};
+
 enum StandardWriters {
     OGG,
     AAC,
@@ -42,54 +74,22 @@
     kMaxValue = MPEG2TS,
 };
 
-static std::string kTestedMimeTypes[] = {"audio/3gpp",
-                                         "audio/amr-wb",
-                                         "audio/vorbis",
-                                         "audio/opus",
-                                         "audio/mp4a-latm",
-                                         "audio/mpeg",
-                                         "audio/mpeg-L1",
-                                         "audio/mpeg-L2",
-                                         "audio/midi",
-                                         "audio/qcelp",
-                                         "audio/g711-alaw",
-                                         "audio/g711-mlaw",
-                                         "audio/flac",
-                                         "audio/aac-adts",
-                                         "audio/gsm",
-                                         "audio/ac3",
-                                         "audio/eac3",
-                                         "audio/eac3-joc",
-                                         "audio/ac4",
-                                         "audio/scrambled",
-                                         "audio/alac",
-                                         "audio/x-ms-wma",
-                                         "audio/x-adpcm-ms",
-                                         "audio/x-adpcm-dvi-ima",
-                                         "video/avc",
-                                         "video/hevc",
-                                         "video/mp4v-es",
-                                         "video/3gpp",
-                                         "video/x-vnd.on2.vp8",
-                                         "video/x-vnd.on2.vp9",
-                                         "video/av01",
-                                         "video/mpeg2",
-                                         "video/dolby-vision",
-                                         "video/scrambled",
-                                         "video/divx",
-                                         "video/divx3",
-                                         "video/xvid",
-                                         "video/x-motion-jpeg",
-                                         "text/3gpp-tt",
-                                         "application/x-subrip",
-                                         "text/vtt",
-                                         "text/cea-608",
-                                         "text/cea-708",
-                                         "application/x-id3v4"};
+static const uint32_t kSampleRateTable[] = {
+        8000, 11025, 12000, 16000, 22050, 24000, 32000, 44100, 48000, 64000, 88200, 96000,
+};
+static const std::string kMpeg4MimeTypes[] = {
+        MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC, MEDIA_MIMETYPE_IMAGE_AVIF,
 
-std::string genMimeType(FuzzedDataProvider *dataProvider);
-sp<IMediaExtractor> genMediaExtractor(FuzzedDataProvider *dataProvider, uint16_t dataAmount);
-sp<MediaSource> genMediaSource(FuzzedDataProvider *dataProvider, uint16_t maxMediaBlobSize);
+        MEDIA_MIMETYPE_VIDEO_AV1,          MEDIA_MIMETYPE_VIDEO_AVC,
+        MEDIA_MIMETYPE_VIDEO_HEVC,         MEDIA_MIMETYPE_VIDEO_MPEG4,
+        MEDIA_MIMETYPE_VIDEO_H263,         MEDIA_MIMETYPE_VIDEO_DOLBY_VISION,
 
-sp<MediaWriter> createWriter(int32_t fd, StandardWriters writerType, sp<MetaData> fileMeta);
+        MEDIA_MIMETYPE_AUDIO_AMR_NB,       MEDIA_MIMETYPE_AUDIO_AMR_WB,
+        MEDIA_MIMETYPE_AUDIO_AAC,
+};
+
+sp<MediaWriter> createWriter(int32_t fd, StandardWriters writerType, sp<MetaData> writerMeta,
+                             FuzzedDataProvider* fdp);
+
+sp<FuzzSource> createSource(StandardWriters writerType, FuzzedDataProvider* fdp);
 }  // namespace android
diff --git a/media/libstagefright/tests/fuzzers/IMediaSourceFuzzImpl.h b/media/libstagefright/tests/fuzzers/IMediaSourceFuzzImpl.h
index e769950..7e6f662 100644
--- a/media/libstagefright/tests/fuzzers/IMediaSourceFuzzImpl.h
+++ b/media/libstagefright/tests/fuzzers/IMediaSourceFuzzImpl.h
@@ -19,31 +19,33 @@
 
 #include <media/stagefright/MediaSource.h>
 
+#define MAX_FRAMES 5
+
 namespace android {
 
 class IMediaSourceFuzzImpl : public IMediaSource {
  public:
-    IMediaSourceFuzzImpl(FuzzedDataProvider *_fdp, size_t _max_buffer_size) :
-        fdp(_fdp),
-        max_buffer_size(_max_buffer_size) {}
-    status_t start(MetaData*) override { return 0; }
-    status_t stop() override { return 0; }
-    sp<MetaData> getFormat() override { return nullptr; }
-    status_t read(MediaBufferBase**,
-        const MediaSource::ReadOptions*) override;
-    status_t readMultiple(Vector<MediaBufferBase*>*, uint32_t,
-        const MediaSource::ReadOptions*) override;
-    bool supportReadMultiple() override { return true; }
-    bool supportNonblockingRead() override { return true; }
-    status_t pause() override { return 0; }
+   IMediaSourceFuzzImpl(FuzzedDataProvider* _fdp, size_t _max_buffer_size)
+       : frames_read(0), fdp(_fdp), min_buffer_size(32 * 32), max_buffer_size(_max_buffer_size) {}
+   status_t start(MetaData*) override { return 0; }
+   status_t stop() override { return 0; }
+   sp<MetaData> getFormat() override { return nullptr; }
+   status_t read(MediaBufferBase**, const MediaSource::ReadOptions*) override;
+   status_t readMultiple(Vector<MediaBufferBase*>*, uint32_t,
+                         const MediaSource::ReadOptions*) override;
+   bool supportReadMultiple() override { return true; }
+   bool supportNonblockingRead() override { return true; }
+   status_t pause() override { return 0; }
 
  protected:
     IBinder* onAsBinder() { return nullptr; }
 
  private:
-    FuzzedDataProvider *fdp;
-    std::vector<std::shared_ptr<MediaBufferBase>> buffer_bases;
-    const size_t max_buffer_size;
+   uint8_t frames_read;
+   FuzzedDataProvider* fdp;
+   const size_t min_buffer_size;
+   const size_t max_buffer_size;
+   std::vector<uint8_t> buf;
 };
 
 // This class is simply to expose the destructor
@@ -53,32 +55,41 @@
     ~MediaBufferFuzzImpl() {}
 };
 
-status_t IMediaSourceFuzzImpl::read(MediaBufferBase **buffer,
-        const MediaSource::ReadOptions *options) {
+status_t IMediaSourceFuzzImpl::read(MediaBufferBase** buffer, const MediaSource::ReadOptions*) {
     Vector<MediaBufferBase*> buffers;
-    status_t ret = readMultiple(&buffers, 1, options);
+    status_t ret = readMultiple(&buffers, 1, nullptr);
     *buffer = buffers.empty() ? nullptr : buffers[0];
 
     return ret;
 }
 
-status_t IMediaSourceFuzzImpl::readMultiple(Vector<MediaBufferBase*>* buffers,
-        uint32_t maxNumBuffers, const MediaSource::ReadOptions*) {
-    uint32_t num_buffers =
-        fdp->ConsumeIntegralInRange<uint32_t>(0, maxNumBuffers);
-    for(uint32_t i = 0; i < num_buffers; i++) {
-        std::vector<uint8_t> buf = fdp->ConsumeBytes<uint8_t>(
-            fdp->ConsumeIntegralInRange<size_t>(0, max_buffer_size));
+status_t IMediaSourceFuzzImpl::readMultiple(Vector<MediaBufferBase*>* buffers, uint32_t,
+                                            const MediaSource::ReadOptions*) {
+    if (++frames_read == MAX_FRAMES) {
+        auto size = fdp->ConsumeIntegralInRange<size_t>(min_buffer_size, max_buffer_size);
+        buf = fdp->ConsumeBytes<uint8_t>(size);
+        if (buf.size() < size) {
+            buf.resize(size, 0);
+        }
 
-        std::shared_ptr<MediaBufferBase> mbb(
-            new MediaBufferFuzzImpl(buf.data(), buf.size()));
+        MediaBufferBase* mbb = new MediaBufferFuzzImpl(buf.data(), buf.size());
+        mbb->meta_data().setInt64(kKeyTime, fdp->ConsumeIntegral<uint64_t>());
+        buffers->push_back(mbb);
 
-        buffer_bases.push_back(mbb);
-        buffers->push_back(mbb.get());
+        return ERROR_END_OF_STREAM;
     }
 
-    // STATUS_OK
-    return 0;
+    auto size = fdp->ConsumeIntegralInRange<size_t>(min_buffer_size, max_buffer_size);
+    buf = fdp->ConsumeBytes<uint8_t>(size);
+    if (buf.size() < size) {
+        buf.resize(size, 0);
+    }
+
+    MediaBufferBase* mbb = new MediaBufferFuzzImpl(buf.data(), buf.size());
+    mbb->meta_data().setInt64(kKeyTime, fdp->ConsumeIntegral<uint64_t>());
+    buffers->push_back(mbb);
+
+    return OK;
 }
 
 } // namespace android
diff --git a/media/libstagefright/tests/fuzzers/MediaMimeTypes.h b/media/libstagefright/tests/fuzzers/MediaMimeTypes.h
index 9f337ac..de7814e 100644
--- a/media/libstagefright/tests/fuzzers/MediaMimeTypes.h
+++ b/media/libstagefright/tests/fuzzers/MediaMimeTypes.h
@@ -18,6 +18,7 @@
 #define FUZZER_MEDIAMIMETYPES_H_
 
 #include <media/stagefright/foundation/MediaDefs.h>
+#include <unordered_map>
 
 namespace android {
 
@@ -80,6 +81,15 @@
     MEDIA_MIMETYPE_DATA_TIMED_ID3
 };
 
+static const std::unordered_map<std::string, const char*> decoderToMediaType = {
+        {"c2.android.vp8.decoder", MEDIA_MIMETYPE_VIDEO_VP8},
+        {"c2.android.vp9.decoder", MEDIA_MIMETYPE_VIDEO_VP9},
+        {"c2.android.av1.decoder", MEDIA_MIMETYPE_VIDEO_AV1},
+        {"c2.android.avc.decoder", MEDIA_MIMETYPE_VIDEO_AVC},
+        {"c2.android.hevc.decoder", MEDIA_MIMETYPE_VIDEO_HEVC},
+        {"c2.android.mpeg4.decoder", MEDIA_MIMETYPE_VIDEO_MPEG4},
+        {"c2.android.h263.decoder", MEDIA_MIMETYPE_VIDEO_H263}};
+
 }  // namespace android
 
 #endif  // FUZZER_MEDIAMIMETYPES_H_
diff --git a/media/libstagefright/tests/fuzzers/MediaMuxerFuzzer.cpp b/media/libstagefright/tests/fuzzers/MediaMuxerFuzzer.cpp
index 70d73c8..5ac2a54 100644
--- a/media/libstagefright/tests/fuzzers/MediaMuxerFuzzer.cpp
+++ b/media/libstagefright/tests/fuzzers/MediaMuxerFuzzer.cpp
@@ -13,94 +13,221 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-// Authors: corbin.souffrant@leviathansecurity.com
-//          dylan.katz@leviathansecurity.com
 
-#include <MediaMuxerFuzzer.h>
-#include <cutils/ashmem.h>
 #include <fuzzer/FuzzedDataProvider.h>
 #include <media/stagefright/MediaMuxer.h>
 #include <media/stagefright/foundation/ABuffer.h>
 #include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/MediaDefs.h>
 
 namespace android {
+const uint8_t kMinSize = 0;
+const uint8_t kMinTrackCount = 0;
 
-// Can't seem to get setBuffer or setString working. It always segfaults on a
-// null pointer read or memleaks. So that functionality is missing.
-void createMessage(AMessage *msg, FuzzedDataProvider *fdp) {
-  size_t count = fdp->ConsumeIntegralInRange<size_t>(0, 32);
-  while (fdp->remaining_bytes() > 0 && count > 0) {
-    uint8_t function_id =
-        fdp->ConsumeIntegralInRange<uint8_t>(0, amessage_setvals.size() - 1);
-    amessage_setvals[function_id](msg, fdp);
-    count--;
-  }
+enum kBufferFlags { BUFFER_FLAG_SYNCFRAME = 1, BUFFER_FLAG_CODECCONFIG = 2, BUFFER_FLAG_EOS = 4 };
+
+constexpr char kMuxerFile[] = "MediaMuxer";
+
+const std::string kAudioMimeTypes[] = {
+        MEDIA_MIMETYPE_AUDIO_AMR_NB,
+        MEDIA_MIMETYPE_AUDIO_AMR_WB,
+        MEDIA_MIMETYPE_AUDIO_MPEG,
+        MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_I,
+        MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_II,
+        MEDIA_MIMETYPE_AUDIO_MIDI,
+        MEDIA_MIMETYPE_AUDIO_AAC,
+        MEDIA_MIMETYPE_AUDIO_QCELP,
+        MEDIA_MIMETYPE_AUDIO_VORBIS,
+        MEDIA_MIMETYPE_AUDIO_OPUS,
+        MEDIA_MIMETYPE_AUDIO_G711_ALAW,
+        MEDIA_MIMETYPE_AUDIO_G711_MLAW,
+        MEDIA_MIMETYPE_AUDIO_RAW,
+        MEDIA_MIMETYPE_AUDIO_FLAC,
+        MEDIA_MIMETYPE_AUDIO_AAC_ADTS,
+        MEDIA_MIMETYPE_AUDIO_MSGSM,
+        MEDIA_MIMETYPE_AUDIO_AC3,
+        MEDIA_MIMETYPE_AUDIO_EAC3,
+        MEDIA_MIMETYPE_AUDIO_EAC3_JOC,
+        MEDIA_MIMETYPE_AUDIO_AC4,
+        MEDIA_MIMETYPE_AUDIO_MPEGH_MHA1,
+        MEDIA_MIMETYPE_AUDIO_MPEGH_MHM1,
+        MEDIA_MIMETYPE_AUDIO_MPEGH_BL_L3,
+        MEDIA_MIMETYPE_AUDIO_MPEGH_BL_L4,
+        MEDIA_MIMETYPE_AUDIO_MPEGH_LC_L3,
+        MEDIA_MIMETYPE_AUDIO_MPEGH_LC_L4,
+        MEDIA_MIMETYPE_AUDIO_SCRAMBLED,
+        MEDIA_MIMETYPE_AUDIO_ALAC,
+        MEDIA_MIMETYPE_AUDIO_WMA,
+        MEDIA_MIMETYPE_AUDIO_MS_ADPCM,
+        MEDIA_MIMETYPE_AUDIO_DVI_IMA_ADPCM,
+        MEDIA_MIMETYPE_AUDIO_DTS,
+        MEDIA_MIMETYPE_AUDIO_DTS_HD,
+        MEDIA_MIMETYPE_AUDIO_DTS_HD_MA,
+        MEDIA_MIMETYPE_AUDIO_DTS_UHD,
+        MEDIA_MIMETYPE_AUDIO_DTS_UHD_P1,
+        MEDIA_MIMETYPE_AUDIO_DTS_UHD_P2,
+        MEDIA_MIMETYPE_AUDIO_EVRC,
+        MEDIA_MIMETYPE_AUDIO_EVRCB,
+        MEDIA_MIMETYPE_AUDIO_EVRCWB,
+        MEDIA_MIMETYPE_AUDIO_EVRCNW,
+        MEDIA_MIMETYPE_AUDIO_AMR_WB_PLUS,
+        MEDIA_MIMETYPE_AUDIO_APTX,
+        MEDIA_MIMETYPE_AUDIO_DRA,
+        MEDIA_MIMETYPE_AUDIO_DOLBY_MAT,
+        MEDIA_MIMETYPE_AUDIO_DOLBY_MAT_1_0,
+        MEDIA_MIMETYPE_AUDIO_DOLBY_MAT_2_0,
+        MEDIA_MIMETYPE_AUDIO_DOLBY_MAT_2_1,
+        MEDIA_MIMETYPE_AUDIO_DOLBY_TRUEHD,
+        MEDIA_MIMETYPE_AUDIO_AAC_MP4,
+        MEDIA_MIMETYPE_AUDIO_AAC_MAIN,
+        MEDIA_MIMETYPE_AUDIO_AAC_LC,
+        MEDIA_MIMETYPE_AUDIO_AAC_SSR,
+        MEDIA_MIMETYPE_AUDIO_AAC_LTP,
+        MEDIA_MIMETYPE_AUDIO_AAC_HE_V1,
+        MEDIA_MIMETYPE_AUDIO_AAC_SCALABLE,
+        MEDIA_MIMETYPE_AUDIO_AAC_ERLC,
+        MEDIA_MIMETYPE_AUDIO_AAC_LD,
+        MEDIA_MIMETYPE_AUDIO_AAC_HE_V2,
+        MEDIA_MIMETYPE_AUDIO_AAC_ELD,
+        MEDIA_MIMETYPE_AUDIO_AAC_XHE,
+        MEDIA_MIMETYPE_AUDIO_AAC_ADIF,
+        MEDIA_MIMETYPE_AUDIO_AAC_ADTS_MAIN,
+        MEDIA_MIMETYPE_AUDIO_AAC_ADTS_LC,
+        MEDIA_MIMETYPE_AUDIO_AAC_ADTS_SSR,
+        MEDIA_MIMETYPE_AUDIO_AAC_ADTS_LTP,
+        MEDIA_MIMETYPE_AUDIO_AAC_ADTS_HE_V1,
+        MEDIA_MIMETYPE_AUDIO_AAC_ADTS_SCALABLE,
+        MEDIA_MIMETYPE_AUDIO_AAC_ADTS_ERLC,
+        MEDIA_MIMETYPE_AUDIO_AAC_ADTS_LD,
+        MEDIA_MIMETYPE_AUDIO_AAC_ADTS_HE_V2,
+        MEDIA_MIMETYPE_AUDIO_AAC_ADTS_ELD,
+        MEDIA_MIMETYPE_AUDIO_AAC_ADTS_XHE,
+        MEDIA_MIMETYPE_AUDIO_AAC_LATM_LC,
+        MEDIA_MIMETYPE_AUDIO_AAC_LATM_HE_V1,
+        MEDIA_MIMETYPE_AUDIO_AAC_LATM_HE_V2,
+        MEDIA_MIMETYPE_AUDIO_IEC61937,
+        MEDIA_MIMETYPE_AUDIO_IEC60958,
+};
+
+const std::string kVideoMimeTypes[] = {
+        MEDIA_MIMETYPE_VIDEO_VP8,       MEDIA_MIMETYPE_VIDEO_VP9,
+        MEDIA_MIMETYPE_VIDEO_AV1,       MEDIA_MIMETYPE_VIDEO_AVC,
+        MEDIA_MIMETYPE_VIDEO_HEVC,      MEDIA_MIMETYPE_VIDEO_MPEG4,
+        MEDIA_MIMETYPE_VIDEO_H263,      MEDIA_MIMETYPE_VIDEO_MPEG2,
+        MEDIA_MIMETYPE_VIDEO_RAW,       MEDIA_MIMETYPE_VIDEO_DOLBY_VISION,
+        MEDIA_MIMETYPE_VIDEO_SCRAMBLED, MEDIA_MIMETYPE_VIDEO_DIVX,
+        MEDIA_MIMETYPE_VIDEO_DIVX3,     MEDIA_MIMETYPE_VIDEO_XVID,
+        MEDIA_MIMETYPE_VIDEO_MJPEG,
+};
+
+void getSampleAudioFormat(FuzzedDataProvider& fdp, AMessage* format) {
+    std::string mimeType = fdp.PickValueInArray(kAudioMimeTypes);
+    format->setString("mime", mimeType.c_str(), mimeType.length());
+    format->setInt32("sample-rate", fdp.ConsumeIntegral<int32_t>());
+    format->setInt32("channel-count", fdp.ConsumeIntegral<int32_t>());
+}
+
+void getSampleVideoFormat(FuzzedDataProvider& fdp, AMessage* format) {
+    std::string mimeType = fdp.PickValueInArray(kVideoMimeTypes);
+    format->setString("mime", mimeType.c_str(), mimeType.length());
+    format->setInt32("height", fdp.ConsumeIntegral<int32_t>());
+    format->setInt32("width", fdp.ConsumeIntegral<int32_t>());
+    format->setInt32("time-lapse-fps", fdp.ConsumeIntegral<int32_t>());
 }
 
 extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
-  FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
+    FuzzedDataProvider fdp(data, size);
 
-  size_t data_size = fdp.ConsumeIntegralInRange<size_t>(0, size);
-  int fd = ashmem_create_region("mediamuxer_fuzz_region", data_size);
-  if (fd < 0)
+    // memfd_create() creates an anonymous file and returns a file
+    // descriptor that refers to it. MFD_ALLOW_SEALING allows sealing
+    // operations on this file.
+    int32_t fd = memfd_create(kMuxerFile, MFD_ALLOW_SEALING);
+    if (fd == -1) {
+        ALOGE("memfd_create failed: %s", strerror(errno));
+        return 0;
+    }
+
+    auto outputFormat = (MediaMuxer::OutputFormat)fdp.ConsumeIntegralInRange<int32_t>(
+            MediaMuxer::OutputFormat::OUTPUT_FORMAT_MPEG_4,
+            MediaMuxer::OutputFormat::OUTPUT_FORMAT_LIST_END);
+
+    sp<MediaMuxer> mMuxer = MediaMuxer::create(fd, outputFormat);
+    if (mMuxer == nullptr) {
+        close(fd);
+        return 0;
+    }
+
+    // Used to consume a maximum of 80% of the data to send buffer data to writeSampleData().
+    // This ensures that we don't completely exhaust data and use the rest 20% for fuzzing
+    // of APIs.
+    const size_t kMaxSize = (size * 80) / 100;
+    while (fdp.remaining_bytes()) {
+        auto invokeMediaMuxerAPI = fdp.PickValueInArray<const std::function<void()>>({
+                [&]() {
+                    // Using 'return' here due to a timeout bug present in OGGWriter.cpp
+                    // (b/310316183).
+                    if (outputFormat == MediaMuxer::OutputFormat::OUTPUT_FORMAT_OGG) {
+                        return;
+                    }
+
+                    sp<AMessage> format = sp<AMessage>::make();
+                    fdp.ConsumeBool() ? getSampleAudioFormat(fdp, format.get())
+                                      : getSampleVideoFormat(fdp, format.get());
+
+                    mMuxer->addTrack(fdp.ConsumeBool() ? format : nullptr);
+                },
+                [&]() {
+                    mMuxer->setLocation(fdp.ConsumeIntegral<int32_t>() /* latitude */,
+                                        fdp.ConsumeIntegral<int32_t>() /* longitude */);
+                },
+                [&]() { mMuxer->setOrientationHint(fdp.ConsumeIntegral<int32_t>() /* degrees */); },
+                [&]() { mMuxer->start(); },
+                [&]() {
+                    std::vector<uint8_t> sample = fdp.ConsumeBytes<uint8_t>(
+                            fdp.ConsumeIntegralInRange<size_t>(kMinSize, kMaxSize));
+                    sp<ABuffer> buffer = sp<ABuffer>::make(sample.data(), sample.size());
+
+                    size_t offset = fdp.ConsumeIntegralInRange<size_t>(kMinSize, sample.size());
+                    size_t length =
+                            fdp.ConsumeIntegralInRange<size_t>(kMinSize, buffer->size() - offset);
+                    buffer->setRange(offset, length);
+
+                    sp<AMessage> meta = buffer->meta();
+                    meta->setInt64("sample-file-offset", fdp.ConsumeIntegral<int64_t>());
+                    meta->setInt64("last-sample-index-in-chunk", fdp.ConsumeIntegral<int64_t>());
+
+                    uint32_t flags = 0;
+                    if (fdp.ConsumeBool()) {
+                        flags |= kBufferFlags::BUFFER_FLAG_SYNCFRAME;
+                    }
+                    if (fdp.ConsumeBool()) {
+                        flags |= kBufferFlags::BUFFER_FLAG_CODECCONFIG;
+                    }
+                    if (fdp.ConsumeBool()) {
+                        flags |= kBufferFlags::BUFFER_FLAG_EOS;
+                    }
+
+                    size_t trackIndex = fdp.ConsumeBool()
+                                                ? fdp.ConsumeIntegralInRange<size_t>(
+                                                          kMinTrackCount, mMuxer->getTrackCount())
+                                                : fdp.ConsumeIntegral<size_t>();
+                    int64_t timeUs = fdp.ConsumeIntegral<int64_t>();
+                    mMuxer->writeSampleData(fdp.ConsumeBool() ? buffer : nullptr, trackIndex,
+                                            timeUs, flags);
+                },
+                [&]() {
+                    mMuxer->getTrackFormat(
+                            fdp.ConsumeBool() ? fdp.ConsumeIntegralInRange<size_t>(
+                                                        kMinTrackCount, mMuxer->getTrackCount())
+                                              : fdp.ConsumeIntegral<size_t>() /* idx */);
+                },
+                [&]() { mMuxer->stop(); },
+        });
+
+        invokeMediaMuxerAPI();
+    }
+
+    close(fd);
     return 0;
-
-  uint8_t *sh_data = static_cast<uint8_t *>(
-      mmap(NULL, data_size, PROT_READ | PROT_WRITE, MAP_SHARED, fd, 0));
-  if (sh_data == MAP_FAILED)
-    return 0;
-
-  MediaMuxer::OutputFormat format =
-      (MediaMuxer::OutputFormat)fdp.ConsumeIntegralInRange<int32_t>(0, 4);
-  sp<MediaMuxer> mMuxer = MediaMuxer::create(fd, format);
-  if (mMuxer == nullptr) {
-    return 0;
-  }
-
-  while (fdp.remaining_bytes() > 1) {
-    switch (fdp.ConsumeIntegralInRange<uint8_t>(0, 4)) {
-    case 0: {
-      // For some reason it only likes mp4s here...
-      if (format == 1 || format == 4)
-        break;
-
-      sp<AMessage> a_format(new AMessage);
-      createMessage(a_format.get(), &fdp);
-      mMuxer->addTrack(a_format);
-      break;
-    }
-    case 1: {
-      mMuxer->start();
-      break;
-    }
-    case 2: {
-      int degrees = fdp.ConsumeIntegral<int>();
-      mMuxer->setOrientationHint(degrees);
-      break;
-    }
-    case 3: {
-      int latitude = fdp.ConsumeIntegral<int>();
-      int longitude = fdp.ConsumeIntegral<int>();
-      mMuxer->setLocation(latitude, longitude);
-      break;
-    }
-    case 4: {
-      size_t buf_size = fdp.ConsumeIntegralInRange<size_t>(0, data_size);
-      sp<ABuffer> a_buffer(new ABuffer(buf_size));
-
-      size_t trackIndex = fdp.ConsumeIntegral<size_t>();
-      int64_t timeUs = fdp.ConsumeIntegral<int64_t>();
-      uint32_t flags = fdp.ConsumeIntegral<uint32_t>();
-      mMuxer->writeSampleData(a_buffer, trackIndex, timeUs, flags);
-    }
-    }
-  }
-
-  if (fdp.ConsumeBool())
-    mMuxer->stop();
-
-  munmap(sh_data, data_size);
-  close(fd);
-  return 0;
 }
 } // namespace android
diff --git a/media/libstagefright/tests/fuzzers/MediaMuxerFuzzer.h b/media/libstagefright/tests/fuzzers/MediaMuxerFuzzer.h
deleted file mode 100644
index 7d4421d..0000000
--- a/media/libstagefright/tests/fuzzers/MediaMuxerFuzzer.h
+++ /dev/null
@@ -1,109 +0,0 @@
-/*
- * Copyright 2020 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-// Authors: corbin.souffrant@leviathansecurity.com
-//          dylan.katz@leviathansecurity.com
-
-#pragma once
-
-#include <fuzzer/FuzzedDataProvider.h>
-#include <media/stagefright/foundation/AMessage.h>
-
-namespace android {
-
-// Mappings vectors are the list of attributes that the MediaMuxer
-// class looks for in the message.
-static std::vector<const char *> floatMappings{
-    "capture-rate",
-    "time-lapse-fps",
-    "frame-rate",
-};
-
-static std::vector<const char *> int64Mappings{
-    "exif-offset",    "exif-size", "target-time",
-    "thumbnail-time", "timeUs",    "durationUs",
-};
-
-static std::vector<const char *> int32Mappings{"loop",
-                                               "time-scale",
-                                               "crypto-mode",
-                                               "crypto-default-iv-size",
-                                               "crypto-encrypted-byte-block",
-                                               "crypto-skip-byte-block",
-                                               "frame-count",
-                                               "max-bitrate",
-                                               "pcm-big-endian",
-                                               "temporal-layer-count",
-                                               "temporal-layer-id",
-                                               "thumbnail-width",
-                                               "thumbnail-height",
-                                               "track-id",
-                                               "valid-samples",
-                                               "color-format",
-                                               "ca-system-id",
-                                               "is-sync-frame",
-                                               "bitrate",
-                                               "max-bitrate",
-                                               "width",
-                                               "height",
-                                               "sar-width",
-                                               "sar-height",
-                                               "display-width",
-                                               "display-height",
-                                               "is-default",
-                                               "tile-width",
-                                               "tile-height",
-                                               "grid-rows",
-                                               "grid-cols",
-                                               "rotation-degrees",
-                                               "channel-count",
-                                               "sample-rate",
-                                               "bits-per-sample",
-                                               "channel-mask",
-                                               "encoder-delay",
-                                               "encoder-padding",
-                                               "is-adts",
-                                               "frame-rate",
-                                               "max-height",
-                                               "max-width",
-                                               "max-input-size",
-                                               "haptic-channel-count",
-                                               "pcm-encoding",
-                                               "aac-profile"};
-
-static const std::vector<std::function<void(AMessage *, FuzzedDataProvider *)>>
-    amessage_setvals = {
-        [](AMessage *msg, FuzzedDataProvider *fdp) -> void {
-          msg->setRect("crop", fdp->ConsumeIntegral<int32_t>(),
-                       fdp->ConsumeIntegral<int32_t>(),
-                       fdp->ConsumeIntegral<int32_t>(),
-                       fdp->ConsumeIntegral<int32_t>());
-        },
-        [](AMessage *msg, FuzzedDataProvider *fdp) -> void {
-          msg->setFloat(floatMappings[fdp->ConsumeIntegralInRange<size_t>(
-                            0, floatMappings.size() - 1)],
-                        fdp->ConsumeFloatingPoint<float>());
-        },
-        [](AMessage *msg, FuzzedDataProvider *fdp) -> void {
-          msg->setInt64(int64Mappings[fdp->ConsumeIntegralInRange<size_t>(
-                            0, int64Mappings.size() - 1)],
-                        fdp->ConsumeIntegral<int64_t>());
-        },
-        [](AMessage *msg, FuzzedDataProvider *fdp) -> void {
-          msg->setInt32(int32Mappings[fdp->ConsumeIntegralInRange<size_t>(
-                            0, int32Mappings.size() - 1)],
-                        fdp->ConsumeIntegral<int32_t>());
-        }};
-} // namespace android
diff --git a/media/libstagefright/tests/fuzzers/WriterFuzzer.cpp b/media/libstagefright/tests/fuzzers/WriterFuzzer.cpp
index 97d1160..cd0a866 100644
--- a/media/libstagefright/tests/fuzzers/WriterFuzzer.cpp
+++ b/media/libstagefright/tests/fuzzers/WriterFuzzer.cpp
@@ -13,216 +13,49 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-// Authors: corbin.souffrant@leviathansecurity.com
-//          dylan.katz@leviathansecurity.com
-
-#include <android-base/file.h>
-#include <android/content/AttributionSourceState.h>
-#include <ctype.h>
-#include <media/mediarecorder.h>
-#include <media/stagefright/MPEG4Writer.h>
-#include <media/stagefright/MediaDefs.h>
-#include <stdlib.h>
-#include <utils/StrongPointer.h>
-#include <utils/Vector.h>
-
-#include <functional>
-#include <string>
 
 #include "FuzzerMediaUtility.h"
-#include "fuzzer/FuzzedDataProvider.h"
-
-static constexpr uint16_t kMaxOperations = 5000;
-static constexpr uint8_t kMaxPackageNameLen = 50;
-// For other strings in mpeg we want a higher limit.
-static constexpr uint16_t kMaxMPEGStrLen = 1000;
-static constexpr uint16_t kMaxMediaBlobSize = 1000;
 
 namespace android {
 
-using android::content::AttributionSourceState;
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    FuzzedDataProvider fdp(data, size);
 
-std::string getFourCC(FuzzedDataProvider *fdp) {
-    std::string fourCC = fdp->ConsumeRandomLengthString(4);
-    // Replace any existing nulls
-    for (size_t pos = 0; pos < fourCC.length(); pos++) {
-        if (fourCC.at(pos) == '\0') {
-            fourCC.replace(pos, 1, "a");
-        }
+    // memfd_create() creates an anonymous file and returns a file
+    // descriptor that refers to it. MFD_ALLOW_SEALING allows sealing
+    // operations on this file.
+    int32_t fd = memfd_create("WriterFuzzer", MFD_ALLOW_SEALING);
+    if (fd == -1) {
+        ALOGE("memfd_create() failed: %s", strerror(errno));
+        return 0;
     }
 
-    // If our string is too short, fill the remainder with "a"s.
-    while (fourCC.length() < 4) {
-        fourCC += 'a';
-    }
-    return fourCC;
-}
+    StandardWriters writerType = fdp.ConsumeEnum<StandardWriters>();
+    sp<MetaData> writerMeta = sp<MetaData>::make();
 
-typedef std::vector<std::function<void(FuzzedDataProvider*,
-                                    sp<MediaWriter>, sp<MetaData>, int tmpFileFd)>> OperationVec;
-typedef std::vector<std::function<void(FuzzedDataProvider*, MPEG4Writer*)>> MPEG4OperationVec;
-static const OperationVec operations = {
-    [](FuzzedDataProvider*, sp<MediaWriter> mediaWriter, sp<MetaData>, int) {
-        mediaWriter->pause();
-    },
-    [](FuzzedDataProvider *dataProvider, sp<MediaWriter> mediaWriter, sp<MetaData>, int tmpFd) {
-        bool valid_fd = dataProvider->ConsumeBool();
-        int fd = -1;
-        if (valid_fd) {
-            fd = tmpFd;
-        }
-        // Args don't seem to be used
-        Vector<String16> args;
-        mediaWriter->dump(fd, args);
-    },
-    [](FuzzedDataProvider *dataProvider, sp<MediaWriter> mediaWriter, sp<MetaData>, int tmpFd) {
-        bool valid_fd = dataProvider->ConsumeBool();
-        int fd = -1;
-        if (valid_fd) {
-            fd = tmpFd;
-        }
-        mediaWriter->setNextFd(fd);
-    },
-    [](FuzzedDataProvider *dataProvider, sp<MediaWriter> mediaWriter, sp<MetaData>, int) {
-        mediaWriter->setCaptureRate(dataProvider->ConsumeFloatingPoint<float>());
-    },
-    [](FuzzedDataProvider *dataProvider, sp<MediaWriter> mediaWriter, sp<MetaData>, int) {
-        mediaWriter->setMaxFileDuration(dataProvider->ConsumeIntegral<int64_t>());
-    },
-    [](FuzzedDataProvider *dataProvider, sp<MediaWriter> mediaWriter, sp<MetaData>, int) {
-        mediaWriter->setStartTimeOffsetMs(dataProvider->ConsumeIntegral<int>());
-
-        // Likely won't do much, but might as well as do a quick check
-        // while we're here.
-        mediaWriter->getStartTimeOffsetMs();
-    },
-    [](FuzzedDataProvider *dataProvider, sp<MediaWriter> mediaWriter, sp<MetaData>, int) {
-        mediaWriter->setMaxFileDuration(dataProvider->ConsumeIntegral<int64_t>());
-    },
-    [](FuzzedDataProvider *dataProvider, sp<MediaWriter> mediaWriter, sp<MetaData>, int) {
-        mediaWriter->setMaxFileDuration(dataProvider->ConsumeIntegral<int64_t>());
-    },
-};
-
-static const MPEG4OperationVec mpeg4Operations = {
-    [](FuzzedDataProvider*, MPEG4Writer *mediaWriter) { mediaWriter->notifyApproachingLimit(); },
-    // Lower level write methods.
-    // High-level startBox/endBox/etc are all called elsewhere,
-    [](FuzzedDataProvider *dataProvider, MPEG4Writer *mediaWriter) {
-        uint8_t val = dataProvider->ConsumeIntegral<uint8_t>();
-        mediaWriter->writeInt8(val);
-    },
-    [](FuzzedDataProvider *dataProvider, MPEG4Writer *mediaWriter) {
-        uint16_t val = dataProvider->ConsumeIntegral<uint16_t>();
-        mediaWriter->writeInt16(val);
-    },
-    [](FuzzedDataProvider *dataProvider, MPEG4Writer *mediaWriter) {
-        uint32_t val = dataProvider->ConsumeIntegral<uint32_t>();
-        mediaWriter->writeInt32(val);
-    },
-    [](FuzzedDataProvider *dataProvider, MPEG4Writer *mediaWriter) {
-        uint64_t val = dataProvider->ConsumeIntegral<uint64_t>();
-        mediaWriter->writeInt64(val);
-    },
-    [](FuzzedDataProvider *dataProvider, MPEG4Writer *mediaWriter) {
-        std::string strVal = dataProvider->ConsumeRandomLengthString(kMaxMPEGStrLen);
-        mediaWriter->writeCString(strVal.c_str());
-    },
-    [](FuzzedDataProvider *dataProvider, MPEG4Writer *mediaWriter) {
-        std::string fourCC = getFourCC(dataProvider);
-        mediaWriter->writeFourcc(fourCC.c_str());
-    },
-
-    // Misc setters
-    [](FuzzedDataProvider *dataProvider, MPEG4Writer *mediaWriter) {
-        uint32_t layers = dataProvider->ConsumeIntegral<uint32_t>();
-        mediaWriter->setTemporalLayerCount(layers);
-    },
-    [](FuzzedDataProvider *dataProvider, MPEG4Writer *mediaWriter) {
-        uint32_t duration = dataProvider->ConsumeIntegral<uint32_t>();
-        mediaWriter->setInterleaveDuration(duration);
-    },
-    [](FuzzedDataProvider *dataProvider, MPEG4Writer *mediaWriter) {
-        int lat = dataProvider->ConsumeIntegral<int>();
-        int lon = dataProvider->ConsumeIntegral<int>();
-        mediaWriter->setGeoData(lat, lon);
-    },
-};
-
-// Not all writers can always add new sources, so we'll need additional checks.
-void addSource(FuzzedDataProvider *dataProvider, sp<MediaWriter> mediaWriter) {
-    sp<MediaSource> mediaSource = genMediaSource(dataProvider, kMaxMediaBlobSize);
-    if (mediaSource == NULL) {
-        // There's a static check preventing NULLs in addSource.
-        return;
-    }
-    mediaWriter->addSource(mediaSource);
-}
-
-extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
-    FuzzedDataProvider dataProvider(data, size);
-    TemporaryFile tf;
-    sp<MetaData> fileMeta = new MetaData;
-    StandardWriters writerType = dataProvider.ConsumeEnum<StandardWriters>();
-    sp<MediaWriter> writer = createWriter(tf.fd, writerType, fileMeta);
-
-    AttributionSourceState attributionSource;
-    attributionSource.packageName = dataProvider.ConsumeRandomLengthString(kMaxPackageNameLen);
-    attributionSource.uid = dataProvider.ConsumeIntegral<int32_t>();
-    attributionSource.pid = dataProvider.ConsumeIntegral<int32_t>();
-    attributionSource.token = sp<BBinder>::make();
-    sp<MediaRecorder> mr = new MediaRecorder(attributionSource);
-    writer->setListener(mr);
-
-    uint8_t baseOpLen = operations.size();
-    uint8_t totalLen = baseOpLen;
-    uint8_t maxSources;
-    // Different writers support different amounts of sources.
-    switch (writerType) {
-        case StandardWriters::AAC:
-        case StandardWriters::AAC_ADTS:
-        case StandardWriters::AMR_NB:
-        case StandardWriters::AMR_WB:
-        case StandardWriters::OGG:
-            maxSources = 1;
-            break;
-        case StandardWriters::WEBM:
-            maxSources = 2;
-            break;
-        default:
-            maxSources = UINT8_MAX;
-            break;
-    }
-    // Initialize some number of sources and add them to our writer.
-    uint8_t sourceCount = dataProvider.ConsumeIntegralInRange<uint8_t>(0, maxSources);
-    for (uint8_t i = 0; i < sourceCount; i++) {
-        addSource(&dataProvider, writer);
+    sp<MediaWriter> writer = createWriter(fd, writerType, writerMeta, &fdp);
+    if (writer == nullptr) {
+        close(fd);
+        return 0;
     }
 
-    // Increase our range if additional operations are implemented.
-    // Currently only MPEG4 has additiona public operations on their writer.
-    if (writerType == StandardWriters::MPEG4) {
-        totalLen += mpeg4Operations.size();
+    if (writerType == StandardWriters::WEBM) {
+        // This range is set to avoid CHECK failure in WEBMWriter::reset() -> EbmlVoid::EBmlVoid().
+        writer->setMaxFileSize(fdp.ConsumeIntegralInRange<int64_t>(5 * 1024 * 1024, INT64_MAX));
+    } else {
+        writer->setMaxFileSize(fdp.ConsumeIntegral<int64_t>());
     }
+    writer->setMaxFileDuration(fdp.ConsumeIntegral<int64_t>());
+    writer->setCaptureRate(fdp.ConsumeFloatingPoint<float>());
 
-    // Many operations require the writer to be started.
-    writer->start(fileMeta.get());
-    for (size_t ops_run = 0; dataProvider.remaining_bytes() > 0 && ops_run < kMaxOperations - 1;
-            ops_run++) {
-        uint8_t op = dataProvider.ConsumeIntegralInRange<uint8_t>(0, totalLen - 1);
-        if (op < baseOpLen) {
-            operations[op](&dataProvider, writer, fileMeta, tf.fd);
-        } else if (writerType == StandardWriters::MPEG4) {
-            mpeg4Operations[op - baseOpLen](&dataProvider, (MPEG4Writer*)writer.get());
-        } else {
-            // Here just in case, will error out.
-            operations[op](&dataProvider, writer, fileMeta, tf.fd);
-        }
-    }
+    sp<MediaSource> source = createSource(writerType, &fdp);
+    writer->addSource(source);
+    writer->start(writerMeta.get());
+    writer->pause();
     writer->stop();
 
-    writer.clear();
-    writer = nullptr;
+    close(fd);
+
     return 0;
 }
 }  // namespace android
diff --git a/media/libstagefright/tests/fuzzers/corpus/0ef67b8a074fed50b8875df345ab2e62175c34c9 b/media/libstagefright/tests/fuzzers/corpus/0ef67b8a074fed50b8875df345ab2e62175c34c9
new file mode 100644
index 0000000..652581f
--- /dev/null
+++ b/media/libstagefright/tests/fuzzers/corpus/0ef67b8a074fed50b8875df345ab2e62175c34c9
Binary files differ
diff --git a/media/libstagefright/tests/fuzzers/corpus/60eb43c963545c0b2676dad3e4c38cfe87136bbc b/media/libstagefright/tests/fuzzers/corpus/60eb43c963545c0b2676dad3e4c38cfe87136bbc
new file mode 100644
index 0000000..60ca169
--- /dev/null
+++ b/media/libstagefright/tests/fuzzers/corpus/60eb43c963545c0b2676dad3e4c38cfe87136bbc
Binary files differ
diff --git a/media/libstagefright/tests/fuzzers/corpus/8c7cb9439f81a8e00b651b3658fe24116f37df7e b/media/libstagefright/tests/fuzzers/corpus/8c7cb9439f81a8e00b651b3658fe24116f37df7e
new file mode 100644
index 0000000..c03bcad
--- /dev/null
+++ b/media/libstagefright/tests/fuzzers/corpus/8c7cb9439f81a8e00b651b3658fe24116f37df7e
Binary files differ
diff --git a/media/libstagefright/tests/fuzzers/corpus/c624e73c16c59dfbc3c563416cfc962e3c3a96a0 b/media/libstagefright/tests/fuzzers/corpus/c624e73c16c59dfbc3c563416cfc962e3c3a96a0
new file mode 100644
index 0000000..52f2d5a
--- /dev/null
+++ b/media/libstagefright/tests/fuzzers/corpus/c624e73c16c59dfbc3c563416cfc962e3c3a96a0
Binary files differ
diff --git a/media/libstagefright/tests/fuzzers/corpus/c6aff0d7ccaf58a1964a6bcc51777bf1786503ca b/media/libstagefright/tests/fuzzers/corpus/c6aff0d7ccaf58a1964a6bcc51777bf1786503ca
new file mode 100644
index 0000000..83c522f
--- /dev/null
+++ b/media/libstagefright/tests/fuzzers/corpus/c6aff0d7ccaf58a1964a6bcc51777bf1786503ca
Binary files differ
diff --git a/media/libstagefright/tests/fuzzers/corpus/ec6bd6069f74a2f6e92442f88efb29288ad6f456 b/media/libstagefright/tests/fuzzers/corpus/ec6bd6069f74a2f6e92442f88efb29288ad6f456
new file mode 100644
index 0000000..62d259b
--- /dev/null
+++ b/media/libstagefright/tests/fuzzers/corpus/ec6bd6069f74a2f6e92442f88efb29288ad6f456
Binary files differ
diff --git a/media/libstagefright/tests/fuzzers/corpus/fbf47d9a9173df0a39285c94d89fcbc767d5e774 b/media/libstagefright/tests/fuzzers/corpus/fbf47d9a9173df0a39285c94d89fcbc767d5e774
new file mode 100644
index 0000000..db78b75
--- /dev/null
+++ b/media/libstagefright/tests/fuzzers/corpus/fbf47d9a9173df0a39285c94d89fcbc767d5e774
Binary files differ
diff --git a/media/libstagefright/timedtext/TEST_MAPPING b/media/libstagefright/timedtext/TEST_MAPPING
index 35a5b11..f011d04 100644
--- a/media/libstagefright/timedtext/TEST_MAPPING
+++ b/media/libstagefright/timedtext/TEST_MAPPING
@@ -1,9 +1,5 @@
-// mappings for frameworks/av/media/libstagefright/timedtext
 {
-  // tests which require dynamic content
-  // invoke with: atest -- --enable-module-dynamic-download=true
-  // TODO(b/148094059): unit tests not allowed to download content
-  "dynamic-presubmit": [
+  "postsubmit": [
     { "name": "TimedTextUnitTest" }
   ]
 }
diff --git a/media/libstagefright/timedtext/test/fuzzer/Android.bp b/media/libstagefright/timedtext/test/fuzzer/Android.bp
index 6590ebb..8724d51 100644
--- a/media/libstagefright/timedtext/test/fuzzer/Android.bp
+++ b/media/libstagefright/timedtext/test/fuzzer/Android.bp
@@ -48,8 +48,16 @@
     ],
     fuzz_config: {
         cc: [
-            "android-media-fuzzing-reports@google.com",
+            "android-media-playback@google.com",
         ],
-        componentid: 155276,
+        componentid: 42195,
+        hotlists: [
+            "4593311",
+        ],
+        description: "This fuzzer targets the APIs of libstagefright_timedtext",
+        vector: "local_no_privileges_required",
+        service_privilege: "constrained",
+        users: "multi_user",
+        fuzzed_code_usage: "shipped",
     },
 }
diff --git a/media/libstagefright/webm/Android.bp b/media/libstagefright/webm/Android.bp
index 6ed3e0e..723131d 100644
--- a/media/libstagefright/webm/Android.bp
+++ b/media/libstagefright/webm/Android.bp
@@ -10,8 +10,6 @@
 cc_library_static {
     name: "libstagefright_webm",
 
-    cppflags: ["-D__STDINT_LIMITS"],
-
     cflags: [
         "-Werror",
         "-Wall",
diff --git a/media/libstagefright/webm/WebmFrameThread.cpp b/media/libstagefright/webm/WebmFrameThread.cpp
index 7d1442b..e20a08d 100644
--- a/media/libstagefright/webm/WebmFrameThread.cpp
+++ b/media/libstagefright/webm/WebmFrameThread.cpp
@@ -354,6 +354,17 @@
         }
 
         MetaDataBase &md = buffer->meta_data();
+
+        if (mType == kVideoType) {
+            int32_t isCodecConfig = 0;
+            if (md.findInt32(kKeyIsCodecConfig, &isCodecConfig) && isCodecConfig) {
+                ALOGI("ignoring CSD for video track");
+                buffer->release();
+                buffer = NULL;
+                continue;
+            }
+        }
+
         CHECK(md.findInt64(kKeyTime, &timestampUs));
         if (mStartTimeUs == kUninitialized) {
             mStartTimeUs = timestampUs;
diff --git a/media/libstagefright/webm/WebmWriter.cpp b/media/libstagefright/webm/WebmWriter.cpp
index ca862b0..151ce7c 100644
--- a/media/libstagefright/webm/WebmWriter.cpp
+++ b/media/libstagefright/webm/WebmWriter.cpp
@@ -290,7 +290,7 @@
     // Max file duration limit is set
     if (mMaxFileDurationLimitUs != 0) {
         if (bitRate > 0) {
-            int64_t size2 = ((mMaxFileDurationLimitUs * bitRate * 6) / 1000 / 8000000);
+            int64_t size2 = ((mMaxFileDurationLimitUs / 1000) * bitRate * 6) / 8000000;
             if (mMaxFileSizeLimitBytes != 0 && mIsFileSizeLimitExplicitlyRequested) {
                 // When both file size and duration limits are set,
                 // we use the smaller limit of the two.
diff --git a/media/libstagefright/xmlparser/Android.bp b/media/libstagefright/xmlparser/Android.bp
index 2f204f9..2c5e81a 100644
--- a/media/libstagefright/xmlparser/Android.bp
+++ b/media/libstagefright/xmlparser/Android.bp
@@ -16,9 +16,6 @@
 cc_library_shared {
     name: "libstagefright_xmlparser",
     vendor_available: true,
-    vndk: {
-        enabled: true,
-    },
     double_loadable: true,
 
     srcs: [
diff --git a/media/mediaserver/Android.bp b/media/mediaserver/Android.bp
index 19f9549..6ea40e3 100644
--- a/media/mediaserver/Android.bp
+++ b/media/mediaserver/Android.bp
@@ -1,4 +1,3 @@
-
 package {
     default_applicable_licenses: ["frameworks_av_media_mediaserver_license"],
 }
diff --git a/media/mediaserver/manifest_media_c2_software.xml b/media/mediaserver/manifest_media_c2_software.xml
index d7fb1a0..31dfafb 100644
--- a/media/mediaserver/manifest_media_c2_software.xml
+++ b/media/mediaserver/manifest_media_c2_software.xml
@@ -1,5 +1,5 @@
 <manifest version="1.0" type="framework">
-    <hal>
+    <hal format="hidl" max-level="8">
         <name>android.hardware.media.c2</name>
         <transport>hwbinder</transport>
         <version>1.2</version>
diff --git a/media/module/aidlpersistentsurface/AidlGraphicBufferSource.cpp b/media/module/aidlpersistentsurface/AidlGraphicBufferSource.cpp
new file mode 100644
index 0000000..c208666
--- /dev/null
+++ b/media/module/aidlpersistentsurface/AidlGraphicBufferSource.cpp
@@ -0,0 +1,97 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <inttypes.h>
+
+#define LOG_TAG "AidlGraphicBufferSource"
+//#define LOG_NDEBUG 0
+#include <utils/Log.h>
+
+#include <media/stagefright/bqhelper/ComponentWrapper.h>
+#include <media/stagefright/bqhelper/GraphicBufferSource.h>
+#include <media/stagefright/aidlpersistentsurface/AidlGraphicBufferSource.h>
+#include <media/stagefright/aidlpersistentsurface/C2NodeDef.h>
+
+namespace android::media {
+
+namespace {
+
+class AidlComponentWrapper : public ComponentWrapper {
+public:
+    explicit AidlComponentWrapper(const sp<IAidlNodeWrapper> &node)
+        : mAidlNode(node) {}
+    virtual ~AidlComponentWrapper() = default;
+
+    status_t submitBuffer(
+            int32_t bufferId, const sp<GraphicBuffer> &buffer,
+            int64_t timestamp, int fenceFd) override {
+        return mAidlNode->submitBuffer(
+                bufferId, BUFFERFLAG_ENDOFFRAME, buffer, timestamp, fenceFd);
+    }
+
+    status_t submitEos(int32_t bufferId) override {
+        return mAidlNode->submitBuffer(
+            bufferId, BUFFERFLAG_ENDOFFRAME | BUFFERFLAG_EOS);
+    }
+
+    void dispatchDataSpaceChanged(
+            int32_t dataSpace, int32_t aspects, int32_t pixelFormat) override {
+        mAidlNode->dispatchDataSpaceChanged(dataSpace, aspects, pixelFormat);
+    }
+
+private:
+    sp<IAidlNodeWrapper> mAidlNode;
+
+    DISALLOW_EVIL_CONSTRUCTORS(AidlComponentWrapper);
+};
+
+}  // namespace
+
+::ndk::ScopedAStatus AidlGraphicBufferSource::onStart() {
+    status_t err = start();
+    return (OK == err) ? ::ndk::ScopedAStatus::ok() :
+            ::ndk::ScopedAStatus::fromServiceSpecificError(err);
+}
+
+::ndk::ScopedAStatus AidlGraphicBufferSource::onStop() {
+    status_t err = stop();
+    return (OK == err) ? ::ndk::ScopedAStatus::ok() :
+            ::ndk::ScopedAStatus::fromServiceSpecificError(err);
+}
+
+::ndk::ScopedAStatus AidlGraphicBufferSource::onRelease(){
+    status_t err = release();
+    return (OK == err) ? ::ndk::ScopedAStatus::ok() :
+            ::ndk::ScopedAStatus::fromServiceSpecificError(err);
+}
+
+status_t AidlGraphicBufferSource::configure(
+        const sp<IAidlNodeWrapper>& aidlNode,
+        int32_t dataSpace,
+        int32_t bufferCount,
+        uint32_t frameWidth,
+        uint32_t frameHeight,
+        uint64_t consumerUsage) {
+    if (aidlNode == NULL) {
+        return BAD_VALUE;
+    }
+
+    return GraphicBufferSource::configure(
+            new AidlComponentWrapper(aidlNode), dataSpace, bufferCount,
+            frameWidth, frameHeight, consumerUsage);
+}
+
+}  // namespace android::media
diff --git a/media/module/aidlpersistentsurface/Android.bp b/media/module/aidlpersistentsurface/Android.bp
new file mode 100644
index 0000000..5c1a010
--- /dev/null
+++ b/media/module/aidlpersistentsurface/Android.bp
@@ -0,0 +1,69 @@
+aidl_interface {
+    name: "graphicbuffersource-aidl",
+    unstable: true,
+    local_include_dir: "aidl",
+    min_sdk_version: "29",
+    srcs: [
+        "aidl/android/media/AidlColorAspects.aidl",
+        "aidl/android/media/IAidlGraphicBufferSource.aidl",
+        "aidl/android/media/IAidlBufferSource.aidl",
+        "aidl/android/media/IAidlNode.aidl",
+    ],
+    headers: [
+        "HardwareBuffer_aidl",
+    ],
+    imports: [
+        "android.hardware.graphics.common-V5",
+    ],
+    include_dirs: [
+        "frameworks/native/aidl/gui",
+    ],
+    backend: {
+        cpp: {
+            enabled: false,
+        },
+        java: {
+            enabled: false,
+        },
+        ndk: {
+            enabled: true,
+            additional_shared_libraries: [
+                "libnativewindow",
+            ],
+        },
+        rust: {
+            // No users, and no rust implementation of android.os.Surface yet
+            enabled: false,
+        },
+    },
+}
+
+cc_library_shared {
+    name: "libstagefright_graphicbuffersource_aidl",
+    min_sdk_version: "29",
+    srcs: [
+        "AidlGraphicBufferSource.cpp",
+        "wrapper/WAidlGraphicBufferSource.cpp",
+    ],
+    export_include_dirs: [
+        "include",
+    ],
+    header_libs: [
+        "media_plugin_headers",
+    ],
+
+    export_header_lib_headers: [
+        "media_plugin_headers",
+    ],
+    shared_libs: [
+        "android.hardware.graphics.common-V5-ndk",
+        "graphicbuffersource-aidl-ndk",
+        "libbinder_ndk",
+        "libcutils",
+        "libgui",
+        "liblog",
+        "libnativewindow",
+        "libstagefright_bufferqueue_helper",
+        "libutils",
+    ],
+}
diff --git a/media/module/aidlpersistentsurface/aidl/android/media/AidlColorAspects.aidl b/media/module/aidlpersistentsurface/aidl/android/media/AidlColorAspects.aidl
new file mode 100644
index 0000000..4edd6ce
--- /dev/null
+++ b/media/module/aidlpersistentsurface/aidl/android/media/AidlColorAspects.aidl
@@ -0,0 +1,89 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * Ref: frameworks/native/include/media/hardware/VideoAPI.h
+ *
+ * Framework defined color aspects. These are based mainly on ISO 23001-8 spec. As this standard
+ * continues to evolve, new values may be defined in the future. Use OTHER for these future values
+ * as well as for values not listed here, as those are not supported by the framework.
+ */
+parcelable AidlColorAspects {
+    @Backing(type="int")
+    enum Range {
+        UNSPECIFIED,  // Unspecified
+        FULL,         // Full range
+        LIMITED,      // Limited range (if defined), or not full range
+
+        OTHER = 0xff, // Not one of the above values
+    }
+
+    // Color primaries
+    @Backing(type="int")
+    enum Primaries {
+        UNSPECIFIED,  // Unspecified
+        BT709_5,      // Rec.ITU-R BT.709-5 or equivalent
+        BT470_6M,     // Rec.ITU-R BT.470-6 System M or equivalent
+        BT601_6_625,  // Rec.ITU-R BT.601-6 625 or equivalent
+        BT601_6_525,  // Rec.ITU-R BT.601-6 525 or equivalent
+        GENERIC_FILM, // Generic Film
+        BT2020,       // Rec.ITU-R BT.2020 or equivalent
+
+        OTHER = 0xff, // Not one of the above values
+    }
+
+    // Transfer characteristics
+    @Backing(type="int")
+    enum Transfer {
+        UNSPECIFIED,  // Unspecified
+        LINEAR,       // Linear transfer characteristics
+        SRGB,         // sRGB or equivalent
+        SMPTE170M,    // SMPTE 170M or equivalent (e.g. BT.601/709/2020)
+        GAMMA22,      // Assumed display gamma 2.2
+        GAMMA28,      // Assumed display gamma 2.8
+        ST2084,       // SMPTE ST 2084 for 10/12/14/16 bit systems
+        HLG,          // ARIB STD-B67 hybrid-log-gamma
+
+        // values unlikely to be required by Android follow here
+        SMPTE240M = 0x40, // SMPTE 240M
+        XVYCC,        // IEC 61966-2-4
+        BT1361,       // Rec.ITU-R BT.1361 extended gamut
+        ST428,        // SMPTE ST 428-1
+
+        OTHER = 0xff, // Not one of the above values
+    }
+
+    // YUV <-> RGB conversion
+    @Backing(type="int")
+    enum MatrixCoeffs {
+        UNSPECIFIED,    // Unspecified
+        BT709_5,        // Rec.ITU-R BT.709-5 or equivalent
+        BT470_6M,       // KR=0.30, KB=0.11 or equivalent
+        BT601_6,        // Rec.ITU-R BT.601-6 625 or equivalent
+        SMPTE240M,      // SMPTE 240M or equivalent
+        BT2020,         // Rec.ITU-R BT.2020 non-constant luminance
+        BT2020CONSTANT, // Rec.ITU-R BT.2020 constant luminance
+
+        OTHER = 0xff,   // Not one of the above values
+    }
+
+    Range range;
+    Primaries primaries;
+    Transfer transfer;
+    MatrixCoeffs matrixCoeffs;
+}
diff --git a/media/module/aidlpersistentsurface/aidl/android/media/IAidlBufferSource.aidl b/media/module/aidlpersistentsurface/aidl/android/media/IAidlBufferSource.aidl
new file mode 100644
index 0000000..d428e99
--- /dev/null
+++ b/media/module/aidlpersistentsurface/aidl/android/media/IAidlBufferSource.aidl
@@ -0,0 +1,64 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.os.ParcelFileDescriptor;
+
+/**
+ * Binder interface for controlling and handling IAidlGraphicBufferSource
+ * from the process which owns IAidlNode.
+ *
+ * In order to support Persistent InputSurface and/or MediaRecorder
+ */
+interface IAidlBufferSource {
+    /**
+     * This is called when IAidlGraphicBufferSource can start handing buffers.
+     * If we already have buffers of data sitting in the BufferQueue,
+     * this will send them to the codec.
+     */
+    void onStart();
+
+    /**
+     * This is called when IAidlGraphicBufferSource indicaters that
+     * the codec is meant to return all buffers back to the client for them
+     * to be freed. Do NOT submit any more buffers to the component.
+     */
+    void onStop();
+
+    /**
+     * This is called when IAidlGraphicBufferSource indicates that
+     * we are shutting down.
+     */
+    void onRelease();
+
+    /**
+     * A "codec buffer", i.e. a buffer that can be used to pass data into
+     * the encoder, has been allocated.
+     */
+    void onInputBufferAdded(int bufferID);
+
+    /**
+     * If we have a BQ buffer available,
+     * fill it with a new frame of data; otherwise, just mark it as available.
+     *
+     * fence contains the fence's fd that the callee should wait on before
+     * using the buffer (or pass on to the user of the buffer, if the user supports
+     * fences). Callee takes ownership of the fence fd even if it fails.
+     */
+    void onInputBufferEmptied(int bufferID, in @nullable ParcelFileDescriptor fence);
+}
+
diff --git a/media/module/aidlpersistentsurface/aidl/android/media/IAidlGraphicBufferSource.aidl b/media/module/aidlpersistentsurface/aidl/android/media/IAidlGraphicBufferSource.aidl
new file mode 100644
index 0000000..6642e89
--- /dev/null
+++ b/media/module/aidlpersistentsurface/aidl/android/media/IAidlGraphicBufferSource.aidl
@@ -0,0 +1,41 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.hardware.graphics.common.Dataspace;
+import android.media.AidlColorAspects;
+import android.media.IAidlNode;
+
+/**
+ * Binder interface for configuring/controlling a Codec2 AIDL encoder instance
+ * on behalf of a Surface which will produce input buffers.
+ *
+ * In order to support Persistent InputSurface and/or MediaRecorder.
+ */
+interface IAidlGraphicBufferSource {
+    void configure(IAidlNode node, Dataspace dataSpace);
+    void setSuspend(boolean suspend, long suspendTimeUs);
+    void setRepeatPreviousFrameDelayUs(long repeatAfterUs);
+    void setMaxFps(float maxFps);
+    void setTimeLapseConfig(double fps, double captureFps);
+    void setStartTimeUs(long startTimeUs);
+    void setStopTimeUs(long stopTimeUs);
+    long getStopTimeOffsetUs();
+    void setColorAspects(in AidlColorAspects aspects);
+    void setTimeOffsetUs(long timeOffsetsUs);
+    void signalEndOfInputStream();
+}
diff --git a/media/module/aidlpersistentsurface/aidl/android/media/IAidlNode.aidl b/media/module/aidlpersistentsurface/aidl/android/media/IAidlNode.aidl
new file mode 100644
index 0000000..fe3caf3
--- /dev/null
+++ b/media/module/aidlpersistentsurface/aidl/android/media/IAidlNode.aidl
@@ -0,0 +1,52 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.hardware.HardwareBuffer;
+import android.media.IAidlBufferSource;
+import android.os.ParcelFileDescriptor;
+
+/**
+ * Binder interface abstraction for codec2 encoder instance.
+ *
+ * In order to support Persistent InputSurface and/or MediaRecorder.
+ */
+interface IAidlNode {
+
+    /**
+     * InputBuffer parameter for retrieval for the Node
+     */
+    parcelable InputBufferParams {
+        int bufferCountActual;
+        int frameWidth;
+        int frameHeight;
+    }
+
+    void freeNode();
+    long getConsumerUsage();
+    InputBufferParams getInputBufferParams();
+    void setConsumerUsage(long usage);
+    void setAdjustTimestampGapUs(int gapUs);
+    void setInputSurface(IAidlBufferSource bufferSource);
+    void submitBuffer(
+            int buffer,
+            in @nullable HardwareBuffer hBuffer,
+            int flags,
+            long timestampUs,
+            in @nullable ParcelFileDescriptor fence);
+    void onDataSpaceChanged(int dataSpace, int aspects, int pixelFormat);
+}
diff --git a/media/module/aidlpersistentsurface/include/media/stagefright/aidlpersistentsurface/AidlGraphicBufferSource.h b/media/module/aidlpersistentsurface/include/media/stagefright/aidlpersistentsurface/AidlGraphicBufferSource.h
new file mode 100644
index 0000000..85de688
--- /dev/null
+++ b/media/module/aidlpersistentsurface/include/media/stagefright/aidlpersistentsurface/AidlGraphicBufferSource.h
@@ -0,0 +1,76 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <media/stagefright/bqhelper/GraphicBufferSource.h>
+#include <media/stagefright/foundation/ABase.h>
+
+#include <media/stagefright/aidlpersistentsurface/IAidlNodeWrapper.h>
+
+#include <utils/Errors.h>
+
+#include <aidl/android/media/BnAidlBufferSource.h>
+
+namespace android::media {
+
+/*
+ * This class is used to feed codec encoders from a Surface via BufferQueue or
+ * HW producer using AIDL binder interfaces.
+ *
+ * See media/stagefright/bqhelper/GraphicBufferSource.h for documentation.
+ */
+class AidlGraphicBufferSource : public GraphicBufferSource {
+public:
+    AidlGraphicBufferSource() = default;
+    virtual ~AidlGraphicBufferSource() = default;
+
+    // For IAidlBufferSource interface
+    // ------------------------------
+
+    // When we can start handling buffers.  If we already have buffers of data
+    // sitting in the BufferQueue, this will send them to the codec.
+    ::ndk::ScopedAStatus onStart();
+
+    // When the codec is meant to return all buffers back to the client for
+    // them to be freed. Do NOT submit any more buffers to the component.
+    ::ndk::ScopedAStatus onStop();
+
+    // When we are shutting down.
+    ::ndk::ScopedAStatus onRelease();
+
+    // Rest of the interface in GraphicBufferSource.
+
+    // IAidlGraphicBufferSource interface
+    // ------------------------------
+
+    // Configure the buffer source to be used with a codec2 aidl node given
+    // parameters.
+    status_t configure(
+        const sp<IAidlNodeWrapper> &aidlNode,
+        int32_t dataSpace,
+        int32_t bufferCount,
+        uint32_t frameWidth,
+        uint32_t frameHeight,
+        uint64_t consumerUsage);
+
+    // Rest of the interface in GraphicBufferSource.
+
+private:
+    DISALLOW_EVIL_CONSTRUCTORS(AidlGraphicBufferSource);
+};
+
+}  // namespace android::media
diff --git a/media/module/aidlpersistentsurface/include/media/stagefright/aidlpersistentsurface/C2NodeDef.h b/media/module/aidlpersistentsurface/include/media/stagefright/aidlpersistentsurface/C2NodeDef.h
new file mode 100644
index 0000000..364efe2
--- /dev/null
+++ b/media/module/aidlpersistentsurface/include/media/stagefright/aidlpersistentsurface/C2NodeDef.h
@@ -0,0 +1,32 @@
+/*
+ * Copyright 2024, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+namespace android::media {
+
+// Node definitions for aidl input surface.
+//
+// Copied from non-aidl version implementation.
+// Unnecessary definitions for input surface implementation
+// are all omitted.
+
+enum C2NodeBufferFlag : uint32_t {
+    BUFFERFLAG_EOS = 1,
+    BUFFERFLAG_ENDOFFRAME = (1 << 4)
+};
+
+}  // namespace android::media
diff --git a/media/module/aidlpersistentsurface/include/media/stagefright/aidlpersistentsurface/IAidlNodeWrapper.h b/media/module/aidlpersistentsurface/include/media/stagefright/aidlpersistentsurface/IAidlNodeWrapper.h
new file mode 100644
index 0000000..f23b5e4
--- /dev/null
+++ b/media/module/aidlpersistentsurface/include/media/stagefright/aidlpersistentsurface/IAidlNodeWrapper.h
@@ -0,0 +1,36 @@
+/*
+ * Copyright 2024, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <utils/RefBase.h>
+#include <utils/StrongPointer.h>
+#include <ui/GraphicBuffer.h>
+
+#include <stdint.h>
+
+namespace android::media {
+
+struct IAidlNodeWrapper : public RefBase {
+    virtual status_t submitBuffer(
+            int32_t bufferId, uint32_t flags,
+            const sp<GraphicBuffer> &buffer = nullptr,
+            int64_t timestamp = 0, int fenceFd = -1) = 0;
+    virtual void dispatchDataSpaceChanged(
+            int32_t dataSpace, int32_t aspects, int32_t pixelFormat) = 0;
+};
+
+}  // namespace android::media
diff --git a/media/module/aidlpersistentsurface/include/media/stagefright/aidlpersistentsurface/wrapper/Conversion.h b/media/module/aidlpersistentsurface/include/media/stagefright/aidlpersistentsurface/wrapper/Conversion.h
new file mode 100644
index 0000000..dcb83f6
--- /dev/null
+++ b/media/module/aidlpersistentsurface/include/media/stagefright/aidlpersistentsurface/wrapper/Conversion.h
@@ -0,0 +1,60 @@
+/*
+ * Copyright 2024, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <android/binder_auto_utils.h>
+#include <ui/GraphicBuffer.h>
+#include <utils/Errors.h>
+
+#include <aidl/android/hardware/graphics/common/Dataspace.h>
+#include <aidl/android/hardware/graphics/common/PixelFormat.h>
+#include <aidl/android/media/AidlColorAspects.h>
+
+namespace android::media::aidl_conversion {
+
+inline status_t fromAidlStatus(const ::ndk::ScopedAStatus &status) {
+    if (!status.isOk()) {
+        if (status.getExceptionCode() == EX_SERVICE_SPECIFIC) {
+            return static_cast<status_t>(status.getServiceSpecificError());
+        } else {
+            return static_cast<status_t>(FAILED_TRANSACTION);
+        }
+    }
+   return NO_ERROR;
+}
+
+inline ::ndk::ScopedAStatus toAidlStatus(status_t status) {
+    if (status == NO_ERROR) {
+        return ::ndk::ScopedAStatus::ok();
+    }
+    return ::ndk::ScopedAStatus::fromServiceSpecificError(status);
+}
+
+inline int32_t compactFromAidlColorAspects(::aidl::android::media::AidlColorAspects const& s) {
+    return static_cast<int32_t>(
+            (static_cast<uint32_t>(s.range) << 24) |
+            (static_cast<uint32_t>(s.primaries) << 16) |
+            (static_cast<uint32_t>(s.transfer)) |
+            (static_cast<uint32_t>(s.matrixCoeffs) << 8));
+}
+
+inline int32_t rawFromAidlDataspace(
+        ::aidl::android::hardware::graphics::common::Dataspace const& s) {
+    return static_cast<int32_t>(s);
+}
+
+}  // namespace android::media::aidl_conversion
diff --git a/media/module/aidlpersistentsurface/include/media/stagefright/aidlpersistentsurface/wrapper/WAidlGraphicBufferSource.h b/media/module/aidlpersistentsurface/include/media/stagefright/aidlpersistentsurface/wrapper/WAidlGraphicBufferSource.h
new file mode 100644
index 0000000..f4d7fe8
--- /dev/null
+++ b/media/module/aidlpersistentsurface/include/media/stagefright/aidlpersistentsurface/wrapper/WAidlGraphicBufferSource.h
@@ -0,0 +1,60 @@
+/*
+ * Copyright 2024, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <utils/RefBase.h>
+#include <aidl/android/hardware/graphics/common/Dataspace.h>
+#include <aidl/android/media/IAidlBufferSource.h>
+#include <aidl/android/media/IAidlNode.h>
+#include <aidl/android/media/BnAidlGraphicBufferSource.h>
+
+namespace android::media {
+
+class AidlGraphicBufferSource;
+
+using ::android::sp;
+
+/**
+ * Aidl wrapper implementation for IAidlGraphicBufferSource
+ */
+class WAidlGraphicBufferSource : public ::aidl::android::media::BnAidlGraphicBufferSource {
+public:
+
+    struct WAidlNodeWrapper;
+    class WAidlBufferSource;
+
+    sp<AidlGraphicBufferSource> mBase;
+    std::shared_ptr<::aidl::android::media::IAidlBufferSource> mBufferSource;
+
+    WAidlGraphicBufferSource(sp<AidlGraphicBufferSource> const& base);
+    ::ndk::ScopedAStatus configure(
+            const std::shared_ptr<::aidl::android::media::IAidlNode>& node,
+            aidl::android::hardware::graphics::common::Dataspace dataspace) override;
+    ::ndk::ScopedAStatus setSuspend(bool suspend, int64_t timeUs) override;
+    ::ndk::ScopedAStatus setRepeatPreviousFrameDelayUs(int64_t repeatAfterUs) override;
+    ::ndk::ScopedAStatus setMaxFps(float maxFps) override;
+    ::ndk::ScopedAStatus setTimeLapseConfig(double fps, double captureFps) override;
+    ::ndk::ScopedAStatus setStartTimeUs(int64_t startTimeUs) override;
+    ::ndk::ScopedAStatus setStopTimeUs(int64_t stopTimeUs) override;
+    ::ndk::ScopedAStatus getStopTimeOffsetUs(int64_t *_aidl_return) override;
+    ::ndk::ScopedAStatus setColorAspects(
+            const ::aidl::android::media::AidlColorAspects& aspects) override;
+    ::ndk::ScopedAStatus setTimeOffsetUs(int64_t timeOffsetUs) override;
+    ::ndk::ScopedAStatus signalEndOfInputStream() override;
+};
+
+}  // namespace android::media
diff --git a/media/module/aidlpersistentsurface/wrapper/WAidlGraphicBufferSource.cpp b/media/module/aidlpersistentsurface/wrapper/WAidlGraphicBufferSource.cpp
new file mode 100644
index 0000000..a5c72d6
--- /dev/null
+++ b/media/module/aidlpersistentsurface/wrapper/WAidlGraphicBufferSource.cpp
@@ -0,0 +1,226 @@
+/*
+ * Copyright 2024, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+//#define LOG_NDEBUG 0
+#define LOG_TAG "WAidlGraphicBufferSource"
+#include <android/hardware_buffer_aidl.h>
+#include <private/android/AHardwareBufferHelpers.h>
+#include <utils/Log.h>
+
+#include <aidl/android/media/BnAidlBufferSource.h>
+#include <aidl/android/media/IAidlNode.h>
+
+#include <media/stagefright/aidlpersistentsurface/AidlGraphicBufferSource.h>
+#include <media/stagefright/aidlpersistentsurface/C2NodeDef.h>
+#include <media/stagefright/aidlpersistentsurface/wrapper/WAidlGraphicBufferSource.h>
+#include <media/stagefright/aidlpersistentsurface/wrapper/Conversion.h>
+
+namespace android::media {
+using ::android::binder::unique_fd;
+using ::aidl::android::hardware::graphics::common::PixelFormat;
+using ::aidl::android::hardware::graphics::common::Dataspace;
+using ::aidl::android::media::AidlColorAspects;
+using ::aidl::android::media::IAidlNode;
+using ::aidl::android::media::BnAidlBufferSource;
+
+// Conversion
+using ::android::media::aidl_conversion::fromAidlStatus;
+using ::android::media::aidl_conversion::toAidlStatus;
+using ::android::media::aidl_conversion::compactFromAidlColorAspects;
+using ::android::media::aidl_conversion::rawFromAidlDataspace;
+
+struct WAidlGraphicBufferSource::WAidlNodeWrapper : public IAidlNodeWrapper {
+    std::shared_ptr<IAidlNode> mNode;
+
+    WAidlNodeWrapper(const std::shared_ptr<IAidlNode> &node): mNode(node) {
+    }
+
+    virtual status_t submitBuffer(
+            int32_t bufferId, uint32_t flags,
+            const sp<GraphicBuffer> &buffer,
+            int64_t timestamp, int fenceFd) override {
+        ::ndk::ScopedFileDescriptor fence(fenceFd);
+        if (buffer.get()) {
+            ::aidl::android::hardware::HardwareBuffer hBuffer;
+            AHardwareBuffer *ahwBuffer = AHardwareBuffer_from_GraphicBuffer(buffer.get());
+            AHardwareBuffer_acquire(ahwBuffer);
+            hBuffer.reset(ahwBuffer);
+
+            return fromAidlStatus(mNode->submitBuffer(
+                    bufferId,
+                    std::move(hBuffer),
+                    flags,
+                    timestamp,
+                    fence));
+        }
+
+        return fromAidlStatus(mNode->submitBuffer(
+              bufferId,
+              {},
+              flags,
+              timestamp,
+              fence));
+    }
+
+    virtual void dispatchDataSpaceChanged(
+            int32_t dataSpace, int32_t aspects, int32_t pixelFormat) override {
+        ::ndk::ScopedAStatus err = mNode->onDataSpaceChanged(
+                dataSpace, aspects, pixelFormat);
+        status_t status = fromAidlStatus(err);
+        if (status != NO_ERROR) {
+            ALOGE("WAidlNodeWrapper failed to change dataspace (%d): "
+                    "dataSpace = %ld, aspects = %ld, pixelFormat = %ld",
+                    static_cast<int>(status),
+                    static_cast<long>(dataSpace),
+                    static_cast<long>(aspects),
+                    static_cast<long>(pixelFormat));
+        }
+    }
+};
+
+class WAidlGraphicBufferSource::WAidlBufferSource : public BnAidlBufferSource {
+    sp<AidlGraphicBufferSource> mSource;
+
+public:
+    WAidlBufferSource(const sp<AidlGraphicBufferSource> &source): mSource(source) {
+    }
+
+    ::ndk::ScopedAStatus onStart() override {
+        mSource->onStart();
+        return ::ndk::ScopedAStatus::ok();
+    }
+
+    ::ndk::ScopedAStatus onStop() override {
+        mSource->onStop();
+        return ::ndk::ScopedAStatus::ok();
+    }
+
+    ::ndk::ScopedAStatus onRelease() override {
+        mSource->onRelease();
+        return ::ndk::ScopedAStatus::ok();
+    }
+
+    ::ndk::ScopedAStatus onInputBufferAdded(int32_t bufferId) override {
+        mSource->onInputBufferAdded(bufferId);
+        return ::ndk::ScopedAStatus::ok();
+    }
+
+    ::ndk::ScopedAStatus onInputBufferEmptied(
+            int32_t bufferId, const ::ndk::ScopedFileDescriptor& fence) override {
+        mSource->onInputBufferEmptied(bufferId, ::dup(fence.get()));
+        return ::ndk::ScopedAStatus::ok();
+    }
+};
+
+// WAidlGraphicBufferSource
+WAidlGraphicBufferSource::WAidlGraphicBufferSource(
+        sp<AidlGraphicBufferSource> const& base) :
+    mBase(base),
+    mBufferSource(::ndk::SharedRefBase::make<WAidlBufferSource>(base)) {
+}
+
+::ndk::ScopedAStatus WAidlGraphicBufferSource::configure(
+        const std::shared_ptr<IAidlNode>& node, Dataspace dataspace) {
+    if (node == NULL) {
+        return toAidlStatus(BAD_VALUE);
+    }
+
+    // Do setInputSurface() first, the node will try to enable metadata
+    // mode on input, and does necessary error checking. If this fails,
+    // we can't use this input surface on the node.
+    ::ndk::ScopedAStatus err = node->setInputSurface(mBufferSource);
+    status_t fnStatus = fromAidlStatus(err);
+    if (fnStatus != NO_ERROR) {
+        ALOGE("Unable to set input surface: %d", fnStatus);
+        return err;
+    }
+
+    // use consumer usage bits queried from encoder, but always add
+    // HW_VIDEO_ENCODER for backward compatibility.
+    int64_t  consumerUsage;
+    fnStatus = OK;
+    err = node->getConsumerUsage(&consumerUsage);
+    fnStatus = fromAidlStatus(err);
+    if (fnStatus != NO_ERROR) {
+        if (fnStatus == FAILED_TRANSACTION) {
+            return err;
+        }
+        consumerUsage = 0;
+    }
+
+    IAidlNode::InputBufferParams rDef ;
+    err = node->getInputBufferParams(&rDef);
+    fnStatus = fromAidlStatus(err);
+    if (fnStatus != NO_ERROR) {
+        ALOGE("Failed to get port definition: %d", fnStatus);
+        return toAidlStatus(fnStatus);
+    }
+
+    return toAidlStatus(mBase->configure(
+            new WAidlNodeWrapper(node),
+            rawFromAidlDataspace(dataspace),
+            rDef.bufferCountActual,
+            rDef.frameWidth,
+            rDef.frameHeight,
+            static_cast<uint64_t>(consumerUsage)));
+}
+
+::ndk::ScopedAStatus WAidlGraphicBufferSource::setSuspend(
+        bool suspend, int64_t timeUs) {
+    return toAidlStatus(mBase->setSuspend(suspend, timeUs));
+}
+
+::ndk::ScopedAStatus WAidlGraphicBufferSource::setRepeatPreviousFrameDelayUs(
+        int64_t repeatAfterUs) {
+    return toAidlStatus(mBase->setRepeatPreviousFrameDelayUs(repeatAfterUs));
+}
+
+::ndk::ScopedAStatus WAidlGraphicBufferSource::setMaxFps(float maxFps) {
+    return toAidlStatus(mBase->setMaxFps(maxFps));
+}
+
+::ndk::ScopedAStatus WAidlGraphicBufferSource::setTimeLapseConfig(
+        double fps, double captureFps) {
+    return toAidlStatus(mBase->setTimeLapseConfig(fps, captureFps));
+}
+
+::ndk::ScopedAStatus WAidlGraphicBufferSource::setStartTimeUs(int64_t startTimeUs) {
+    return toAidlStatus(mBase->setStartTimeUs(startTimeUs));
+}
+
+::ndk::ScopedAStatus WAidlGraphicBufferSource::setStopTimeUs(int64_t stopTimeUs) {
+    return toAidlStatus(mBase->setStopTimeUs(stopTimeUs));
+}
+
+::ndk::ScopedAStatus WAidlGraphicBufferSource::getStopTimeOffsetUs(int64_t* _aidl_return) {
+    status_t status = mBase->getStopTimeOffsetUs(_aidl_return);
+    return toAidlStatus(status);
+}
+
+::ndk::ScopedAStatus WAidlGraphicBufferSource::setColorAspects(
+        const AidlColorAspects& aspects) {
+    return toAidlStatus(mBase->setColorAspects(compactFromAidlColorAspects(aspects)));
+}
+
+::ndk::ScopedAStatus WAidlGraphicBufferSource::setTimeOffsetUs(int64_t timeOffsetUs) {
+    return toAidlStatus(mBase->setTimeOffsetUs(timeOffsetUs));
+}
+
+::ndk::ScopedAStatus WAidlGraphicBufferSource::signalEndOfInputStream() {
+    return toAidlStatus(mBase->signalEndOfInputStream());
+}
+
+
+}  // namespace android::media
diff --git a/media/module/bqhelper/Android.bp b/media/module/bqhelper/Android.bp
index c4dadd0..f9b7dea 100644
--- a/media/module/bqhelper/Android.bp
+++ b/media/module/bqhelper/Android.bp
@@ -69,9 +69,6 @@
     name: "libstagefright_bufferqueue_helper",
     defaults: ["libstagefright_bufferqueue-defaults"],
     vendor_available: true,
-    vndk: {
-        enabled: true,
-    },
     min_sdk_version: "29",
 
     shared_libs: [ "libgui" ],
diff --git a/media/module/bufferpool/1.0/vts/multi.cpp b/media/module/bufferpool/1.0/vts/multi.cpp
index d8cc285..21f47d3 100644
--- a/media/module/bufferpool/1.0/vts/multi.cpp
+++ b/media/module/bufferpool/1.0/vts/multi.cpp
@@ -24,6 +24,7 @@
 #include <hidl/HidlSupport.h>
 #include <hidl/HidlTransportSupport.h>
 #include <hidl/LegacySupport.h>
+#include <hidl/ServiceManagement.h>
 #include <hidl/Status.h>
 #include <signal.h>
 #include <sys/types.h>
@@ -36,6 +37,7 @@
 
 using android::hardware::configureRpcThreadpool;
 using android::hardware::hidl_handle;
+using android::hardware::isHidlSupported;
 using android::hardware::media::bufferpool::V1_0::IClientManager;
 using android::hardware::media::bufferpool::V1_0::ResultStatus;
 using android::hardware::media::bufferpool::V1_0::implementation::BufferId;
@@ -178,6 +180,7 @@
   ResultStatus status;
   PipeMessage message;
 
+  if (!isHidlSupported()) GTEST_SKIP() << "HIDL is not supported on this device";
   ASSERT_TRUE(receiveMessage(mResultPipeFds, &message));
 
   android::sp<IClientManager> receiver = IClientManager::getService();
diff --git a/media/module/bufferpool/2.0/AccessorImpl.cpp b/media/module/bufferpool/2.0/AccessorImpl.cpp
index 1d2562e..3d7f0c7 100644
--- a/media/module/bufferpool/2.0/AccessorImpl.cpp
+++ b/media/module/bufferpool/2.0/AccessorImpl.cpp
@@ -17,6 +17,8 @@
 #define LOG_TAG "BufferPoolAccessor2.0"
 //#define LOG_NDEBUG 0
 
+#include <android-base/no_destructor.h>
+
 #include <sys/types.h>
 #include <stdint.h>
 #include <time.h>
@@ -147,7 +149,25 @@
 #endif
 
 static constexpr uint32_t kSeqIdMax = 0x7fffffff;
-uint32_t Accessor::Impl::sSeqId = time(nullptr) & kSeqIdMax;
+
+Accessor::Impl::ConnectionIdGenerator::ConnectionIdGenerator() {
+    mSeqId = static_cast<uint32_t>(time(nullptr) & kSeqIdMax);
+    mPid = static_cast<int32_t>(getpid());
+}
+
+ConnectionId Accessor::Impl::ConnectionIdGenerator::getConnectionId() {
+    uint32_t seq;
+    {
+        std::lock_guard<std::mutex> l(mLock);
+        seq = mSeqId;
+        if (mSeqId == kSeqIdMax) {
+            mSeqId = 0;
+        } else {
+            ++mSeqId;
+        }
+    }
+    return (int64_t)mPid << 32 | seq | kSeqIdVndkBit;
+}
 
 Accessor::Impl::Impl(
         const std::shared_ptr<BufferPoolAllocator> &allocator)
@@ -163,13 +183,14 @@
         uint32_t *pMsgId,
         const StatusDescriptor** statusDescPtr,
         const InvalidationDescriptor** invDescPtr) {
+    static ::android::base::NoDestructor<ConnectionIdGenerator> sConIdGenerator;
     sp<Connection> newConnection = new Connection();
     ResultStatus status = ResultStatus::CRITICAL_ERROR;
     {
         std::lock_guard<std::mutex> lock(mBufferPool.mMutex);
         if (newConnection) {
             int32_t pid = getpid();
-            ConnectionId id = (int64_t)pid << 32 | sSeqId | kSeqIdVndkBit;
+            ConnectionId id = sConIdGenerator->getConnectionId();
             status = mBufferPool.mObserver.open(id, statusDescPtr);
             if (status == ResultStatus::OK) {
                 newConnection->initialize(accessor, id);
@@ -179,11 +200,6 @@
                 mBufferPool.mConnectionIds.insert(id);
                 mBufferPool.mInvalidationChannel.getDesc(invDescPtr);
                 mBufferPool.mInvalidation.onConnect(id, observer);
-                if (sSeqId == kSeqIdMax) {
-                   sSeqId = 0;
-                } else {
-                    ++sSeqId;
-                }
             }
 
         }
@@ -609,7 +625,7 @@
         }
         if (ret == false) {
             ALOGW("buffer status message processing failure - message : %d connection : %lld",
-                  message.newStatus, (long long)message.connectionId);
+                  (int)message.newStatus, (long long)message.connectionId);
         }
     }
     messages.clear();
@@ -907,7 +923,7 @@
         std::map<const std::weak_ptr<Accessor::Impl>, nsecs_t, std::owner_less<>> &accessors,
         std::mutex &mutex,
         std::condition_variable &cv) {
-    std::list<const std::weak_ptr<Accessor::Impl>> evictList;
+    std::list<std::weak_ptr<Accessor::Impl>> evictList;
     while (true) {
         int expired = 0;
         int evicted = 0;
diff --git a/media/module/bufferpool/2.0/AccessorImpl.h b/media/module/bufferpool/2.0/AccessorImpl.h
index 3d39941..2366177 100644
--- a/media/module/bufferpool/2.0/AccessorImpl.h
+++ b/media/module/bufferpool/2.0/AccessorImpl.h
@@ -77,7 +77,14 @@
 private:
     // ConnectionId = pid : (timestamp_created + seqId)
     // in order to guarantee uniqueness for each connection
-    static uint32_t sSeqId;
+    struct ConnectionIdGenerator {
+        int32_t mPid;
+        uint32_t mSeqId;
+        std::mutex mLock;
+
+        ConnectionIdGenerator();
+        ConnectionId getConnectionId();
+    };
 
     const std::shared_ptr<BufferPoolAllocator> mAllocator;
 
diff --git a/media/module/bufferpool/2.0/Android.bp b/media/module/bufferpool/2.0/Android.bp
index 930b026..c40603c 100644
--- a/media/module/bufferpool/2.0/Android.bp
+++ b/media/module/bufferpool/2.0/Android.bp
@@ -21,6 +21,9 @@
     export_include_dirs: [
         "include",
     ],
+    header_libs: [
+        "libbase_headers",
+    ],
     shared_libs: [
         "libcutils",
         "libfmq",
@@ -60,7 +63,4 @@
     vendor_available: true,
     // TODO: b/147147992
     double_loadable: true,
-    vndk: {
-        enabled: true,
-    },
 }
diff --git a/media/module/codecs/amrnb/TEST_MAPPING b/media/module/codecs/amrnb/TEST_MAPPING
index 343d08a..306921f 100644
--- a/media/module/codecs/amrnb/TEST_MAPPING
+++ b/media/module/codecs/amrnb/TEST_MAPPING
@@ -1,9 +1,5 @@
-// mappings for frameworks/av/media/libstagefright/codecs/amrnb
 {
-  // tests which require dynamic content
-  // invoke with: atest -- --enable-module-dynamic-download=true
-  // TODO(b/148094059): unit tests not allowed to download content
-  "dynamic-presubmit": [
+  "postsubmit": [
     { "name": "AmrnbDecoderTest"},
     { "name": "AmrnbEncoderTest"}
   ]
diff --git a/media/module/codecs/amrnb/common/include/abs_s.h b/media/module/codecs/amrnb/common/include/abs_s.h
deleted file mode 100644
index e92eaf4..0000000
--- a/media/module/codecs/amrnb/common/include/abs_s.h
+++ /dev/null
@@ -1,113 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-/****************************************************************************************
-Portions of this file are derived from the following 3GPP standard:
-
-    3GPP TS 26.073
-    ANSI-C code for the Adaptive Multi-Rate (AMR) speech codec
-    Available from http://www.3gpp.org
-
-(C) 2004, 3GPP Organizational Partners (ARIB, ATIS, CCSA, ETSI, TTA, TTC)
-Permission to distribute, modify and use this file under the standard license
-terms listed above has been obtained from the copyright holder.
-****************************************************************************************/
-/*
-
- Pathname: ./gsm-amr/c/include/abs_s.h
-
-------------------------------------------------------------------------------
- REVISION HISTORY
-
- Description: Created separate header file for abs_s function.
-
- Description: Updated template to make it build for Symbian.
-
- Description: Moved _cplusplus #ifdef after Include section.
-
- Who:                       Date:
- Description:
-
-------------------------------------------------------------------------------
- INCLUDE DESCRIPTION
-
- This file contains all the constant definitions and prototype definitions
- needed by the abs_s function.
-
-------------------------------------------------------------------------------
-*/
-
-/*----------------------------------------------------------------------------
-; CONTINUE ONLY IF NOT ALREADY DEFINED
-----------------------------------------------------------------------------*/
-#ifndef ABS_S_H
-#define ABS_S_H
-
-/*----------------------------------------------------------------------------
-; INCLUDES
-----------------------------------------------------------------------------*/
-#include    "basicop_malloc.h"
-
-/*--------------------------------------------------------------------------*/
-#ifdef __cplusplus
-extern "C"
-{
-#endif
-
-    /*----------------------------------------------------------------------------
-    ; MACROS
-    ; Define module specific macros here
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; DEFINES
-    ; Include all pre-processor statements here.
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; EXTERNAL VARIABLES REFERENCES
-    ; Declare variables used in this module but defined elsewhere
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; SIMPLE TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; ENUMERATED TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; STRUCTURES TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; GLOBAL FUNCTION DEFINITIONS
-    ; Function Prototype declaration
-    ----------------------------------------------------------------------------*/
-    Word16 abs_s(Word16 var1);
-
-    /*----------------------------------------------------------------------------
-    ; END
-    ----------------------------------------------------------------------------*/
-#ifdef __cplusplus
-}
-#endif
-
-#endif  /* ABS_S_H */
-
-
diff --git a/media/module/codecs/amrnb/common/include/basic_op_c_equivalent.h b/media/module/codecs/amrnb/common/include/basic_op_c_equivalent.h
index 8f0867a..8817621 100644
--- a/media/module/codecs/amrnb/common/include/basic_op_c_equivalent.h
+++ b/media/module/codecs/amrnb/common/include/basic_op_c_equivalent.h
@@ -120,15 +120,11 @@
     {
         Word32 L_sum;
 
-        L_sum = L_var1 + L_var2;
-
-        if ((L_var1 ^ L_var2) >= 0)
+        if (__builtin_add_overflow(L_var1, L_var2, &L_sum))
         {
-            if ((L_sum ^ L_var1) < 0)
-            {
-                L_sum = (L_var1 < 0) ? MIN_32 : MAX_32;
-                *pOverflow = 1;
-            }
+            // saturating...
+            L_sum = (L_var1 < 0) ? MIN_32 : MAX_32;
+            *pOverflow = 1;
         }
 
         return (L_sum);
@@ -160,15 +156,11 @@
     {
         Word32 L_diff;
 
-        L_diff = L_var1 - L_var2;
-
-        if ((L_var1 ^ L_var2) < 0)
+        if (__builtin_sub_overflow(L_var1, L_var2, &L_diff))
         {
-            if ((L_diff ^ L_var1) & MIN_32)
-            {
-                L_diff = (L_var1 < 0L) ? MIN_32 : MAX_32;
-                *pOverflow = 1;
-            }
+            // saturating...
+            L_diff = (L_var1 < 0L) ? MIN_32 : MAX_32;
+            *pOverflow = 1;
         }
 
         return (L_diff);
@@ -204,16 +196,12 @@
         result = (Word32) var1 * var2;
         if (result != (Word32) 0x40000000L)
         {
-            L_sum = (result << 1) + L_var3;
-
             /* Check if L_sum and L_var_3 share the same sign */
-            if ((L_var3 ^ result) > 0)
+            if (__builtin_add_overflow((result << 1), L_var3, &L_sum))
             {
-                if ((L_sum ^ L_var3) < 0)
-                {
-                    L_sum = (L_var3 < 0) ? MIN_32 : MAX_32;
-                    *pOverflow = 1;
-                }
+                // saturating...
+                L_sum = (L_var3 < 0) ? MIN_32 : MAX_32;
+                *pOverflow = 1;
             }
         }
         else
@@ -345,14 +333,10 @@
         product32 = ((Word32) L_var1_hi * L_var2_lo) >> 15;
 
         /* L_product = L_mac (L_product, result, 1, pOverflow); */
-        L_sum = L_product + (product32 << 1);
-
-        if ((L_product ^ product32) > 0)
+        if (__builtin_add_overflow(L_product, (product32 << 1), &L_sum))
         {
-            if ((L_sum ^ L_product) < 0)
-            {
-                L_sum = (L_product < 0) ? MIN_32 : MAX_32;
-            }
+            // saturating...
+            L_sum = (L_product < 0) ? MIN_32 : MAX_32;
         }
 
         L_product = L_sum;
@@ -361,14 +345,10 @@
         product32 = ((Word32) L_var1_lo * L_var2_hi) >> 15;
 
         /* L_product = L_mac (L_product, result, 1, pOverflow); */
-        L_sum = L_product + (product32 << 1);
-
-        if ((L_product ^ product32) > 0)
+        if (__builtin_add_overflow(L_product, (product32 << 1), &L_sum))
         {
-            if ((L_sum ^ L_product) < 0)
-            {
-                L_sum = (L_product < 0) ? MIN_32 : MAX_32;
-            }
+            // saturating...
+            L_sum = (L_product < 0) ? MIN_32 : MAX_32;
         }
         return (L_sum);
     }
@@ -416,15 +396,11 @@
 
         result = ((Word32)L_var1_lo * var2) >> 15;
 
-        L_sum  =  L_product + (result << 1);
-
-        if ((L_product ^ result) > 0)
+        if (__builtin_add_overflow(L_product, (result << 1), &L_sum))
         {
-            if ((L_sum ^ L_product) < 0)
-            {
-                L_sum = (L_product < 0) ? MIN_32 : MAX_32;
-                *pOverflow = 1;
-            }
+            // saturating...
+            L_sum = (L_product < 0) ? MIN_32 : MAX_32;
+            *pOverflow = 1;
         }
         return (L_sum);
 
diff --git a/media/module/codecs/amrnb/common/include/l_add.h b/media/module/codecs/amrnb/common/include/l_add.h
deleted file mode 100644
index 136b914..0000000
--- a/media/module/codecs/amrnb/common/include/l_add.h
+++ /dev/null
@@ -1,171 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-/****************************************************************************************
-Portions of this file are derived from the following 3GPP standard:
-
-    3GPP TS 26.073
-    ANSI-C code for the Adaptive Multi-Rate (AMR) speech codec
-    Available from http://www.3gpp.org
-
-(C) 2004, 3GPP Organizational Partners (ARIB, ATIS, CCSA, ETSI, TTA, TTC)
-Permission to distribute, modify and use this file under the standard license
-terms listed above has been obtained from the copyright holder.
-****************************************************************************************/
-/*
-
- Filename: /audio/gsm_amr/c/include/l_add.h
-
-------------------------------------------------------------------------------
- REVISION HISTORY
-
- Description: Created separate header file for L_add function.
-
- Description: Changed function prototype declaration. A pointer to the overflow
-              flag is being passed in as a parameter instead of using global
-              data.
-
- Description: Updated template. Changed paramter name from overflow to
-              pOverflow
-
- Description: Moved _cplusplus #ifdef after Include section.
-
- Description: Providing support for ARM and Linux-ARM assembly instructions.
-
- Who:                       Date:
- Description:
-
-------------------------------------------------------------------------------
- INCLUDE DESCRIPTION
-
- This file contains all the constant definitions and prototype definitions
- needed by the L_add function.
-
-------------------------------------------------------------------------------
-*/
-
-/*----------------------------------------------------------------------------
-; CONTINUE ONLY IF NOT ALREADY DEFINED
-----------------------------------------------------------------------------*/
-#ifndef L_ADD_H
-#define L_ADD_H
-
-/*----------------------------------------------------------------------------
-; INCLUDES
-----------------------------------------------------------------------------*/
-#include    "basicop_malloc.h"
-
-/*--------------------------------------------------------------------------*/
-#ifdef __cplusplus
-extern "C"
-{
-#endif
-
-    /*----------------------------------------------------------------------------
-    ; MACROS
-    ; Define module specific macros here
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; DEFINES
-    ; Include all pre-processor statements here.
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; EXTERNAL VARIABLES REFERENCES
-    ; Declare variables used in this module but defined elsewhere
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; SIMPLE TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; ENUMERATED TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; STRUCTURES TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; GLOBAL FUNCTION DEFINITIONS
-    ; Function Prototype declaration
-    ----------------------------------------------------------------------------*/
-#if defined(PV_ARM_V5) /* Instructions for ARM Assembly on ADS*/
-
-    __inline Word32 L_add(register Word32 L_var1, register Word32 L_var2, Flag *pOverflow)
-    {
-        Word32 result;
-
-        OSCL_UNUSED_ARG(pOverflow);
-        __asm
-        {
-            QADD result, L_var1, L_var2
-        }
-        return(result);
-    }
-
-#elif defined(PV_ARM_GCC_V5) /* Instructions for ARM-linux cross-compiler*/
-
-    __inline Word32 L_add(register Word32 L_var1, register Word32 L_var2, Flag *pOverflow)
-    {
-        register Word32 ra = L_var1;
-        register Word32 rb = L_var2;
-        Word32 result;
-
-        OSCL_UNUSED_ARG(pOverflow);
-
-        asm volatile("qadd %0, %1, %2"
-             : "=r"(result)
-                             : "r"(ra), "r"(rb)
-                            );
-        return (result);
-
-    }
-
-#else /* C EQUIVALENT */
-
-
-    static inline Word32 L_add(register Word32 L_var1, register Word32 L_var2, Flag *pOverflow)
-    {
-        Word32 L_sum;
-
-        L_sum = L_var1 + L_var2;
-
-        if ((L_var1 ^ L_var2) >= 0)
-        {
-            if ((L_sum ^ L_var1) < 0)
-            {
-                L_sum = (L_var1 < 0) ? MIN_32 : MAX_32;
-                *pOverflow = 1;
-            }
-        }
-
-        return (L_sum);
-    }
-
-#endif
-
-    /*----------------------------------------------------------------------------
-    ; END
-    ----------------------------------------------------------------------------*/
-#ifdef __cplusplus
-}
-#endif
-
-#endif /* _L_ADD_H_ */
diff --git a/media/module/codecs/amrnb/common/include/l_add_c.h b/media/module/codecs/amrnb/common/include/l_add_c.h
deleted file mode 100644
index 3585a3c..0000000
--- a/media/module/codecs/amrnb/common/include/l_add_c.h
+++ /dev/null
@@ -1,115 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-/****************************************************************************************
-Portions of this file are derived from the following 3GPP standard:
-
-    3GPP TS 26.073
-    ANSI-C code for the Adaptive Multi-Rate (AMR) speech codec
-    Available from http://www.3gpp.org
-
-(C) 2004, 3GPP Organizational Partners (ARIB, ATIS, CCSA, ETSI, TTA, TTC)
-Permission to distribute, modify and use this file under the standard license
-terms listed above has been obtained from the copyright holder.
-****************************************************************************************/
-/*
-
- Filename: /audio/gsm_amr/c/include/l_add_c.h
-
-------------------------------------------------------------------------------
- REVISION HISTORY
-
- Description: Created separate header file for L_add_c function.
-
- Description: Updated function prototype declaration to reflect new interface.
-              A pointer to overflow flag and carry flag is passed into the
-              function. Updated template.
-
- Description: Moved _cplusplus #ifdef after Include section.
-
- Who:                       Date:
- Description:
-
-------------------------------------------------------------------------------
- INCLUDE DESCRIPTION
-
- This file contains all the constant definitions and prototype definitions
- needed by the L_add_c function.
-
-------------------------------------------------------------------------------
-*/
-
-/*----------------------------------------------------------------------------
-; CONTINUE ONLY IF NOT ALREADY DEFINED
-----------------------------------------------------------------------------*/
-#ifndef L_ADD_C_H
-#define L_ADD_C_H
-
-/*----------------------------------------------------------------------------
-; INCLUDES
-----------------------------------------------------------------------------*/
-#include    "basicop_malloc.h"
-
-/*--------------------------------------------------------------------------*/
-#ifdef __cplusplus
-extern "C"
-{
-#endif
-
-    /*----------------------------------------------------------------------------
-    ; MACROS
-    ; Define module specific macros here
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; DEFINES
-    ; Include all pre-processor statements here.
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; EXTERNAL VARIABLES REFERENCES
-    ; Declare variables used in this module but defined elsewhere
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; SIMPLE TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; ENUMERATED TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; STRUCTURES TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; GLOBAL FUNCTION DEFINITIONS
-    ; Function Prototype declaration
-    ----------------------------------------------------------------------------*/
-    Word32 L_add_c(Word32 L_var1, Word32 L_var2, Flag *pOverflow, Flag *pCarry);
-
-    /*----------------------------------------------------------------------------
-    ; END
-    ----------------------------------------------------------------------------*/
-#ifdef __cplusplus
-}
-#endif
-
-#endif /* _L_ADD_C_H_ */
-
-
diff --git a/media/module/codecs/amrnb/common/include/l_mac.h b/media/module/codecs/amrnb/common/include/l_mac.h
deleted file mode 100644
index b4af3aa..0000000
--- a/media/module/codecs/amrnb/common/include/l_mac.h
+++ /dev/null
@@ -1,183 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-/****************************************************************************************
-Portions of this file are derived from the following 3GPP standard:
-
-    3GPP TS 26.073
-    ANSI-C code for the Adaptive Multi-Rate (AMR) speech codec
-    Available from http://www.3gpp.org
-
-(C) 2004, 3GPP Organizational Partners (ARIB, ATIS, CCSA, ETSI, TTA, TTC)
-Permission to distribute, modify and use this file under the standard license
-terms listed above has been obtained from the copyright holder.
-****************************************************************************************/
-/*
- Filename: /audio/gsm_amr/c/include/l_mac.h
-
-------------------------------------------------------------------------------
- REVISION HISTORY
-
- Description: Created separate header file for L_mac function.
-
- Description: Updated function prototype declaration to reflect new interface.
-              A pointer to overflow flag is passed into the function. Updated
-              template.
-
- Description: Moved _cplusplus #ifdef after Include section.
-
- Description: 1. Updated the function to include ARM and Linux-ARM assembly
-                 instructions.
-              2. Added OSCL_UNUSED_ARG(pOverflow) to remove compiler warnings.
-
- Description:
-
-------------------------------------------------------------------------------
- INCLUDE DESCRIPTION
-
- This file contains all the constant definitions and prototype definitions
- needed by the L_mac function.
-
-------------------------------------------------------------------------------
-*/
-
-/*----------------------------------------------------------------------------
-; CONTINUE ONLY IF NOT ALREADY DEFINED
-----------------------------------------------------------------------------*/
-#ifndef L_MAC_H
-#define L_MAC_H
-
-/*----------------------------------------------------------------------------
-; INCLUDES
-----------------------------------------------------------------------------*/
-#include    "basicop_malloc.h"
-
-/*--------------------------------------------------------------------------*/
-#ifdef __cplusplus
-extern "C"
-{
-#endif
-
-    /*----------------------------------------------------------------------------
-    ; MACROS
-    ; Define module specific macros here
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; DEFINES
-    ; Include all pre-processor statements here.
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; EXTERNAL VARIABLES REFERENCES
-    ; Declare variables used in this module but defined elsewhere
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; SIMPLE TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; ENUMERATED TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; STRUCTURES TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; GLOBAL FUNCTION DEFINITIONS
-    ; Function Prototype declaration
-    ----------------------------------------------------------------------------*/
-#if defined(PV_ARM_V5) /* Instructions for ARM Assembly on ADS*/
-
-    __inline Word32 L_mac(Word32 L_var3, Word16 var1, Word16 var2, Flag *pOverflow)
-    {
-        Word32 result;
-        Word32 L_sum;
-
-        OSCL_UNUSED_ARG(pOverflow);
-
-        __asm {SMULBB result, var1, var2}
-        __asm {QDADD L_sum, L_var3, result}
-        return (L_sum);
-    }
-
-#elif defined(PV_ARM_GCC_V5) /* Instructions for ARM-linux cross-compiler*/
-
-    static inline Word32 L_mac(Word32 L_var3, Word16 var1, Word16 var2, Flag *pOverflow)
-    {
-        register Word32 ra = L_var3;
-        register Word32 rb = var1;
-        register Word32 rc = var2;
-        Word32 result;
-
-        OSCL_UNUSED_ARG(pOverflow);
-
-        asm volatile("smulbb %0, %1, %2"
-             : "=r"(result)
-                             : "r"(rb), "r"(rc)
-                            );
-
-        asm volatile("qdadd %0, %1, %2"
-             : "=r"(rc)
-                             : "r"(ra), "r"(result)
-                            );
-
-        return (rc);
-    }
-
-#else /* C_EQUIVALENT */
-
-    __inline Word32 L_mac(Word32 L_var3, Word16 var1, Word16 var2, Flag *pOverflow)
-    {
-        Word32 result;
-        Word32 L_sum;
-        result = (Word32) var1 * var2;
-        if (result != (Word32) 0x40000000L)
-        {
-            L_sum = (result << 1) + L_var3;
-
-            /* Check if L_sum and L_var_3 share the same sign */
-            if ((L_var3 ^ result) > 0)
-            {
-                if ((L_sum ^ L_var3) < 0)
-                {
-                    L_sum = (L_var3 < 0) ? MIN_32 : MAX_32;
-                    *pOverflow = 1;
-                }
-            }
-        }
-        else
-        {
-            *pOverflow = 1;
-            L_sum = MAX_32;
-        }
-        return (L_sum);
-    }
-
-#endif
-    /*----------------------------------------------------------------------------
-    ; END
-    ----------------------------------------------------------------------------*/
-#ifdef __cplusplus
-}
-#endif
-
-#endif /* _L_MAC_H_ */
-
-
diff --git a/media/module/codecs/amrnb/common/include/l_msu.h b/media/module/codecs/amrnb/common/include/l_msu.h
deleted file mode 100644
index 3bafb00..0000000
--- a/media/module/codecs/amrnb/common/include/l_msu.h
+++ /dev/null
@@ -1,171 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-/****************************************************************************************
-Portions of this file are derived from the following 3GPP standard:
-
-    3GPP TS 26.073
-    ANSI-C code for the Adaptive Multi-Rate (AMR) speech codec
-    Available from http://www.3gpp.org
-
-(C) 2004, 3GPP Organizational Partners (ARIB, ATIS, CCSA, ETSI, TTA, TTC)
-Permission to distribute, modify and use this file under the standard license
-terms listed above has been obtained from the copyright holder.
-****************************************************************************************/
-/*
- Filename: /audio/gsm_amr/c/include/l_msu.h
-
-------------------------------------------------------------------------------
- REVISION HISTORY
-
- Description: Created separate header file for L_msu function.
-
- Description: Updated function prototype declaration to reflect new interface.
-              A pointer to overflow flag is passed into the function. Updated
-              template.
-
- Description: Moved _cplusplus #ifdef after Include section.
-
- Description: Providing support for ARM and Linux-ARM assembly instructions.
-
- Who:                       Date:
- Description:
-
-------------------------------------------------------------------------------
- INCLUDE DESCRIPTION
-
- This file contains all the constant definitions and prototype definitions
- needed by the L_msu function.
-
-------------------------------------------------------------------------------
-*/
-
-/*----------------------------------------------------------------------------
-; CONTINUE ONLY IF NOT ALREADY DEFINED
-----------------------------------------------------------------------------*/
-#ifndef L_MSU_H
-#define L_MSU_H
-
-/*----------------------------------------------------------------------------
-; INCLUDES
-----------------------------------------------------------------------------*/
-#include    "basicop_malloc.h"
-#include    "l_mult.h"
-#include    "l_sub.h"
-
-/*--------------------------------------------------------------------------*/
-#ifdef __cplusplus
-extern "C"
-{
-#endif
-
-    /*----------------------------------------------------------------------------
-    ; MACROS
-    ; Define module specific macros here
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; DEFINES
-    ; Include all pre-processor statements here.
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; EXTERNAL VARIABLES REFERENCES
-    ; Declare variables used in this module but defined elsewhere
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; SIMPLE TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; ENUMERATED TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; STRUCTURES TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; GLOBAL FUNCTION DEFINITIONS
-    ; Function Prototype declaration
-    ----------------------------------------------------------------------------*/
-#if defined(PV_ARM_V5) /* Instructions for ARM Assembly on ADS*/
-
-    __inline Word32 L_msu(Word32 L_var3, Word16 var1, Word16 var2, Flag *pOverflow)
-    {
-        Word32 product;
-        Word32 result;
-
-        OSCL_UNUSED_ARG(pOverflow);
-
-        __asm
-        {
-            SMULBB product, var1, var2
-            QDSUB  result, L_var3, product
-        }
-
-        return (result);
-    }
-
-#elif defined(PV_ARM_GCC_V5) /* Instructions for ARM-linux cross-compiler*/
-
-    __inline Word32 L_msu(Word32 L_var3, Word16 var1, Word16 var2, Flag *pOverflow)
-    {
-        register Word32 ra = L_var3;
-        register Word32 rb = var1;
-        register Word32 rc = var2;
-        Word32 product;
-        Word32 result;
-
-        OSCL_UNUSED_ARG(pOverflow);
-
-        asm volatile("smulbb %0, %1, %2"
-             : "=r"(product)
-                             : "r"(rb), "r"(rc)
-                            );
-
-        asm volatile("qdsub %0, %1, %2"
-             : "=r"(result)
-                             : "r"(ra), "r"(product)
-                            );
-
-        return (result);
-    }
-
-#else /* C EQUIVALENT */
-
-    static inline Word32 L_msu(Word32 L_var3, Word16 var1, Word16 var2, Flag *pOverflow)
-    {
-        Word32 result;
-
-        result = L_mult(var1, var2, pOverflow);
-        result = L_sub(L_var3, result, pOverflow);
-
-        return (result);
-    }
-
-#endif
-    /*----------------------------------------------------------------------------
-    ; END
-    ----------------------------------------------------------------------------*/
-
-#ifdef __cplusplus
-}
-#endif
-
-#endif /* _L_MSU_H_ */
diff --git a/media/module/codecs/amrnb/common/include/l_mult.h b/media/module/codecs/amrnb/common/include/l_mult.h
deleted file mode 100644
index 061df60..0000000
--- a/media/module/codecs/amrnb/common/include/l_mult.h
+++ /dev/null
@@ -1,178 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-/****************************************************************************************
-Portions of this file are derived from the following 3GPP standard:
-
-    3GPP TS 26.073
-    ANSI-C code for the Adaptive Multi-Rate (AMR) speech codec
-    Available from http://www.3gpp.org
-
-(C) 2004, 3GPP Organizational Partners (ARIB, ATIS, CCSA, ETSI, TTA, TTC)
-Permission to distribute, modify and use this file under the standard license
-terms listed above has been obtained from the copyright holder.
-****************************************************************************************/
-/*
-
- Filename: /audio/gsm_amr/c/include/l_mult.h
-
-------------------------------------------------------------------------------
- REVISION HISTORY
-
- Description: Created separate header file for L_mult function.
-
- Description: Updated function prototype declaration to reflect new interface.
-              A pointer to overflow flag is passed into the function. Updated
-              template.
-
- Description: Moved _cplusplus #ifdef after Include section.
-
- Description: Providing support for ARM and Linux-ARM assembly instructions.
-
- Who:                       Date:
- Description:
-
-------------------------------------------------------------------------------
- INCLUDE DESCRIPTION
-
- This file contains all the constant definitions and prototype definitions
- needed by the L_mult function.
-
-------------------------------------------------------------------------------
-*/
-
-/*----------------------------------------------------------------------------
-; CONTINUE ONLY IF NOT ALREADY DEFINED
-----------------------------------------------------------------------------*/
-#ifndef L_MULT_H
-#define L_MULT_H
-
-/*----------------------------------------------------------------------------
-; INCLUDES
-----------------------------------------------------------------------------*/
-#include    "basicop_malloc.h"
-
-/*--------------------------------------------------------------------------*/
-#ifdef __cplusplus
-extern "C"
-{
-#endif
-
-    /*----------------------------------------------------------------------------
-    ; MACROS
-    ; Define module specific macros here
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; DEFINES
-    ; Include all pre-processor statements here.
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; EXTERNAL VARIABLES REFERENCES
-    ; Declare variables used in this module but defined elsewhere
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; SIMPLE TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; ENUMERATED TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; STRUCTURES TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; GLOBAL FUNCTION DEFINITIONS
-    ; Function Prototype declaration
-    ----------------------------------------------------------------------------*/
-
-#if defined(PV_ARM_V5) /* Instructions for ARM Assembly on ADS*/
-
-    __inline Word32 L_mult(Word16 var1, Word16 var2, Flag *pOverflow)
-    {
-        Word32 result;
-        Word32 product;
-
-        OSCL_UNUSED_ARG(pOverflow);
-
-        __asm
-        {
-            SMULBB product, var1, var2
-            QADD   result, product, product
-        }
-
-        return (result);
-    }
-
-#elif defined(PV_ARM_GCC_V5) /* Instructions for ARM-linux cross-compiler*/
-
-    __inline Word32 L_mult(Word16 var1, Word16 var2, Flag *pOverflow)
-    {
-        register Word32 ra = var1;
-        register Word32 rb = var2;
-        Word32 result;
-        Word32 product;
-
-        OSCL_UNUSED_ARG(pOverflow);
-
-        asm volatile("smulbb %0, %1, %2"
-             : "=r"(product)
-                             : "r"(ra), "r"(rb)
-                            );
-
-        asm volatile("qadd %0, %1, %2"
-             : "=r"(result)
-                             : "r"(product), "r"(product)
-                            );
-
-        return(result);
-    }
-
-#else /* C EQUIVALENT */
-
-    static inline Word32 L_mult(Word16 var1, Word16 var2, Flag *pOverflow)
-    {
-        register Word32 L_product;
-
-        L_product = (Word32) var1 * var2;
-
-        if (L_product != (Word32) 0x40000000L)
-        {
-            L_product <<= 1;          /* Multiply by 2 */
-        }
-        else
-        {
-            *pOverflow = 1;
-            L_product = MAX_32;
-        }
-
-        return (L_product);
-    }
-#endif
-    /*----------------------------------------------------------------------------
-    ; END
-    ----------------------------------------------------------------------------*/
-#ifdef __cplusplus
-}
-#endif
-
-#endif /* _L_MULT_H */
-
diff --git a/media/module/codecs/amrnb/common/include/l_shl.h b/media/module/codecs/amrnb/common/include/l_shl.h
deleted file mode 100644
index 7b9fdb1..0000000
--- a/media/module/codecs/amrnb/common/include/l_shl.h
+++ /dev/null
@@ -1,116 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-/****************************************************************************************
-Portions of this file are derived from the following 3GPP standard:
-
-    3GPP TS 26.073
-    ANSI-C code for the Adaptive Multi-Rate (AMR) speech codec
-    Available from http://www.3gpp.org
-
-(C) 2004, 3GPP Organizational Partners (ARIB, ATIS, CCSA, ETSI, TTA, TTC)
-Permission to distribute, modify and use this file under the standard license
-terms listed above has been obtained from the copyright holder.
-****************************************************************************************/
-/*
-
- Filename: /audio/gsm_amr/c/include/l_shl.h
-
-------------------------------------------------------------------------------
- REVISION HISTORY
-
- Description: Created separate header file for L_shl function.
-
- Description: Updated function prototype declaration to reflect new interface.
-              A pointer to overflow flag is passed into the function. Updated
-              template.
-
- Description: Moved _cplusplus #ifdef after Include section.
-
- Who:                       Date:
- Description:
-
-------------------------------------------------------------------------------
- INCLUDE DESCRIPTION
-
- This file contains all the constant definitions and prototype definitions
- needed by the L_shl function.
-
-------------------------------------------------------------------------------
-*/
-
-/*----------------------------------------------------------------------------
-; CONTINUE ONLY IF NOT ALREADY DEFINED
-----------------------------------------------------------------------------*/
-#ifndef L_SHL_H
-#define L_SHL_H
-
-/*----------------------------------------------------------------------------
-; INCLUDES
-----------------------------------------------------------------------------*/
-#include    "basicop_malloc.h"
-
-/*--------------------------------------------------------------------------*/
-#ifdef __cplusplus
-extern "C"
-{
-#endif
-
-    /*----------------------------------------------------------------------------
-    ; MACROS
-    ; Define module specific macros here
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; DEFINES
-    ; Include all pre-processor statements here.
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; EXTERNAL VARIABLES REFERENCES
-    ; Declare variables used in this module but defined elsewhere
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; SIMPLE TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; ENUMERATED TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; STRUCTURES TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; GLOBAL FUNCTION DEFINITIONS
-    ; Function Prototype declaration
-    ----------------------------------------------------------------------------*/
-    Word32 L_shl(Word32 L_var1, Word16 var2, Flag *pOverflow);
-
-    /*----------------------------------------------------------------------------
-    ; END
-    ----------------------------------------------------------------------------*/
-#ifdef __cplusplus
-}
-#endif
-
-#endif /* _L_SHL_H_ */
-
-
-
diff --git a/media/module/codecs/amrnb/common/include/l_shr.h b/media/module/codecs/amrnb/common/include/l_shr.h
deleted file mode 100644
index ef22073..0000000
--- a/media/module/codecs/amrnb/common/include/l_shr.h
+++ /dev/null
@@ -1,115 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-/****************************************************************************************
-Portions of this file are derived from the following 3GPP standard:
-
-    3GPP TS 26.073
-    ANSI-C code for the Adaptive Multi-Rate (AMR) speech codec
-    Available from http://www.3gpp.org
-
-(C) 2004, 3GPP Organizational Partners (ARIB, ATIS, CCSA, ETSI, TTA, TTC)
-Permission to distribute, modify and use this file under the standard license
-terms listed above has been obtained from the copyright holder.
-****************************************************************************************/
-/*
- Filename: /audio/gsm_amr/c/include/l_shr.h
-
-------------------------------------------------------------------------------
- REVISION HISTORY
-
- Description: Created separate header file for L_shr function.
-
- Description: Updated function prototype declaration to reflect new interface.
-              A pointer to overflow flag is passed into the function. Updated
-              template.
-
- Description: Moved _cplusplus #ifdef after Include section.
-
- Who:                       Date:
- Description:
-
-------------------------------------------------------------------------------
- INCLUDE DESCRIPTION
-
- This file contains all the constant definitions and prototype definitions
- needed by the L_shr function.
-
-------------------------------------------------------------------------------
-*/
-
-/*----------------------------------------------------------------------------
-; CONTINUE ONLY IF NOT ALREADY DEFINED
-----------------------------------------------------------------------------*/
-#ifndef L_SHR_H
-#define L_SHR_H
-
-/*----------------------------------------------------------------------------
-; INCLUDES
-----------------------------------------------------------------------------*/
-#include    "basicop_malloc.h"
-
-/*--------------------------------------------------------------------------*/
-#ifdef __cplusplus
-extern "C"
-{
-#endif
-
-    /*----------------------------------------------------------------------------
-    ; MACROS
-    ; Define module specific macros here
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; DEFINES
-    ; Include all pre-processor statements here.
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; EXTERNAL VARIABLES REFERENCES
-    ; Declare variables used in this module but defined elsewhere
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; SIMPLE TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; ENUMERATED TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; STRUCTURES TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; GLOBAL FUNCTION DEFINITIONS
-    ; Function Prototype declaration
-    ----------------------------------------------------------------------------*/
-    Word32 L_shr(Word32 L_var1, Word16 var2, Flag *pOverflow);
-
-    /*----------------------------------------------------------------------------
-    ; END
-    ----------------------------------------------------------------------------*/
-#ifdef __cplusplus
-}
-#endif
-
-#endif /* _L_SHR_H_ */
-
-
-
diff --git a/media/module/codecs/amrnb/common/include/l_sub.h b/media/module/codecs/amrnb/common/include/l_sub.h
deleted file mode 100644
index 97d7538..0000000
--- a/media/module/codecs/amrnb/common/include/l_sub.h
+++ /dev/null
@@ -1,173 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-/****************************************************************************************
-Portions of this file are derived from the following 3GPP standard:
-
-    3GPP TS 26.073
-    ANSI-C code for the Adaptive Multi-Rate (AMR) speech codec
-    Available from http://www.3gpp.org
-
-(C) 2004, 3GPP Organizational Partners (ARIB, ATIS, CCSA, ETSI, TTA, TTC)
-Permission to distribute, modify and use this file under the standard license
-terms listed above has been obtained from the copyright holder.
-****************************************************************************************/
-/*
-
- Filename: /audio/gsm_amr/c/include/l_sub.h
-
-------------------------------------------------------------------------------
- REVISION HISTORY
-
- Description: Created separate header file for L_sub function.
-
- Description: Updated function prototype declaration to reflect new interface.
-              A pointer to overflow flag is passed into the function. Updated
-              template.
-
- Description: Moved _cplusplus #ifdef after Include section.
-
- Description: Providing support for ARM and Linux-ARM assembly instructions.
-
- Who:                       Date:
- Description:
-
-------------------------------------------------------------------------------
- INCLUDE DESCRIPTION
-
- This file contains all the constant definitions and prototype definitions
- needed by the L_sub function.
-
-------------------------------------------------------------------------------
-*/
-
-/*----------------------------------------------------------------------------
-; CONTINUE ONLY IF NOT ALREADY DEFINED
-----------------------------------------------------------------------------*/
-#ifndef L_SUB_H
-#define L_SUB_H
-
-
-/*----------------------------------------------------------------------------
-; INCLUDES
-----------------------------------------------------------------------------*/
-#include    "basicop_malloc.h"
-
-/*--------------------------------------------------------------------------*/
-#ifdef __cplusplus
-extern "C"
-{
-#endif
-
-    /*----------------------------------------------------------------------------
-    ; MACROS
-    ; Define module specific macros here
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; DEFINES
-    ; Include all pre-processor statements here.
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; EXTERNAL VARIABLES REFERENCES
-    ; Declare variables used in this module but defined elsewhere
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; SIMPLE TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; ENUMERATED TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; STRUCTURES TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; GLOBAL FUNCTION DEFINITIONS
-    ; Function Prototype declaration
-    ----------------------------------------------------------------------------*/
-#if defined(PV_ARM_V5) /* Instructions for ARM Assembly on ADS*/
-
-    __inline Word32 L_sub(Word32 L_var1, Word32 L_var2, Flag *pOverflow)
-    {
-        Word32 result;
-
-        OSCL_UNUSED_ARG(pOverflow);
-
-        __asm
-        {
-            QSUB result, L_var1, L_var2
-        }
-
-        return(result);
-
-    }
-
-#elif defined(PV_ARM_GCC_V5) /* Instructions for ARM-linux cross-compiler*/
-
-    __inline Word32 L_sub(Word32 L_var1, Word32 L_var2, Flag *pOverflow)
-    {
-        register Word32 ra = L_var1;
-        register Word32 rb = L_var2;
-        Word32 result;
-
-        OSCL_UNUSED_ARG(pOverflow);
-
-        asm volatile("qsub %0, %1, %2"
-             : "=r"(result)
-                             : "r"(ra), "r"(rb)
-                            );
-
-        return (result);
-    }
-
-#else /* C EQUIVALENT */
-
-    static inline Word32 L_sub(register Word32 L_var1, register Word32 L_var2,
-                               register Flag *pOverflow)
-    {
-        Word32 L_diff;
-
-        L_diff = L_var1 - L_var2;
-
-        if ((L_var1 ^ L_var2) < 0)
-        {
-            if ((L_diff ^ L_var1) & MIN_32)
-            {
-                L_diff = (L_var1 < 0L) ? MIN_32 : MAX_32;
-                *pOverflow = 1;
-            }
-        }
-
-        return (L_diff);
-    }
-
-#endif
-    /*----------------------------------------------------------------------------
-    ; END
-    ----------------------------------------------------------------------------*/
-#ifdef __cplusplus
-}
-#endif
-
-#endif /* _L_SUB_H_ */
-
-
diff --git a/media/module/codecs/amrnb/common/include/mpy_32.h b/media/module/codecs/amrnb/common/include/mpy_32.h
deleted file mode 100644
index 03f36b2..0000000
--- a/media/module/codecs/amrnb/common/include/mpy_32.h
+++ /dev/null
@@ -1,272 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-/****************************************************************************************
-Portions of this file are derived from the following 3GPP standard:
-
-    3GPP TS 26.073
-    ANSI-C code for the Adaptive Multi-Rate (AMR) speech codec
-    Available from http://www.3gpp.org
-
-(C) 2004, 3GPP Organizational Partners (ARIB, ATIS, CCSA, ETSI, TTA, TTC)
-Permission to distribute, modify and use this file under the standard license
-terms listed above has been obtained from the copyright holder.
-****************************************************************************************/
-/*
-
- Filename: /audio/gsm_amr/c/include/mpy_32.h
-
-------------------------------------------------------------------------------
- REVISION HISTORY
-
- Description: Updated function prototype declaration to reflect new interface.
-              A pointer to overflow flag is passed into the function. Updated
-              template.
-
- Description: Moved _cplusplus #ifdef after Include section.
-
- Description: Updated the function to include ARM and Linux-ARM assembly
-              instructions.
-
- Description:
-
-------------------------------------------------------------------------------
- INCLUDE DESCRIPTION
-
- This file contains all the constant definitions and prototype definitions
- needed by the Mpy_32 function.
-
-------------------------------------------------------------------------------
-*/
-
-/*----------------------------------------------------------------------------
-; CONTINUE ONLY IF NOT ALREADY DEFINED
-----------------------------------------------------------------------------*/
-#ifndef MPY_32_H
-#define MPY_32_H
-
-/*----------------------------------------------------------------------------
-; INCLUDES
-----------------------------------------------------------------------------*/
-#include    "basicop_malloc.h"
-
-/*--------------------------------------------------------------------------*/
-#ifdef __cplusplus
-extern "C"
-{
-#endif
-
-    /*----------------------------------------------------------------------------
-    ; MACROS
-    ; Define module specific macros here
-    ----------------------------------------------------------------------------*/
-
-
-    /*----------------------------------------------------------------------------
-    ; DEFINES
-    ; Include all pre-processor statements here.
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; EXTERNAL VARIABLES REFERENCES
-    ; Declare variables used in this module but defined elsewhere
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; SIMPLE TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; ENUMERATED TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; STRUCTURES TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; GLOBAL FUNCTION DEFINITIONS
-    ; Function Prototype declaration
-    ----------------------------------------------------------------------------*/
-#if defined(PV_ARM_V5) /* Instructions for ARM Assembly on ADS*/
-
-    __inline Word32 Mpy_32(Word16 L_var1_hi,
-    Word16 L_var1_lo,
-    Word16 L_var2_hi,
-    Word16 L_var2_lo,
-    Flag   *pOverflow)
-
-    {
-        /*----------------------------------------------------------------------------
-        ; Define all local variables
-        ----------------------------------------------------------------------------*/
-        Word32 L_product;
-        Word32 L_sum;
-        Word32 product32;
-
-        OSCL_UNUSED_ARG(pOverflow);
-        /*----------------------------------------------------------------------------
-        ; Function body here
-        ----------------------------------------------------------------------------*/
-        /* L_product = L_mult (L_var1_hi, L_var2_hi, pOverflow);*/
-
-        __asm {SMULBB L_product, L_var1_hi, L_var2_hi}
-        __asm {QDADD L_product, 0, L_product}
-        __asm {SMULBB product32, L_var1_hi, L_var2_lo}
-        product32 >>= 15;
-        __asm {QDADD L_sum, L_product, product32}
-        L_product = L_sum;
-        __asm {SMULBB product32, L_var1_lo, L_var2_hi}
-        product32 >>= 15;
-        __asm {QDADD L_sum, L_product, product32}
-        return (L_sum);
-    }
-
-#elif defined(PV_ARM_GCC_V5) /* Instructions for ARM-linux cross-compiler*/
-
-    static inline Word32 Mpy_32(Word16 L_var1_hi,
-                                Word16 L_var1_lo,
-                                Word16 L_var2_hi,
-                                Word16 L_var2_lo,
-                                Flag   *pOverflow)
-    {
-        register Word32 product32;
-        register Word32 L_sum;
-        register Word32 L_product, result;
-        register Word32 ra = L_var1_hi;
-        register Word32 rb = L_var1_lo;
-        register Word32 rc = L_var2_hi;
-        register Word32 rd = L_var2_lo;
-
-
-
-        OSCL_UNUSED_ARG(pOverflow);
-
-        asm volatile("smulbb %0, %1, %2"
-             : "=r"(L_product)
-                             : "r"(ra), "r"(rc)
-                            );
-        asm volatile("mov %0, #0"
-             : "=r"(result)
-                    );
-
-        asm volatile("qdadd %0, %1, %2"
-             : "=r"(L_sum)
-                             : "r"(result), "r"(L_product)
-                            );
-
-        asm volatile("smulbb %0, %1, %2"
-             : "=r"(product32)
-                             : "r"(ra), "r"(rd)
-                            );
-
-        asm volatile("mov %0, %1, ASR #15"
-             : "=r"(ra)
-                             : "r"(product32)
-                            );
-        asm volatile("qdadd %0, %1, %2"
-             : "=r"(L_product)
-                             : "r"(L_sum), "r"(ra)
-                            );
-
-        asm volatile("smulbb %0, %1, %2"
-             : "=r"(product32)
-                             : "r"(rb), "r"(rc)
-                            );
-
-        asm volatile("mov %0, %1, ASR #15"
-             : "=r"(rb)
-                             : "r"(product32)
-                            );
-
-        asm volatile("qdadd %0, %1, %2"
-             : "=r"(L_sum)
-                             : "r"(L_product), "r"(rb)
-                            );
-
-        return (L_sum);
-    }
-
-#else /* C_EQUIVALENT */
-
-    __inline Word32 Mpy_32(Word16 L_var1_hi,
-                           Word16 L_var1_lo,
-                           Word16 L_var2_hi,
-                           Word16 L_var2_lo,
-                           Flag   *pOverflow)
-    {
-        Word32 L_product;
-        Word32 L_sum;
-        Word32 product32;
-
-        OSCL_UNUSED_ARG(pOverflow);
-        L_product = (Word32) L_var1_hi * L_var2_hi;
-
-        if (L_product != (Word32) 0x40000000L)
-        {
-            L_product <<= 1;
-        }
-        else
-        {
-            L_product = MAX_32;
-        }
-
-        /* result = mult (L_var1_hi, L_var2_lo, pOverflow); */
-        product32 = ((Word32) L_var1_hi * L_var2_lo) >> 15;
-
-        /* L_product = L_mac (L_product, result, 1, pOverflow); */
-        L_sum = L_product + (product32 << 1);
-
-        if ((L_product ^ product32) > 0)
-        {
-            if ((L_sum ^ L_product) < 0)
-            {
-                L_sum = (L_product < 0) ? MIN_32 : MAX_32;
-            }
-        }
-
-        L_product = L_sum;
-
-        /* result = mult (L_var1_lo, L_var2_hi, pOverflow); */
-        product32 = ((Word32) L_var1_lo * L_var2_hi) >> 15;
-
-        /* L_product = L_mac (L_product, result, 1, pOverflow); */
-        L_sum = L_product + (product32 << 1);
-
-        if ((L_product ^ product32) > 0)
-        {
-            if ((L_sum ^ L_product) < 0)
-            {
-                L_sum = (L_product < 0) ? MIN_32 : MAX_32;
-            }
-        }
-
-        /*----------------------------------------------------------------------------
-        ; Return nothing or data or data pointer
-        ----------------------------------------------------------------------------*/
-        return (L_sum);
-    }
-
-#endif
-    /*----------------------------------------------------------------------------
-    ; END
-    ----------------------------------------------------------------------------*/
-#ifdef __cplusplus
-}
-#endif
-
-#endif /* _MPY_32_H_ */
diff --git a/media/module/codecs/amrnb/common/include/mpy_32_16.h b/media/module/codecs/amrnb/common/include/mpy_32_16.h
deleted file mode 100644
index 7eaa741..0000000
--- a/media/module/codecs/amrnb/common/include/mpy_32_16.h
+++ /dev/null
@@ -1,206 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-/****************************************************************************************
-Portions of this file are derived from the following 3GPP standard:
-
-    3GPP TS 26.073
-    ANSI-C code for the Adaptive Multi-Rate (AMR) speech codec
-    Available from http://www.3gpp.org
-
-(C) 2004, 3GPP Organizational Partners (ARIB, ATIS, CCSA, ETSI, TTA, TTC)
-Permission to distribute, modify and use this file under the standard license
-terms listed above has been obtained from the copyright holder.
-****************************************************************************************/
-/*
-
-  Filename: /audio/gsm_amr/c/include/mpy_32_16.h
-
-------------------------------------------------------------------------------
- INCLUDE DESCRIPTION
-
- This file contains all the constant definitions and prototype definitions
- needed by the Mpy_32_16 function.
-
-------------------------------------------------------------------------------
-*/
-
-/*----------------------------------------------------------------------------
-; CONTINUE ONLY IF NOT ALREADY DEFINED
-----------------------------------------------------------------------------*/
-#ifndef MPY_32_16_H
-#define MPY_32_16_H
-
-/*----------------------------------------------------------------------------
-; INCLUDES
-----------------------------------------------------------------------------*/
-#include    "basicop_malloc.h"
-
-/*--------------------------------------------------------------------------*/
-#ifdef __cplusplus
-extern "C"
-{
-#endif
-
-    /*----------------------------------------------------------------------------
-    ; MACROS
-    ; Define module specific macros here
-    ----------------------------------------------------------------------------*/
-
-
-    /*----------------------------------------------------------------------------
-    ; DEFINES
-    ; Include all pre-processor statements here.
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; EXTERNAL VARIABLES REFERENCES
-    ; Declare variables used in this module but defined elsewhere
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; SIMPLE TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; ENUMERATED TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; STRUCTURES TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; GLOBAL FUNCTION DEFINITIONS
-    ; Function Prototype declaration
-    ----------------------------------------------------------------------------*/
-#if defined(PV_ARM_V5) /* Instructions for ARM Assembly on ADS*/
-
-    __inline Word32 Mpy_32_16(Word16 L_var1_hi,
-    Word16 L_var1_lo,
-    Word16 var2,
-    Flag *pOverflow)
-    {
-
-        Word32 L_product;
-        Word32 L_sum;
-        Word32 result;
-
-        OSCL_UNUSED_ARG(pOverflow);
-
-        __asm {SMULBB L_product, L_var1_hi, var2}
-        __asm {QDADD L_product, 0, L_product}
-        __asm {SMULBB result, L_var1_lo, var2}
-        result >>= 15;
-        __asm {QDADD L_sum, L_product, result}
-        return (L_sum);
-    }
-
-#elif defined(PV_ARM_GCC_V5) /* Instructions for ARM-linux cross-compiler*/
-
-    static inline Word32 Mpy_32_16(Word16 L_var1_hi,
-                                   Word16 L_var1_lo,
-                                   Word16 var2,
-                                   Flag *pOverflow)
-    {
-
-        register Word32 ra = L_var1_hi;
-        register Word32 rb = L_var1_lo;
-        register Word32 rc = var2;
-        Word32 result, L_product;
-
-        OSCL_UNUSED_ARG(pOverflow);
-
-        asm volatile("smulbb %0, %1, %2"
-             : "=r"(L_product)
-                             : "r"(ra), "r"(rc)
-                            );
-        asm volatile("mov %0, #0"
-             : "=r"(result)
-                    );
-
-        asm volatile("qdadd %0, %1, %2"
-             : "=r"(L_product)
-                             : "r"(result), "r"(L_product)
-                            );
-
-        asm volatile("smulbb %0, %1, %2"
-             : "=r"(result)
-                             : "r"(rb), "r"(rc)
-                            );
-
-        asm volatile("mov %0, %1, ASR #15"
-             : "=r"(ra)
-                             : "r"(result)
-                            );
-        asm volatile("qdadd %0, %1, %2"
-             : "=r"(result)
-                             : "r"(L_product), "r"(ra)
-                            );
-
-        return (result);
-    }
-
-#else /* C_EQUIVALENT */
-    __inline Word32 Mpy_32_16(Word16 L_var1_hi,
-                              Word16 L_var1_lo,
-                              Word16 var2,
-                              Flag *pOverflow)
-    {
-
-        Word32 L_product;
-        Word32 L_sum;
-        Word32 result;
-        L_product = (Word32) L_var1_hi * var2;
-
-        if (L_product != (Word32) 0x40000000L)
-        {
-            L_product <<= 1;
-        }
-        else
-        {
-            *pOverflow = 1;
-            L_product = MAX_32;
-        }
-
-        result = ((Word32)L_var1_lo * var2) >> 15;
-
-        L_sum  =  L_product + (result << 1);
-
-        if ((L_product ^ result) > 0)
-        {
-            if ((L_sum ^ L_product) < 0)
-            {
-                L_sum = (L_product < 0) ? MIN_32 : MAX_32;
-                *pOverflow = 1;
-            }
-        }
-        return (L_sum);
-
-    }
-
-#endif
-    /*----------------------------------------------------------------------------
-    ; END
-    ----------------------------------------------------------------------------*/
-#ifdef __cplusplus
-}
-#endif
-
-#endif /* _MPY_32_16_H_ */
-
-
diff --git a/media/module/codecs/amrnb/common/include/mult.h b/media/module/codecs/amrnb/common/include/mult.h
deleted file mode 100644
index 6927eba..0000000
--- a/media/module/codecs/amrnb/common/include/mult.h
+++ /dev/null
@@ -1,190 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-/****************************************************************************************
-Portions of this file are derived from the following 3GPP standard:
-
-    3GPP TS 26.073
-    ANSI-C code for the Adaptive Multi-Rate (AMR) speech codec
-    Available from http://www.3gpp.org
-
-(C) 2004, 3GPP Organizational Partners (ARIB, ATIS, CCSA, ETSI, TTA, TTC)
-Permission to distribute, modify and use this file under the standard license
-terms listed above has been obtained from the copyright holder.
-****************************************************************************************/
-/*
-
- Filename: /audio/gsm_amr/c/include/mult.h
-
-------------------------------------------------------------------------------
- REVISION HISTORY
-
- Description: Created separate header file for mult function.
-
- Description: Changed prototype of the mult() function. Instead of using global
-              a pointer to overflow flag is now passed into the function.
-
- Description: Updated copyright information.
-              Updated variable name from "overflow" to "pOverflow" to match
-              with original function declaration.
-
- Description: Moved _cplusplus #ifdef after Include section.
-
- Description: Providing support for ARM and Linux-ARM assembly instructions.
-
- Who:                       Date:
- Description:
-
-------------------------------------------------------------------------------
- INCLUDE DESCRIPTION
-
- This file contains all the constant definitions and prototype definitions
- needed by the mult function.
-
-------------------------------------------------------------------------------
-*/
-
-#ifndef MULT_H
-#define MULT_H
-
-/*----------------------------------------------------------------------------
-; INCLUDES
-----------------------------------------------------------------------------*/
-
-#include    "basicop_malloc.h"
-
-/*--------------------------------------------------------------------------*/
-#ifdef __cplusplus
-extern "C"
-{
-#endif
-
-    /*----------------------------------------------------------------------------
-    ; MACROS
-    ; Define module specific macros here
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; DEFINES
-    ; Include all pre-processor statements here.
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; EXTERNAL VARIABLES REFERENCES
-    ; Declare variables used in this module but defined elsewhere
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; SIMPLE TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; ENUMERATED TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; STRUCTURES TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; GLOBAL FUNCTION DEFINITIONS
-    ; Function Prototype declaration
-    ----------------------------------------------------------------------------*/
-#if defined(PV_ARM_V5)
-
-    __inline Word16 mult(Word16 var1, Word16 var2, Flag *pOverflow)
-    {
-        Word32 product;
-
-        OSCL_UNUSED_ARG(pOverflow);
-
-        __asm
-        {
-            SMULBB product, var1, var2
-            MOV    product, product, ASR #15
-            CMP    product, 0x7FFF
-            MOVGE  product, 0x7FFF
-        }
-
-        return ((Word16) product);
-    }
-
-#elif defined(PV_ARM_GCC_V5)
-
-    __inline Word16 mult(Word16 var1, Word16 var2, Flag *pOverflow)
-    {
-        register Word32 ra = var1;
-        register Word32 rb = var2;
-        Word32 product;
-        Word32 temp = 0x7FFF;
-
-        OSCL_UNUSED_ARG(pOverflow);
-
-        asm volatile("smulbb %0, %1, %2"
-             : "=r"(product)
-                             : "r"(ra), "r"(rb)
-                            );
-        asm volatile("mov %0, %1, ASR #15"
-             : "=r"(product)
-                             : "r"(product)
-                            );
-        asm volatile("cmp %0, %1"
-             : "=r"(product)
-                             : "r"(temp)
-                            );
-        asm volatile("movge %0, %1"
-             : "=r"(product)
-                             : "r"(temp)
-                            );
-
-        return ((Word16) product);
-    }
-
-#else /* C EQUIVALENT */
-
-    static inline Word16 mult(Word16 var1, Word16 var2, Flag *pOverflow)
-    {
-        register Word32 product;
-
-        product = ((Word32) var1 * var2) >> 15;
-
-        /* Saturate result (if necessary). */
-        /* var1 * var2 >0x00007fff is the only case */
-        /* that saturation occurs. */
-
-        if (product > 0x00007fffL)
-        {
-            *pOverflow = 1;
-            product = (Word32) MAX_16;
-        }
-
-
-        /* Return the product as a 16 bit value by type casting Word32 to Word16 */
-
-        return ((Word16) product);
-    }
-
-#endif
-    /*----------------------------------------------------------------------------
-    ; END
-    ----------------------------------------------------------------------------*/
-#ifdef __cplusplus
-}
-#endif
-
-#endif  /* _MULT_H_ */
-
diff --git a/media/module/codecs/amrnb/common/include/n_proc.h b/media/module/codecs/amrnb/common/include/n_proc.h
deleted file mode 100644
index e5738c1..0000000
--- a/media/module/codecs/amrnb/common/include/n_proc.h
+++ /dev/null
@@ -1,31 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-/****************************************************************************************
-Portions of this file are derived from the following 3GPP standard:
-
-    3GPP TS 26.073
-    ANSI-C code for the Adaptive Multi-Rate (AMR) speech codec
-    Available from http://www.3gpp.org
-
-(C) 2004, 3GPP Organizational Partners (ARIB, ATIS, CCSA, ETSI, TTA, TTC)
-Permission to distribute, modify and use this file under the standard license
-terms listed above has been obtained from the copyright holder.
-****************************************************************************************/
-/* $Id $ */
-
-void proc_head(char *mes);
diff --git a/media/module/codecs/amrnb/enc/src/g_pitch.cpp b/media/module/codecs/amrnb/enc/src/g_pitch.cpp
index 5b80e2a..6f686fa 100644
--- a/media/module/codecs/amrnb/enc/src/g_pitch.cpp
+++ b/media/module/codecs/amrnb/enc/src/g_pitch.cpp
@@ -376,15 +376,11 @@
     {
         L_temp = ((Word32) * (p_xn++) * *(p_y1++));
         s1 = s;
-        s = s1 + L_temp;
 
-        if ((s1 ^ L_temp) > 0)
+        if (__builtin_add_overflow(s1, L_temp, &s))
         {
-            if ((s1 ^ s) < 0)
-            {
-                *pOverflow = 1;
-                break;
-            }
+            *pOverflow = 1;
+            break;
         }
     }
 
diff --git a/media/module/codecs/amrwb/TEST_MAPPING b/media/module/codecs/amrwb/TEST_MAPPING
new file mode 100644
index 0000000..3f05c90
--- /dev/null
+++ b/media/module/codecs/amrwb/TEST_MAPPING
@@ -0,0 +1,6 @@
+{
+  "postsubmit": [
+    { "name": "AmrwbDecoderTest"},
+    { "name": "AmrwbEncoderTest"}
+  ]
+}
diff --git a/media/module/codecs/amrwb/dec/TEST_MAPPING b/media/module/codecs/amrwb/dec/TEST_MAPPING
deleted file mode 100644
index 0278d26..0000000
--- a/media/module/codecs/amrwb/dec/TEST_MAPPING
+++ /dev/null
@@ -1,10 +0,0 @@
-// mappings for frameworks/av/media/libstagefright/codecs/amrwb
-{
-  // tests which require dynamic content
-  // invoke with: atest -- --enable-module-dynamic-download=true
-  // TODO(b/148094059): unit tests not allowed to download content
-  "dynamic-presubmit": [
-    { "name": "AmrwbDecoderTest"}
-
-  ]
-}
diff --git a/media/module/codecs/amrwb/enc/Android.bp b/media/module/codecs/amrwb/enc/Android.bp
index 8780136..04f36b5 100644
--- a/media/module/codecs/amrwb/enc/Android.bp
+++ b/media/module/codecs/amrwb/enc/Android.bp
@@ -79,67 +79,31 @@
     arch: {
         arm: {
             srcs: [
-                "src/asm/ARMV5E/convolve_opt.s",
-                "src/asm/ARMV5E/cor_h_vec_opt.s",
-                "src/asm/ARMV5E/Deemph_32_opt.s",
-                "src/asm/ARMV5E/Dot_p_opt.s",
-                "src/asm/ARMV5E/Filt_6k_7k_opt.s",
-                "src/asm/ARMV5E/Norm_Corr_opt.s",
-                "src/asm/ARMV5E/pred_lt4_1_opt.s",
-                "src/asm/ARMV5E/residu_asm_opt.s",
-                "src/asm/ARMV5E/scale_sig_opt.s",
-                "src/asm/ARMV5E/Syn_filt_32_opt.s",
-                "src/asm/ARMV5E/syn_filt_opt.s",
+                "src/asm/ARMV7/convolve_neon.s",
+                "src/asm/ARMV7/cor_h_vec_neon.s",
+                "src/asm/ARMV7/Deemph_32_neon.s",
+                "src/asm/ARMV7/Dot_p_neon.s",
+                "src/asm/ARMV7/Filt_6k_7k_neon.s",
+                "src/asm/ARMV7/Norm_Corr_neon.s",
+                "src/asm/ARMV7/pred_lt4_1_neon.s",
+                "src/asm/ARMV7/residu_asm_neon.s",
+                "src/asm/ARMV7/scale_sig_neon.s",
+                "src/asm/ARMV7/Syn_filt_32_neon.s",
+                "src/asm/ARMV7/syn_filt_neon.s",
             ],
 
             cflags: [
                 "-DARM",
+                "-DARMV7",
                 "-DASM_OPT",
+                // don't actually generate neon instructions, see bug 26932980
+                "-mfpu=vfpv3",
             ],
-            local_include_dirs: ["src/asm/ARMV5E"],
+            local_include_dirs: [
+                "src/asm/ARMV7",
+            ],
 
             instruction_set: "arm",
-
-            neon: {
-                exclude_srcs: [
-                    "src/asm/ARMV5E/convolve_opt.s",
-                    "src/asm/ARMV5E/cor_h_vec_opt.s",
-                    "src/asm/ARMV5E/Deemph_32_opt.s",
-                    "src/asm/ARMV5E/Dot_p_opt.s",
-                    "src/asm/ARMV5E/Filt_6k_7k_opt.s",
-                    "src/asm/ARMV5E/Norm_Corr_opt.s",
-                    "src/asm/ARMV5E/pred_lt4_1_opt.s",
-                    "src/asm/ARMV5E/residu_asm_opt.s",
-                    "src/asm/ARMV5E/scale_sig_opt.s",
-                    "src/asm/ARMV5E/Syn_filt_32_opt.s",
-                    "src/asm/ARMV5E/syn_filt_opt.s",
-                ],
-
-                srcs: [
-                    "src/asm/ARMV7/convolve_neon.s",
-                    "src/asm/ARMV7/cor_h_vec_neon.s",
-                    "src/asm/ARMV7/Deemph_32_neon.s",
-                    "src/asm/ARMV7/Dot_p_neon.s",
-                    "src/asm/ARMV7/Filt_6k_7k_neon.s",
-                    "src/asm/ARMV7/Norm_Corr_neon.s",
-                    "src/asm/ARMV7/pred_lt4_1_neon.s",
-                    "src/asm/ARMV7/residu_asm_neon.s",
-                    "src/asm/ARMV7/scale_sig_neon.s",
-                    "src/asm/ARMV7/Syn_filt_32_neon.s",
-                    "src/asm/ARMV7/syn_filt_neon.s",
-                ],
-
-                // don't actually generate neon instructions, see bug 26932980
-                cflags: [
-                    "-DARMV7",
-                    "-mfpu=vfpv3",
-                ],
-                local_include_dirs: [
-                    "src/asm/ARMV5E",
-                    "src/asm/ARMV7",
-                ],
-            },
-
         },
     },
 
diff --git a/media/module/codecs/amrwb/enc/TEST_MAPPING b/media/module/codecs/amrwb/enc/TEST_MAPPING
deleted file mode 100644
index 045e8b3..0000000
--- a/media/module/codecs/amrwb/enc/TEST_MAPPING
+++ /dev/null
@@ -1,10 +0,0 @@
-// mappings for frameworks/av/media/libstagefright/codecs/amrwbenc
-{
-  // tests which require dynamic content
-  // invoke with: atest -- --enable-module-dynamic-download=true
-  // TODO(b/148094059): unit tests not allowed to download content
-  "dynamic-presubmit": [
-    { "name": "AmrwbEncoderTest"}
-
-  ]
-}
diff --git a/media/module/codecs/amrwb/enc/inc/basic_op.h b/media/module/codecs/amrwb/enc/inc/basic_op.h
index 80ad7f1..8e740b4 100644
--- a/media/module/codecs/amrwb/enc/inc/basic_op.h
+++ b/media/module/codecs/amrwb/enc/inc/basic_op.h
@@ -569,13 +569,10 @@
 static_vo Word32 L_add (Word32 L_var1, Word32 L_var2)
 {
     Word32 L_var_out;
-    L_var_out = L_var1 + L_var2;
-    if (((L_var1 ^ L_var2) & MIN_32) == 0)
+    if (__builtin_add_overflow(L_var1, L_var2, &L_var_out))
     {
-        if ((L_var_out ^ L_var1) & MIN_32)
-        {
-            L_var_out = (L_var1 < 0) ? MIN_32 : MAX_32;
-        }
+        // saturating...
+        L_var_out = (L_var1 < 0) ? MIN_32 : MAX_32;
     }
     return (L_var_out);
 }
@@ -616,13 +613,10 @@
 static_vo Word32 L_sub (Word32 L_var1, Word32 L_var2)
 {
     Word32 L_var_out;
-    L_var_out = L_var1 - L_var2;
-    if (((L_var1 ^ L_var2) & MIN_32) != 0)
+    if (__builtin_sub_overflow(L_var1, L_var2, &L_var_out))
     {
-        if ((L_var_out ^ L_var1) & MIN_32)
-        {
-            L_var_out = (L_var1 < 0L) ? MIN_32 : MAX_32;
-        }
+        // saturating...
+        L_var_out = (L_var1 < 0L) ? MIN_32 : MAX_32;
     }
     return (L_var_out);
 }
diff --git a/media/module/codecs/flac/TEST_MAPPING b/media/module/codecs/flac/TEST_MAPPING
new file mode 100644
index 0000000..725ea90
--- /dev/null
+++ b/media/module/codecs/flac/TEST_MAPPING
@@ -0,0 +1,5 @@
+{
+  "postsubmit": [
+    { "name": "FlacDecoderTest"}
+  ]
+}
diff --git a/media/module/codecs/flac/dec/test/Android.bp b/media/module/codecs/flac/dec/test/Android.bp
index a4c2735..8004c4a 100644
--- a/media/module/codecs/flac/dec/test/Android.bp
+++ b/media/module/codecs/flac/dec/test/Android.bp
@@ -28,6 +28,7 @@
 cc_test {
     name: "FlacDecoderTest",
     gtest: true,
+    test_suites: ["device-tests"],
 
     srcs: [
         "FlacDecoderTest.cpp",
diff --git a/media/module/codecs/m4v_h263/TEST_MAPPING b/media/module/codecs/m4v_h263/TEST_MAPPING
index ba3ff1c..8599fa5 100644
--- a/media/module/codecs/m4v_h263/TEST_MAPPING
+++ b/media/module/codecs/m4v_h263/TEST_MAPPING
@@ -1,18 +1,6 @@
-// mappings for frameworks/av/media/libstagefright/codecs/m4v_h263
 {
-  // tests which require dynamic content
-  // invoke with: atest -- --enable-module-dynamic-download=true
-  // TODO(b/148094059): unit tests not allowed to download content
-  "dynamic-presubmit": [
-
-    // the decoder reports something bad about an unexpected newline in the *config file
-    // and the config file looks like the AndroidTest.xml file that we put in there.
-    // I don't get this from the Encoder -- and I don't see any substantive difference
-    // between decode and encode AndroidTest.xml files -- except that encode does NOT
-    // finish with a newline.
-    // strange.
+  "postsubmit": [
     { "name": "Mpeg4H263DecoderTest"},
     { "name": "Mpeg4H263EncoderTest"}
-
   ]
 }
diff --git a/media/module/codecs/mp3dec/TEST_MAPPING b/media/module/codecs/mp3dec/TEST_MAPPING
index 4ef4317..5faece6 100644
--- a/media/module/codecs/mp3dec/TEST_MAPPING
+++ b/media/module/codecs/mp3dec/TEST_MAPPING
@@ -1,9 +1,5 @@
-// mappings for frameworks/av/media/libstagefright/codecs/mp3dec
 {
-  // tests which require dynamic content
-  // invoke with: atest -- --enable-module-dynamic-download=true
-  // TODO(b/148094059): unit tests not allowed to download content
-  "dynamic-presubmit": [
+  "postsubmit": [
     { "name": "Mp3DecoderTest"}
   ]
 }
diff --git a/media/module/codecserviceregistrant/Android.bp b/media/module/codecserviceregistrant/Android.bp
index becb98a..56cd8b8 100644
--- a/media/module/codecserviceregistrant/Android.bp
+++ b/media/module/codecserviceregistrant/Android.bp
@@ -55,6 +55,8 @@
         "com.android.media.swcodec",
     ],
 
+    export_include_dirs: ["include"],
+
     srcs: [
         "CodecServiceRegistrant.cpp",
     ],
diff --git a/media/module/codecserviceregistrant/CodecServiceRegistrant.cpp b/media/module/codecserviceregistrant/CodecServiceRegistrant.cpp
index f95fc4d..42fd94e 100644
--- a/media/module/codecserviceregistrant/CodecServiceRegistrant.cpp
+++ b/media/module/codecserviceregistrant/CodecServiceRegistrant.cpp
@@ -25,6 +25,7 @@
 #include <C2Component.h>
 #include <C2PlatformSupport.h>
 
+#include <android/hidl/manager/1.2/IServiceManager.h>
 #include <codec2/hidl/1.0/ComponentStore.h>
 #include <codec2/hidl/1.1/ComponentStore.h>
 #include <codec2/hidl/1.2/ComponentStore.h>
@@ -39,7 +40,7 @@
 #include <codec2/aidl/ComponentStore.h>
 #include <codec2/aidl/ParamTypes.h>
 
-#include <media/CodecServiceRegistrant.h>
+#include <codecserviceregistrant/CodecServiceRegistrant.h>
 
 namespace /* unnamed */ {
 
@@ -48,6 +49,7 @@
 using ::android::hardware::Return;
 using ::android::sp;
 using ::ndk::ScopedAStatus;
+namespace c2_hidl_V1_0 = ::android::hardware::media::c2::V1_0;
 namespace c2_hidl = ::android::hardware::media::c2::V1_2;
 namespace c2_aidl = ::aidl::android::hardware::media::c2;
 
@@ -733,13 +735,52 @@
 
 } // unnamed namespace
 
-extern "C" void RegisterCodecServices() {
-    const bool aidlSelected = c2_aidl::utils::IsSelected();
-    constexpr int kThreadCount = 64;
-    ABinderProcess_setThreadPoolMaxThreadCount(kThreadCount);
-    ABinderProcess_startThreadPool();
-    ::android::hardware::configureRpcThreadpool(kThreadCount, false);
+static android::sp<c2_hidl_V1_0::IComponentStore> getDeclaredHidlSwcodec(
+        const std::shared_ptr<C2ComponentStore>& store) {
+    using ::android::hidl::manager::V1_2::IServiceManager;
+    using namespace ::android::hardware::media::c2;
 
+    int platformVersion = android_get_device_api_level();
+    // STOPSHIP: Remove code name checking once platform version bumps up to 35.
+    std::string codeName = android::base::GetProperty("ro.build.version.codename", "");
+
+    if (codeName == "VanillaIceCream") {
+        platformVersion = __ANDROID_API_V__;
+    }
+    IServiceManager::Transport transport =
+            android::hardware::defaultServiceManager1_2()->getTransport(
+                    V1_2::IComponentStore::descriptor, "software");
+    if (transport == IServiceManager::Transport::HWBINDER) {
+        if (platformVersion < __ANDROID_API_S__) {
+            LOG(ERROR) << "We don't expect V1.2::IComponentStore to be declared on this device";
+        }
+        return ::android::sp<V1_2::utils::ComponentStore>::make(store);
+    }
+    transport = android::hardware::defaultServiceManager1_2()->getTransport(
+            V1_1::IComponentStore::descriptor, "software");
+    if (transport == IServiceManager::Transport::HWBINDER) {
+        if (platformVersion != __ANDROID_API_R__) {
+            LOG(ERROR) << "We don't expect V1.1::IComponentStore to be declared on this device";
+        }
+        return ::android::sp<V1_1::utils::ComponentStore>::make(store);
+    }
+    transport = android::hardware::defaultServiceManager1_2()->getTransport(
+            V1_0::IComponentStore::descriptor, "software");
+    if (transport == IServiceManager::Transport::HWBINDER) {
+        if (platformVersion != __ANDROID_API_Q__) {
+            LOG(ERROR) << "We don't expect V1.0::IComponentStore to be declared on this device";
+        }
+        return ::android::sp<V1_0::utils::ComponentStore>::make(store);
+    }
+    return nullptr;
+}
+
+/**
+ * This function encapsulates the core logic required to register codec services,
+ * separated from threadpool management to avoid timeouts when called by the fuzzer.
+ */
+static void RegisterCodecServicesWithExistingThreadpool() {
+    const bool aidlSelected = c2_aidl::utils::IsSelected();
     LOG(INFO) << "Creating software Codec2 service...";
     std::shared_ptr<C2ComponentStore> store =
         android::GetCodec2PlatformComponentStore();
@@ -750,33 +791,6 @@
 
     using namespace ::android::hardware::media::c2;
 
-    int platformVersion = android_get_device_api_level();
-    // STOPSHIP: Remove code name checking once platform version bumps up to 35.
-    std::string codeName =
-        android::base::GetProperty("ro.build.version.codename", "");
-    if (codeName == "VanillaIceCream") {
-        platformVersion = __ANDROID_API_V__;
-    }
-
-    android::sp<V1_0::IComponentStore> hidlStore;
-    std::shared_ptr<c2_aidl::IComponentStore> aidlStore;
-    const char *hidlVer = "(unknown)";
-    if (aidlSelected) {
-        aidlStore = ::ndk::SharedRefBase::make<c2_aidl::utils::ComponentStore>(store);
-    } else if (platformVersion >= __ANDROID_API_S__) {
-        hidlStore = ::android::sp<V1_2::utils::ComponentStore>::make(store);
-        hidlVer = "1.2";
-    } else if (platformVersion == __ANDROID_API_R__) {
-        hidlStore = ::android::sp<V1_1::utils::ComponentStore>::make(store);
-        hidlVer = "1.1";
-    } else if (platformVersion == __ANDROID_API_Q__) {
-        hidlStore = ::android::sp<V1_0::utils::ComponentStore>::make(store);
-        hidlVer = "1.0";
-    } else {  // platformVersion < __ANDROID_API_Q__
-        LOG(ERROR) << "The platform version " << platformVersion <<
-                      " is not supported.";
-        return;
-    }
     if (!ionPropertiesDefined()) {
         using IComponentStore =
             ::android::hardware::media::c2::V1_0::IComponentStore;
@@ -818,38 +832,63 @@
     }
 
     bool registered = false;
-    if (platformVersion >= __ANDROID_API_V__) {
-        if (!aidlStore) {
-            aidlStore = ::ndk::SharedRefBase::make<c2_aidl::utils::ComponentStore>(
-                    std::make_shared<H2C2ComponentStore>(nullptr));
-        }
-        const std::string serviceName =
-            std::string(c2_aidl::IComponentStore::descriptor) + "/software";
-        binder_exception_t ex = AServiceManager_addService(
-                aidlStore->asBinder().get(), serviceName.c_str());
-        if (ex == EX_NONE) {
-            registered = true;
-        } else {
-            LOG(ERROR) << "Cannot register software Codec2 AIDL service.";
+    const std::string aidlServiceName =
+        std::string(c2_aidl::IComponentStore::descriptor) + "/software";
+    if (__builtin_available(android __ANDROID_API_S__, *)) {
+        if (AServiceManager_isDeclared(aidlServiceName.c_str())) {
+            std::shared_ptr<c2_aidl::IComponentStore> aidlStore;
+            if (aidlSelected) {
+                aidlStore = ::ndk::SharedRefBase::make<c2_aidl::utils::ComponentStore>(store);
+            } else {
+                aidlStore = ::ndk::SharedRefBase::make<c2_aidl::utils::ComponentStore>(
+                        std::make_shared<H2C2ComponentStore>(nullptr));
+            }
+            binder_exception_t ex = AServiceManager_addService(
+                    aidlStore->asBinder().get(), aidlServiceName.c_str());
+            if (ex == EX_NONE) {
+                registered = true;
+            } else {
+                LOG(WARNING) << "Cannot register software Codec2 AIDL service. Exception: " << ex;
+            }
         }
     }
 
-    if (!hidlStore) {
-        hidlStore = ::android::sp<V1_2::utils::ComponentStore>::make(
-                std::make_shared<H2C2ComponentStore>(nullptr));
-        hidlVer = "1.2";
-    }
-    if (hidlStore->registerAsService("software") == android::OK) {
-        registered = true;
+    android::sp<V1_0::IComponentStore> hidlStore = getDeclaredHidlSwcodec(store);
+    // If the software component store isn't declared in the manifest, we don't
+    // need to create the service and register it.
+    if (hidlStore) {
+        if (registered && aidlSelected) {
+            LOG(INFO) << "Both HIDL and AIDL software codecs are declared in the vintf "
+                      << "manifest, but AIDL was selected. "
+                      << "Creating a null HIDL service so it's not accidentally "
+                      << "used. The AIDL software codec is already registered.";
+            hidlStore = ::android::sp<V1_2::utils::ComponentStore>::make(
+                    std::make_shared<H2C2ComponentStore>(nullptr));
+        }
+        if (hidlStore->registerAsService("software") == android::OK) {
+            registered = true;
+        } else {
+            LOG(ERROR) << "Cannot register software Codec2 " << hidlStore->descriptor
+                       << " service.";
+        }
     } else {
-        LOG(ERROR) << "Cannot register software Codec2 v" << hidlVer << " service.";
+        LOG(INFO) << "The HIDL software Codec2 service is deprecated"
+                     " so it is not being registered with hwservicemanager.";
     }
 
     if (registered) {
         LOG(INFO) << "Software Codec2 service created and registered.";
     }
+}
+
+extern "C" void RegisterCodecServices() {
+    constexpr int kThreadCount = 64;
+    ABinderProcess_setThreadPoolMaxThreadCount(kThreadCount);
+    ABinderProcess_startThreadPool();
+    ::android::hardware::configureRpcThreadpool(kThreadCount, false);
+
+    RegisterCodecServicesWithExistingThreadpool();
 
     ABinderProcess_joinThreadPool();
     ::android::hardware::joinRpcThreadpool();
 }
-
diff --git a/media/module/codecserviceregistrant/fuzzer/codecServiceRegistrant_fuzzer.cpp b/media/module/codecserviceregistrant/fuzzer/codecServiceRegistrant_fuzzer.cpp
index 4868e0c..0baf1ca 100644
--- a/media/module/codecserviceregistrant/fuzzer/codecServiceRegistrant_fuzzer.cpp
+++ b/media/module/codecserviceregistrant/fuzzer/codecServiceRegistrant_fuzzer.cpp
@@ -13,6 +13,9 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
+#include <codecserviceregistrant/CodecServiceRegistrant.h>
+
 #include "../CodecServiceRegistrant.cpp"
 #include "fuzzer/FuzzedDataProvider.h"
 #include <C2Config.h>
@@ -166,9 +169,9 @@
 void CodecServiceRegistrantFuzzer::process(const uint8_t *data, size_t size) {
   mFDP = new FuzzedDataProvider(data, size);
   invokeH2C2ComponentStore();
-  /** RegisterCodecServices is called here to improve code coverage */
-  /** as currently it is not called by codecServiceRegistrant       */
-  RegisterCodecServices();
+  /** RegisterCodecServicesWithExistingThreadpool() is called here to improve
+   * code coverage as currently it is not called in codecServiceRegistrant.cpp */
+  RegisterCodecServicesWithExistingThreadpool();
   delete mFDP;
 }
 
diff --git a/media/libmedia/include/media/CodecServiceRegistrant.h b/media/module/codecserviceregistrant/include/codecserviceregistrant/CodecServiceRegistrant.h
similarity index 77%
rename from media/libmedia/include/media/CodecServiceRegistrant.h
rename to media/module/codecserviceregistrant/include/codecserviceregistrant/CodecServiceRegistrant.h
index e0af781..1c6f71f 100644
--- a/media/libmedia/include/media/CodecServiceRegistrant.h
+++ b/media/module/codecserviceregistrant/include/codecserviceregistrant/CodecServiceRegistrant.h
@@ -18,6 +18,13 @@
 
 #define CODEC_SERVICE_REGISTRANT_H_
 
+/**
+ * This function starts the threadpool, calls the registration logic
+ * encapsulated in RegisterCodecServicesWithExistingThreadpool(), and
+ * then stops the threadpool.
+ */
+extern "C" void RegisterCodecServices();
+
 typedef void (*RegisterCodecServicesFunc)();
 
 #endif  // CODEC_SERVICE_REGISTRANT_H_
diff --git a/media/module/esds/TEST_MAPPING b/media/module/esds/TEST_MAPPING
index 9368b6d..0337743 100644
--- a/media/module/esds/TEST_MAPPING
+++ b/media/module/esds/TEST_MAPPING
@@ -1,9 +1,5 @@
-// mappings for frameworks/av/media/module/esds
 {
-  // tests which require dynamic content
-  // invoke with: atest -- --enable-module-dynamic-download=true
-  // TODO(b/148094059): unit tests not allowed to download content
-  "dynamic-presubmit": [
+  "postsubmit": [
     { "name": "ESDSTest" }
   ]
 }
diff --git a/media/module/esds/tests/Android.bp b/media/module/esds/tests/Android.bp
index aea611e..427b275 100644
--- a/media/module/esds/tests/Android.bp
+++ b/media/module/esds/tests/Android.bp
@@ -25,6 +25,7 @@
 cc_test {
     name: "ESDSTest",
     gtest: true,
+    test_suites: ["device-tests"],
 
     srcs: [
         "ESDSTest.cpp",
diff --git a/media/module/esds/tests/ESDSTest.cpp b/media/module/esds/tests/ESDSTest.cpp
index 33bdcac..ba64f60 100644
--- a/media/module/esds/tests/ESDSTest.cpp
+++ b/media/module/esds/tests/ESDSTest.cpp
@@ -52,7 +52,7 @@
                              /* BitrateMax */ int32_t,
                              /* BitrateAvg */ int32_t>> {
   public:
-    ESDSUnitTest() : mESDSData(nullptr) {
+    ESDSUnitTest() {
         mESDSParams.inputFile = get<0>(GetParam());
         mESDSParams.objectTypeIndication = get<1>(GetParam());
         mESDSParams.codecSpecificInfoData = get<2>(GetParam());
@@ -61,6 +61,13 @@
         mESDSParams.bitrateAvg = get<5>(GetParam());
     };
 
+    ~ESDSUnitTest() {
+        if (mESDSData != nullptr) {
+            free(mESDSData);
+            mESDSData = nullptr;
+        }
+    }
+
     virtual void TearDown() override {
         if (mDataSource) mDataSource.clear();
         if (mInputFp) {
@@ -70,8 +77,8 @@
     }
 
     virtual void SetUp() override { ASSERT_NO_FATAL_FAILURE(readESDSData()); }
-    const void *mESDSData;
-    size_t mESDSSize;
+    void *mESDSData = nullptr;
+    size_t mESDSSize = 0;
     ESDSParams mESDSParams;
 
   private:
@@ -105,10 +112,19 @@
     bool esdsDataPresent(size_t numTracks, sp<IMediaExtractor> extractor) {
         bool foundESDS = false;
         uint32_t type;
+        if (mESDSData != nullptr) {
+            free(mESDSData);
+            mESDSData = nullptr;
+        }
         for (size_t i = 0; i < numTracks; ++i) {
             sp<MetaData> trackMeta = extractor->getTrackMetaData(i);
+            const void *esdsData = nullptr;
+            size_t esdsSize = 0;
             if (trackMeta != nullptr &&
-                trackMeta->findData(kKeyESDS, &type, &mESDSData, &mESDSSize)) {
+                trackMeta->findData(kKeyESDS, &type, &esdsData, &esdsSize)) {
+                mESDSData = malloc(esdsSize);
+                mESDSSize = esdsSize;
+                memcpy(mESDSData, esdsData, esdsSize);
                 trackMeta->clear();
                 foundESDS = true;
                 break;
diff --git a/media/module/extractors/fuzzers/Android.bp b/media/module/extractors/fuzzers/Android.bp
index d096d63..7a49d8e 100644
--- a/media/module/extractors/fuzzers/Android.bp
+++ b/media/module/extractors/fuzzers/Android.bp
@@ -134,6 +134,8 @@
     ],
 
     dictionary: "mp4_extractor_fuzzer.dict",
+
+    corpus: ["corpus_mp4/*"],
 }
 
 cc_fuzz {
@@ -202,7 +204,6 @@
         "ogg_extractor_fuzzer.cpp",
     ],
 
-
     static_libs: [
         "libstagefright_metadatautils",
         "libvorbisidec",
diff --git a/media/module/extractors/fuzzers/corpus_mp4/164a5bad5340b262316f93932c4160813657e1e0 b/media/module/extractors/fuzzers/corpus_mp4/164a5bad5340b262316f93932c4160813657e1e0
new file mode 100644
index 0000000..c17251b
--- /dev/null
+++ b/media/module/extractors/fuzzers/corpus_mp4/164a5bad5340b262316f93932c4160813657e1e0
Binary files differ
diff --git a/media/module/extractors/fuzzers/corpus_mp4/24f242f3b30fd5c2ff0f9aebed4375a3ab5cdceb b/media/module/extractors/fuzzers/corpus_mp4/24f242f3b30fd5c2ff0f9aebed4375a3ab5cdceb
new file mode 100644
index 0000000..16907fd
--- /dev/null
+++ b/media/module/extractors/fuzzers/corpus_mp4/24f242f3b30fd5c2ff0f9aebed4375a3ab5cdceb
Binary files differ
diff --git a/media/module/extractors/fuzzers/corpus_mp4/2a017927fdab79a8cc3b0bb75224cb44f4c1b35b b/media/module/extractors/fuzzers/corpus_mp4/2a017927fdab79a8cc3b0bb75224cb44f4c1b35b
new file mode 100644
index 0000000..2ec7881
--- /dev/null
+++ b/media/module/extractors/fuzzers/corpus_mp4/2a017927fdab79a8cc3b0bb75224cb44f4c1b35b
Binary files differ
diff --git a/media/module/extractors/fuzzers/corpus_mp4/58b3155e64ac16e4e6c68b68257871bcd769b92f b/media/module/extractors/fuzzers/corpus_mp4/58b3155e64ac16e4e6c68b68257871bcd769b92f
new file mode 100644
index 0000000..cd1fdcc
--- /dev/null
+++ b/media/module/extractors/fuzzers/corpus_mp4/58b3155e64ac16e4e6c68b68257871bcd769b92f
Binary files differ
diff --git a/media/module/extractors/fuzzers/corpus_mp4/64093d4da00ba406310c7679cd8b37562e6344b5 b/media/module/extractors/fuzzers/corpus_mp4/64093d4da00ba406310c7679cd8b37562e6344b5
new file mode 100644
index 0000000..f1ea812
--- /dev/null
+++ b/media/module/extractors/fuzzers/corpus_mp4/64093d4da00ba406310c7679cd8b37562e6344b5
Binary files differ
diff --git a/media/module/extractors/fuzzers/corpus_mp4/7355066d4975de07e9b6d0e9907c896eeb90577a b/media/module/extractors/fuzzers/corpus_mp4/7355066d4975de07e9b6d0e9907c896eeb90577a
new file mode 100644
index 0000000..c5d3eb2
--- /dev/null
+++ b/media/module/extractors/fuzzers/corpus_mp4/7355066d4975de07e9b6d0e9907c896eeb90577a
Binary files differ
diff --git a/media/module/extractors/fuzzers/corpus_mp4/7a48b0237581c794097a15add08517b3c6dc0aa2 b/media/module/extractors/fuzzers/corpus_mp4/7a48b0237581c794097a15add08517b3c6dc0aa2
new file mode 100644
index 0000000..1f6c29d
--- /dev/null
+++ b/media/module/extractors/fuzzers/corpus_mp4/7a48b0237581c794097a15add08517b3c6dc0aa2
Binary files differ
diff --git a/media/module/extractors/fuzzers/corpus_mp4/8706f07041a0cf828a7b40d727533d6c732b5ebc b/media/module/extractors/fuzzers/corpus_mp4/8706f07041a0cf828a7b40d727533d6c732b5ebc
new file mode 100644
index 0000000..40d639d
--- /dev/null
+++ b/media/module/extractors/fuzzers/corpus_mp4/8706f07041a0cf828a7b40d727533d6c732b5ebc
Binary files differ
diff --git a/media/module/extractors/fuzzers/corpus_mp4/911a46d40d60b9a806dbdc70799048df2f546615 b/media/module/extractors/fuzzers/corpus_mp4/911a46d40d60b9a806dbdc70799048df2f546615
new file mode 100644
index 0000000..2056348
--- /dev/null
+++ b/media/module/extractors/fuzzers/corpus_mp4/911a46d40d60b9a806dbdc70799048df2f546615
Binary files differ
diff --git a/media/module/extractors/fuzzers/corpus_mp4/97b1d0e78525c793574cce3e66f86564c2a10271 b/media/module/extractors/fuzzers/corpus_mp4/97b1d0e78525c793574cce3e66f86564c2a10271
new file mode 100644
index 0000000..f50d4f4
--- /dev/null
+++ b/media/module/extractors/fuzzers/corpus_mp4/97b1d0e78525c793574cce3e66f86564c2a10271
Binary files differ
diff --git a/media/module/extractors/fuzzers/corpus_mp4/d52e7095534fdf1f040b10a80df4cbc069a97a4e b/media/module/extractors/fuzzers/corpus_mp4/d52e7095534fdf1f040b10a80df4cbc069a97a4e
new file mode 100644
index 0000000..25ea55b
--- /dev/null
+++ b/media/module/extractors/fuzzers/corpus_mp4/d52e7095534fdf1f040b10a80df4cbc069a97a4e
Binary files differ
diff --git a/media/module/extractors/fuzzers/corpus_mp4/ec6bd6069f74a2f6e92442f88efb29288ad6f456 b/media/module/extractors/fuzzers/corpus_mp4/ec6bd6069f74a2f6e92442f88efb29288ad6f456
new file mode 100644
index 0000000..62d259b
--- /dev/null
+++ b/media/module/extractors/fuzzers/corpus_mp4/ec6bd6069f74a2f6e92442f88efb29288ad6f456
Binary files differ
diff --git a/media/module/extractors/fuzzers/corpus_mp4/faa22bcb745206340d5d411b498a3868d2b1feec b/media/module/extractors/fuzzers/corpus_mp4/faa22bcb745206340d5d411b498a3868d2b1feec
new file mode 100644
index 0000000..d649632
--- /dev/null
+++ b/media/module/extractors/fuzzers/corpus_mp4/faa22bcb745206340d5d411b498a3868d2b1feec
Binary files differ
diff --git a/media/module/extractors/fuzzers/mp4_extractor_fuzzer.dict b/media/module/extractors/fuzzers/mp4_extractor_fuzzer.dict
index 3683649..b48c854 100644
--- a/media/module/extractors/fuzzers/mp4_extractor_fuzzer.dict
+++ b/media/module/extractors/fuzzers/mp4_extractor_fuzzer.dict
@@ -246,3 +246,4 @@
 kw245="iso5"
 kw246="resv"
 kw247="iso6"
+kw248="clap"
diff --git a/media/module/extractors/mkv/MatroskaExtractor.cpp b/media/module/extractors/mkv/MatroskaExtractor.cpp
index 6900341..f326db1 100644
--- a/media/module/extractors/mkv/MatroskaExtractor.cpp
+++ b/media/module/extractors/mkv/MatroskaExtractor.cpp
@@ -1769,6 +1769,30 @@
 
 }
 
+status_t MatroskaExtractor::synthesizeVP9(TrackInfo* trackInfo, size_t index) {
+    BlockIterator iter(this, trackInfo->mTrackNum, index);
+    if (iter.eos()) {
+        return ERROR_MALFORMED;
+    }
+
+    const mkvparser::Block* block = iter.block();
+    if (block->GetFrameCount() <= 0) {
+        return ERROR_MALFORMED;
+    }
+
+    const mkvparser::Block::Frame& frame = block->GetFrame(0);
+    auto tmpData = heapbuffer<unsigned char>(frame.len);
+    long n = frame.Read(mReader, tmpData.get());
+    if (n != 0) {
+        return ERROR_MALFORMED;
+    }
+
+    if (!MakeVP9CodecSpecificData(trackInfo->mMeta, tmpData.get(), frame.len)) {
+        return ERROR_MALFORMED;
+    }
+
+    return OK;
+}
 
 static inline bool isValidInt32ColourValue(long long value) {
     return value != mkvparser::Colour::kValueNotPresent
@@ -2002,6 +2026,8 @@
                       // specified in http://www.webmproject.org/vp9/profiles/.
                       AMediaFormat_setBuffer(meta,
                              AMEDIAFORMAT_KEY_CSD_0, codecPrivate, codecPrivateSize);
+                    } else {
+                        isSetCsdFrom1stFrame = true;
                     }
                 } else if (!strcmp("V_AV1", codecID)) {
                     AMediaFormat_setString(meta, AMEDIAFORMAT_KEY_MIME, MEDIA_MIMETYPE_VIDEO_AV1);
@@ -2254,6 +2280,13 @@
                 mTracks.pop();
                 continue;
             }
+        } else if ((!strcmp("V_VP9", codecID) && codecPrivateSize == 0) ||
+                   (!strcmp(mimetype, MEDIA_MIMETYPE_VIDEO_VP9) && isSetCsdFrom1stFrame)) {
+            // Attempt to recover from VP9 track without codec private data
+            err = synthesizeVP9(trackInfo, n);
+            if (err != OK) {
+                ALOGW("ignoring error %d in synthesizeVP9", err);
+            }
         }
         // the TrackInfo owns the metadata now
         meta = nullptr;
@@ -2279,6 +2312,8 @@
         int64_t thumbnailTimeUs = 0;
         size_t maxBlockSize = 0;
         while (!iter.eos() && j < 20) {
+            int64_t blockTimeUs = iter.blockTimeUs();
+
             if (iter.block()->IsKey()) {
                 ++j;
 
@@ -2289,9 +2324,13 @@
 
                 if (blockSize > maxBlockSize) {
                     maxBlockSize = blockSize;
-                    thumbnailTimeUs = iter.blockTimeUs();
+                    thumbnailTimeUs = blockTimeUs;
                 }
             }
+            // Exit after 20s if we've already found at least one key frame.
+            if (blockTimeUs > 20000000 && maxBlockSize > 0) {
+                break;
+            }
             iter.advance();
         }
         AMediaFormat_setInt64(info->mMeta,
diff --git a/media/module/extractors/mkv/include/MatroskaExtractor.h b/media/module/extractors/mkv/include/MatroskaExtractor.h
index 99fad17..2e4d955 100644
--- a/media/module/extractors/mkv/include/MatroskaExtractor.h
+++ b/media/module/extractors/mkv/include/MatroskaExtractor.h
@@ -95,6 +95,7 @@
     status_t synthesizeAVCC(TrackInfo *trackInfo, size_t index);
     status_t synthesizeMPEG2(TrackInfo *trackInfo, size_t index);
     status_t synthesizeMPEG4(TrackInfo *trackInfo, size_t index);
+    status_t synthesizeVP9(TrackInfo* trackInfo, size_t index);
     status_t initTrackInfo(
             const mkvparser::Track *track,
             AMediaFormat *meta,
diff --git a/media/module/extractors/mp4/MPEG4Extractor.cpp b/media/module/extractors/mp4/MPEG4Extractor.cpp
index b3707c8..f247f8c 100644
--- a/media/module/extractors/mp4/MPEG4Extractor.cpp
+++ b/media/module/extractors/mp4/MPEG4Extractor.cpp
@@ -523,11 +523,10 @@
     }
 
     [this, &track] {
-        int64_t duration;
+        int64_t duration = track->mMdhdDurationUs;
         int32_t samplerate;
         // Only for audio track.
-        if (track->elst_needs_processing && mHeaderTimescale != 0 &&
-            AMediaFormat_getInt64(track->meta, AMEDIAFORMAT_KEY_DURATION, &duration) &&
+        if (track->elst_needs_processing && mHeaderTimescale != 0 && duration != 0 &&
             AMediaFormat_getInt32(track->meta, AMEDIAFORMAT_KEY_SAMPLE_RATE, &samplerate)) {
             // Elst has to be processed only the first time this function is called.
             track->elst_needs_processing = false;
@@ -1645,7 +1644,10 @@
                           (long long) duration, (long long) mLastTrack->timescale);
                     return ERROR_MALFORMED;
                 }
-                AMediaFormat_setInt64(mLastTrack->meta, AMEDIAFORMAT_KEY_DURATION, durationUs);
+                // Store this track's mdhd duration to calculate the padding.
+                mLastTrack->mMdhdDurationUs = (int64_t)durationUs;
+            } else {
+                mLastTrack->mMdhdDurationUs = 0;
             }
 
             uint8_t lang[2];
@@ -3907,17 +3909,18 @@
     }
 
     int32_t id;
+    int64_t duration;
 
     if (version == 1) {
         // we can get ctime value from U64_AT(&buffer[4])
         // we can get mtime value from U64_AT(&buffer[12])
         id = U32_AT(&buffer[20]);
-        // we can get duration value from U64_AT(&buffer[28])
+        duration = U64_AT(&buffer[28]);
     } else if (version == 0) {
         // we can get ctime value from U32_AT(&buffer[4])
         // we can get mtime value from U32_AT(&buffer[8])
         id = U32_AT(&buffer[12]);
-        // we can get duration value from U32_AT(&buffer[20])
+        duration = U32_AT(&buffer[20]);
     } else {
         return ERROR_UNSUPPORTED;
     }
@@ -3926,6 +3929,15 @@
         return ERROR_MALFORMED;
 
     AMediaFormat_setInt32(mLastTrack->meta, AMEDIAFORMAT_KEY_TRACK_ID, id);
+    if (duration != 0 && mHeaderTimescale != 0) {
+        long double durationUs = ((long double)duration * 1000000) / mHeaderTimescale;
+        if (durationUs < 0 || durationUs > INT64_MAX) {
+            ALOGE("cannot represent %lld * 1000000 / %lld in 64 bits",
+                  (long long) duration, (long long) mHeaderTimescale);
+            return ERROR_MALFORMED;
+        }
+        AMediaFormat_setInt64(mLastTrack->meta, AMEDIAFORMAT_KEY_DURATION, durationUs);
+    }
 
     size_t matrixOffset = dynSize + 16;
     int32_t a00 = U32_AT(&buffer[matrixOffset]);
diff --git a/media/module/extractors/mp4/include/MPEG4Extractor.h b/media/module/extractors/mp4/include/MPEG4Extractor.h
index 542a3e6..59626f6 100644
--- a/media/module/extractors/mp4/include/MPEG4Extractor.h
+++ b/media/module/extractors/mp4/include/MPEG4Extractor.h
@@ -96,7 +96,7 @@
 
         uint8_t *mTx3gBuffer;
         size_t mTx3gSize, mTx3gFilled;
-
+        int64_t mMdhdDurationUs;
 
         Track() {
             next = NULL;
diff --git a/media/module/foundation/Android.bp b/media/module/foundation/Android.bp
index dc8384d..edf4cb5 100644
--- a/media/module/foundation/Android.bp
+++ b/media/module/foundation/Android.bp
@@ -33,9 +33,6 @@
 cc_defaults {
     name: "libstagefright_foundation_defaults",
     vendor_available: true,
-    vndk: {
-        enabled: true,
-    },
     host_supported: true,
     double_loadable: true,
 
diff --git a/media/module/foundation/TEST_MAPPING b/media/module/foundation/TEST_MAPPING
index a70c352..ea4e4fd 100644
--- a/media/module/foundation/TEST_MAPPING
+++ b/media/module/foundation/TEST_MAPPING
@@ -1,9 +1,6 @@
-// mappings for frameworks/av/media/libstagefright/foundation
 {
-  // tests which require dynamic content
-  // invoke with: atest -- --enable-module-dynamic-download=true
-  // TODO(b/148094059): unit tests not allowed to download content
-  "dynamic-presubmit": [
+  "postsubmit": [
+    { "name": "AVCUtilsUnitTest" },
     { "name": "OpusHeaderTest" }
   ],
 
diff --git a/media/module/foundation/include/media/stagefright/foundation/AMessage.h b/media/module/foundation/include/media/stagefright/foundation/AMessage.h
index 7594565..b301c53 100644
--- a/media/module/foundation/include/media/stagefright/foundation/AMessage.h
+++ b/media/module/foundation/include/media/stagefright/foundation/AMessage.h
@@ -356,6 +356,16 @@
     DISALLOW_EVIL_CONSTRUCTORS(AMessage);
 };
 
+/*
+ * Helper struct for wrapping any object with RefBase.
+ */
+template <typename T>
+struct WrapperObject : public RefBase {
+    WrapperObject(const T& v) : value(v) {}
+    WrapperObject(T&& v) : value(std::move(v)) {}
+    T value;
+};
+
 }  // namespace android
 
 #endif  // A_MESSAGE_H_
diff --git a/media/module/foundation/tests/AData_test.cpp b/media/module/foundation/tests/AData_test.cpp
index 2628a47..87b69a6 100644
--- a/media/module/foundation/tests/AData_test.cpp
+++ b/media/module/foundation/tests/AData_test.cpp
@@ -392,7 +392,7 @@
     EXPECT_EQ(2L, _shared.use_count()); // still both u and _shared contains the object
 
     EXPECT_TRUE(u.clear());
-    EXPECT_TRUE(_shared.unique()); // now only _shared contains the object
+    EXPECT_EQ(1L, _shared.use_count()); // now only _shared contains the object
 
     EXPECT_TRUE(u.set(_constShared));
     EXPECT_EQ(2L, _constShared.use_count()); // even though it is const, we can add a use count
@@ -591,7 +591,7 @@
     EXPECT_EQ(2L, _shared.use_count()); // still both u and _shared contains the object
 
     EXPECT_TRUE(u.clear());
-    EXPECT_TRUE(_shared.unique()); // now only _shared contains the object
+    EXPECT_EQ(1L, _shared.use_count()); // now only _shared contains the object
 
     EXPECT_TRUE(u.set(_constShared));
     EXPECT_EQ(2L, _constShared.use_count()); // even though it is const, we can add a use count
diff --git a/media/module/foundation/tests/AVCUtils/Android.bp b/media/module/foundation/tests/AVCUtils/Android.bp
index ee7db21..c306c73 100644
--- a/media/module/foundation/tests/AVCUtils/Android.bp
+++ b/media/module/foundation/tests/AVCUtils/Android.bp
@@ -28,6 +28,7 @@
 cc_test {
     name: "AVCUtilsUnitTest",
     gtest: true,
+    test_suites: ["device-tests"],
 
     srcs: [
         "AVCUtilsUnitTest.cpp",
diff --git a/media/module/foundation/tests/AVCUtils/AndroidTest.xml b/media/module/foundation/tests/AVCUtils/AndroidTest.xml
index e30bfbf..315373f 100644
--- a/media/module/foundation/tests/AVCUtils/AndroidTest.xml
+++ b/media/module/foundation/tests/AVCUtils/AndroidTest.xml
@@ -16,7 +16,7 @@
 <configuration description="Test module config for AVC Utils unit tests">
     <option name="test-suite-tag" value="AVCUtilsUnitTest" />
     <target_preparer class="com.android.tradefed.targetprep.PushFilePreparer">
-        <option name="cleanup" value="false" />
+        <option name="cleanup" value="true" />
         <option name="push" value="AVCUtilsUnitTest->/data/local/tmp/AVCUtilsUnitTest" />
     </target_preparer>
 
diff --git a/media/module/id3/Android.bp b/media/module/id3/Android.bp
index bea3e34..e426796 100644
--- a/media/module/id3/Android.bp
+++ b/media/module/id3/Android.bp
@@ -17,6 +17,24 @@
     ],
 }
 
+cc_library_headers {
+    name: "libstagefright_id3_headers",
+    export_include_dirs: ["include"],
+    vendor_available: true,
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.media",
+        "com.android.media.swcodec",
+    ],
+    min_sdk_version: "29",
+    host_supported: true,
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
+}
+
 cc_library_static {
     name: "libstagefright_id3",
     min_sdk_version: "29",
@@ -25,7 +43,6 @@
         "com.android.media",
     ],
 
-
     srcs: ["ID3.cpp"],
 
     header_libs: [
@@ -35,6 +52,8 @@
         "media_ndk_headers",
     ],
 
+    export_include_dirs: ["include"],
+
     cflags: [
         "-Werror",
         "-Wall",
diff --git a/media/module/id3/TEST_MAPPING b/media/module/id3/TEST_MAPPING
index 6106908..497d984 100644
--- a/media/module/id3/TEST_MAPPING
+++ b/media/module/id3/TEST_MAPPING
@@ -1,9 +1,5 @@
-// frameworks/av/media/libstagefright/id3
 {
-  // tests which require dynamic content
-  // invoke with: atest -- --enable-module-dynamic-download=true
-  // TODO(b/148094059): unit tests not allowed to download content
-  "dynamic-presubmit": [
+  "postsubmit": [
     { "name": "ID3Test" }
   ],
 
diff --git a/media/libstagefright/include/ID3.h b/media/module/id3/include/ID3.h
similarity index 100%
rename from media/libstagefright/include/ID3.h
rename to media/module/id3/include/ID3.h
diff --git a/media/module/libmediatranscoding/TranscodingResourcePolicy.cpp b/media/module/libmediatranscoding/TranscodingResourcePolicy.cpp
index af53f64..43a4628 100644
--- a/media/module/libmediatranscoding/TranscodingResourcePolicy.cpp
+++ b/media/module/libmediatranscoding/TranscodingResourcePolicy.cpp
@@ -21,6 +21,7 @@
 #include <aidl/android/media/IResourceObserverService.h>
 #include <android/binder_manager.h>
 #include <android/binder_process.h>
+#include <map>
 #include <media/TranscodingResourcePolicy.h>
 #include <utils/Log.h>
 
@@ -66,11 +67,31 @@
     TranscodingResourcePolicy* mOwner;
 };
 
+// cookie used for death recipients. The TranscodingResourcePolicy
+// that this cookie is associated with must outlive this cookie. It is
+// either deleted by binderDied, or in unregisterSelf which is also called
+// in the destructor of TranscodingResourcePolicy
+class TranscodingResourcePolicyCookie {
+ public:
+    TranscodingResourcePolicyCookie(TranscodingResourcePolicy* policy) : mPolicy(policy) {}
+    TranscodingResourcePolicyCookie() = delete;
+    TranscodingResourcePolicy* mPolicy;
+};
+
+static std::map<uintptr_t, std::unique_ptr<TranscodingResourcePolicyCookie>> sCookies;
+static uintptr_t sCookieKeyCounter;
+static std::mutex sCookiesMutex;
+
 // static
 void TranscodingResourcePolicy::BinderDiedCallback(void* cookie) {
-    TranscodingResourcePolicy* owner = reinterpret_cast<TranscodingResourcePolicy*>(cookie);
-    if (owner != nullptr) {
-        owner->unregisterSelf();
+    std::lock_guard<std::mutex> guard(sCookiesMutex);
+    if (auto it = sCookies.find(reinterpret_cast<uintptr_t>(cookie)); it != sCookies.end()) {
+        ALOGI("BinderDiedCallback unregistering TranscodingResourcePolicy");
+        auto policy = reinterpret_cast<TranscodingResourcePolicy*>(it->second->mPolicy);
+        if (policy) {
+            policy->unregisterSelf();
+        }
+        sCookies.erase(it);
     }
     // TODO(chz): retry to connecting to IResourceObserverService after failure.
     // Also need to have back-up logic if IResourceObserverService is offline for
@@ -88,6 +109,23 @@
 }
 
 TranscodingResourcePolicy::~TranscodingResourcePolicy() {
+    {
+        std::lock_guard<std::mutex> guard(sCookiesMutex);
+
+        // delete all of the cookies associated with this TranscodingResourcePolicy
+        // instance since they are holding pointers to this object that will no
+        // longer be valid.
+        std::erase_if(sCookies, [this](const auto& cookieEntry) {
+            auto const& [key, cookie] = cookieEntry;
+            std::lock_guard guard(mCookieKeysLock);
+            if (const auto& it = mCookieKeys.find(key); it != mCookieKeys.end()) {
+                // No longer need to track this cookie
+                mCookieKeys.erase(key);
+                return true;
+            }
+            return false;
+        });
+    }
     unregisterSelf();
 }
 
@@ -123,7 +161,17 @@
         return;
     }
 
-    AIBinder_linkToDeath(binder.get(), mDeathRecipient.get(), reinterpret_cast<void*>(this));
+    std::unique_ptr<TranscodingResourcePolicyCookie> cookie =
+            std::make_unique<TranscodingResourcePolicyCookie>(this);
+    void* cookiePtr = static_cast<void*>(cookie.get());
+    uintptr_t cookieKey = sCookieKeyCounter++;
+    sCookies.emplace(cookieKey, std::move(cookie));
+    {
+        std::lock_guard guard(mCookieKeysLock);
+        mCookieKeys.insert(cookieKey);
+    }
+
+    AIBinder_linkToDeath(binder.get(), mDeathRecipient.get(), reinterpret_cast<void*>(cookieKey));
 
     ALOGD("@@@ registered observer");
     mRegistered = true;
@@ -141,7 +189,6 @@
     ::ndk::SpAIBinder binder = mService->asBinder();
     if (binder.get() != nullptr) {
         Status status = mService->unregisterObserver(mObserver);
-        AIBinder_unlinkToDeath(binder.get(), mDeathRecipient.get(), reinterpret_cast<void*>(this));
     }
 
     mService = nullptr;
diff --git a/media/module/libmediatranscoding/include/media/TranscodingResourcePolicy.h b/media/module/libmediatranscoding/include/media/TranscodingResourcePolicy.h
index ee232e7..4d762b5 100644
--- a/media/module/libmediatranscoding/include/media/TranscodingResourcePolicy.h
+++ b/media/module/libmediatranscoding/include/media/TranscodingResourcePolicy.h
@@ -22,6 +22,7 @@
 #include <utils/Condition.h>
 
 #include <mutex>
+#include <set>
 namespace aidl {
 namespace android {
 namespace media {
@@ -48,6 +49,8 @@
     bool mRegistered GUARDED_BY(mRegisteredLock);
     std::shared_ptr<IResourceObserverService> mService GUARDED_BY(mRegisteredLock);
     std::shared_ptr<ResourceObserver> mObserver;
+    mutable std::mutex mCookieKeysLock;
+    std::set<uintptr_t> mCookieKeys;
 
     mutable std::mutex mCallbackLock;
     std::weak_ptr<ResourcePolicyCallbackInterface> mResourcePolicyCallback
@@ -59,6 +62,7 @@
     static void BinderDiedCallback(void* cookie);
 
     void registerSelf();
+    // must delete the associated TranscodingResourcePolicyCookie any time this is called
     void unregisterSelf();
     void onResourceAvailable(pid_t pid);
 };  // class TranscodingUidPolicy
diff --git a/media/module/metadatautils/MetaDataUtils.cpp b/media/module/metadatautils/MetaDataUtils.cpp
index db60f04..0895bb5 100644
--- a/media/module/metadatautils/MetaDataUtils.cpp
+++ b/media/module/metadatautils/MetaDataUtils.cpp
@@ -81,6 +81,177 @@
     return true;
 }
 
+// Check if the next 24 bits are VP9 SYNC_CODE
+static bool isVp9SyncCode(ABitReader &bits) {
+    if (bits.numBitsLeft() < 24) {
+        return false;
+    }
+    return bits.getBits(24) == 0x498342;
+}
+
+// This parses bitdepth and subsampling in a VP9 uncompressed header
+// (refer section bitdepth_colorspace_sampling in 6.2 of the VP9 bitstream spec)
+static bool getVp9BitdepthChromaSubSampling(ABitReader &bits,
+        int32_t profile,
+        int32_t *bitDepth,
+        int32_t *chromaSubsampling) {
+    if (profile >= 2) {
+        if (bits.numBitsLeft() < 1) {
+            return false;
+        }
+        *bitDepth = bits.getBits(1) ? 12 : 10;
+    } else {
+        *bitDepth = 8;
+    }
+
+    uint32_t colorspace;
+    if (!bits.getBitsGraceful(3, &colorspace)) {
+        return false;
+    }
+
+    *chromaSubsampling = -1;
+    if (colorspace != 7 /*SRGB*/) {
+        // Skip yuv_range_flag
+        if (!bits.skipBits(1)) {
+            return false;
+        }
+        // Check for subsampling only for profiles 1 and 3.
+        if (profile == 1 || profile == 3) {
+            uint32_t ss_x;
+            uint32_t ss_y;
+            if (bits.getBitsGraceful(1, &ss_x) && bits.getBitsGraceful(1, &ss_y)) {
+                *chromaSubsampling = ss_x << 1 & ss_y;
+            } else {
+                return false;
+            }
+        } else {
+            *chromaSubsampling = 3;
+        }
+    } else {
+        if (profile == 1 || profile == 3) {
+            *chromaSubsampling = 0;
+        }
+    }
+    return true;
+}
+// The param data contains the first frame data, starting with the uncompressed frame
+// header. This uncompressed header (refer section 6.2 of the VP9 bitstream spec) is
+// used to parse profile, bitdepth and subsampling.
+bool MakeVP9CodecSpecificData(AMediaFormat* meta, const uint8_t* data, size_t size) {
+    if (meta == nullptr || data == nullptr || size == 0) {
+        return false;
+    }
+
+    ABitReader bits(data, size);
+
+    // First 2 bits of the uncompressed header should be the frame_marker.
+    if (bits.getBits(2) != 0b10) {
+        return false;
+    }
+
+    int32_t profileLowBit = bits.getBits(1);
+    int32_t profileHighBit = bits.getBits(1);
+    int32_t profile = profileHighBit * 2 + profileLowBit;
+
+    // One reserved '0' bit if profile is 3.
+    if (profile == 3 && bits.getBits(1) != 0) {
+        return false;
+    }
+
+    // If show_existing_frame is set, we get no more data. Since this is
+    // expected to be the first frame, we can return false which will cascade
+    // into ERROR_MALFORMED.
+    if (bits.getBits(1)) {
+        return false;
+    }
+
+    int32_t frame_type = bits.getBits(1);
+
+    // Upto 7 bits could be read till now, which were guaranteed to be available
+    // since size > 0. Check for bits available before reading them from now on.
+    if (bits.numBitsLeft() < 2) {
+        return false;
+    }
+
+    int32_t show_frame = bits.getBits(1);
+    int32_t error_resilient_mode = bits.getBits(1);
+    int32_t bitDepth = 8;
+    int32_t chromaSubsampling = -1;
+
+    if (frame_type == 0 /* KEY_FRAME */) {
+        // Check for sync code.
+        if (!isVp9SyncCode(bits)) {
+            return false;
+        }
+
+        if (!getVp9BitdepthChromaSubSampling(bits, profile, &bitDepth, &chromaSubsampling)) {
+            return false;
+        }
+    } else {
+        int32_t intra_only = 0;
+        if (!show_frame) {
+            if (bits.numBitsLeft() < 1) {
+                return false;
+            }
+            intra_only = bits.getBits(1);
+        }
+
+        if (!error_resilient_mode) {
+            if (bits.numBitsLeft() < 2) {
+                return false;
+            }
+            // ignore reset_frame_context
+            bits.skipBits(2);
+        }
+
+        if (!intra_only) {
+            // Require first frame to be either KEY_FRAME or INTER_FRAME with intra_only set to true
+            return false;
+        }
+
+        // Check for sync code.
+        if (!isVp9SyncCode(bits)) {
+            return false;
+        }
+
+        if (profile > 0) {
+            if (!getVp9BitdepthChromaSubSampling(bits, profile, &bitDepth, &chromaSubsampling)) {
+                return false;
+            }
+        } else {
+            bitDepth = 8;
+            chromaSubsampling = 3;
+        }
+    }
+    int32_t csdSize = 6;
+    if (chromaSubsampling != -1) {
+        csdSize += 3;
+    }
+
+    // Create VP9 Codec Feature Metadata (CodecPrivate) that can be parsed
+    // https://www.webmproject.org/docs/container/#vp9-codec-feature-metadata-codecprivate
+    sp<ABuffer> csd = sp<ABuffer>::make(csdSize);
+    uint8_t* csdData = csd->data();
+
+    *csdData++ = 0x01 /* FEATURE PROFILE */;
+    *csdData++ = 0x01 /* length */;
+    *csdData++ = profile;
+
+    *csdData++ = 0x03 /* FEATURE BITDEPTH */;
+    *csdData++ = 0x01 /* length */;
+    *csdData++ = bitDepth;
+
+    // csdSize more than 6 means chroma subsampling data was found.
+    if (csdSize > 6) {
+        *csdData++ = 0x04 /* FEATURE SUBSAMPLING */;
+        *csdData++ = 0x01 /* length */;
+        *csdData++ = chromaSubsampling;
+    }
+
+    AMediaFormat_setBuffer(meta, AMEDIAFORMAT_KEY_CSD_0, csd->data(), csd->size());
+    return true;
+}
+
 bool MakeAACCodecSpecificData(MetaDataBase &meta, const uint8_t *data, size_t size) {
     if (data == nullptr || size < 7) {
         return false;
diff --git a/media/module/metadatautils/include/media/stagefright/MetaDataUtils.h b/media/module/metadatautils/include/media/stagefright/MetaDataUtils.h
index dcaf27f..69cf21a 100644
--- a/media/module/metadatautils/include/media/stagefright/MetaDataUtils.h
+++ b/media/module/metadatautils/include/media/stagefright/MetaDataUtils.h
@@ -38,6 +38,8 @@
 void parseVorbisComment(
         AMediaFormat *fileMeta, const char *comment, size_t commentLength);
 
+bool MakeVP9CodecSpecificData(AMediaFormat* meta, const uint8_t* data, size_t size);
+
 }  // namespace android
 
 #endif  // META_DATA_UTILS_H_
diff --git a/media/module/mpeg2ts/ATSParser.cpp b/media/module/mpeg2ts/ATSParser.cpp
index 86187bd..88c3cc2 100644
--- a/media/module/mpeg2ts/ATSParser.cpp
+++ b/media/module/mpeg2ts/ATSParser.cpp
@@ -440,6 +440,10 @@
         ATSParser::CADescriptor *caDescriptor) {
     bool found = false;
     while (infoLength > 2) {
+        if (br->numBitsLeft() < 16) {
+            ALOGE("Not enough data left in bitreader");
+            return false;
+        }
         unsigned descriptor_tag = br->getBits(8);
         ALOGV("      tag = 0x%02x", descriptor_tag);
 
@@ -452,6 +456,10 @@
         }
         if (descriptor_tag == DESCRIPTOR_CA && descriptor_length >= 4) {
             found = true;
+            if (br->numBitsLeft() < 32) {
+                ALOGE("Not enough data left in bitreader");
+                return false;
+            }
             caDescriptor->mSystemID = br->getBits(16);
             caDescriptor->mPID = br->getBits(16) & 0x1fff;
             infoLength -= 4;
@@ -460,14 +468,24 @@
             break;
         } else {
             infoLength -= descriptor_length;
-            br->skipBits(descriptor_length * 8);
+            if (!br->skipBits(descriptor_length * 8)) {
+                ALOGE("Not enough data left in bitreader");
+                return false;
+            }
         }
     }
-    br->skipBits(infoLength * 8);
+    if (!br->skipBits(infoLength * 8)) {
+        ALOGE("Not enough data left in bitreader");
+        return false;
+    }
     return found;
 }
 
 status_t ATSParser::Program::parseProgramMap(ABitReader *br) {
+    if (br->numBitsLeft() < 10) {
+        ALOGE("Not enough data left in bitreader!");
+        return ERROR_MALFORMED;
+    }
     unsigned table_id = br->getBits(8);
     ALOGV("  table_id = %u", table_id);
     if (table_id != 0x02u) {
@@ -482,6 +500,10 @@
     }
 
     br->skipBits(1);  // '0'
+    if (br->numBitsLeft() < 86) {
+        ALOGE("Not enough data left in bitreader!");
+        return ERROR_MALFORMED;
+    }
     MY_LOGV("  reserved = %u", br->getBits(2));
 
     unsigned section_length = br->getBits(12);
@@ -526,6 +548,10 @@
 
     while (infoBytesRemaining >= 5) {
         StreamInfo info;
+        if (br->numBitsLeft() < 40) {
+            ALOGE("Not enough data left in bitreader!");
+            return ERROR_MALFORMED;
+        }
         info.mType = br->getBits(8);
         ALOGV("    stream_type = 0x%02x", info.mType);
         MY_LOGV("    reserved = %u", br->getBits(3));
@@ -545,6 +571,10 @@
         info.mAudioPresentations.clear();
         bool hasStreamCA = false;
         while (ES_info_length > 2 && infoBytesRemaining >= 0) {
+            if (br->numBitsLeft() < 16) {
+                ALOGE("Not enough data left in bitreader!");
+                return ERROR_MALFORMED;
+            }
             unsigned descriptor_tag = br->getBits(8);
             ALOGV("      tag = 0x%02x", descriptor_tag);
 
@@ -562,21 +592,39 @@
             if (descriptor_tag == DESCRIPTOR_DTS) {
                 info.mType = STREAMTYPE_DTS;
                 ES_info_length -= descriptor_length;
-                br->skipBits(descriptor_length * 8);
+                if (!br->skipBits(descriptor_length * 8)) {
+                    ALOGE("Not enough data left in bitreader!");
+                    return ERROR_MALFORMED;
+                }
             } else if (descriptor_tag == DESCRIPTOR_CA && descriptor_length >= 4) {
                 hasStreamCA = true;
+                if (br->numBitsLeft() < 32) {
+                    ALOGE("Not enough data left in bitreader!");
+                    return ERROR_MALFORMED;
+                }
                 streamCA.mSystemID = br->getBits(16);
                 streamCA.mPID = br->getBits(16) & 0x1fff;
                 ES_info_length -= descriptor_length;
                 descriptor_length -= 4;
                 streamCA.mPrivateData.assign(br->data(), br->data() + descriptor_length);
-                br->skipBits(descriptor_length * 8);
+                if (!br->skipBits(descriptor_length * 8)) {
+                    ALOGE("Not enough data left in bitreader!");
+                    return ERROR_MALFORMED;
+                }
             } else if (info.mType == STREAMTYPE_PES_PRIVATE_DATA &&
                        descriptor_tag == DESCRIPTOR_DVB_EXTENSION && descriptor_length >= 1) {
+                if (br->numBitsLeft() < 8) {
+                    ALOGE("Not enough data left in bitreader!");
+                    return ERROR_MALFORMED;
+                }
                 unsigned descTagExt = br->getBits(8);
                 ALOGV("      tag_ext = 0x%02x", descTagExt);
                 ES_info_length -= descriptor_length;
                 descriptor_length--;
+                if (br->numBitsLeft() < (descriptor_length * 8)) {
+                    ALOGE("Not enough data left in bitreader!");
+                    return ERROR_MALFORMED;
+                }
                 // The AC4 descriptor is used in the PSI PMT to identify streams which carry AC4
                 // audio.
                 if (descTagExt == EXT_DESCRIPTOR_DVB_AC4) {
@@ -594,6 +642,10 @@
                     br->skipBits(descriptor_length * 8);
                 } else if (descTagExt == EXT_DESCRIPTOR_DVB_AUDIO_PRESELECTION &&
                            descriptor_length >= 1) {
+                    if (br->numBitsLeft() < 8) {
+                        ALOGE("Not enough data left in bitreader!");
+                        return ERROR_MALFORMED;
+                    }
                     // DVB BlueBook A038 Table 110
                     unsigned num_preselections = br->getBits(5);
                     br->skipBits(3);  // reserved
@@ -671,11 +723,17 @@
                         info.mAudioPresentations.push_back(std::move(ap));
                     }
                 } else {
-                    br->skipBits(descriptor_length * 8);
+                    if (!br->skipBits(descriptor_length * 8)) {
+                        ALOGE("Not enough data left in bitreader!");
+                        return ERROR_MALFORMED;
+                    }
                 }
             } else {
                 ES_info_length -= descriptor_length;
-                br->skipBits(descriptor_length * 8);
+                if (!br->skipBits(descriptor_length * 8)) {
+                    ALOGE("Not enough data left in bitreader!");
+                    return ERROR_MALFORMED;
+                }
             }
         }
         if (hasStreamCA && !mParser->mCasManager->addStream(
@@ -694,6 +752,10 @@
     if (infoBytesRemaining != 0) {
         ALOGW("Section data remains unconsumed");
     }
+    if (br->numBitsLeft() < 32) {
+        ALOGE("Not enough data left in bitreader!");
+        return ERROR_MALFORMED;
+    }
     unsigned crc = br->getBits(32);
     if (crc != mPMT_CRC) {
         audioPresentationsChanged = true;
@@ -1261,6 +1323,10 @@
 status_t ATSParser::Stream::parsePES(ABitReader *br, SyncEvent *event) {
     const uint8_t *basePtr = br->data();
 
+    if (br->numBitsLeft() < 48) {
+        ALOGE("Not enough data left in bitreader!");
+        return ERROR_MALFORMED;
+    }
     unsigned packet_startcode_prefix = br->getBits(24);
 
     ALOGV("packet_startcode_prefix = 0x%08x", packet_startcode_prefix);
@@ -1286,10 +1352,14 @@
             && stream_id != 0xff  // program_stream_directory
             && stream_id != 0xf2  // DSMCC
             && stream_id != 0xf8) {  // H.222.1 type E
-        if (br->getBits(2) != 2u) {
+        if (br->numBitsLeft() < 2 || br->getBits(2) != 2u) {
             return ERROR_MALFORMED;
         }
 
+        if (br->numBitsLeft() < 22) {
+            ALOGE("Not enough data left in bitreader!");
+            return ERROR_MALFORMED;
+        }
         unsigned PES_scrambling_control = br->getBits(2);
         ALOGV("PES_scrambling_control = %u", PES_scrambling_control);
 
@@ -1328,19 +1398,19 @@
                 return ERROR_MALFORMED;
             }
 
-            if (br->getBits(4) != PTS_DTS_flags) {
+            if (br->numBitsLeft() < 7 || br->getBits(4) != PTS_DTS_flags) {
                 return ERROR_MALFORMED;
             }
             PTS = ((uint64_t)br->getBits(3)) << 30;
-            if (br->getBits(1) != 1u) {
+            if (br->numBitsLeft() < 16 || br->getBits(1) != 1u) {
                 return ERROR_MALFORMED;
             }
             PTS |= ((uint64_t)br->getBits(15)) << 15;
-            if (br->getBits(1) != 1u) {
+            if (br->numBitsLeft() < 16 || br->getBits(1) != 1u) {
                 return ERROR_MALFORMED;
             }
             PTS |= br->getBits(15);
-            if (br->getBits(1) != 1u) {
+            if (br->numBitsLeft() < 1 || br->getBits(1) != 1u) {
                 return ERROR_MALFORMED;
             }
 
@@ -1353,20 +1423,20 @@
                     return ERROR_MALFORMED;
                 }
 
-                if (br->getBits(4) != 1u) {
+                if (br->numBitsLeft() < 7 || br->getBits(4) != 1u) {
                     return ERROR_MALFORMED;
                 }
 
                 DTS = ((uint64_t)br->getBits(3)) << 30;
-                if (br->getBits(1) != 1u) {
+                if (br->numBitsLeft() < 16 || br->getBits(1) != 1u) {
                     return ERROR_MALFORMED;
                 }
                 DTS |= ((uint64_t)br->getBits(15)) << 15;
-                if (br->getBits(1) != 1u) {
+                if (br->numBitsLeft() < 16 || br->getBits(1) != 1u) {
                     return ERROR_MALFORMED;
                 }
                 DTS |= br->getBits(15);
-                if (br->getBits(1) != 1u) {
+                if (br->numBitsLeft() < 1 || br->getBits(1) != 1u) {
                     return ERROR_MALFORMED;
                 }
 
@@ -1381,22 +1451,30 @@
                 return ERROR_MALFORMED;
             }
 
+            if (br->numBitsLeft() < 5) {
+                ALOGE("Not enough data left in bitreader!");
+                return ERROR_MALFORMED;
+            }
             br->getBits(2);
 
             uint64_t ESCR = ((uint64_t)br->getBits(3)) << 30;
-            if (br->getBits(1) != 1u) {
+            if (br->numBitsLeft() < 16 || br->getBits(1) != 1u) {
                 return ERROR_MALFORMED;
             }
             ESCR |= ((uint64_t)br->getBits(15)) << 15;
-            if (br->getBits(1) != 1u) {
+            if (br->numBitsLeft() < 16 || br->getBits(1) != 1u) {
                 return ERROR_MALFORMED;
             }
             ESCR |= br->getBits(15);
-            if (br->getBits(1) != 1u) {
+            if (br->numBitsLeft() < 1 || br->getBits(1) != 1u) {
                 return ERROR_MALFORMED;
             }
 
             ALOGV("ESCR = %" PRIu64, ESCR);
+            if (br->numBitsLeft() < 10) {
+                ALOGE("Not enough data left in bitreader!");
+                return ERROR_MALFORMED;
+            }
             MY_LOGV("ESCR_extension = %u", br->getBits(9));
 
             if (br->getBits(1) != 1u) {
@@ -1411,18 +1489,25 @@
                 return ERROR_MALFORMED;
             }
 
-            if (br->getBits(1) != 1u) {
+            if (br->numBitsLeft() < 1 || br->getBits(1) != 1u) {
+                return ERROR_MALFORMED;
+            }
+            if (br->numBitsLeft() < 22) {
+                ALOGE("Not enough data left in bitreader!");
                 return ERROR_MALFORMED;
             }
             MY_LOGV("ES_rate = %u", br->getBits(22));
-            if (br->getBits(1) != 1u) {
+            if (br->numBitsLeft() < 1 || br->getBits(1) != 1u) {
                 return ERROR_MALFORMED;
             }
 
             optional_bytes_remaining -= 3;
         }
 
-        br->skipBits(optional_bytes_remaining * 8);
+        if (!br->skipBits(optional_bytes_remaining * 8)) {
+            ALOGE("Not enough data left in bitreader!");
+            return ERROR_MALFORMED;
+        }
 
         // ES data follows.
         int32_t pesOffset = br->data() - basePtr;
@@ -1450,7 +1535,10 @@
                     PTS_DTS_flags, PTS, DTS, PES_scrambling_control,
                     br->data(), dataLength, pesOffset, event);
 
-            br->skipBits(dataLength * 8);
+            if (!br->skipBits(dataLength * 8)) {
+                ALOGE("Not enough data left in bitreader!");
+                return ERROR_MALFORMED;
+            }
         } else {
             onPayloadData(
                     PTS_DTS_flags, PTS, DTS, PES_scrambling_control,
@@ -1465,15 +1553,13 @@
                     payloadSizeBits / 8, pesOffset);
         }
     } else if (stream_id == 0xbe) {  // padding_stream
-        if (PES_packet_length == 0u) {
+        if (PES_packet_length == 0u || !br->skipBits(PES_packet_length * 8)) {
             return ERROR_MALFORMED;
         }
-        br->skipBits(PES_packet_length * 8);
     } else {
-        if (PES_packet_length == 0u) {
+        if (PES_packet_length == 0u || !br->skipBits(PES_packet_length * 8)) {
             return ERROR_MALFORMED;
         }
-        br->skipBits(PES_packet_length * 8);
     }
 
     return OK;
@@ -1481,6 +1567,10 @@
 
 uint32_t ATSParser::Stream::getPesScramblingControl(
         ABitReader *br, int32_t *pesOffset) {
+    if (br->numBitsLeft() < 24) {
+        ALOGE("Not enough data left in bitreader!");
+        return 0;
+    }
     unsigned packet_startcode_prefix = br->getBits(24);
 
     ALOGV("packet_startcode_prefix = 0x%08x", packet_startcode_prefix);
@@ -1491,6 +1581,7 @@
     }
 
     if (br->numBitsLeft() < 48) {
+        ALOGE("Not enough data left in bitreader!");
         return 0;
     }
 
@@ -1987,12 +2078,20 @@
 }
 
 void ATSParser::parseProgramAssociationTable(ABitReader *br) {
+    if (br->numBitsLeft() < 8) {
+        ALOGE("Not enough data left in bitreader!");
+        return;
+    }
     unsigned table_id = br->getBits(8);
     ALOGV("  table_id = %u", table_id);
     if (table_id != 0x00u) {
         ALOGE("PAT data error!");
         return ;
     }
+    if (br->numBitsLeft() < 56) {
+        ALOGE("Not enough data left in bitreader!");
+        return;
+    }
     unsigned section_syntax_indictor = br->getBits(1);
     ALOGV("  section_syntax_indictor = %u", section_syntax_indictor);
 
@@ -2009,9 +2108,17 @@
     MY_LOGV("  section_number = %u", br->getBits(8));
     MY_LOGV("  last_section_number = %u", br->getBits(8));
 
+    // check for unsigned integer overflow before assigning it to numProgramBytes
+    if (section_length < 9) {
+        return;
+    }
     size_t numProgramBytes = (section_length - 5 /* header */ - 4 /* crc */);
 
     for (size_t i = 0; i < numProgramBytes / 4; ++i) {
+        if (br->numBitsLeft() < 32) {
+            ALOGE("Not enough data left in bitreader!");
+            return;
+        }
         unsigned program_number = br->getBits(16);
         ALOGV("    program_number = %u", program_number);
 
@@ -2049,6 +2156,10 @@
         }
     }
 
+    if (br->numBitsLeft() < 32) {
+        ALOGE("Not enough data left in bitreader!");
+        return;
+    }
     MY_LOGV("  CRC = 0x%08x", br->getBits(32));
 }
 
@@ -2070,9 +2181,16 @@
                 section->clear();
             }
 
+            if (br->numBitsLeft() < 8) {
+                ALOGE("Not enough data left in bitreader!");
+                return ERROR_MALFORMED;
+            }
             unsigned skip = br->getBits(8);
             section->setSkipBytes(skip + 1);  // skip filler bytes + pointer field itself
-            br->skipBits(skip * 8);
+            if (!br->skipBits(skip * 8)) {
+                ALOGE("Not enough data left in bitreader!");
+                return ERROR_MALFORMED;
+            }
         }
 
         if (br->numBitsLeft() % 8 != 0) {
@@ -2157,6 +2275,10 @@
 status_t ATSParser::parseAdaptationField(
         ABitReader *br, unsigned PID, unsigned *random_access_indicator) {
     *random_access_indicator = 0;
+    if (br->numBitsLeft() < 8) {
+        ALOGE("Not enough data left in bitreader!");
+        return ERROR_MALFORMED;
+    }
     unsigned adaptation_field_length = br->getBits(8);
 
     if (adaptation_field_length > 0) {
@@ -2227,6 +2349,10 @@
 status_t ATSParser::parseTS(ABitReader *br, SyncEvent *event) {
     ALOGV("---");
 
+    if (br->numBitsLeft() < 32) {
+        ALOGE("Not enough data left in bitreader!");
+        return ERROR_MALFORMED;
+    }
     unsigned sync_byte = br->getBits(8);
     if (sync_byte != 0x47u) {
         ALOGE("[error] parseTS: return error as sync_byte=0x%x", sync_byte);
diff --git a/media/module/mpeg2ts/TEST_MAPPING b/media/module/mpeg2ts/TEST_MAPPING
index 9f4bbdf..536450f 100644
--- a/media/module/mpeg2ts/TEST_MAPPING
+++ b/media/module/mpeg2ts/TEST_MAPPING
@@ -1,9 +1,5 @@
-// frameworks/av/media/libstagefright/mpeg2ts
 {
-  // tests which require dynamic content
-  // invoke with: atest -- --enable-module-dynamic-download=true
-  // TODO(b/148094059): unit tests not allowed to download content
-  "dynamic-presubmit": [
+  "postsubmit": [
     { "name": "Mpeg2tsUnitTest" }
   ]
 }
diff --git a/media/mtp/MtpServer.cpp b/media/mtp/MtpServer.cpp
index d917772..80fe51a 100644
--- a/media/mtp/MtpServer.cpp
+++ b/media/mtp/MtpServer.cpp
@@ -132,6 +132,10 @@
 }
 
 MtpServer::~MtpServer() {
+    if (mHandle) {
+        delete mHandle;
+        mHandle = NULL;
+    }
 }
 
 void MtpServer::addStorage(MtpStorage* storage) {
diff --git a/media/mtp/tests/MtpFuzzer/Android.bp b/media/mtp/tests/MtpFuzzer/Android.bp
index acae06a..2e9c58b 100644
--- a/media/mtp/tests/MtpFuzzer/Android.bp
+++ b/media/mtp/tests/MtpFuzzer/Android.bp
@@ -29,7 +29,6 @@
         "liblog",
         "libutils",
     ],
-    static_libs: ["libc++fs",],
     cflags: [
         "-Wall",
         "-Wextra",
diff --git a/media/ndk/Android.bp b/media/ndk/Android.bp
index 9ec7700..3d873df 100644
--- a/media/ndk/Android.bp
+++ b/media/ndk/Android.bp
@@ -192,7 +192,6 @@
     header_libs: [
         "libstagefright_headers",
         "libmedia_headers",
-        "libstagefright_headers",
     ],
 
     shared_libs: [
diff --git a/media/ndk/include/media/NdkMediaDataSource.h b/media/ndk/include/media/NdkMediaDataSource.h
index 197e202..def142c 100644
--- a/media/ndk/include/media/NdkMediaDataSource.h
+++ b/media/ndk/include/media/NdkMediaDataSource.h
@@ -49,16 +49,16 @@
 /*
  * AMediaDataSource's callbacks will be invoked on an implementation-defined thread
  * or thread pool. No guarantees are provided about which thread(s) will be used for
- * callbacks. For example, |close| can be invoked from a different thread than the
- * thread invoking |readAt|. As such, the Implementations of AMediaDataSource callbacks
+ * callbacks. For example, `close` can be invoked from a different thread than the
+ * thread invoking `readAt`. As such, the Implementations of AMediaDataSource callbacks
  * must be threadsafe.
  */
 
 /**
- * Called to request data from the given |offset|.
+ * Called to request data from the given `offset`.
  *
- * Implementations should should write up to |size| bytes into
- * |buffer|, and return the number of bytes written.
+ * Implementations should should write up to `size` bytes into
+ * `buffer`, and return the number of bytes written.
  *
  * Return 0 if size is zero (thus no bytes are read).
  *
@@ -78,9 +78,9 @@
  * Called to close the data source, unblock reads, and release associated
  * resources.
  *
- * The NDK media framework guarantees that after the first |close| is
+ * The NDK media framework guarantees that after the first `close` is
  * called, no future callbacks will be invoked on the data source except
- * for |close| itself.
+ * for `close` itself.
  *
  * Closing a data source allows readAt calls that were blocked waiting
  * for I/O data to return promptly.
@@ -101,7 +101,7 @@
 
 /**
  * Called to get an estimate of the number of bytes that can be read from this data source
- * starting at |offset| without blocking for I/O.
+ * starting at `offset` without blocking for I/O.
  *
  * Return -1 when such an estimate is not possible.
  */
@@ -111,10 +111,10 @@
  * Create new media data source. Returns NULL if memory allocation
  * for the new data source object fails.
  *
- * Set the |uri| from which the data source will read,
+ * Set the `uri` from which the data source will read,
  * plus additional http headers when initiating the request.
  *
- * Headers will contain corresponding items from |key_values|
+ * Headers will contain corresponding items from `key_values`
  * in the following fashion:
  *
  * key_values[0]:key_values[1]
diff --git a/media/psh_utils/Android.bp b/media/psh_utils/Android.bp
new file mode 100644
index 0000000..4662db8
--- /dev/null
+++ b/media/psh_utils/Android.bp
@@ -0,0 +1,47 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+// libraries that are included whole_static for test apps
+ndk_libs = [
+    "android.hardware.health-V3-ndk",
+    "android.hardware.power.stats-V1-ndk",
+]
+
+// Power, System, Health utils
+cc_library {
+    name: "libpshutils",
+    local_include_dirs: ["include"],
+    export_include_dirs: ["include"],
+    srcs: [
+        "HealthStats.cpp",
+        "HealthStatsProvider.cpp",
+        "PowerStats.cpp",
+        "PowerStatsCollector.cpp",
+        "PowerStatsProvider.cpp",
+    ],
+    shared_libs: [
+        "libaudioutils",
+        "libbase",
+        "libbinder_ndk",
+        "libcutils",
+        "liblog",
+        "libutils",
+    ],
+    cflags: [
+        "-Wall",
+        "-Werror",
+        "-Wthread-safety",
+    ],
+    shared: {
+        shared_libs: ndk_libs,
+    },
+    static: {
+        whole_static_libs: ndk_libs,
+    },
+}
diff --git a/media/psh_utils/HealthStats.cpp b/media/psh_utils/HealthStats.cpp
new file mode 100644
index 0000000..5e767f6
--- /dev/null
+++ b/media/psh_utils/HealthStats.cpp
@@ -0,0 +1,81 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include <android-base/logging.h>
+#include <psh_utils/HealthStats.h>
+
+namespace android::media::psh_utils {
+
+template <typename T>
+const T& choose_voltage(const T& a, const T& b) {
+   return std::max(a, b);  // we use max here, could use avg.
+}
+
+std::string HealthStats::toString() const {
+    std::string result;
+    const float batteryVoltage = batteryVoltageMillivolts * 1e-3f;  // Volts
+    const float charge = batteryChargeCounterUah * (3600 * 1e-6);  // Joules = Amp-Second
+    result.append("{Net Battery V: ")
+            .append(std::to_string(batteryVoltage))
+            .append(" J: ")
+            .append(std::to_string(charge))
+            .append("}");
+    return result;
+}
+
+std::string HealthStats::normalizedEnergy(double timeSec) const {
+    std::string result;
+    const float batteryVoltage = batteryVoltageMillivolts * 1e-3f;   // Volts
+    const float charge = -batteryChargeCounterUah * (3600 * 1e-6f);  // Joules = Amp-Second
+    const float watts = charge * batteryVoltage / timeSec;
+    result.append("{Net Battery V: ")
+            .append(std::to_string(batteryVoltage))
+            .append(" J: ")
+            .append(std::to_string(charge))
+            .append(" W: ")
+            .append(std::to_string(watts))
+            .append("}");
+    return result;
+}
+
+HealthStats HealthStats::operator+=(const HealthStats& other) {
+    batteryVoltageMillivolts = choose_voltage(
+            batteryVoltageMillivolts, other.batteryVoltageMillivolts);
+    batteryFullChargeUah = std::max(batteryFullChargeUah, other.batteryFullChargeUah);
+    batteryChargeCounterUah += other.batteryChargeCounterUah;
+    return *this;
+}
+
+HealthStats HealthStats::operator-=(const HealthStats& other) {
+    batteryVoltageMillivolts = choose_voltage(
+            batteryVoltageMillivolts, other.batteryVoltageMillivolts);
+    batteryFullChargeUah = std::max(batteryFullChargeUah, other.batteryFullChargeUah);
+    batteryChargeCounterUah -= other.batteryChargeCounterUah;
+    return *this;
+}
+
+HealthStats HealthStats::operator+(const HealthStats& other) const {
+    HealthStats result = *this;
+    result += other;
+    return result;
+}
+
+HealthStats HealthStats::operator-(const HealthStats& other) const {
+    HealthStats result = *this;
+    result -= other;
+    return result;
+}
+
+} // namespace android::media::psh_utils
diff --git a/media/psh_utils/HealthStatsProvider.cpp b/media/psh_utils/HealthStatsProvider.cpp
new file mode 100644
index 0000000..de72463
--- /dev/null
+++ b/media/psh_utils/HealthStatsProvider.cpp
@@ -0,0 +1,51 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "PowerStatsProvider.h"
+#include <aidl/android/hardware/health/IHealth.h>
+#include <android-base/logging.h>
+#include <android/binder_manager.h>
+#include <psh_utils/ServiceSingleton.h>
+
+using ::aidl::android::hardware::health::HealthInfo;
+using ::aidl::android::hardware::health::IHealth;
+
+namespace android::media::psh_utils {
+
+static auto getHealthService() {
+    return getServiceSingleton<IHealth>();
+}
+
+status_t HealthStatsDataProvider::fill(PowerStats* stat) const {
+    if (stat == nullptr) return BAD_VALUE;
+    HealthStats& stats = stat->health_stats;
+    auto healthService = getHealthService();
+    if (healthService == nullptr) {
+        return NO_INIT;
+    }
+    HealthInfo healthInfo;
+    if (!healthService->getHealthInfo(&healthInfo).isOk()) {
+        LOG(ERROR) << __func__ << ": unable to get health info";
+        return INVALID_OPERATION;
+    }
+
+    stats.batteryVoltageMillivolts = healthInfo.batteryVoltageMillivolts;
+    stats.batteryFullChargeUah = healthInfo.batteryFullChargeUah;
+    stats.batteryChargeCounterUah = healthInfo.batteryChargeCounterUah;
+    return NO_ERROR;
+}
+
+} // namespace android::media::psh_utils
diff --git a/media/psh_utils/PowerStats.cpp b/media/psh_utils/PowerStats.cpp
new file mode 100644
index 0000000..f8f87c5
--- /dev/null
+++ b/media/psh_utils/PowerStats.cpp
@@ -0,0 +1,283 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include <android-base/logging.h>
+#include <audio_utils/clock.h>
+#include <psh_utils/PowerStats.h>
+
+namespace android::media::psh_utils {
+
+// Determine the best start time from a and b, which is
+// min(a, b) if both exist, otherwise the one that exists.
+template <typename T>
+const T& choose_best_start_time(const T& a, const T& b) {
+    if (a) {
+        return b ? std::min(a, b) : a;
+    } else {
+        return b;
+    }
+}
+
+// subtract two time differences.
+template <typename T, typename U>
+const T sub_time_diff(const T& diff_a, const T& diff_b, const U& abs_c, const U& abs_d) {
+    if (diff_a) {
+        return diff_b ? (diff_a - diff_b) : diff_a;
+    } else if (diff_b) {
+        return diff_b;
+    } else {  // no difference exists, use absolute time.
+        return abs_c - abs_d;
+    }
+}
+
+std::string PowerStats::Metadata::toString() const {
+    return std::string("start_time_since_boot_ms: ").append(
+                    std::to_string(start_time_since_boot_ms))
+            .append(" start_time_monotonic_ms: ").append(std::to_string(start_time_monotonic_ms))
+            .append(audio_utils_time_string_from_ns(start_time_epoch_ms * 1'000'000).time)
+            .append(" duration_ms: ").append(std::to_string(duration_ms))
+            .append(" duration_monotonic_ms: ").append(std::to_string(duration_monotonic_ms));
+}
+
+PowerStats::Metadata PowerStats::Metadata::operator+=(const Metadata& other) {
+    start_time_since_boot_ms = choose_best_start_time(
+            start_time_since_boot_ms, other.start_time_since_boot_ms);
+    start_time_epoch_ms = choose_best_start_time(
+            start_time_epoch_ms, other.start_time_epoch_ms);
+    start_time_monotonic_ms = choose_best_start_time(
+            start_time_monotonic_ms, other.start_time_monotonic_ms);
+    duration_ms += other.duration_ms;
+    duration_monotonic_ms += other.duration_monotonic_ms;
+    return *this;
+}
+
+PowerStats::Metadata PowerStats::Metadata::operator-=(const Metadata& other) {
+    // here we calculate duration, if it makes sense.
+    duration_ms = sub_time_diff(duration_ms, other.duration_ms,
+                                start_time_since_boot_ms, other.start_time_since_boot_ms);
+    duration_monotonic_ms = sub_time_diff(
+            duration_monotonic_ms, other.duration_monotonic_ms,
+            start_time_monotonic_ms, other.start_time_monotonic_ms);
+    start_time_since_boot_ms = choose_best_start_time(
+            start_time_since_boot_ms, other.start_time_since_boot_ms);
+    start_time_epoch_ms = choose_best_start_time(
+            start_time_epoch_ms, other.start_time_epoch_ms);
+    start_time_monotonic_ms = choose_best_start_time(
+            start_time_monotonic_ms, other.start_time_monotonic_ms);
+    return *this;
+}
+
+PowerStats::Metadata PowerStats::Metadata::operator+(const Metadata& other) const {
+    Metadata result = *this;
+    result += other;
+    return result;
+}
+
+PowerStats::Metadata PowerStats::Metadata::operator-(const Metadata& other) const {
+    Metadata result = *this;
+    result -= other;
+    return result;
+}
+
+std::string PowerStats::StateResidency::toString() const {
+    return std::string(entity_name).append(state_name)
+            .append(" ").append(std::to_string(time_ms))
+            .append(" ").append(std::to_string(entry_count));
+}
+
+PowerStats::StateResidency PowerStats::StateResidency::operator+=(const StateResidency& other) {
+    if (entity_name.empty()) entity_name = other.entity_name;
+    if (state_name.empty()) state_name = other.state_name;
+    time_ms += other.time_ms;
+    entry_count += other.entry_count;
+    return *this;
+}
+
+PowerStats::StateResidency PowerStats::StateResidency::operator-=(const StateResidency& other) {
+    if (entity_name.empty()) entity_name = other.entity_name;
+    if (state_name.empty()) state_name = other.state_name;
+    time_ms -= other.time_ms;
+    entry_count -= other.entry_count;
+    return *this;
+}
+
+PowerStats::StateResidency PowerStats::StateResidency::operator+(
+        const StateResidency& other) const {
+    StateResidency result = *this;
+    result += other;
+    return result;
+}
+
+PowerStats::StateResidency PowerStats::StateResidency::operator-(
+        const StateResidency& other) const {
+    StateResidency result = *this;
+    result -= other;
+    return result;
+}
+
+std::string PowerStats::RailEnergy::toString() const {
+    return std::string(subsystem_name)
+            .append(rail_name)
+            .append(" ")
+            .append(std::to_string(energy_uws));
+}
+
+PowerStats::RailEnergy PowerStats::RailEnergy::operator+=(const RailEnergy& other) {
+    if (subsystem_name.empty()) subsystem_name = other.subsystem_name;
+    if (rail_name.empty()) rail_name = other.rail_name;
+    energy_uws += other.energy_uws;
+    return *this;
+}
+
+PowerStats::RailEnergy PowerStats::RailEnergy::operator-=(const RailEnergy& other) {
+    if (subsystem_name.empty()) subsystem_name = other.subsystem_name;
+    if (rail_name.empty()) rail_name = other.rail_name;
+    energy_uws -= other.energy_uws;
+    return *this;
+}
+
+PowerStats::RailEnergy PowerStats::RailEnergy::operator+(const RailEnergy& other) const {
+    RailEnergy result = *this;
+    result += other;
+    return result;
+}
+
+PowerStats::RailEnergy PowerStats::RailEnergy::operator-(const RailEnergy& other) const {
+    RailEnergy result = *this;
+    result -= other;
+    return result;
+}
+
+std::string PowerStats::toString(const std::string& prefix) const {
+    std::string result;
+    result.append(prefix).append(metadata.toString()).append("\n");
+    result.append(prefix).append(health_stats.toString()).append("\n");
+    for (const auto &residency: power_entity_state_residency) {
+        result.append(prefix).append(residency.toString()).append("\n");
+    }
+    for (const auto &energy: rail_energy) {
+        result.append(prefix).append(energy.toString()).append("\n");
+    }
+    return result;
+}
+
+std::string PowerStats::normalizedEnergy(const std::string& prefix) const {
+    if (metadata.duration_ms == 0) return {};
+
+    std::string result(prefix);
+    result.append(audio_utils_time_string_from_ns(
+            metadata.start_time_epoch_ms * 1'000'000).time);
+    result.append(" duration_boottime: ")
+            .append(std::to_string(metadata.duration_ms * 1e-3f))
+            .append(" duration_monotonic: ")
+            .append(std::to_string(metadata.duration_monotonic_ms * 1e-3f))
+            .append("\n");
+    if (health_stats.isValid()) {
+        result.append(prefix)
+                .append(health_stats.normalizedEnergy(metadata.duration_ms * 1e-3f)).append("\n");
+    }
+
+    // energy_uws is converted to ave W using recip time in us.
+    const float recipTime = 1e-3 / metadata.duration_ms;
+    int64_t total_energy = 0;
+    for (const auto& energy: rail_energy) {
+        total_energy += energy.energy_uws;
+        result.append(prefix).append(energy.subsystem_name)
+                .append(energy.rail_name)
+                .append(" ")
+                .append(std::to_string(energy.energy_uws * 1e-6))
+                .append(" ")
+                .append(std::to_string(energy.energy_uws * recipTime))
+                .append("\n");
+    }
+    if (total_energy != 0) {
+        result.append(prefix).append("total J and ave W: ")
+                .append(std::to_string(total_energy * 1e-6))
+                .append(" ")
+                .append(std::to_string(total_energy * recipTime))
+                .append("\n");
+    }
+    return result;
+}
+
+// seconds, joules, watts
+std::tuple<float, float, float> PowerStats::energyFrom(const std::string& railMatcher) const {
+    if (metadata.duration_ms == 0) return {};
+
+    // energy_uws is converted to ave W using recip time in us.
+    const float recipTime = 1e-3 / metadata.duration_ms;
+    int64_t total_energy = 0;
+    for (const auto& energy: rail_energy) {
+        if (energy.subsystem_name.find(railMatcher) != std::string::npos
+                || energy.rail_name.find(railMatcher) != std::string::npos) {
+            total_energy += energy.energy_uws;
+        }
+    }
+    return {metadata.duration_ms * 1e-3, total_energy * 1e-6, total_energy * recipTime};
+}
+
+PowerStats PowerStats::operator+=(const PowerStats& other) {
+    metadata += other.metadata;
+    health_stats += other.health_stats;
+    if (power_entity_state_residency.empty()) {
+        power_entity_state_residency = other.power_entity_state_residency;
+    } else {
+        for (size_t i = 0; i < power_entity_state_residency.size(); ++i) {
+            power_entity_state_residency[i] += other.power_entity_state_residency[i];
+        }
+    }
+    if (rail_energy.empty()) {
+        rail_energy = other.rail_energy;
+    } else {
+        for (size_t i = 0; i < rail_energy.size(); ++i) {
+            rail_energy[i] += other.rail_energy[i];
+        }
+    }
+    return *this;
+}
+
+PowerStats PowerStats::operator-=(const PowerStats& other) {
+    metadata -= other.metadata;
+    health_stats -= other.health_stats;
+    if (power_entity_state_residency.empty()) {
+        power_entity_state_residency = other.power_entity_state_residency;
+    } else {
+        for (size_t i = 0; i < power_entity_state_residency.size(); ++i) {
+            power_entity_state_residency[i] -= other.power_entity_state_residency[i];
+        }
+    }
+    if (rail_energy.empty()) {
+        rail_energy = other.rail_energy;
+    } else {
+        for (size_t i = 0; i < rail_energy.size(); ++i) {
+            rail_energy[i] -= other.rail_energy[i];
+        }
+    }
+    return *this;
+}
+
+PowerStats PowerStats::operator+(const PowerStats& other) const {
+    PowerStats result = *this;
+    result += other;
+    return result;
+}
+
+PowerStats PowerStats::operator-(const PowerStats& other) const {
+    PowerStats result = *this;
+    result -= other;
+    return result;
+}
+
+} // namespace android::media::psh_utils
diff --git a/media/psh_utils/PowerStatsCollector.cpp b/media/psh_utils/PowerStatsCollector.cpp
new file mode 100644
index 0000000..e5bf2aa
--- /dev/null
+++ b/media/psh_utils/PowerStatsCollector.cpp
@@ -0,0 +1,97 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include <android-base/logging.h>
+#include <psh_utils/PowerStatsCollector.h>
+#include "PowerStatsProvider.h"
+#include <utils/Timers.h>
+
+namespace android::media::psh_utils {
+
+PowerStatsCollector::PowerStatsCollector() {
+    addProvider(std::make_unique<PowerEntityResidencyDataProvider>());
+    addProvider(std::make_unique<RailEnergyDataProvider>());
+    addProvider(std::make_unique<HealthStatsDataProvider>());
+}
+
+/* static */
+PowerStatsCollector& PowerStatsCollector::getCollector() {
+    [[clang::no_destroy]] static PowerStatsCollector psc;
+    return psc;
+}
+
+std::shared_ptr<const PowerStats> PowerStatsCollector::getStats(int64_t toleranceNs) {
+    // Check if there is a cached PowerStats result available.
+    // As toleranceNs may be different between callers, it may be that some callers
+    // are blocked on mMutexExclusiveFill for a new stats result, while other callers
+    // may find the current cached result acceptable (within toleranceNs).
+    if (toleranceNs > 0) {
+        auto result = checkLastStats(toleranceNs);
+        if (result) return result;
+    }
+
+    // Take the mMutexExclusiveFill to ensure only one thread is filling.
+    std::lock_guard lg1(mMutexExclusiveFill);
+    // As obtaining a new PowerStats snapshot might take some time,
+    // check again to see if another waiting thread filled the cached result for us.
+    if (toleranceNs > 0) {
+        auto result = checkLastStats(toleranceNs);
+        if (result) return result;
+    }
+    auto result = std::make_shared<PowerStats>();
+    (void)fill(result.get());
+    std::lock_guard lg2(mMutex);
+    mLastFetchNs = systemTime(SYSTEM_TIME_BOOTTIME);
+    mLastFetchStats = result;
+    return result;
+}
+
+std::shared_ptr<const PowerStats> PowerStatsCollector::checkLastStats(int64_t toleranceNs) const {
+    if (toleranceNs > 0) {
+        // see if we can return an old result.
+        std::lock_guard lg(mMutex);
+        if (mLastFetchStats && systemTime(SYSTEM_TIME_BOOTTIME) - mLastFetchNs < toleranceNs) {
+            return mLastFetchStats;
+        }
+    }
+    return {};
+}
+
+void PowerStatsCollector::addProvider(std::unique_ptr<PowerStatsProvider>&& powerStatsProvider) {
+    mPowerStatsProviders.emplace_back(std::move(powerStatsProvider));
+}
+
+int PowerStatsCollector::fill(PowerStats* stats) const {
+    if (!stats) {
+        LOG(ERROR) << __func__ << ": bad args; stat is null";
+        return 1;
+    }
+
+    for (const auto& provider : mPowerStatsProviders) {
+        (void) provider->fill(stats); // on error, we continue to proceed.
+    }
+
+    // boot time follows wall clock time, but starts from boot.
+    stats->metadata.start_time_since_boot_ms = systemTime(SYSTEM_TIME_BOOTTIME) / 1'000'000;
+
+    // wall clock time
+    stats->metadata.start_time_epoch_ms = systemTime(SYSTEM_TIME_REALTIME) / 1'000'000;
+
+    // monotonic time follows boot time, but does not include any time suspended.
+    stats->metadata.start_time_monotonic_ms = systemTime() / 1'000'000;
+    return 0;
+}
+
+} // namespace android::media::psh_utils
diff --git a/media/psh_utils/PowerStatsProvider.cpp b/media/psh_utils/PowerStatsProvider.cpp
new file mode 100644
index 0000000..112c323
--- /dev/null
+++ b/media/psh_utils/PowerStatsProvider.cpp
@@ -0,0 +1,140 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "PowerStatsProvider.h"
+#include <aidl/android/hardware/power/stats/IPowerStats.h>
+#include <android-base/logging.h>
+#include <psh_utils/ServiceSingleton.h>
+#include <unordered_map>
+
+using ::aidl::android::hardware::power::stats::IPowerStats;
+
+namespace android::media::psh_utils {
+
+static auto getPowerStatsService() {
+    return getServiceSingleton<IPowerStats>();
+}
+
+status_t RailEnergyDataProvider::fill(PowerStats *stat) const {
+    if (stat == nullptr) return BAD_VALUE;
+    auto powerStatsService = getPowerStatsService();
+    if (powerStatsService == nullptr) {
+        return NO_INIT;
+    }
+
+    std::unordered_map<int32_t, ::aidl::android::hardware::power::stats::Channel> channelMap;
+    {
+        std::vector<::aidl::android::hardware::power::stats::Channel> channels;
+        if (!powerStatsService->getEnergyMeterInfo(&channels).isOk()) {
+            LOG(ERROR) << "unable to get energy meter info";
+            return INVALID_OPERATION;
+        }
+        for (auto& channel : channels) {
+          channelMap.emplace(channel.id, std::move(channel));
+        }
+    }
+
+    std::vector<::aidl::android::hardware::power::stats::EnergyMeasurement> measurements;
+    if (!powerStatsService->readEnergyMeter({}, &measurements).isOk()) {
+        LOG(ERROR) << "unable to get energy measurements";
+        return INVALID_OPERATION;
+    }
+
+    for (const auto& measurement : measurements) {
+        stat->rail_energy.emplace_back(
+            channelMap.at(measurement.id).subsystem,
+            channelMap.at(measurement.id).name,
+            measurement.energyUWs);
+    }
+
+    // Sort entries first by subsystem_name, then by rail_name.
+    // Sorting is needed to make interval processing efficient.
+    std::sort(stat->rail_energy.begin(), stat->rail_energy.end(),
+              [](const auto& a, const auto& b) {
+                  if (a.subsystem_name != b.subsystem_name) {
+                      return a.subsystem_name < b.subsystem_name;
+                  }
+                  return a.rail_name < b.rail_name;
+              });
+
+    return NO_ERROR;
+}
+
+status_t PowerEntityResidencyDataProvider::fill(PowerStats* stat) const {
+    if (stat == nullptr) return BAD_VALUE;
+    auto powerStatsService = getPowerStatsService();
+    if (powerStatsService == nullptr) {
+        return NO_INIT;
+    }
+
+    // these are based on entityId
+    std::unordered_map<int32_t, std::string> entityNames;
+    std::unordered_map<int32_t, std::unordered_map<int32_t, std::string>> stateNames;
+    std::vector<int32_t> powerEntityIds; // ids to use
+
+    {
+        std::vector<::aidl::android::hardware::power::stats::PowerEntity> entities;
+        if (!powerStatsService->getPowerEntityInfo(&entities).isOk()) {
+            LOG(ERROR) << __func__ << ": unable to get entity info";
+            return INVALID_OPERATION;
+        }
+
+        std::vector<std::string> powerEntityNames;
+        for (const auto& entity : entities) {
+            std::unordered_map<int32_t, std::string> states;
+            for (const auto& state : entity.states) {
+                states.emplace(state.id, state.name);
+            }
+
+            if (std::find(powerEntityNames.begin(), powerEntityNames.end(), entity.name) !=
+                powerEntityNames.end()) {
+                powerEntityIds.emplace_back(entity.id);
+            }
+            entityNames.emplace(entity.id, std::move(entity.name));
+            stateNames.emplace(entity.id, std::move(states));
+        }
+    }
+
+    std::vector<::aidl::android::hardware::power::stats::StateResidencyResult> results;
+    if (!powerStatsService->getStateResidency(powerEntityIds, &results).isOk()) {
+        LOG(ERROR) << __func__ << ": Unable to get state residency";
+        return INVALID_OPERATION;
+    }
+
+    for (const auto& result : results) {
+        for (const auto& curStateResidency : result.stateResidencyData) {
+          stat->power_entity_state_residency.emplace_back(
+              entityNames.at(result.id),
+              stateNames.at(result.id).at(curStateResidency.id),
+              static_cast<uint64_t>(curStateResidency.totalTimeInStateMs),
+              static_cast<uint64_t>(curStateResidency.totalStateEntryCount));
+        }
+    }
+
+    // Sort entries first by entity_name, then by state_name.
+    // Sorting is needed to make interval processing efficient.
+    std::sort(stat->power_entity_state_residency.begin(),
+              stat->power_entity_state_residency.end(),
+              [](const auto& a, const auto& b) {
+                  if (a.entity_name != b.entity_name) {
+                      return a.entity_name < b.entity_name;
+                  }
+                  return a.state_name < b.state_name;
+              });
+    return NO_ERROR;
+}
+
+} // namespace android::media::psh_utils
diff --git a/media/psh_utils/PowerStatsProvider.h b/media/psh_utils/PowerStatsProvider.h
new file mode 100644
index 0000000..c3888ac
--- /dev/null
+++ b/media/psh_utils/PowerStatsProvider.h
@@ -0,0 +1,38 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <psh_utils/PowerStatsCollector.h>
+
+namespace android::media::psh_utils {
+
+class RailEnergyDataProvider : public PowerStatsProvider {
+public:
+    status_t fill(PowerStats* stat) const override;
+};
+
+class PowerEntityResidencyDataProvider : public PowerStatsProvider {
+public:
+    status_t fill(PowerStats* stat) const override;
+};
+
+class HealthStatsDataProvider : public PowerStatsProvider {
+public:
+    status_t fill(PowerStats* stat) const override;
+};
+
+} // namespace android::media::psh_utils
diff --git a/media/psh_utils/benchmarks/Android.bp b/media/psh_utils/benchmarks/Android.bp
new file mode 100644
index 0000000..20efaa9
--- /dev/null
+++ b/media/psh_utils/benchmarks/Android.bp
@@ -0,0 +1,51 @@
+package {
+    default_team: "trendy_team_android_media_audio_framework",
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_benchmark {
+    name: "audio_powerstats_benchmark",
+
+    srcs: ["audio_powerstats_benchmark.cpp"],
+    cflags: [
+        "-Wall",
+        "-Werror",
+    ],
+    static_libs: [
+        "libaudioutils",
+        "libpshutils",
+    ],
+    shared_libs: [
+        "libbase",
+        "libbinder_ndk",
+        "libcutils",
+        "liblog",
+        "libutils",
+    ],
+}
+
+cc_benchmark {
+    name: "audio_powerstatscollector_benchmark",
+
+    srcs: ["audio_powerstatscollector_benchmark.cpp"],
+    cflags: [
+        "-Wall",
+        "-Werror",
+    ],
+    static_libs: [
+        "libaudioutils",
+        "libpshutils",
+    ],
+    shared_libs: [
+        "libbase",
+        "libbinder_ndk",
+        "libcutils",
+        "liblog",
+        "libutils",
+    ],
+}
diff --git a/media/psh_utils/benchmarks/audio_powerstats_benchmark.cpp b/media/psh_utils/benchmarks/audio_powerstats_benchmark.cpp
new file mode 100644
index 0000000..d3f815c
--- /dev/null
+++ b/media/psh_utils/benchmarks/audio_powerstats_benchmark.cpp
@@ -0,0 +1,139 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "audio_powerstat_benchmark"
+#include <cutils/properties.h>
+#include <utils/Log.h>
+
+#include <psh_utils/PerformanceFixture.h>
+
+#include <algorithm>
+#include <android-base/strings.h>
+#include <random>
+#include <thread>
+#include <vector>
+
+float result = 0;
+
+using android::media::psh_utils::CoreClass;
+using android::media::psh_utils::CORE_LITTLE;
+using android::media::psh_utils::CORE_MID;
+using android::media::psh_utils::CORE_BIG;
+
+enum Direction {
+    DIRECTION_FORWARD,
+    DIRECTION_BACKWARD,
+    DIRECTION_RANDOM,
+};
+
+std::string toString(Direction direction) {
+    switch (direction) {
+        case DIRECTION_FORWARD: return "DIRECTION_FORWARD";
+        case DIRECTION_BACKWARD: return "DIRECTION_BACKWARD";
+        case DIRECTION_RANDOM: return "DIRECTION_RANDOM";
+        default: return "DIRECTION_UNKNOWN";
+    }
+}
+
+enum Content {
+    CONTENT_ZERO,
+    CONTENT_RANDOM,
+};
+
+std::string toString(Content content) {
+    switch (content) {
+        case CONTENT_ZERO: return "CONTENT_ZERO";
+        case CONTENT_RANDOM: return "CONTENT_RANDOM";
+        default: return "CONTENT_UNKNOWN";
+    }
+}
+
+class MemoryFixture : public android::media::psh_utils::PerformanceFixture {
+public:
+    void SetUp(benchmark::State& state) override {
+        mCount = state.range(0) / (sizeof(uint32_t) + sizeof(float));
+        state.SetComplexityN(mCount * 2);  // 2 access per iteration.
+
+        // create src distribution
+        mSource.resize(mCount);
+        const auto content = static_cast<Content>(state.range(3));
+        if (content == CONTENT_RANDOM) {
+            std::minstd_rand gen(mCount);
+            std::uniform_real_distribution<float> dis(-1.f, 1.f);
+            for (size_t i = 0; i < mCount; i++) {
+                mSource[i] = dis(gen);
+            }
+        }
+
+        // create direction
+        mIndex.resize(mCount);
+        const auto direction = static_cast<Direction>(state.range(2));
+        switch (direction) {
+            case DIRECTION_BACKWARD:
+                for (size_t i = 0; i < mCount; i++) {
+                    mIndex[i] = i;  // it is also possible to go in the reverse direction
+                }
+                break;
+            case DIRECTION_FORWARD:
+            case DIRECTION_RANDOM:
+                for (size_t i = 0; i < mCount; i++) {
+                    mIndex[i] = i;  // it is also possible to go in the reverse direction
+                }
+                if (direction == DIRECTION_RANDOM) {
+                    std::random_device rd;
+                    std::mt19937 g(rd());
+                    std::shuffle(mIndex.begin(), mIndex.end(), g);
+                }
+                break;
+        }
+
+        // set up the profiler
+        const auto coreClass = static_cast<CoreClass>(state.range(1));
+
+        // It would be best if we could override SetName() but it is too late at this point,
+        // so we set the state label here for clarity.
+        state.SetLabel(toString(coreClass).append("/")
+            .append(toString(direction)).append("/")
+            .append(toString(content)));
+
+        if (property_get_bool("persist.audio.benchmark_profile", false)) {
+            startProfiler(coreClass);
+        }
+    }
+    size_t mCount = 0;
+    std::vector<uint32_t> mIndex;
+    std::vector<float> mSource;
+};
+
+BENCHMARK_DEFINE_F(MemoryFixture, CacheAccess)(benchmark::State &state) {
+    float accum = 0;
+    while (state.KeepRunning()) {
+        for (size_t i = 0; i < mCount; ++i) {
+            accum += mSource[mIndex[i]];
+        }
+        benchmark::ClobberMemory();
+    }
+    result += accum; // not optimized
+}
+
+BENCHMARK_REGISTER_F(MemoryFixture, CacheAccess)->ArgsProduct({
+    benchmark::CreateRange(64, 64<<20, /* multi = */2),
+    {CORE_LITTLE, CORE_MID, CORE_BIG},
+    {DIRECTION_FORWARD, DIRECTION_RANDOM},
+    {CONTENT_RANDOM},
+});
+
+BENCHMARK_MAIN();
diff --git a/media/psh_utils/benchmarks/audio_powerstatscollector_benchmark.cpp b/media/psh_utils/benchmarks/audio_powerstatscollector_benchmark.cpp
new file mode 100644
index 0000000..9e581bc
--- /dev/null
+++ b/media/psh_utils/benchmarks/audio_powerstatscollector_benchmark.cpp
@@ -0,0 +1,52 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "audio_token_benchmark"
+#include <utils/Log.h>
+
+#include <psh_utils/PowerStatsCollector.h>
+
+#include <benchmark/benchmark.h>
+
+/*
+ Pixel 8 Pro
+------------------------------------------------------------------------------------------
+ Benchmark                            Time                      CPU             Iteration
+------------------------------------------------------------------------------------------
+audio_powerstatscollector_benchmark:
+  #BM_StatsToleranceMs/0      1.2005660120994434E8 ns            2532739.72 ns          100
+  #BM_StatsToleranceMs/50        1281.095987079007 ns     346.0322183913503 ns      2022168
+  #BM_StatsToleranceMs/100       459.9668862534226 ns    189.47902626735942 ns      2891307
+  #BM_StatsToleranceMs/200       233.8438662484292 ns    149.84041813854736 ns      4407343
+  #BM_StatsToleranceMs/500      184.42197142314103 ns    144.86896036787098 ns      7295167
+*/
+
+// We check how expensive it is to query stats depending
+// on the tolerance to reuse the cached values.
+// A tolerance of 0 means we always fetch stats.
+static void BM_StatsToleranceMs(benchmark::State& state) {
+    auto& collector = android::media::psh_utils::PowerStatsCollector::getCollector();
+    const int64_t toleranceNs = state.range(0) * 1'000'000;
+    while (state.KeepRunning()) {
+        collector.getStats(toleranceNs);
+        benchmark::ClobberMemory();
+    }
+}
+
+// Here we test various time tolerances (given in milliseconds here)
+BENCHMARK(BM_StatsToleranceMs)->Arg(0)->Arg(50)->Arg(100)->Arg(200)->Arg(500);
+
+BENCHMARK_MAIN();
diff --git a/media/psh_utils/include/psh_utils/HealthStats.h b/media/psh_utils/include/psh_utils/HealthStats.h
new file mode 100644
index 0000000..d7a8d1a
--- /dev/null
+++ b/media/psh_utils/include/psh_utils/HealthStats.h
@@ -0,0 +1,58 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <string>
+
+namespace android::media::psh_utils {
+
+// From hardware/interfaces/health/aidl/android/hardware/health/HealthInfo.aidl
+
+struct HealthStats {
+    /**
+     * Instantaneous battery voltage in millivolts (mV).
+     *
+     * Historically, the unit of this field is microvolts (µV), but all
+     * clients and implementations uses millivolts in practice, making it
+     * the de-facto standard.
+     */
+    double batteryVoltageMillivolts;
+    /**
+     * Battery charge value when it is considered to be "full" in µA-h
+     */
+    double batteryFullChargeUah;
+    /**
+     * Instantaneous battery capacity in µA-h
+     */
+    double batteryChargeCounterUah;
+
+    std::string normalizedEnergy(double time) const;
+
+    bool isValid() const { return batteryVoltageMillivolts > 0; }
+
+    // Returns {seconds, joules, watts} from battery counters
+    std::tuple<float, float, float> energyFrom(const std::string& s) const;
+    std::string toString() const;
+
+    HealthStats operator+=(const HealthStats& other);
+    HealthStats operator-=(const HealthStats& other);
+    HealthStats operator+(const HealthStats& other) const;
+    HealthStats operator-(const HealthStats& other) const;
+    bool operator==(const HealthStats& other) const = default;
+};
+
+} // namespace android::media::psh_utils
diff --git a/media/psh_utils/include/psh_utils/PerformanceFixture.h b/media/psh_utils/include/psh_utils/PerformanceFixture.h
new file mode 100644
index 0000000..092a508
--- /dev/null
+++ b/media/psh_utils/include/psh_utils/PerformanceFixture.h
@@ -0,0 +1,108 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <audio_utils/threads.h>
+#include <benchmark/benchmark.h>
+#include <psh_utils/PowerStats.h>
+#include <psh_utils/PowerStatsCollector.h>
+
+#include <future>
+
+namespace android::media::psh_utils {
+
+enum CoreClass {
+    CORE_LITTLE,
+    CORE_MID,
+    CORE_BIG,
+};
+
+inline std::string toString(CoreClass coreClass) {
+    switch (coreClass) {
+        case CORE_LITTLE: return "LITTLE";
+        case CORE_MID: return "MID";
+        case CORE_BIG: return "BIG";
+        default: return "UNKNOWN";
+    }
+}
+
+/**
+ * A benchmark fixture is used to specify benchmarks that have a custom SetUp() and
+ * TearDown().  This is **required** for performance testing, as a typical benchmark
+ * method **may be called several times** during a run.
+ *
+ * A fixture ensures that SetUp() and TearDown() and the resulting statistics accumulation
+ * is done only once.   Note: BENCHMARK(BM_func)->Setup(DoSetup)->Teardown(DoTeardown)
+ * does something similar, but it requires some singleton to contain the state properly.
+ */
+class PerformanceFixture : public benchmark::Fixture {
+public:
+    // call this to start the profiling
+    virtual void startProfiler(CoreClass coreClass) {
+        mCores = android::audio_utils::get_number_cpus();
+        if (mCores == 0) return;
+        mCoreClass = coreClass;
+        std::array<unsigned, 3> coreSelection{0U, mCores / 2 + 1, mCores - 1};
+        mCore = coreSelection[std::min((size_t)coreClass, std::size(coreSelection) - 1)];
+
+        auto& collector = android::media::psh_utils::PowerStatsCollector::getCollector();
+        mStartStats = collector.getStats();
+
+        const pid_t tid = gettid(); // us.
+
+        // Possibly change priority to improve benchmarking
+        // android::audio_utils::set_thread_priority(gettid(), 98);
+
+        android::audio_utils::set_thread_affinity(0 /* pid */, 1 << mCore);
+    }
+
+    void TearDown(benchmark::State &state) override {
+        const auto N = state.complexity_length_n();
+        state.counters["N"] = benchmark::Counter(N,
+                benchmark::Counter::kIsIterationInvariantRate, benchmark::Counter::OneK::kIs1024);
+        if (mStartStats) {
+            auto& collector = android::media::psh_utils::PowerStatsCollector::getCollector();
+            const auto stopStats = collector.getStats();
+            android::media::psh_utils::PowerStats diff = *stopStats - *mStartStats;
+            auto cpuEnergy = diff.energyFrom("CPU");
+            auto memEnergy = diff.energyFrom("MEM");
+
+            constexpr float kMwToW = 1e-3;
+            state.counters["WCPU"] = benchmark::Counter(std::get<2>(cpuEnergy) * kMwToW,
+                                                          benchmark::Counter::kDefaults,
+                                                          benchmark::Counter::OneK::kIs1000);
+            state.counters["WMem"] = benchmark::Counter(std::get<2>(memEnergy) * kMwToW,
+                                                          benchmark::Counter::kDefaults,
+                                                          benchmark::Counter::OneK::kIs1000);
+            state.counters["JCPU"] = benchmark::Counter(
+                    std::get<1>(cpuEnergy) / N / state.iterations(), benchmark::Counter::kDefaults,
+                    benchmark::Counter::OneK::kIs1000);
+            state.counters["JMem"] = benchmark::Counter(
+                    std::get<1>(memEnergy) / N / state.iterations(), benchmark::Counter::kDefaults,
+                    benchmark::Counter::OneK::kIs1000);
+        }
+    }
+
+protected:
+    // these are only initialized upon startProfiler.
+    unsigned mCores = 0;
+    int mCore = 0;
+    CoreClass mCoreClass = CORE_LITTLE;
+    std::shared_ptr<const android::media::psh_utils::PowerStats> mStartStats;
+};
+
+} // namespace android::media::psh_utils
diff --git a/media/psh_utils/include/psh_utils/PowerStats.h b/media/psh_utils/include/psh_utils/PowerStats.h
new file mode 100644
index 0000000..ae48606
--- /dev/null
+++ b/media/psh_utils/include/psh_utils/PowerStats.h
@@ -0,0 +1,108 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include "HealthStats.h"
+
+#include <string>
+#include <unordered_map>
+#include <vector>
+
+namespace android::media::psh_utils {
+
+// See powerstats_util.proto and powerstats_util.pb.h
+
+struct PowerStats {
+    struct Metadata {
+        // Represents the start time measured in milliseconds since boot of the
+        // interval or point in time when stats were gathered.
+        uint64_t start_time_since_boot_ms;
+
+        // Represents the start time measured in milliseconds since epoch of the
+        // interval or point in time when stats were gathered.
+        uint64_t start_time_epoch_ms;
+
+        // In monotonic clock.
+        uint64_t start_time_monotonic_ms;
+
+        // If PowerStats represent an interval, the duration field will be set will
+        // the millisecond duration of stats collection. It will be unset for point
+        // stats.
+        // This is in boottime.
+        uint64_t duration_ms;
+
+        // This is in monotonic time, and does not include suspend.
+        uint64_t duration_monotonic_ms;
+
+        std::string toString() const;
+
+        Metadata operator+=(const Metadata& other);
+        Metadata operator-=(const Metadata& other);
+        Metadata operator+(const Metadata& other) const;
+        Metadata operator-(const Metadata& other) const;
+        bool operator==(const Metadata& other) const = default;
+    };
+
+    struct StateResidency {
+        std::string entity_name;
+        std::string state_name;
+        uint64_t time_ms;
+        uint64_t entry_count;
+
+        std::string toString() const;
+
+        StateResidency operator+=(const StateResidency& other);
+        StateResidency operator-=(const StateResidency& other);
+        StateResidency operator+(const StateResidency& other) const;
+        StateResidency operator-(const StateResidency& other) const;
+        bool operator==(const StateResidency& other) const = default;
+    };
+
+    struct RailEnergy {
+        std::string subsystem_name;
+        std::string rail_name;
+        uint64_t energy_uws;
+
+        std::string toString() const;
+        RailEnergy operator+=(const RailEnergy& other);
+        RailEnergy operator-=(const RailEnergy& other);
+        RailEnergy operator+(const RailEnergy& other) const;
+        RailEnergy operator-(const RailEnergy& other) const;
+        bool operator==(const RailEnergy& other) const = default;
+    };
+
+    HealthStats health_stats;
+
+    std::string normalizedEnergy(const std::string& prefix = {}) const;
+
+    // Returns {seconds, joules, watts} from all rails containing a matching string.
+    std::tuple<float, float, float> energyFrom(const std::string& railMatcher) const;
+    std::string toString(const std::string& prefix = {}) const;
+
+    PowerStats operator+=(const PowerStats& other);
+    PowerStats operator-=(const PowerStats& other);
+    PowerStats operator+(const PowerStats& other) const;
+    PowerStats operator-(const PowerStats& other) const;
+    bool operator==(const PowerStats& other) const = default;
+
+    Metadata metadata{};
+    // These are sorted by name.
+    std::vector<StateResidency> power_entity_state_residency;
+    std::vector<RailEnergy> rail_energy;
+};
+
+} // namespace android::media::psh_utils
diff --git a/media/psh_utils/include/psh_utils/PowerStatsCollector.h b/media/psh_utils/include/psh_utils/PowerStatsCollector.h
new file mode 100644
index 0000000..e3f8ea8
--- /dev/null
+++ b/media/psh_utils/include/psh_utils/PowerStatsCollector.h
@@ -0,0 +1,59 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include "PowerStats.h"
+#include <android-base/thread_annotations.h>
+#include <memory>
+#include <utils/Errors.h> // status_t
+
+namespace android::media::psh_utils {
+
+// Internal providers that fill up the PowerStats state object.
+class PowerStatsProvider {
+public:
+    virtual ~PowerStatsProvider() = default;
+    virtual status_t fill(PowerStats* stat) const = 0;
+};
+
+class PowerStatsCollector {
+public:
+    // singleton getter
+    static PowerStatsCollector& getCollector();
+
+    // Returns a snapshot of the state.
+    // If toleranceNs > 0, we permit the use of a stale snapshot taken within that tolerance.
+    std::shared_ptr<const PowerStats> getStats(int64_t toleranceNs = 0)
+            EXCLUDES(mMutex, mMutexExclusiveFill);
+
+private:
+    PowerStatsCollector();  // use the singleton getter
+
+    // Returns non-empty PowerStats if we have a previous stats snapshot within toleranceNs.
+    std::shared_ptr<const PowerStats> checkLastStats(int64_t toleranceNs) const EXCLUDES(mMutex);
+    int fill(PowerStats* stats) const;
+    void addProvider(std::unique_ptr<PowerStatsProvider>&& powerStatsProvider);
+
+    mutable std::mutex mMutexExclusiveFill;
+    mutable std::mutex mMutex;
+    // addProvider is called in the ctor, so effectively const.
+    std::vector<std::unique_ptr<PowerStatsProvider>> mPowerStatsProviders;
+    int64_t mLastFetchNs GUARDED_BY(mMutex) = 0;
+    std::shared_ptr<const PowerStats> mLastFetchStats GUARDED_BY(mMutex);
+};
+
+} // namespace android::media::psh_utils
diff --git a/media/psh_utils/include/psh_utils/ServiceSingleton.h b/media/psh_utils/include/psh_utils/ServiceSingleton.h
new file mode 100644
index 0000000..d0cd6d2
--- /dev/null
+++ b/media/psh_utils/include/psh_utils/ServiceSingleton.h
@@ -0,0 +1,67 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <android/binder_auto_utils.h>
+#include <android/binder_manager.h>
+#include <android-base/thread_annotations.h>
+#include <mutex>
+#include <utils/Log.h>
+#include <utils/Timers.h>
+
+namespace android::media::psh_utils {
+
+struct DefaultServiceTraits {
+    static constexpr int64_t kThresholdRetryNs = 1'000'000'000;
+    static constexpr int64_t kMaxRetries = 5;
+    static constexpr const char* kServiceVersion = "/default";
+    static constexpr bool kShowLog = true;
+};
+
+template<typename Service, typename ServiceTraits = DefaultServiceTraits>
+std::shared_ptr<Service> getServiceSingleton() {
+    [[clang::no_destroy]] static constinit std::mutex m;
+    [[clang::no_destroy]] static constinit std::shared_ptr<Service> service GUARDED_BY(m);
+    static int64_t nextTryNs GUARDED_BY(m) = 0;
+    static int64_t tries GUARDED_BY(m) = 0;
+
+    std::lock_guard l(m);
+    if (service
+            || tries > ServiceTraits::kMaxRetries  // try too many times
+            || systemTime(SYSTEM_TIME_BOOTTIME) < nextTryNs) {  // try too frequently.
+        return service;
+    }
+
+    const auto serviceName = std::string(Service::descriptor)
+            .append(ServiceTraits::kServiceVersion);
+    service = Service::fromBinder(
+            ::ndk::SpAIBinder(AServiceManager_checkService(serviceName.c_str())));
+
+    if (!service) {
+        // If failed, set a time limit before retry.
+        // No need to log an error, it is already done.
+        nextTryNs = systemTime(SYSTEM_TIME_BOOTTIME) + ServiceTraits::kThresholdRetryNs;
+        ALOGV_IF(ServiceTraits::kShowLog, "service:%s  retries:%lld of %lld  nextTryNs:%lld",
+                Service::descriptor, (long long)tries,
+                (long long)kMaxRetries, (long long)nextTryNs);
+        ++tries;
+    }
+
+    return service;
+}
+
+} // namespace android::media::psh_utils
diff --git a/media/psh_utils/tests/Android.bp b/media/psh_utils/tests/Android.bp
new file mode 100644
index 0000000..74589f8
--- /dev/null
+++ b/media/psh_utils/tests/Android.bp
@@ -0,0 +1,26 @@
+package {
+    default_team: "trendy_team_media_framework_audio",
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_test {
+    name: "powerstats_collector_tests",
+    srcs: [
+        "powerstats_collector_tests.cpp",
+    ],
+    shared_libs: [
+        "libbase",
+        "libbinder_ndk",
+        "libcutils",
+        "liblog",
+        "libutils",
+    ],
+    static_libs: [
+        "libpshutils",
+    ],
+}
diff --git a/media/psh_utils/tests/powerstats_collector_tests.cpp b/media/psh_utils/tests/powerstats_collector_tests.cpp
new file mode 100644
index 0000000..35c264a
--- /dev/null
+++ b/media/psh_utils/tests/powerstats_collector_tests.cpp
@@ -0,0 +1,181 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <psh_utils/PowerStatsCollector.h>
+#include <gtest/gtest.h>
+#include <utils/Log.h>
+
+using namespace android::media::psh_utils;
+
+template <typename T>
+void inRange(const T& a, const T& b, const T& c) {
+    ASSERT_GE(a, std::min(b, c));
+    ASSERT_LE(a, std::max(b, c));
+}
+
+TEST(powerstat_collector_tests, basic) {
+    auto& psc = PowerStatsCollector::getCollector();
+
+    // This test is used for debugging the string through logcat, we validate a non-empty string.
+    auto powerStats = psc.getStats();
+    ALOGD("%s: %s", __func__, powerStats->toString().c_str());
+    EXPECT_FALSE(powerStats->toString().empty());
+}
+
+TEST(powerstat_collector_tests, metadata) {
+    PowerStats ps1, ps2;
+
+    constexpr uint64_t kDurationMs1 = 5;
+    constexpr uint64_t kDurationMs2 = 10;
+    ps1.metadata.duration_ms = kDurationMs1;
+    ps2.metadata.duration_ms = kDurationMs2;
+
+    constexpr uint64_t kDurationMonotonicMs1 = 3;
+    constexpr uint64_t kDurationMonotonicMs2 = 9;
+    ps1.metadata.duration_monotonic_ms = kDurationMonotonicMs1;
+    ps2.metadata.duration_monotonic_ms = kDurationMonotonicMs2;
+
+    constexpr uint64_t kStartTimeSinceBootMs1 = 1616;
+    constexpr uint64_t kStartTimeEpochMs1 = 1121;
+    constexpr uint64_t kStartTimeMonotonicMs1 = 1525;
+    constexpr uint64_t kStartTimeSinceBootMs2 = 2616;
+    constexpr uint64_t kStartTimeEpochMs2 = 2121;
+    constexpr uint64_t kStartTimeMonotonicMs2 = 2525;
+
+    ps1.metadata.start_time_since_boot_ms = kStartTimeSinceBootMs1;
+    ps1.metadata.start_time_epoch_ms = kStartTimeEpochMs1;
+    ps1.metadata.start_time_monotonic_ms = kStartTimeMonotonicMs1;
+    ps2.metadata.start_time_since_boot_ms = kStartTimeSinceBootMs2;
+    ps2.metadata.start_time_epoch_ms = kStartTimeEpochMs2;
+    ps2.metadata.start_time_monotonic_ms = kStartTimeMonotonicMs2;
+
+    PowerStats ps3 = ps1 + ps2;
+    PowerStats ps4 = ps2 + ps1;
+    EXPECT_EQ(ps3, ps4);
+    EXPECT_EQ(kDurationMs1 + kDurationMs2,
+            ps3.metadata.duration_ms);
+    EXPECT_EQ(kDurationMonotonicMs1 + kDurationMonotonicMs2,
+            ps3.metadata.duration_monotonic_ms);
+
+    EXPECT_NO_FATAL_FAILURE(inRange(ps3.metadata.start_time_since_boot_ms,
+            kStartTimeSinceBootMs1, kStartTimeSinceBootMs2));
+    EXPECT_NO_FATAL_FAILURE(inRange(ps3.metadata.start_time_epoch_ms,
+            kStartTimeEpochMs1, kStartTimeEpochMs2));
+    EXPECT_NO_FATAL_FAILURE(inRange(ps3.metadata.start_time_monotonic_ms,
+            kStartTimeMonotonicMs1, kStartTimeMonotonicMs2));
+
+    PowerStats ps5 = ps2 - ps1;
+    EXPECT_EQ(kDurationMs2 - kDurationMs1,
+            ps5.metadata.duration_ms);
+    EXPECT_EQ(kDurationMonotonicMs2 - kDurationMonotonicMs1,
+            ps5.metadata.duration_monotonic_ms);
+
+    EXPECT_NO_FATAL_FAILURE(inRange(ps5.metadata.start_time_since_boot_ms,
+            kStartTimeSinceBootMs1, kStartTimeSinceBootMs2));
+    EXPECT_NO_FATAL_FAILURE(inRange(ps5.metadata.start_time_epoch_ms,
+            kStartTimeEpochMs1, kStartTimeEpochMs2));
+    EXPECT_NO_FATAL_FAILURE(inRange(ps5.metadata.start_time_monotonic_ms,
+         kStartTimeMonotonicMs1, kStartTimeMonotonicMs2));
+}
+
+TEST(powerstat_collector_tests, state_residency) {
+    PowerStats ps1, ps2;
+
+    constexpr uint64_t kTimeMs1 = 5;
+    constexpr uint64_t kTimeMs2 = 10;
+    constexpr uint64_t kEntryCount1 = 15;
+    constexpr uint64_t kEntryCount2 = 18;
+
+    ps1.power_entity_state_residency.emplace_back(
+            PowerStats::StateResidency{"", "", kTimeMs1, kEntryCount1});
+    ps2.power_entity_state_residency.emplace_back(
+            PowerStats::StateResidency{"", "", kTimeMs2, kEntryCount2});
+
+    PowerStats ps3 = ps1 + ps2;
+    PowerStats ps4 = ps2 + ps1;
+    EXPECT_EQ(ps3, ps4);
+    EXPECT_EQ(kTimeMs1 + kTimeMs2,
+            ps3.power_entity_state_residency[0].time_ms);
+    EXPECT_EQ(kEntryCount1 + kEntryCount2,
+            ps3.power_entity_state_residency[0].entry_count);
+
+    PowerStats ps5 = ps2 - ps1;
+    EXPECT_EQ(kTimeMs2 - kTimeMs1,
+            ps5.power_entity_state_residency[0].time_ms);
+    EXPECT_EQ(kEntryCount2 - kEntryCount1,
+            ps5.power_entity_state_residency[0].entry_count);
+}
+
+TEST(powerstat_collector_tests, rail_energy) {
+    PowerStats ps1, ps2;
+
+    constexpr uint64_t kEnergyUws1 = 5;
+    constexpr uint64_t kEnergyUws2 = 10;
+
+    ps1.rail_energy.emplace_back(
+            PowerStats::RailEnergy{"", "", kEnergyUws1});
+    ps2.rail_energy.emplace_back(
+            PowerStats::RailEnergy{"", "", kEnergyUws2});
+
+    PowerStats ps3 = ps1 + ps2;
+    PowerStats ps4 = ps2 + ps1;
+    EXPECT_EQ(ps3, ps4);
+    EXPECT_EQ(kEnergyUws1 + kEnergyUws2,
+            ps3.rail_energy[0].energy_uws);
+
+    PowerStats ps5 = ps2 - ps1;
+    EXPECT_EQ(kEnergyUws2 - kEnergyUws1,
+            ps5.rail_energy[0].energy_uws);
+}
+
+TEST(powerstat_collector_tests, health_stats) {
+    PowerStats ps1, ps2;
+
+    constexpr double kBatteryChargeCounterUah1 = 21;
+    constexpr double kBatteryChargeCounterUah2 = 25;
+    ps1.health_stats.batteryChargeCounterUah = kBatteryChargeCounterUah1;
+    ps2.health_stats.batteryChargeCounterUah = kBatteryChargeCounterUah2;
+
+    constexpr double kBatteryFullChargeUah1 = 32;
+    constexpr double kBatteryFullChargeUah2 = 33;
+    ps1.health_stats.batteryFullChargeUah = kBatteryFullChargeUah1;
+    ps2.health_stats.batteryFullChargeUah = kBatteryFullChargeUah2;
+
+    constexpr double kBatteryVoltageMillivolts1 = 42;
+    constexpr double kBatteryVoltageMillivolts2 = 43;
+    ps1.health_stats.batteryVoltageMillivolts = kBatteryVoltageMillivolts1;
+    ps2.health_stats.batteryVoltageMillivolts = kBatteryVoltageMillivolts2;
+
+    PowerStats ps3 = ps1 + ps2;
+    PowerStats ps4 = ps2 + ps1;
+    EXPECT_EQ(ps3, ps4);
+    EXPECT_EQ(kBatteryChargeCounterUah1 + kBatteryChargeCounterUah2,
+              ps3.health_stats.batteryChargeCounterUah);
+
+    EXPECT_NO_FATAL_FAILURE(inRange(ps3.health_stats.batteryFullChargeUah,
+             kBatteryFullChargeUah1, kBatteryFullChargeUah2));
+    EXPECT_NO_FATAL_FAILURE(inRange(ps3.health_stats.batteryVoltageMillivolts,
+             kBatteryVoltageMillivolts1, kBatteryVoltageMillivolts2));
+
+    PowerStats ps5 = ps2 - ps1;
+    EXPECT_EQ(kBatteryChargeCounterUah2 - kBatteryChargeCounterUah1,
+              ps5.health_stats.batteryChargeCounterUah);
+
+    EXPECT_NO_FATAL_FAILURE(inRange(ps5.health_stats.batteryFullChargeUah,
+            kBatteryFullChargeUah1, kBatteryFullChargeUah2));
+    EXPECT_NO_FATAL_FAILURE(inRange(ps5.health_stats.batteryVoltageMillivolts,
+            kBatteryVoltageMillivolts1, kBatteryVoltageMillivolts2));
+}
diff --git a/media/tests/benchmark/src/native/decoder/C2Decoder.cpp b/media/tests/benchmark/src/native/decoder/C2Decoder.cpp
index 6539f24..f9a6b1c 100644
--- a/media/tests/benchmark/src/native/decoder/C2Decoder.cpp
+++ b/media/tests/benchmark/src/native/decoder/C2Decoder.cpp
@@ -106,7 +106,7 @@
         work->input.ordinal.frameIndex = mNumInputFrame;
         work->input.buffers.clear();
         int size = frameInfo[mNumInputFrame].size;
-        int alignedSize = ALIGN(size, PAGE_SIZE);
+        int alignedSize = ALIGN(size, getpagesize());
         if (size) {
             std::shared_ptr<C2LinearBlock> block;
             status = mLinearPool->fetchLinearBlock(
diff --git a/media/utils/Android.bp b/media/utils/Android.bp
index 5b7319a..e340b40 100644
--- a/media/utils/Android.bp
+++ b/media/utils/Android.bp
@@ -113,6 +113,7 @@
 
     export_shared_lib_headers: [
         "libpermission",
+        "packagemanager_aidl-cpp",
     ],
 
     required: [
diff --git a/media/utils/ServiceUtilities.cpp b/media/utils/ServiceUtilities.cpp
index 2946398..ffcde42 100644
--- a/media/utils/ServiceUtilities.cpp
+++ b/media/utils/ServiceUtilities.cpp
@@ -14,6 +14,7 @@
  * limitations under the License.
  */
 
+//#define LOG_NDEBUG 0
 #define LOG_TAG "ServiceUtilities"
 
 #include <audio_utils/clock.h>
@@ -27,7 +28,6 @@
 #include <media/AidlConversionUtil.h>
 #include <android/content/AttributionSourceState.h>
 
-#include <com_android_media_audio.h>
 #include <iterator>
 #include <algorithm>
 #include <pwd.h>
@@ -47,7 +47,6 @@
 static const String16 sModifyPhoneState("android.permission.MODIFY_PHONE_STATE");
 static const String16 sModifyAudioRouting("android.permission.MODIFY_AUDIO_ROUTING");
 static const String16 sCallAudioInterception("android.permission.CALL_AUDIO_INTERCEPTION");
-static const String16 sAndroidPermissionBluetoothConnect("android.permission.BLUETOOTH_CONNECT");
 
 static String16 resolveCallingPackage(PermissionController& permissionController,
         const std::optional<String16> opPackageName, uid_t uid) {
@@ -86,7 +85,7 @@
 }
 
 std::optional<AttributionSourceState> resolveAttributionSource(
-        const AttributionSourceState& callerAttributionSource) {
+        const AttributionSourceState& callerAttributionSource, const uint32_t virtualDeviceId) {
     AttributionSourceState nextAttributionSource = callerAttributionSource;
 
     if (!nextAttributionSource.packageName.has_value()) {
@@ -101,6 +100,7 @@
             return std::nullopt;
         }
     }
+    nextAttributionSource.deviceId = virtualDeviceId;
 
     AttributionSourceState myAttributionSource;
     myAttributionSource.uid = VALUE_OR_FATAL(android::legacy2aidl_uid_t_int32_t(getuid()));
@@ -109,13 +109,15 @@
     // audioserver to the app ops system
     static sp<BBinder> appOpsToken = sp<BBinder>::make();
     myAttributionSource.token = appOpsToken;
+    myAttributionSource.deviceId = virtualDeviceId;
     myAttributionSource.next.push_back(nextAttributionSource);
 
     return std::optional<AttributionSourceState>{myAttributionSource};
 }
 
-static bool checkRecordingInternal(const AttributionSourceState& attributionSource,
-        const String16& msg, bool start, audio_source_t source) {
+    static bool checkRecordingInternal(const AttributionSourceState &attributionSource,
+                                       const uint32_t virtualDeviceId,
+                                       const String16 &msg, bool start, audio_source_t source) {
     // Okay to not track in app ops as audio server or media server is us and if
     // device is rooted security model is considered compromised.
     // system_server loses its RECORD_AUDIO permission when a secondary
@@ -127,8 +129,8 @@
     // We specify a pid and uid here as mediaserver (aka MediaRecorder or StageFrightRecorder)
     // may open a record track on behalf of a client. Note that pid may be a tid.
     // IMPORTANT: DON'T USE PermissionCache - RUNTIME PERMISSIONS CHANGE.
-    const std::optional<AttributionSourceState> resolvedAttributionSource =
-            resolveAttributionSource(attributionSource);
+    std::optional<AttributionSourceState> resolvedAttributionSource =
+            resolveAttributionSource(attributionSource, virtualDeviceId);
     if (!resolvedAttributionSource.has_value()) {
         return false;
     }
@@ -150,16 +152,30 @@
     return permitted;
 }
 
-bool recordingAllowed(const AttributionSourceState& attributionSource, audio_source_t source) {
-    return checkRecordingInternal(attributionSource, String16(), /*start*/ false, source);
+static constexpr int DEVICE_ID_DEFAULT = 0;
+
+bool recordingAllowed(const AttributionSourceState &attributionSource, audio_source_t source) {
+    return checkRecordingInternal(attributionSource, DEVICE_ID_DEFAULT, String16(), /*start*/ false,
+                                  source);
 }
 
-bool startRecording(const AttributionSourceState& attributionSource, const String16& msg,
-        audio_source_t source) {
-    return checkRecordingInternal(attributionSource, msg, /*start*/ true, source);
+bool recordingAllowed(const AttributionSourceState &attributionSource,
+                      const uint32_t virtualDeviceId,
+                      audio_source_t source) {
+    return checkRecordingInternal(attributionSource, virtualDeviceId,
+                                  String16(), /*start*/ false, source);
 }
 
-void finishRecording(const AttributionSourceState& attributionSource, audio_source_t source) {
+bool startRecording(const AttributionSourceState& attributionSource,
+                    const uint32_t virtualDeviceId,
+                    const String16& msg,
+                    audio_source_t source) {
+    return checkRecordingInternal(attributionSource, virtualDeviceId, msg, /*start*/ true,
+                                  source);
+}
+
+void finishRecording(const AttributionSourceState &attributionSource, uint32_t virtualDeviceId,
+                     audio_source_t source) {
     // Okay to not track in app ops as audio server is us and if
     // device is rooted security model is considered compromised.
     uid_t uid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(attributionSource.uid));
@@ -169,7 +185,7 @@
     // may open a record track on behalf of a client. Note that pid may be a tid.
     // IMPORTANT: DON'T USE PermissionCache - RUNTIME PERMISSIONS CHANGE.
     const std::optional<AttributionSourceState> resolvedAttributionSource =
-            resolveAttributionSource(attributionSource);
+            resolveAttributionSource(attributionSource, virtualDeviceId);
     if (!resolvedAttributionSource.has_value()) {
         return;
     }
@@ -270,7 +286,7 @@
 bool modifyAudioRoutingAllowed(const AttributionSourceState& attributionSource) {
     uid_t uid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(attributionSource.uid));
     pid_t pid = VALUE_OR_FATAL(aidl2legacy_int32_t_pid_t(attributionSource.pid));
-    if (isAudioServerUid(IPCThreadState::self()->getCallingUid())) return true;
+    if (isAudioServerUid(uid)) return true;
     // IMPORTANT: Use PermissionCache - not a runtime permission and may not change.
     bool ok = PermissionCache::checkPermission(sModifyAudioRouting, pid, uid);
     if (!ok) ALOGE("%s(): android.permission.MODIFY_AUDIO_ROUTING denied for uid %d",
@@ -285,7 +301,7 @@
 bool modifyDefaultAudioEffectsAllowed(const AttributionSourceState& attributionSource) {
     uid_t uid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(attributionSource.uid));
     pid_t pid = VALUE_OR_FATAL(aidl2legacy_int32_t_pid_t(attributionSource.pid));
-    if (isAudioServerUid(IPCThreadState::self()->getCallingUid())) return true;
+    if (isAudioServerUid(uid)) return true;
 
     static const String16 sModifyDefaultAudioEffectsAllowed(
             "android.permission.MODIFY_DEFAULT_AUDIO_EFFECTS");
@@ -376,52 +392,6 @@
     return NO_ERROR;
 }
 
-/**
- * Determines if the MAC address in Bluetooth device descriptors returned by APIs of
- * a native audio service (audio flinger, audio policy) must be anonymized.
- * MAC addresses returned to system server or apps with BLUETOOTH_CONNECT permission
- * are not anonymized.
- *
- * @param attributionSource The attribution source of the calling app.
- * @param caller string identifying the caller for logging.
- * @return true if the MAC addresses must be anonymized, false otherwise.
- */
-bool mustAnonymizeBluetoothAddress(
-        const AttributionSourceState& attributionSource, const String16& caller) {
-    if (!com::android::media::audio::bluetooth_mac_address_anonymization()) {
-        return false;
-    }
-
-    uid_t uid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(attributionSource.uid));
-    if (isAudioServerOrSystemServerUid(uid)) {
-        return false;
-    }
-    const std::optional<AttributionSourceState> resolvedAttributionSource =
-            resolveAttributionSource(attributionSource);
-    if (!resolvedAttributionSource.has_value()) {
-        return true;
-    }
-    permission::PermissionChecker permissionChecker;
-    return permissionChecker.checkPermissionForPreflightFromDatasource(
-            sAndroidPermissionBluetoothConnect, resolvedAttributionSource.value(), caller,
-            AppOpsManager::OP_BLUETOOTH_CONNECT)
-                != permission::PermissionChecker::PERMISSION_GRANTED;
-}
-
-/**
- * Modifies the passed MAC address string in place for consumption by unprivileged clients.
- * the string is assumed to have a valid MAC address format.
- * the anonymzation must be kept in sync with toAnonymizedAddress() in BluetoothUtils.java
- *
- * @param address input/output the char string contining the MAC address to anonymize.
- */
-void anonymizeBluetoothAddress(char *address) {
-    if (address == nullptr || strlen(address) != strlen("AA:BB:CC:DD:EE:FF")) {
-        return;
-    }
-    memcpy(address, "XX:XX:XX:XX", strlen("XX:XX:XX:XX"));
-}
-
 sp<content::pm::IPackageManagerNative> MediaPackageManager::retrievePackageManager() {
     const sp<IServiceManager> sm = defaultServiceManager();
     if (sm == nullptr) {
diff --git a/media/utils/TimeCheck.cpp b/media/utils/TimeCheck.cpp
index ec68de7..658191e 100644
--- a/media/utils/TimeCheck.cpp
+++ b/media/utils/TimeCheck.cpp
@@ -23,6 +23,7 @@
 #include <android-base/logging.h>
 #include <android-base/strings.h>
 #include <audio_utils/clock.h>
+#include <cutils/properties.h>
 #include <mediautils/EventLog.h>
 #include <mediautils/FixedString.h>
 #include <mediautils/MethodStatistics.h>
@@ -36,6 +37,46 @@
 
 
 namespace android::mediautils {
+
+// Note: The sum of kDefaultTimeOutDurationMs and kDefaultSecondChanceDurationMs
+// should be no less than 2 seconds, otherwise spurious timeouts
+// may occur with system suspend.
+static constexpr int kDefaultTimeoutDurationMs = 3000;
+
+// Due to suspend abort not incrementing the monotonic clock,
+// we allow another second chance timeout after the first timeout expires.
+//
+// The total timeout is therefore kDefaultTimeoutDuration + kDefaultSecondChanceDuration,
+// and the result is more stable when the monotonic clock increments during suspend.
+//
+static constexpr int kDefaultSecondChanceDurationMs = 2000;
+
+/* static */
+TimeCheck::Duration TimeCheck::getDefaultTimeoutDuration() {
+    static constinit std::atomic<int> defaultTimeoutDurationMs{};
+    auto defaultMs = defaultTimeoutDurationMs.load(std::memory_order_relaxed);
+    if (defaultMs == 0) {
+        defaultMs = property_get_int32(
+                "audio.timecheck.timeout_duration_ms", kDefaultTimeoutDurationMs);
+        if (defaultMs < 1) defaultMs = kDefaultTimeoutDurationMs;
+        defaultTimeoutDurationMs.store(defaultMs, std::memory_order_relaxed);
+    }
+    return std::chrono::milliseconds(defaultMs);
+}
+
+/* static */
+TimeCheck::Duration TimeCheck::getDefaultSecondChanceDuration() {
+    static constinit std::atomic<int> defaultSecondChanceDurationMs{};
+    auto defaultMs = defaultSecondChanceDurationMs.load(std::memory_order_relaxed);
+    if (defaultMs == 0) {
+        defaultMs = property_get_int32(
+                "audio.timecheck.second_chance_duration_ms", kDefaultSecondChanceDurationMs);
+        if (defaultMs < 1) defaultMs = kDefaultSecondChanceDurationMs;
+        defaultSecondChanceDurationMs.store(defaultMs, std::memory_order_relaxed);
+    }
+    return std::chrono::milliseconds(defaultMs);
+}
+
 // This function appropriately signals a pid to dump a backtrace if we are
 // running on device (and the HAL exists). If we are not running on an Android
 // device, there is no HAL to signal (so we do nothing).
@@ -182,23 +223,25 @@
 
 /* static */
 std::string TimeCheck::analyzeTimeouts(
-        float requestedTimeoutMs, float elapsedSteadyMs, float elapsedSystemMs) {
+        float requestedTimeoutMs, float secondChanceMs,
+        float elapsedSteadyMs, float elapsedSystemMs) {
     // Track any OS clock issues with suspend.
     // It is possible that the elapsedSystemMs is much greater than elapsedSteadyMs if
     // a suspend occurs; however, we always expect the timeout ms should always be slightly
     // less than the elapsed steady ms regardless of whether a suspend occurs or not.
 
-    std::string s("Timeout ms ");
-    s.append(std::to_string(requestedTimeoutMs))
-        .append(" elapsed steady ms ").append(std::to_string(elapsedSteadyMs))
-        .append(" elapsed system ms ").append(std::to_string(elapsedSystemMs));
+    const float totalTimeoutMs = requestedTimeoutMs + secondChanceMs;
+    std::string s = std::format(
+            "Timeout ms {:.2f} ({:.2f} + {:.2f})"
+            " elapsed steady ms {:.4f} elapsed system ms {:.4f}",
+            totalTimeoutMs, requestedTimeoutMs, secondChanceMs, elapsedSteadyMs, elapsedSystemMs);
 
     // Is there something unusual?
     static constexpr float TOLERANCE_CONTEXT_SWITCH_MS = 200.f;
 
-    if (requestedTimeoutMs > elapsedSteadyMs || requestedTimeoutMs > elapsedSystemMs) {
+    if (totalTimeoutMs > elapsedSteadyMs || totalTimeoutMs > elapsedSystemMs) {
         s.append("\nError: early expiration - "
-                "requestedTimeoutMs should be less than elapsed time");
+                "totalTimeoutMs should be less than elapsed time");
     }
 
     if (elapsedSteadyMs > elapsedSystemMs + TOLERANCE_CONTEXT_SWITCH_MS) {
@@ -206,13 +249,13 @@
     }
 
     // This has been found in suspend stress testing.
-    if (elapsedSteadyMs > requestedTimeoutMs + TOLERANCE_CONTEXT_SWITCH_MS) {
+    if (elapsedSteadyMs > totalTimeoutMs + TOLERANCE_CONTEXT_SWITCH_MS) {
         s.append("\nWarning: steady time significantly exceeds timeout "
                 "- possible thread stall or aborted suspend");
     }
 
     // This has been found in suspend stress testing.
-    if (elapsedSystemMs > requestedTimeoutMs + TOLERANCE_CONTEXT_SWITCH_MS) {
+    if (elapsedSystemMs > totalTimeoutMs + TOLERANCE_CONTEXT_SWITCH_MS) {
         s.append("\nInformation: system time significantly exceeds timeout "
                 "- possible suspend");
     }
@@ -282,7 +325,7 @@
             .append(tag)
             .append(" scheduled ").append(formatTime(startSystemTime))
             .append(" on thread ").append(std::to_string(tid)).append("\n")
-            .append(analyzeTimeouts(requestedTimeoutMs + secondChanceMs,
+            .append(analyzeTimeouts(requestedTimeoutMs, secondChanceMs,
                     elapsedSteadyMs, elapsedSystemMs)).append("\n")
             .append(halPids).append("\n")
             .append(snapshotAnalysis.toString());
diff --git a/media/utils/fuzzers/Android.bp b/media/utils/fuzzers/Android.bp
index bd9a462..0a047c1 100644
--- a/media/utils/fuzzers/Android.bp
+++ b/media/utils/fuzzers/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_media_framework_audio",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/media/utils/fuzzers/ServiceUtilitiesFuzz.cpp b/media/utils/fuzzers/ServiceUtilitiesFuzz.cpp
index 15f043a..449e7de 100644
--- a/media/utils/fuzzers/ServiceUtilitiesFuzz.cpp
+++ b/media/utils/fuzzers/ServiceUtilitiesFuzz.cpp
@@ -53,6 +53,7 @@
     int32_t pid = data_provider.ConsumeIntegral<int32_t>();
     audio_source_t source = static_cast<audio_source_t>(data_provider
         .ConsumeIntegral<std::underlying_type_t<audio_source_t>>());
+    uint32_t deviceId = data_provider.ConsumeIntegral<uint32_t>();
 
     std::string packageNameStr = data_provider.ConsumeRandomLengthString(kMaxStringLen);
     std::string msgStr = data_provider.ConsumeRandomLengthString(kMaxStringLen);
@@ -70,8 +71,9 @@
     android::isAudioServerOrSystemServerUid(uid);
     android::isAudioServerOrMediaServerUid(uid);
     android::recordingAllowed(attributionSource);
-    android::startRecording(attributionSource, msgStr16, source);
-    android::finishRecording(attributionSource, source);
+    android::recordingAllowed(attributionSource, deviceId, source);
+    android::startRecording(attributionSource, deviceId, msgStr16, source);
+    android::finishRecording(attributionSource, deviceId, source);
     android::captureAudioOutputAllowed(attributionSource);
     android::captureMediaOutputAllowed(attributionSource);
     android::captureHotwordAllowed(attributionSource);
diff --git a/media/utils/include/mediautils/ServiceUtilities.h b/media/utils/include/mediautils/ServiceUtilities.h
index 0b3a3f9..e0fabfd 100644
--- a/media/utils/include/mediautils/ServiceUtilities.h
+++ b/media/utils/include/mediautils/ServiceUtilities.h
@@ -87,11 +87,16 @@
 
 bool recordingAllowed(const AttributionSourceState& attributionSource,
         audio_source_t source = AUDIO_SOURCE_DEFAULT);
-bool startRecording(const AttributionSourceState& attributionSource,
-    const String16& msg, audio_source_t source);
-void finishRecording(const AttributionSourceState& attributionSource, audio_source_t source);
+
+bool recordingAllowed(const AttributionSourceState &attributionSource,
+                      uint32_t virtualDeviceId,
+                      audio_source_t source);
+bool startRecording(const AttributionSourceState& attributionSource, uint32_t virtualDeviceId,
+                    const String16& msg, audio_source_t source);
+void finishRecording(const AttributionSourceState& attributionSource, uint32_t virtualDeviceId,
+                     audio_source_t source);
 std::optional<AttributionSourceState> resolveAttributionSource(
-    const AttributionSourceState& callerAttributionSource);
+    const AttributionSourceState& callerAttributionSource, uint32_t virtualDeviceId);
 bool captureAudioOutputAllowed(const AttributionSourceState& attributionSource);
 bool captureMediaOutputAllowed(const AttributionSourceState& attributionSource);
 bool captureTunerAudioInputAllowed(const AttributionSourceState& attributionSource);
@@ -108,10 +113,6 @@
 bool bypassInterruptionPolicyAllowed(const AttributionSourceState& attributionSource);
 bool callAudioInterceptionAllowed(const AttributionSourceState& attributionSource);
 void purgePermissionCache();
-bool mustAnonymizeBluetoothAddress(
-        const AttributionSourceState& attributionSource, const String16& caller);
-void anonymizeBluetoothAddress(char *address);
-
 int32_t getOpForSource(audio_source_t source);
 
 AttributionSourceState getCallingAttributionSource();
diff --git a/media/utils/include/mediautils/TimeCheck.h b/media/utils/include/mediautils/TimeCheck.h
index f1d572f..3e8d35d 100644
--- a/media/utils/include/mediautils/TimeCheck.h
+++ b/media/utils/include/mediautils/TimeCheck.h
@@ -42,19 +42,29 @@
     //  float elapsedMs (the elapsed time to this event).
     using OnTimerFunc = std::function<void(bool /* timeout */, float /* elapsedMs */ )>;
 
-    // The default timeout is chosen to be less than system server watchdog timeout
-    // Note: kDefaultTimeOutMs should be no less than 2 seconds, otherwise spurious timeouts
-    // may occur with system suspend.
-    static constexpr TimeCheck::Duration kDefaultTimeoutDuration = std::chrono::milliseconds(3000);
+    /**
+     * Returns the default timeout to use for TimeCheck.
+     *
+     * The default timeout of 3000ms (kDefaultTimeoutDurationMs) is chosen to be less than
+     * the system server watchdog timeout, and can be changed by the sysprop
+     * audio.timecheck.timeout_duration_ms.
+     * A second chance wait may be set to extend the check.
+     */
+    static TimeCheck::Duration getDefaultTimeoutDuration();
 
-    // Due to suspend abort not incrementing the monotonic clock,
-    // we allow another second chance timeout after the first timeout expires.
-    //
-    // The total timeout is therefore kDefaultTimeoutDuration + kDefaultSecondChanceDuration,
-    // and the result is more stable when the monotonic clock increments during suspend.
-    //
-    static constexpr TimeCheck::Duration kDefaultSecondChanceDuration =
-            std::chrono::milliseconds(2000);
+    /**
+     * Returns the second chance timeout to use for TimeCheck.
+     *
+     * Due to suspend abort not incrementing the monotonic clock,
+     * we allow another second chance timeout after the first timeout expires.
+     * The second chance timeout default of 2000ms (kDefaultSecondChanceDurationMs)
+     * may be changed by the sysprop audio.timecheck.second_chance_duration_ms.
+     *
+     * The total timeout is therefore
+     * getDefaultTimeoutDuration() + getDefaultSecondChanceDuration(),
+     * and the result is more stable when the monotonic clock increments during suspend.
+     */
+    static TimeCheck::Duration getDefaultSecondChanceDuration();
 
     /**
      * TimeCheck is a RAII object which will notify a callback
@@ -130,7 +140,8 @@
     // Returns a string that represents the timeout vs elapsed time,
     // and diagnostics if there are any potential issues.
     static std::string analyzeTimeouts(
-            float timeoutMs, float elapsedSteadyMs, float elapsedSystemMs);
+            float timeoutMs, float secondChanceMs,
+            float elapsedSteadyMs, float elapsedSystemMs);
 
     static TimerThread& getTimeCheckThread();
     static void accessAudioHalPids(std::vector<pid_t>* pids, bool update);
diff --git a/media/utils/tests/Android.bp b/media/utils/tests/Android.bp
index 3fdc6eb..a68569a 100644
--- a/media/utils/tests/Android.bp
+++ b/media/utils/tests/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_media_framework_audio",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/media/utils/tests/static_string_view_tests.cpp b/media/utils/tests/static_string_view_tests.cpp
index c00de68..1dd2370 100644
--- a/media/utils/tests/static_string_view_tests.cpp
+++ b/media/utils/tests/static_string_view_tests.cpp
@@ -37,14 +37,12 @@
     // const std::array<char,2> nonstatic = {'a', 'b'};
     // static_assert(can_assign<nonstatic>::value == false);
     static std::array<char, 2> nonconst = {'a', 'b'};
-    static const std::array<char, 2> nonconstexpr = {'a', 'b'};
     static constexpr std::array<int, 2> nonchar = {1, 2};
     static constexpr size_t nonarray = 2;
 
     static_assert(CanCreate<nonconst>::value == false);
     static_assert(CanCreate<nonarray>::value == false);
     static_assert(CanCreate<nonchar>::value == false);
-    static_assert(CanCreate<nonconstexpr>::value == false);
 
     static constexpr std::array<char, 2> scoped = {'a', 'b'};
     constexpr StaticStringView Ticket1 = StaticStringView::create<global>();
diff --git a/services/Android.mk b/services/Android.mk
deleted file mode 100644
index c86a226..0000000
--- a/services/Android.mk
+++ /dev/null
@@ -1 +0,0 @@
-$(eval $(call declare-1p-copy-files,frameworks/av/services/audiopolicy,))
diff --git a/services/audioflinger/Android.bp b/services/audioflinger/Android.bp
index 129541f..bf2915a 100644
--- a/services/audioflinger/Android.bp
+++ b/services/audioflinger/Android.bp
@@ -66,12 +66,12 @@
 
     // Remove some pedantic stylistic requirements.
     "-google-readability-casting", // C++ casts not always necessary and may be verbose
-    "-google-readability-todo",    // do not require TODO(info)
+    "-google-readability-todo", // do not require TODO(info)
 
-    "-bugprone-unhandled-self-assignment",
-    "-bugprone-suspicious-string-compare",
-    "-cert-oop54-cpp", // found in TransactionLog.h
     "-bugprone-narrowing-conversions", // b/182410845
+    "-bugprone-suspicious-string-compare",
+    "-bugprone-unhandled-self-assignment",
+    "-cert-oop54-cpp", // found in TransactionLog.h
 ]
 
 // TODO(b/275642749) Reenable these warnings
@@ -101,9 +101,9 @@
     "-Wall",
     "-Wdeprecated",
     "-Werror",
+    "-Werror=conditional-uninitialized",
     "-Werror=implicit-fallthrough",
     "-Werror=sometimes-uninitialized",
-    "-Werror=conditional-uninitialized",
     "-Wextra",
 
     // suppress some warning chatter.
@@ -113,7 +113,6 @@
     "-Wredundant-decls",
     "-Wshadow",
     "-Wstrict-aliasing",
-    "-fstrict-aliasing",
     "-Wthread-safety",
     //"-Wthread-safety-negative", // experimental - looks broken in R.
     "-Wunreachable-code",
@@ -121,6 +120,7 @@
     "-Wunreachable-code-return",
     "-Wunused",
     "-Wused-but-marked-unused",
+    "-fstrict-aliasing",
 ]
 
 // Eventually use common tidy defaults
@@ -134,57 +134,57 @@
     tidy_checks: audioflinger_tidy_errors,
     tidy_checks_as_errors: audioflinger_tidy_errors,
     tidy_flags: [
-      "-format-style=file",
+        "-format-style=file",
     ],
 }
 
 cc_defaults {
     name: "libaudioflinger_dependencies",
 
+    header_libs: [
+        "libaudiohal_headers", // required for AudioFlinger
+    ],
+
     shared_libs: [
-        "audioflinger-aidl-cpp",
+        "audio-permission-aidl-cpp",
         "audioclient-types-aidl-cpp",
+        "audioflinger-aidl-cpp",
+        "audiopermissioncontroller",
         "av-types-aidl-cpp",
         "com.android.media.audio-aconfig-cc",
         "effect-aidl-cpp",
-        "libaudioclient_aidl_conversion",
         "libactivitymanager_aidl",
+        "libaudioclient",
+        "libaudioclient_aidl_conversion",
         "libaudioflinger_datapath",
         "libaudioflinger_fastpath",
         "libaudioflinger_timing",
         "libaudioflinger_utils",
         "libaudiofoundation",
         "libaudiohal",
+        "libaudiomanager",
         "libaudioprocessing",
         "libaudioutils",
-        "libcutils",
-        "libutils",
-        "liblog",
         "libbinder",
         "libbinder_ndk",
-        "libaudioclient",
-        "libaudiomanager",
+        "libcutils",
+        "liblog",
+        "libmedia_helper",
         "libmediametrics",
         "libmediautils",
+        "libmemunreachable",
         "libnbaio",
         "libnblog",
         "libpermission",
         "libpowermanager",
-        "libmemunreachable",
-        "libmedia_helper",
         "libshmemcompat",
         "libsounddose",
+        "libutils",
         "libvibrator",
         "packagemanager_aidl-cpp",
     ],
-
-    static_libs: [
-        "libmedialogservice",
-        "libaudiospdif",
-    ],
 }
 
-
 cc_library {
     name: "libaudioflinger",
 
@@ -214,26 +214,29 @@
     ],
 
     static_libs: [
+        "libaudiospdif",
         "libcpustats",
-        "libpermission",
+        "libmedialogservice",
     ],
 
     header_libs: [
+        "audiopolicyservicelocal_headers",
         "libaaudio_headers",
-        "libaudioclient_headers",
-        "libaudiohal_headers",
-        "libaudioutils_headers",
         "libmedia_headers",
     ],
 
+    export_header_lib_headers: ["audiopolicyservicelocal_headers"],
+
+    export_include_dirs: ["."],
+
     export_shared_lib_headers: [
         "libpermission",
     ],
 
     cflags: [
-        "-fvisibility=hidden",
-        "-Werror",
         "-Wall",
+        "-Werror",
+        "-fvisibility=hidden",
     ],
     sanitize: {
         integer_overflow: true,
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index f5ac109..32ca690 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -39,13 +39,17 @@
 #include <binder/IServiceManager.h>
 #include <binder/Parcel.h>
 #include <cutils/properties.h>
+#include <com_android_media_audio.h>
 #include <com_android_media_audioserver.h>
 #include <media/AidlConversion.h>
 #include <media/AudioParameter.h>
 #include <media/AudioValidator.h>
 #include <media/IMediaLogService.h>
+#include <media/IPermissionProvider.h>
 #include <media/MediaMetricsItem.h>
+#include <media/NativePermissionController.h>
 #include <media/TypeConverter.h>
+#include <media/ValidatedAttributionSourceState.h>
 #include <mediautils/BatteryNotifier.h>
 #include <mediautils/MemoryLeakTrackUtil.h>
 #include <mediautils/MethodStatistics.h>
@@ -81,12 +85,17 @@
 namespace android {
 
 using ::android::base::StringPrintf;
+using aidl_utils::statusTFromBinderStatus;
 using media::IEffectClient;
 using media::audio::common::AudioMMapPolicyInfo;
 using media::audio::common::AudioMMapPolicyType;
 using media::audio::common::AudioMode;
 using android::content::AttributionSourceState;
 using android::detail::AudioHalVersionInfo;
+using com::android::media::permission::INativePermissionController;
+using com::android::media::permission::IPermissionProvider;
+using com::android::media::permission::NativePermissionController;
+using com::android::media::permission::ValidatedAttributionSourceState;
 
 static const AudioHalVersionInfo kMaxAAudioPropertyDeviceHalVersion =
         AudioHalVersionInfo(AudioHalVersionInfo::Type::HIDL, 7, 1);
@@ -118,6 +127,52 @@
     }
 }
 
+static error::BinderResult<ValidatedAttributionSourceState>
+validateAttributionFromContextOrTrustedCaller(AttributionSourceState attr,
+        const IPermissionProvider& provider) {
+    const auto callingUid = IPCThreadState::self()->getCallingUid();
+    // We trust the following UIDs to appropriate validated identities above us
+    if (isAudioServerOrMediaServerOrSystemServerOrRootUid(callingUid)) {
+        // Legacy paths may not properly populate package name, so we attempt to handle.
+        if (!attr.packageName.has_value() || attr.packageName.value() == "") {
+            ALOGW("Trusted client %d provided attr with missing package name" , callingUid);
+            attr.packageName = VALUE_OR_RETURN(provider.getPackagesForUid(callingUid))[0];
+        }
+        // Behavior change: In the case of delegation, if pid is invalid,
+        // filling it in with the callingPid will cause a mismatch between the
+        // pid and the uid in the attribution, which is error-prone.
+        // Instead, assert that the pid from a trusted source is valid
+        if (attr.pid == -1) {
+            if (callingUid != static_cast<uid_t>(attr.uid)) {
+                return error::unexpectedExceptionCode(binder::Status::EX_ILLEGAL_ARGUMENT,
+                        "validateAttribution: Invalid pid from delegating trusted source");
+            } else {
+                // Legacy handling for trusted clients which may not fill pid correctly
+                attr.pid = IPCThreadState::self()->getCallingPid();
+            }
+        }
+        return ValidatedAttributionSourceState::createFromTrustedSource(std::move(attr));
+    } else {
+        // Behavior change: Populate pid with callingPid unconditionally. Previously, we
+        // allowed caller provided pid, if uid matched calling context, but this is error-prone
+        // since it allows mismatching uid/pid
+        return ValidatedAttributionSourceState::createFromBinderContext(std::move(attr), provider);
+    }
+}
+
+#define VALUE_OR_RETURN_CONVERTED(exp)                                                \
+    ({                                                                                \
+        auto _tmp = (exp);                                                            \
+        if (!_tmp.ok()) {                                                             \
+            ALOGE("Function: %s Line: %d Failed result (%s)", __FUNCTION__, __LINE__, \
+                  errorToString(_tmp.error()).c_str());                               \
+            return statusTFromBinderStatus(_tmp.error());                             \
+        }                                                                             \
+        std::move(_tmp.value());                                                      \
+    })
+
+
+
 // Creates association between Binder code to name for IAudioFlinger.
 #define IAUDIOFLINGER_BINDER_METHOD_MACRO_LIST \
 BINDER_METHOD_ENTRY(createTrack) \
@@ -132,8 +187,6 @@
 BINDER_METHOD_ENTRY(masterMute) \
 BINDER_METHOD_ENTRY(setStreamVolume) \
 BINDER_METHOD_ENTRY(setStreamMute) \
-BINDER_METHOD_ENTRY(streamVolume) \
-BINDER_METHOD_ENTRY(streamMute) \
 BINDER_METHOD_ENTRY(setMode) \
 BINDER_METHOD_ENTRY(setMicMute) \
 BINDER_METHOD_ENTRY(getMicMute) \
@@ -193,6 +246,7 @@
 BINDER_METHOD_ENTRY(getSoundDoseInterface) \
 BINDER_METHOD_ENTRY(getAudioPolicyConfig) \
 BINDER_METHOD_ENTRY(getAudioMixPort) \
+BINDER_METHOD_ENTRY(resetReferencesForTest) \
 
 // singleton for Binder Method Statistics for IAudioFlinger
 static auto& getIAudioFlingerStatistics() {
@@ -466,6 +520,8 @@
             sMediaLogService->unregisterWriter(iMemory);
         }
     }
+    mMediaLogNotifier->requestExit();
+    mPatchCommandThread->exit();
 }
 
 //static
@@ -516,30 +572,42 @@
     audio_attributes_t localAttr = *attr;
 
     // TODO b/182392553: refactor or make clearer
-    pid_t clientPid =
-        VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_pid_t(client.attributionSource.pid));
-    bool updatePid = (clientPid == (pid_t)-1);
-    const uid_t callingUid = IPCThreadState::self()->getCallingUid();
+    AttributionSourceState adjAttributionSource;
+    if (!com::android::media::audio::audioserver_permissions()) {
+        pid_t clientPid =
+            VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_pid_t(client.attributionSource.pid));
+        bool updatePid = (clientPid == (pid_t)-1);
+        const uid_t callingUid = IPCThreadState::self()->getCallingUid();
 
-    AttributionSourceState adjAttributionSource = client.attributionSource;
-    if (!isAudioServerOrMediaServerOrSystemServerOrRootUid(callingUid)) {
-        uid_t clientUid =
-            VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_uid_t(client.attributionSource.uid));
-        ALOGW_IF(clientUid != callingUid,
-                "%s uid %d tried to pass itself off as %d",
-                __FUNCTION__, callingUid, clientUid);
-        adjAttributionSource.uid = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(callingUid));
-        updatePid = true;
-    }
-    if (updatePid) {
-        const pid_t callingPid = IPCThreadState::self()->getCallingPid();
-        ALOGW_IF(clientPid != (pid_t)-1 && clientPid != callingPid,
-                 "%s uid %d pid %d tried to pass itself off as pid %d",
-                 __func__, callingUid, callingPid, clientPid);
-        adjAttributionSource.pid = VALUE_OR_RETURN_STATUS(legacy2aidl_pid_t_int32_t(callingPid));
-    }
-    adjAttributionSource = afutils::checkAttributionSourcePackage(
+        adjAttributionSource = client.attributionSource;
+        if (!isAudioServerOrMediaServerOrSystemServerOrRootUid(callingUid)) {
+            uid_t clientUid =
+                VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_uid_t(client.attributionSource.uid));
+            ALOGW_IF(clientUid != callingUid,
+                    "%s uid %d tried to pass itself off as %d",
+                    __FUNCTION__, callingUid, clientUid);
+            adjAttributionSource.uid = VALUE_OR_RETURN_STATUS(
+                    legacy2aidl_uid_t_int32_t(callingUid));
+            updatePid = true;
+        }
+        if (updatePid) {
+            const pid_t callingPid = IPCThreadState::self()->getCallingPid();
+            ALOGW_IF(clientPid != (pid_t)-1 && clientPid != callingPid,
+                     "%s uid %d pid %d tried to pass itself off as pid %d",
+                     __func__, callingUid, callingPid, clientPid);
+            adjAttributionSource.pid = VALUE_OR_RETURN_STATUS(
+                    legacy2aidl_pid_t_int32_t(callingPid));
+        }
+        adjAttributionSource = afutils::checkAttributionSourcePackage(
             adjAttributionSource);
+    } else {
+        auto validatedAttrSource = VALUE_OR_RETURN_CONVERTED(
+                validateAttributionFromContextOrTrustedCaller(client.attributionSource,
+                getPermissionProvider()
+                ));
+        // TODO pass wrapped object around
+        adjAttributionSource = std::move(validatedAttrSource).unwrapInto();
+    }
 
     if (direction == MmapStreamInterface::DIRECTION_OUTPUT) {
         audio_config_t fullConfig = AUDIO_CONFIG_INITIALIZER;
@@ -680,11 +748,9 @@
 
     result.append("Notification Clients:\n");
     result.append("   pid    uid  name\n");
-    for (size_t i = 0; i < mNotificationClients.size(); ++i) {
-        const pid_t pid = mNotificationClients[i]->getPid();
-        const uid_t uid = mNotificationClients[i]->getUid();
-        const mediautils::UidInfo::Info info = mUidInfo.getInfo(uid);
-        result.appendFormat("%6d %6u  %s\n", pid, uid, info.package.c_str());
+    for (const auto& [ _, client ] : mNotificationClients) {
+        result.appendFormat("%6d %6u  %s\n",
+                client->getPid(), client->getUid(), client->getPackageName().c_str());
     }
 
     result.append("Global session refs:\n");
@@ -994,36 +1060,50 @@
     bool isSpatialized = false;
     bool isBitPerfect = false;
 
-    // TODO b/182392553: refactor or make clearer
-    pid_t clientPid =
-        VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_pid_t(input.clientInfo.attributionSource.pid));
-    bool updatePid = (clientPid == (pid_t)-1);
-    const uid_t callingUid = IPCThreadState::self()->getCallingUid();
-    uid_t clientUid =
-        VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_uid_t(input.clientInfo.attributionSource.uid));
     audio_io_handle_t effectThreadId = AUDIO_IO_HANDLE_NONE;
     std::vector<int> effectIds;
     audio_attributes_t localAttr = input.attr;
 
-    AttributionSourceState adjAttributionSource = input.clientInfo.attributionSource;
-    if (!isAudioServerOrMediaServerOrSystemServerOrRootUid(callingUid)) {
-        ALOGW_IF(clientUid != callingUid,
-                "%s uid %d tried to pass itself off as %d",
-                __FUNCTION__, callingUid, clientUid);
-        adjAttributionSource.uid = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(callingUid));
-        clientUid = callingUid;
-        updatePid = true;
+    AttributionSourceState adjAttributionSource;
+    pid_t callingPid = IPCThreadState::self()->getCallingPid();
+    if (!com::android::media::audio::audioserver_permissions()) {
+        adjAttributionSource = input.clientInfo.attributionSource;
+        const uid_t callingUid = IPCThreadState::self()->getCallingUid();
+        uid_t clientUid = VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_uid_t(
+                        input.clientInfo.attributionSource.uid));
+        pid_t clientPid =
+            VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_pid_t(
+                        input.clientInfo.attributionSource.pid));
+        bool updatePid = (clientPid == (pid_t)-1);
+
+        if (!isAudioServerOrMediaServerOrSystemServerOrRootUid(callingUid)) {
+            ALOGW_IF(clientUid != callingUid,
+                    "%s uid %d tried to pass itself off as %d",
+                    __FUNCTION__, callingUid, clientUid);
+            adjAttributionSource.uid = VALUE_OR_RETURN_STATUS(
+                    legacy2aidl_uid_t_int32_t(callingUid));
+            clientUid = callingUid;
+            updatePid = true;
+        }
+        if (updatePid) {
+            ALOGW_IF(clientPid != (pid_t)-1 && clientPid != callingPid,
+                     "%s uid %d pid %d tried to pass itself off as pid %d",
+                     __func__, callingUid, callingPid, clientPid);
+            clientPid = callingPid;
+            adjAttributionSource.pid = VALUE_OR_RETURN_STATUS(
+                    legacy2aidl_pid_t_int32_t(callingPid));
+        }
+        adjAttributionSource = afutils::checkAttributionSourcePackage(
+                adjAttributionSource);
+
+    } else {
+        auto validatedAttrSource = VALUE_OR_RETURN_CONVERTED(
+                validateAttributionFromContextOrTrustedCaller(input.clientInfo.attributionSource,
+                getPermissionProvider()
+                ));
+        // TODO pass wrapped object around
+        adjAttributionSource = std::move(validatedAttrSource).unwrapInto();
     }
-    const pid_t callingPid = IPCThreadState::self()->getCallingPid();
-    if (updatePid) {
-        ALOGW_IF(clientPid != (pid_t)-1 && clientPid != callingPid,
-                 "%s uid %d pid %d tried to pass itself off as pid %d",
-                 __func__, callingUid, callingPid, clientPid);
-        clientPid = callingPid;
-        adjAttributionSource.pid = VALUE_OR_RETURN_STATUS(legacy2aidl_pid_t_int32_t(callingPid));
-    }
-    adjAttributionSource = afutils::checkAttributionSourcePackage(
-            adjAttributionSource);
 
     audio_session_t sessionId = input.sessionId;
     if (sessionId == AUDIO_SESSION_ALLOCATE) {
@@ -1076,9 +1156,10 @@
             goto Exit;
         }
 
-        client = registerPid(clientPid);
+        client = registerPid(adjAttributionSource.pid);
 
         IAfPlaybackThread* effectThread = nullptr;
+        sp<IAfEffectChain> effectChain = nullptr;
         // check if an effect chain with the same session ID is present on another
         // output thread and move it here.
         for (size_t i = 0; i < mPlaybackThreads.size(); i++) {
@@ -1091,6 +1172,10 @@
                 }
             }
         }
+        // Check if an orphan effect chain exists for this session
+        if (effectThread == nullptr) {
+            effectChain = getOrphanEffectChain_l(sessionId);
+        }
         ALOGV("createTrack() sessionId: %d", sessionId);
 
         output.sampleRate = input.config.sample_rate;
@@ -1135,6 +1220,13 @@
                     effectIds = thread->getEffectIds_l(sessionId);
                 }
             }
+            if (effectChain != nullptr) {
+                if (moveEffectChain_ll(sessionId, nullptr, thread, effectChain.get())
+                        == NO_ERROR) {
+                    effectThreadId = thread->id();
+                    effectIds = thread->getEffectIds_l(sessionId);
+                }
+            }
         }
 
         // Look for sync events awaiting for a session to be used.
@@ -1652,37 +1744,6 @@
     return NO_ERROR;
 }
 
-float AudioFlinger::streamVolume(audio_stream_type_t stream, audio_io_handle_t output) const
-{
-    status_t status = checkStreamType(stream);
-    if (status != NO_ERROR) {
-        return 0.0f;
-    }
-    if (output == AUDIO_IO_HANDLE_NONE) {
-        return 0.0f;
-    }
-
-    audio_utils::lock_guard lock(mutex());
-    sp<VolumeInterface> volumeInterface = getVolumeInterface_l(output);
-    if (volumeInterface == NULL) {
-        return 0.0f;
-    }
-
-    return volumeInterface->streamVolume(stream);
-}
-
-bool AudioFlinger::streamMute(audio_stream_type_t stream) const
-{
-    status_t status = checkStreamType(stream);
-    if (status != NO_ERROR) {
-        return true;
-    }
-
-    audio_utils::lock_guard lock(mutex());
-    return streamMute_l(stream);
-}
-
-
 void AudioFlinger::broadcastParametersToRecordThreads_l(const String8& keyValuePairs)
 {
     for (size_t i = 0; i < mRecordThreads.size(); i++) {
@@ -1923,10 +1984,11 @@
     if (mPrimaryHardwareDev == nullptr) {
         return 0;
     }
+    if (mInputBufferSizeOrderedDevs.empty()) {
+        return 0;
+    }
     mHardwareStatus = AUDIO_HW_GET_INPUT_BUFFER_SIZE;
 
-    sp<DeviceHalInterface> dev = mPrimaryHardwareDev.load()->hwDevice();
-
     std::vector<audio_channel_mask_t> channelMasks = {channelMask};
     if (channelMask != AUDIO_CHANNEL_IN_MONO) {
         channelMasks.push_back(AUDIO_CHANNEL_IN_MONO);
@@ -1956,8 +2018,24 @@
 
     mHardwareStatus = AUDIO_HW_IDLE;
 
+    auto getInputBufferSize = [](const sp<DeviceHalInterface>& dev, audio_config_t config,
+                                 size_t* bytes) -> status_t {
+        if (!dev) {
+            return BAD_VALUE;
+        }
+        status_t result = dev->getInputBufferSize(&config, bytes);
+        if (result == BAD_VALUE) {
+            // Retry with the config suggested by the HAL.
+            result = dev->getInputBufferSize(&config, bytes);
+        }
+        if (result != OK || *bytes == 0) {
+            return BAD_VALUE;
+        }
+        return result;
+    };
+
     // Change parameters of the configuration each iteration until we find a
-    // configuration that the device will support.
+    // configuration that the device will support, or HAL suggests what it supports.
     audio_config_t config = AUDIO_CONFIG_INITIALIZER;
     for (auto testChannelMask : channelMasks) {
         config.channel_mask = testChannelMask;
@@ -1967,10 +2045,14 @@
                 config.sample_rate = testSampleRate;
 
                 size_t bytes = 0;
-                status_t result = dev->getInputBufferSize(&config, &bytes);
-                if (result != OK || bytes == 0) {
-                    continue;
+                ret = BAD_VALUE;
+                for (const AudioHwDevice* dev : mInputBufferSizeOrderedDevs) {
+                    ret = getInputBufferSize(dev->hwDevice(), config, &bytes);
+                    if (ret == OK) {
+                        break;
+                    }
                 }
+                if (ret == BAD_VALUE) continue;
 
                 if (config.sample_rate != sampleRate || config.channel_mask != channelMask ||
                     config.format != format) {
@@ -2047,24 +2129,23 @@
 
 void AudioFlinger::registerClient(const sp<media::IAudioFlingerClient>& client)
 {
-    audio_utils::lock_guard _l(mutex());
     if (client == 0) {
         return;
     }
-    pid_t pid = IPCThreadState::self()->getCallingPid();
+    const pid_t pid = IPCThreadState::self()->getCallingPid();
     const uid_t uid = IPCThreadState::self()->getCallingUid();
+
+    audio_utils::lock_guard _l(mutex());
     {
         audio_utils::lock_guard _cl(clientMutex());
-        if (mNotificationClients.indexOfKey(pid) < 0) {
-            sp<NotificationClient> notificationClient = new NotificationClient(this,
-                                                                                client,
-                                                                                pid,
-                                                                                uid);
-            ALOGV("registerClient() client %p, pid %d, uid %u",
-                    notificationClient.get(), pid, uid);
+        if (mNotificationClients.count(pid) == 0) {
+            const mediautils::UidInfo::Info info = mUidInfo.getInfo(uid);
+            sp<NotificationClient> notificationClient = sp<NotificationClient>::make(
+                    this, client, pid, uid, info.package);
+            ALOGV("registerClient() pid %d, uid %u, package %s",
+                    pid, uid, info.package.c_str());
 
-            mNotificationClients.add(pid, notificationClient);
-
+            mNotificationClients[pid] = notificationClient;
             sp<IBinder> binder = IInterface::asBinder(client);
             binder->linkToDeath(notificationClient);
         }
@@ -2091,7 +2172,7 @@
         audio_utils::lock_guard _l(mutex());
         {
             audio_utils::lock_guard _cl(clientMutex());
-            mNotificationClients.removeItem(pid);
+            mNotificationClients.erase(pid);
         }
 
         ALOGV("%d died, releasing its sessions", pid);
@@ -2132,11 +2213,13 @@
             legacy2aidl_AudioIoDescriptor_AudioIoDescriptor(ioDesc));
 
     audio_utils::lock_guard _l(clientMutex());
-    size_t size = mNotificationClients.size();
-    for (size_t i = 0; i < size; i++) {
-        if ((pid == 0) || (mNotificationClients.keyAt(i) == pid)) {
-            mNotificationClients.valueAt(i)->audioFlingerClient()->ioConfigChanged(eventAidl,
-                                                                                   descAidl);
+    if (pid != 0) {
+        if (auto it = mNotificationClients.find(pid); it != mNotificationClients.end()) {
+            it->second->audioFlingerClient()->ioConfigChanged(eventAidl, descAidl);
+        }
+    } else {
+        for (const auto& [ client_pid, client] : mNotificationClients) {
+            client->audioFlingerClient()->ioConfigChanged(eventAidl, descAidl);
         }
     }
 }
@@ -2150,12 +2233,24 @@
 
     audio_utils::lock_guard _l(clientMutex());
     size_t size = mNotificationClients.size();
-    for (size_t i = 0; i < size; i++) {
-        mNotificationClients.valueAt(i)->audioFlingerClient()
-                ->onSupportedLatencyModesChanged(outputAidl, modesAidl);
+    for (const auto& [_, client] : mNotificationClients) {
+        client->audioFlingerClient()->onSupportedLatencyModesChanged(outputAidl, modesAidl);
     }
 }
 
+void AudioFlinger::onHardError(std::set<audio_port_handle_t>& trackPortIds) {
+    ALOGI("releasing tracks due to a hard error occurred on an I/O thread");
+    for (const auto portId : trackPortIds) {
+        AudioSystem::releaseOutput(portId);
+    }
+}
+
+const IPermissionProvider& AudioFlinger::getPermissionProvider() {
+    // This is inited as part of service construction, prior to binder registration,
+    // so it should always be non-null.
+    return mAudioPolicyServiceLocal.load()->getPermissionProvider();
+}
+
 // removeClient_l() must be called with AudioFlinger::clientMutex() held
 void AudioFlinger::removeClient_l(pid_t pid)
 {
@@ -2171,30 +2266,24 @@
     sp<IAfThreadBase> thread;
 
     for (size_t i = 0; i < mPlaybackThreads.size(); i++) {
-        if (mPlaybackThreads.valueAt(i)->getEffect(sessionId, effectId) != 0) {
-            ALOG_ASSERT(thread == 0);
-            thread = mPlaybackThreads.valueAt(i);
+        thread = mPlaybackThreads.valueAt(i);
+        if (thread->getEffect(sessionId, effectId) != 0) {
+            return thread;
         }
     }
-    if (thread != nullptr) {
-        return thread;
-    }
     for (size_t i = 0; i < mRecordThreads.size(); i++) {
-        if (mRecordThreads.valueAt(i)->getEffect(sessionId, effectId) != 0) {
-            ALOG_ASSERT(thread == 0);
-            thread = mRecordThreads.valueAt(i);
+        thread = mRecordThreads.valueAt(i);
+        if (thread->getEffect(sessionId, effectId) != 0) {
+            return thread;
         }
     }
-    if (thread != nullptr) {
-        return thread;
-    }
     for (size_t i = 0; i < mMmapThreads.size(); i++) {
-        if (mMmapThreads.valueAt(i)->getEffect(sessionId, effectId) != 0) {
-            ALOG_ASSERT(thread == 0);
-            thread = mMmapThreads.valueAt(i);
+        thread = mMmapThreads.valueAt(i);
+        if (thread->getEffect(sessionId, effectId) != 0) {
+            return thread;
         }
     }
-    return thread;
+    return nullptr;
 }
 
 // ----------------------------------------------------------------------------
@@ -2202,8 +2291,10 @@
 AudioFlinger::NotificationClient::NotificationClient(const sp<AudioFlinger>& audioFlinger,
                                                      const sp<media::IAudioFlingerClient>& client,
                                                      pid_t pid,
-                                                     uid_t uid)
-    : mAudioFlinger(audioFlinger), mPid(pid), mUid(uid), mAudioFlingerClient(client)
+        uid_t uid,
+        std::string_view packageName)
+    : mAudioFlinger(audioFlinger), mPid(pid), mUid(uid)
+    , mPackageName(packageName), mAudioFlingerClient(client)
 {
 }
 
@@ -2213,7 +2304,7 @@
 
 void AudioFlinger::NotificationClient::binderDied(const wp<IBinder>& who __unused)
 {
-    sp<NotificationClient> keep(this);
+    const auto keep = sp<NotificationClient>::fromExisting(this);
     mAudioFlinger->removeNotificationClient(mPid);
 }
 
@@ -2271,30 +2362,43 @@
     output.buffers.clear();
     output.inputId = AUDIO_IO_HANDLE_NONE;
 
-    // TODO b/182392553: refactor or clean up
-    AttributionSourceState adjAttributionSource = input.clientInfo.attributionSource;
-    bool updatePid = (adjAttributionSource.pid == -1);
-    const uid_t callingUid = IPCThreadState::self()->getCallingUid();
-    const uid_t currentUid = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(
-           adjAttributionSource.uid));
-    if (!isAudioServerOrMediaServerOrSystemServerOrRootUid(callingUid)) {
-        ALOGW_IF(currentUid != callingUid,
-                "%s uid %d tried to pass itself off as %d",
-                __FUNCTION__, callingUid, currentUid);
-        adjAttributionSource.uid = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(callingUid));
-        updatePid = true;
+    AttributionSourceState adjAttributionSource;
+    pid_t callingPid = IPCThreadState::self()->getCallingPid();
+    if (!com::android::media::audio::audioserver_permissions()) {
+        adjAttributionSource = input.clientInfo.attributionSource;
+        bool updatePid = (adjAttributionSource.pid == -1);
+        const uid_t callingUid = IPCThreadState::self()->getCallingUid();
+        const uid_t currentUid = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(
+               adjAttributionSource.uid));
+        if (!isAudioServerOrMediaServerOrSystemServerOrRootUid(callingUid)) {
+            ALOGW_IF(currentUid != callingUid,
+                    "%s uid %d tried to pass itself off as %d",
+                    __FUNCTION__, callingUid, currentUid);
+            adjAttributionSource.uid = VALUE_OR_RETURN_STATUS(
+                    legacy2aidl_uid_t_int32_t(callingUid));
+            updatePid = true;
+        }
+        const pid_t currentPid = VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_pid_t(
+                adjAttributionSource.pid));
+        if (updatePid) {
+            ALOGW_IF(currentPid != (pid_t)-1 && currentPid != callingPid,
+                     "%s uid %d pid %d tried to pass itself off as pid %d",
+                     __func__, callingUid, callingPid, currentPid);
+            adjAttributionSource.pid = VALUE_OR_RETURN_STATUS(
+                    legacy2aidl_pid_t_int32_t(callingPid));
+        }
+        adjAttributionSource = afutils::checkAttributionSourcePackage(
+                adjAttributionSource);
+    } else {
+        auto validatedAttrSource = VALUE_OR_RETURN_CONVERTED(
+                validateAttributionFromContextOrTrustedCaller(
+                    input.clientInfo.attributionSource,
+                    getPermissionProvider()
+                    ));
+        // TODO pass wrapped object around
+        adjAttributionSource = std::move(validatedAttrSource).unwrapInto();
     }
-    const pid_t callingPid = IPCThreadState::self()->getCallingPid();
-    const pid_t currentPid = VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_pid_t(
-            adjAttributionSource.pid));
-    if (updatePid) {
-        ALOGW_IF(currentPid != (pid_t)-1 && currentPid != callingPid,
-                 "%s uid %d pid %d tried to pass itself off as pid %d",
-                 __func__, callingUid, callingPid, currentPid);
-        adjAttributionSource.pid = VALUE_OR_RETURN_STATUS(legacy2aidl_pid_t_int32_t(callingPid));
-    }
-    adjAttributionSource = afutils::checkAttributionSourcePackage(
-            adjAttributionSource);
+
     // further format checks are performed by createRecordTrack_l()
     if (!audio_is_valid_format(input.config.format)) {
         ALOGE("createRecord() invalid format %#x", input.config.format);
@@ -2547,6 +2651,7 @@
         bool mm;
         if (OK == dev->getMasterMute(&mm)) {
             mMasterMute = mm;
+            ALOGI_IF(mMasterMute, "%s: applying mute from HAL %s", __func__, name);
         }
     }
 
@@ -2598,12 +2703,43 @@
     }
 
     mAudioHwDevs.add(handle, audioDevice);
+    if (strcmp(name, AUDIO_HARDWARE_MODULE_ID_STUB) != 0) {
+        mInputBufferSizeOrderedDevs.insert(audioDevice);
+    }
 
     ALOGI("loadHwModule() Loaded %s audio interface, handle %d", name, handle);
 
     return audioDevice;
 }
 
+// Sort AudioHwDevice to be traversed in the getInputBufferSize call in the following order:
+// Primary, Usb, Bluetooth, A2DP, other modules, remote submix.
+/* static */
+bool AudioFlinger::inputBufferSizeDevsCmp(const AudioHwDevice* lhs, const AudioHwDevice* rhs) {
+    static const std::map<std::string_view, int> kPriorities = {
+        { AUDIO_HARDWARE_MODULE_ID_PRIMARY, 0 }, { AUDIO_HARDWARE_MODULE_ID_USB, 1 },
+        { AUDIO_HARDWARE_MODULE_ID_BLUETOOTH, 2 }, { AUDIO_HARDWARE_MODULE_ID_A2DP, 3 },
+        { AUDIO_HARDWARE_MODULE_ID_REMOTE_SUBMIX, std::numeric_limits<int>::max() }
+    };
+
+    const std::string_view lhsName = lhs->moduleName();
+    const std::string_view rhsName = rhs->moduleName();
+
+    auto lhsPriority = std::numeric_limits<int>::max() - 1;
+    if (const auto lhsIt = kPriorities.find(lhsName); lhsIt != kPriorities.end()) {
+        lhsPriority = lhsIt->second;
+    }
+    auto rhsPriority = std::numeric_limits<int>::max() - 1;
+    if (const auto rhsIt = kPriorities.find(rhsName); rhsIt != kPriorities.end()) {
+        rhsPriority = rhsIt->second;
+    }
+
+    if (lhsPriority != rhsPriority) {
+        return lhsPriority < rhsPriority;
+    }
+    return lhsName < rhsName;
+}
+
 // ----------------------------------------------------------------------------
 
 uint32_t AudioFlinger::getPrimaryOutputSamplingRate() const
@@ -2854,7 +2990,8 @@
                                                         audio_config_base_t *mixerConfig,
                                                         audio_devices_t deviceType,
                                                         const String8& address,
-                                                        audio_output_flags_t flags)
+                                                        audio_output_flags_t flags,
+                                                        const audio_attributes_t attributes)
 {
     AudioHwDevice *outHwDev = findSuitableHwDev_l(module, deviceType);
     if (outHwDev == NULL) {
@@ -2872,13 +3009,18 @@
 
     mHardwareStatus = AUDIO_HW_OUTPUT_OPEN;
     AudioStreamOut *outputStream = NULL;
+
+    playback_track_metadata_v7_t trackMetadata;
+    trackMetadata.base.usage = attributes.usage;
+
     status_t status = outHwDev->openOutputStream(
             &outputStream,
             *output,
             deviceType,
             flags,
             halConfig,
-            address.c_str());
+            address.c_str(),
+            {trackMetadata});
 
     mHardwareStatus = AUDIO_HW_IDLE;
 
@@ -2947,6 +3089,8 @@
             aidl2legacy_DeviceDescriptorBase(request.device));
     audio_output_flags_t flags = VALUE_OR_RETURN_STATUS(
             aidl2legacy_int32_t_audio_output_flags_t_mask(request.flags));
+    audio_attributes_t attributes = VALUE_OR_RETURN_STATUS(
+            aidl2legacy_AudioAttributes_audio_attributes_t(request.attributes));
 
     audio_io_handle_t output;
 
@@ -2969,7 +3113,7 @@
     audio_utils::lock_guard _l(mutex());
 
     const sp<IAfThreadBase> thread = openOutput_l(module, &output, &halConfig,
-            &mixerConfig, deviceType, address, flags);
+            &mixerConfig, deviceType, address, flags, attributes);
     if (thread != 0) {
         uint32_t latencyMs = 0;
         if ((flags & AUDIO_OUTPUT_FLAG_MMAP_NOIRQ) == 0) {
@@ -3059,6 +3203,25 @@
 
 
             mPlaybackThreads.removeItem(output);
+            // Save AUDIO_SESSION_OUTPUT_MIX effect to orphan chains
+            // Output Mix Effect session is used to manage Music Effect by AudioPolicy Manager.
+            // It exists across all playback threads.
+            if (playbackThread->type() == IAfThreadBase::MIXER
+                    || playbackThread->type() == IAfThreadBase::OFFLOAD
+                    || playbackThread->type() == IAfThreadBase::SPATIALIZER) {
+                sp<IAfEffectChain> mixChain;
+                {
+                    audio_utils::scoped_lock sl(playbackThread->mutex());
+                    mixChain = playbackThread->getEffectChain_l(AUDIO_SESSION_OUTPUT_MIX);
+                    if (mixChain != nullptr) {
+                        ALOGW("%s() output %d moving mix session to orphans", __func__, output);
+                        playbackThread->removeEffectChain_l(mixChain);
+                    }
+                }
+                if (mixChain != nullptr) {
+                    putOrphanEffectChain_l(mixChain);
+                }
+            }
             // save all effects to the default thread
             if (mPlaybackThreads.size()) {
                 IAfPlaybackThread* const dstThread =
@@ -3408,7 +3571,7 @@
         // is likely proxied by mediaserver (e.g CameraService) and releaseAudioSessionId() can be
         // called from a different pid leaving a stale session reference.  Also we don't know how
         // to clear this reference if the client process dies.
-        if (mNotificationClients.indexOfKey(caller) < 0) {
+        if (mNotificationClients.count(caller) == 0) {
             ALOGW("acquireAudioSessionId() unknown client %d for session %d", caller, audioSession);
             return;
         }
@@ -3724,7 +3887,11 @@
 
 IAfPlaybackThread* AudioFlinger::primaryPlaybackThread_l() const
 {
-    audio_utils::lock_guard lock(hardwareMutex());
+    // The atomic ptr mPrimaryHardwareDev requires both the
+    // AudioFlinger and the Hardware mutex for modification.
+    // As we hold the AudioFlinger mutex, we access it
+    // safely without the Hardware mutex, to avoid mutex order
+    // inversion with Thread methods and the ThreadBase mutex.
     if (mPrimaryHardwareDev == nullptr) {
         return nullptr;
     }
@@ -3860,7 +4027,8 @@
                                                        patchRecord->bufferSize(),
                                                        outputFlags,
                                                        0ns /* timeout */,
-                                                       frameCountToBeReady);
+                                                       frameCountToBeReady,
+                                                       track->getSpeed());
         status = patchTrack->initCheck();
         if (status != NO_ERROR) {
             ALOGE("Secondary output patchTrack init failed: %d", status);
@@ -4028,20 +4196,31 @@
     int idOut = -1;
 
     status_t lStatus = NO_ERROR;
-
-    // TODO b/182392553: refactor or make clearer
-    const uid_t callingUid = IPCThreadState::self()->getCallingUid();
-    adjAttributionSource.uid = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(callingUid));
-    pid_t currentPid = VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_pid_t(adjAttributionSource.pid));
-    if (currentPid == -1 || !isAudioServerOrMediaServerOrSystemServerOrRootUid(callingUid)) {
-        const pid_t callingPid = IPCThreadState::self()->getCallingPid();
-        ALOGW_IF(currentPid != -1 && currentPid != callingPid,
-                 "%s uid %d pid %d tried to pass itself off as pid %d",
-                 __func__, callingUid, callingPid, currentPid);
-        adjAttributionSource.pid = VALUE_OR_RETURN_STATUS(legacy2aidl_pid_t_int32_t(callingPid));
-        currentPid = callingPid;
+    uid_t callingUid = IPCThreadState::self()->getCallingUid();
+    pid_t currentPid;
+    if (!com::android::media::audio::audioserver_permissions()) {
+        adjAttributionSource.uid = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(callingUid));
+        currentPid = VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_pid_t(adjAttributionSource.pid));
+        if (currentPid == -1 || !isAudioServerOrMediaServerOrSystemServerOrRootUid(callingUid)) {
+            const pid_t callingPid = IPCThreadState::self()->getCallingPid();
+            ALOGW_IF(currentPid != -1 && currentPid != callingPid,
+                     "%s uid %d pid %d tried to pass itself off as pid %d",
+                     __func__, callingUid, callingPid, currentPid);
+            adjAttributionSource.pid = VALUE_OR_RETURN_STATUS(
+                    legacy2aidl_pid_t_int32_t(callingPid));
+            currentPid = callingPid;
+        }
+        adjAttributionSource = afutils::checkAttributionSourcePackage(adjAttributionSource);
+    } else {
+        auto validatedAttrSource = VALUE_OR_RETURN_CONVERTED(
+                validateAttributionFromContextOrTrustedCaller(request.attributionSource,
+                getPermissionProvider()
+                ));
+        // TODO pass wrapped object around
+        adjAttributionSource = std::move(validatedAttrSource).unwrapInto();
+        currentPid = adjAttributionSource.pid;
     }
-    adjAttributionSource = afutils::checkAttributionSourcePackage(adjAttributionSource);
+
 
     ALOGV("createEffect pid %d, effectClient %p, priority %d, sessionId %d, io %d, factory %p",
           adjAttributionSource.pid, effectClient.get(), priority, sessionId, io,
@@ -4203,7 +4382,9 @@
             // before creating the AudioEffect or the io handle must be specified.
             //
             // Detect if the effect is created after an AudioRecord is destroyed.
-            if (getOrphanEffectChain_l(sessionId).get() != nullptr) {
+            if (sessionId != AUDIO_SESSION_OUTPUT_MIX
+                  && ((descOut.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_PRE_PROC)
+                  && getOrphanEffectChain_l(sessionId).get() != nullptr) {
                 ALOGE("%s: effect %s with no specified io handle is denied because the AudioRecord"
                       " for session %d no longer exists",
                       __func__, descOut.name, sessionId);
@@ -4214,11 +4395,27 @@
             // Legacy handling of creating an effect on an expired or made-up
             // session id.  We think that it is a Playback effect.
             //
-            // If no output thread contains the requested session ID, default to
-            // first output. The effect chain will be moved to the correct output
-            // thread when a track with the same session ID is created
-            if (io == AUDIO_IO_HANDLE_NONE && mPlaybackThreads.size() > 0) {
-                io = mPlaybackThreads.keyAt(0);
+            // If no output thread contains the requested session ID, park the effect to
+            // the orphan chains. The effect chain will be moved to the correct output
+            // thread when a track with the same session ID is created.
+            if (io == AUDIO_IO_HANDLE_NONE) {
+                if (probe) {
+                    // In probe mode, as no compatible thread found, exit with error.
+                    lStatus = BAD_VALUE;
+                    goto Exit;
+                }
+                ALOGV("%s() got io %d for effect %s", __func__, io, descOut.name);
+                sp<Client> client = registerPid(currentPid);
+                bool pinned = !audio_is_global_session(sessionId) && isSessionAcquired_l(sessionId);
+                handle = createOrphanEffect_l(client, effectClient, priority, sessionId,
+                                              &descOut, &enabledOut, &lStatus, pinned,
+                                              request.notifyFramesProcessed);
+                if (lStatus != NO_ERROR && lStatus != ALREADY_EXISTS) {
+                    // remove local strong reference to Client with clientMutex() held
+                    audio_utils::lock_guard _cl(clientMutex());
+                    client.clear();
+                }
+                goto Register;
             }
             ALOGV("createEffect() got io %d for effect %s", io, descOut.name);
         } else if (checkPlaybackThread_l(io) != nullptr
@@ -4259,7 +4456,8 @@
                     goto Exit;
                 }
             }
-        } else {
+        }
+        if (thread->type() == IAfThreadBase::RECORD || sessionId == AUDIO_SESSION_OUTPUT_MIX) {
             // Check if one effect chain was awaiting for an effect to be created on this
             // session and used it instead of creating a new one.
             sp<IAfEffectChain> chain = getOrphanEffectChain_l(sessionId);
@@ -4335,6 +4533,85 @@
     return lStatus;
 }
 
+sp<IAfEffectHandle> AudioFlinger::createOrphanEffect_l(
+        const sp<Client>& client,
+        const sp<IEffectClient>& effectClient,
+        int32_t priority,
+        audio_session_t sessionId,
+        effect_descriptor_t *desc,
+        int *enabled,
+        status_t *status,
+        bool pinned,
+        bool notifyFramesProcessed)
+{
+    ALOGV("%s effectClient %p, priority %d, sessionId %d, factory %p",
+          __func__, effectClient.get(), priority, sessionId, mEffectsFactoryHal.get());
+
+    // Check if an orphan effect chain exists for this session or create new chain for this session
+    sp<IAfEffectModule> effect;
+    sp<IAfEffectChain> chain = getOrphanEffectChain_l(sessionId);
+    bool chainCreated = false;
+    if (chain == nullptr) {
+        chain = IAfEffectChain::create(/* ThreadBase= */ nullptr, sessionId, this);
+        chainCreated = true;
+    } else {
+        effect = chain->getEffectFromDesc(desc);
+    }
+    bool effectCreated = false;
+    if (effect == nullptr) {
+        audio_unique_id_t effectId = nextUniqueId(AUDIO_UNIQUE_ID_USE_EFFECT);
+        // create a new effect module if none present in the chain
+        status_t llStatus =
+                chain->createEffect(effect, desc, effectId, sessionId, pinned);
+        if (llStatus != NO_ERROR) {
+            *status = llStatus;
+            // if the effect chain was not created here, put it back
+            if (!chainCreated) {
+                putOrphanEffectChain_l(chain);
+            }
+            return nullptr;
+        }
+        effect->setMode(getMode());
+
+        if (effect->isHapticGenerator()) {
+            // TODO(b/184194057): Use the vibrator information from the vibrator that will be used
+            // for the HapticGenerator.
+            const std::optional<media::AudioVibratorInfo> defaultVibratorInfo =
+                    getDefaultVibratorInfo_l();
+            if (defaultVibratorInfo) {
+                // Only set the vibrator info when it is a valid one.
+                audio_utils::lock_guard _cl(chain->mutex());
+                effect->setVibratorInfo_l(*defaultVibratorInfo);
+            }
+        }
+        effectCreated = true;
+    }
+    // create effect handle and connect it to effect module
+    sp<IAfEffectHandle> handle =
+            IAfEffectHandle::create(effect, client, effectClient, priority, notifyFramesProcessed);
+    status_t lStatus = handle->initCheck();
+    if (lStatus == OK) {
+        lStatus = effect->addHandle(handle.get());
+    }
+    // in case of lStatus error, EffectHandle will still return and caller should do the clear
+    if (lStatus != NO_ERROR && lStatus != ALREADY_EXISTS) {
+        if (effectCreated) {
+            chain->removeEffect(effect);
+        }
+        // if the effect chain was not created here, put it back
+        if (!chainCreated) {
+            putOrphanEffectChain_l(chain);
+        }
+    } else {
+        if (enabled != NULL) {
+            *enabled = (int)effect->isEnabled();
+        }
+        putOrphanEffectChain_l(chain);
+    }
+    *status = lStatus;
+    return handle;
+}
+
 status_t AudioFlinger::moveEffects(audio_session_t sessionId, audio_io_handle_t srcIo,
         audio_io_handle_t dstIo)
 NO_THREAD_SAFETY_ANALYSIS
@@ -4363,17 +4640,43 @@
         }
         return ret;
     }
-    IAfPlaybackThread* const srcThread = checkPlaybackThread_l(srcIo);
-    if (srcThread == nullptr) {
-        ALOGW("%s() bad srcIo %d", __func__, srcIo);
-        return BAD_VALUE;
-    }
-    IAfPlaybackThread* const dstThread = checkPlaybackThread_l(dstIo);
+
+    IAfPlaybackThread* dstThread = checkPlaybackThread_l(dstIo);
     if (dstThread == nullptr) {
         ALOGW("%s() bad dstIo %d", __func__, dstIo);
         return BAD_VALUE;
     }
 
+    IAfPlaybackThread* srcThread = checkPlaybackThread_l(srcIo);
+    sp<IAfEffectChain> orphanChain = getOrphanEffectChain_l(sessionId);
+    if (srcThread == nullptr && orphanChain == nullptr && sessionId == AUDIO_SESSION_OUTPUT_MIX) {
+        ALOGW("%s() AUDIO_SESSION_OUTPUT_MIX not found in orphans, checking other mix", __func__);
+        for (size_t i = 0; i < mPlaybackThreads.size(); i++) {
+            const sp<IAfPlaybackThread> pt = mPlaybackThreads.valueAt(i);
+            const uint32_t sessionType = pt->hasAudioSession(AUDIO_SESSION_OUTPUT_MIX);
+            if ((pt->type() == IAfThreadBase::MIXER || pt->type() == IAfThreadBase::OFFLOAD) &&
+                    ((sessionType & IAfThreadBase::EFFECT_SESSION) != 0)) {
+                srcThread = pt.get();
+                if (srcThread == dstThread) {
+                    ALOGD("%s() same dst and src threads, ignoring move", __func__);
+                    return NO_ERROR;
+                }
+                ALOGW("%s() found srcOutput %d hosting AUDIO_SESSION_OUTPUT_MIX", __func__,
+                      pt->id());
+                break;
+            }
+        }
+    }
+    if (srcThread == nullptr && orphanChain == nullptr) {
+        ALOGW("moveEffects() bad srcIo %d", srcIo);
+        return BAD_VALUE;
+    }
+    // dstThread pointer validity has already been checked
+    if (orphanChain != nullptr) {
+        audio_utils::scoped_lock _ll(dstThread->mutex());
+        return moveEffectChain_ll(sessionId, nullptr, dstThread, orphanChain.get());
+    }
+    // srcThread pointer validity has already been checked
     audio_utils::scoped_lock _ll(dstThread->mutex(), srcThread->mutex());
     return moveEffectChain_ll(sessionId, srcThread, dstThread);
 }
@@ -4387,23 +4690,29 @@
 
     sp<IAfThreadBase> thread = getEffectThread_l(sessionId, effectId);
     if (thread == nullptr) {
-      return;
+        return;
     }
     audio_utils::lock_guard _sl(thread->mutex());
-    sp<IAfEffectModule> effect = thread->getEffect_l(sessionId, effectId);
-    thread->setEffectSuspended_l(&effect->desc().type, suspended, sessionId);
+    if (const auto& effect = thread->getEffect_l(sessionId, effectId)) {
+        thread->setEffectSuspended_l(&effect->desc().type, suspended, sessionId);
+    }
 }
 
 
 // moveEffectChain_ll must be called with the AudioFlinger::mutex()
 // and both srcThread and dstThread mutex()s held
 status_t AudioFlinger::moveEffectChain_ll(audio_session_t sessionId,
-        IAfPlaybackThread* srcThread, IAfPlaybackThread* dstThread)
+        IAfPlaybackThread* srcThread, IAfPlaybackThread* dstThread,
+        IAfEffectChain* srcChain)
 {
-    ALOGV("%s: session %d from thread %p to thread %p",
-            __func__, sessionId, srcThread, dstThread);
+    ALOGV("%s: session %d from thread %p to thread %p %s",
+            __func__, sessionId, srcThread, dstThread,
+            (srcChain != nullptr ? "from specific chain" : ""));
+    ALOG_ASSERT((srcThread != nullptr) != (srcChain != nullptr),
+                "no source provided for source chain");
 
-    sp<IAfEffectChain> chain = srcThread->getEffectChain_l(sessionId);
+    sp<IAfEffectChain> chain =
+          srcChain != nullptr ? srcChain : srcThread->getEffectChain_l(sessionId);
     if (chain == 0) {
         ALOGW("%s: effect chain for session %d not on source thread %p",
                 __func__, sessionId, srcThread);
@@ -4423,8 +4732,9 @@
     // otherwise unnecessary as removeEffect_l() will remove the chain when last effect is
     // removed.
     // TODO(b/216875016): consider holding the effect chain locks for the duration of the move.
-    srcThread->removeEffectChain_l(chain);
-
+    if (srcThread != nullptr) {
+        srcThread->removeEffectChain_l(chain);
+    }
     // transfer all effects one by one so that new effect chain is created on new thread with
     // correct buffer sizes and audio parameters and effect engines reconfigured accordingly
     sp<IAfEffectChain> dstChain;
@@ -4434,7 +4744,11 @@
     // process effects one by one.
     for (sp<IAfEffectModule> effect = chain->getEffectFromId_l(0); effect != nullptr;
             effect = chain->getEffectFromId_l(0)) {
-        srcThread->removeEffect_l(effect);
+        if (srcThread != nullptr) {
+            srcThread->removeEffect_l(effect);
+        } else {
+            chain->removeEffect(effect);
+        }
         removed.add(effect);
         status = dstThread->addEffect_ll(effect);
         if (status != NO_ERROR) {
@@ -4462,7 +4776,7 @@
         for (const auto& effect : removed) {
             dstThread->removeEffect_l(effect); // Note: Depending on error location, the last
                                                // effect may not have been placed on dstThread.
-            if (srcThread->addEffect_ll(effect) == NO_ERROR) {
+            if (srcThread != nullptr && srcThread->addEffect_ll(effect) == NO_ERROR) {
                 ++restored;
                 if (dstChain == nullptr) {
                     dstChain = effect->getCallback()->chain().promote();
@@ -4483,7 +4797,7 @@
             if (effect->state() == IAfEffectModule::ACTIVE ||
                     effect->state() == IAfEffectModule::STOPPING) {
                 ++started;
-                effect->start();
+                effect->start_l();
             }
         }
         dstChain->mutex().unlock();
@@ -4493,15 +4807,19 @@
         if (errorString.empty()) {
             errorString = StringPrintf("%s: failed status %d", __func__, status);
         }
-        ALOGW("%s: %s unsuccessful move of session %d from srcThread %p to dstThread %p "
+        ALOGW("%s: %s unsuccessful move of session %d from %s %p to dstThread %p "
                 "(%zu effects removed from srcThread, %zu effects restored to srcThread, "
                 "%zu effects started)",
-                __func__, errorString.c_str(), sessionId, srcThread, dstThread,
+                __func__, errorString.c_str(), sessionId,
+                (srcThread != nullptr ? "srcThread" : "srcChain"),
+                (srcThread != nullptr ? (void*) srcThread : (void*) srcChain), dstThread,
                 removed.size(), restored, started);
     } else {
-        ALOGD("%s: successful move of session %d from srcThread %p to dstThread %p "
+        ALOGD("%s: successful move of session %d from %s %p to dstThread %p "
                 "(%zu effects moved, %zu effects started)",
-                __func__, sessionId, srcThread, dstThread, removed.size(), started);
+                __func__, sessionId, (srcThread != nullptr ? "srcThread" : "srcChain"),
+                (srcThread != nullptr ? (void*) srcThread : (void*) srcChain), dstThread,
+                removed.size(), started);
     }
     return status;
 }
@@ -4586,7 +4904,7 @@
         // removeEffect_l() has stopped the effect if it was active so it must be restarted
         if (effect->state() == IAfEffectModule::ACTIVE ||
             effect->state() == IAfEffectModule::STOPPING) {
-            effect->start();
+            effect->start_l();
         }
     }
 
@@ -4672,7 +4990,7 @@
     ALOGV("updateOrphanEffectChains session %d index %zd", session, index);
     if (index >= 0) {
         sp<IAfEffectChain> chain = mOrphanEffectChains.valueAt(index);
-        if (chain->removeEffect_l(effect, true) == 0) {
+        if (chain->removeEffect(effect, true) == 0) {
             ALOGV("updateOrphanEffectChains removing effect chain at index %zd", index);
             mOrphanEffectChains.removeItemsAt(index);
         }
@@ -4749,6 +5067,30 @@
     return mPatchPanel->getAudioMixPort_l(devicePort, mixPort);
 }
 
+status_t AudioFlinger::setTracksInternalMute(
+        const std::vector<media::TrackInternalMuteInfo>& tracksInternalMute) {
+    audio_utils::lock_guard _l(mutex());
+    ALOGV("%s", __func__);
+
+    std::map<audio_port_handle_t, bool> tracksInternalMuteMap;
+    for (const auto& trackInternalMute : tracksInternalMute) {
+        audio_port_handle_t portId = VALUE_OR_RETURN_STATUS(
+                aidl2legacy_int32_t_audio_port_handle_t(trackInternalMute.portId));
+        tracksInternalMuteMap.emplace(portId, trackInternalMute.muted);
+    }
+    for (size_t i = 0; i < mPlaybackThreads.size() && !tracksInternalMuteMap.empty(); i++) {
+        mPlaybackThreads.valueAt(i)->setTracksInternalMute(&tracksInternalMuteMap);
+    }
+    return NO_ERROR;
+}
+
+status_t AudioFlinger::resetReferencesForTest() {
+    mDeviceEffectManager.clear();
+    mPatchPanel.clear();
+    mMelReporter->resetReferencesForTest();
+    return NO_ERROR;
+}
+
 // ----------------------------------------------------------------------------
 
 status_t AudioFlinger::onTransactWrapper(TransactionCode code,
@@ -4783,8 +5125,10 @@
         case TransactionCode::INVALIDATE_TRACKS:
         case TransactionCode::GET_AUDIO_POLICY_CONFIG:
         case TransactionCode::GET_AUDIO_MIX_PORT:
+        case TransactionCode::SET_TRACKS_INTERNAL_MUTE:
+        case TransactionCode::RESET_REFERENCES_FOR_TEST:
             ALOGW("%s: transaction %d received from PID %d",
-                  __func__, code, IPCThreadState::self()->getCallingPid());
+                  __func__, static_cast<int>(code), IPCThreadState::self()->getCallingPid());
             // return status only for non void methods
             switch (code) {
                 case TransactionCode::SET_RECORD_SILENCED:
@@ -4817,7 +5161,8 @@
         case TransactionCode::SUPPORTS_BLUETOOTH_VARIABLE_LATENCY: {
             if (!isServiceUid(IPCThreadState::self()->getCallingUid())) {
                 ALOGW("%s: transaction %d received from PID %d unauthorized UID %d",
-                      __func__, code, IPCThreadState::self()->getCallingPid(),
+                      __func__, static_cast<int>(code),
+                      IPCThreadState::self()->getCallingPid(),
                       IPCThreadState::self()->getCallingUid());
                 // return status only for non-void methods
                 switch (code) {
@@ -4867,8 +5212,8 @@
         } else {
             getIAudioFlingerStatistics().event(code, elapsedMs);
         }
-    }, mediautils::TimeCheck::kDefaultTimeoutDuration,
-    mediautils::TimeCheck::kDefaultSecondChanceDuration,
+    }, mediautils::TimeCheck::getDefaultTimeoutDuration(),
+    mediautils::TimeCheck::getDefaultSecondChanceDuration(),
     true /* crashOnTimeout */);
 
     return delegate();
diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h
index 0f75d6e..b57a355 100644
--- a/services/audioflinger/AudioFlinger.h
+++ b/services/audioflinger/AudioFlinger.h
@@ -33,6 +33,7 @@
 #include <audio_utils/FdToString.h>
 #include <audio_utils/SimpleLog.h>
 #include <media/IAudioFlinger.h>
+#include <media/IAudioPolicyServiceLocal.h>
 #include <media/MediaMetricsItem.h>
 #include <media/audiohal/DevicesFactoryHalInterface.h>
 #include <mediautils/ServiceUtilities.h>
@@ -61,6 +62,8 @@
 public:
     static void instantiate() ANDROID_API;
 
+    status_t resetReferencesForTest();
+
 private:
 
     // ---- begin IAudioFlinger interface
@@ -93,10 +96,6 @@
     status_t setStreamMute(audio_stream_type_t stream, bool muted) final
             EXCLUDES_AudioFlinger_Mutex;
 
-    float streamVolume(audio_stream_type_t stream,
-            audio_io_handle_t output) const final EXCLUDES_AudioFlinger_Mutex;
-    bool streamMute(audio_stream_type_t stream) const final EXCLUDES_AudioFlinger_Mutex;
-
     status_t setMode(audio_mode_t mode) final EXCLUDES_AudioFlinger_Mutex;
 
     status_t setMicMute(bool state) final EXCLUDES_AudioFlinger_Mutex;
@@ -259,6 +258,10 @@
     status_t getAudioMixPort(const struct audio_port_v7* devicePort,
                              struct audio_port_v7* mixPort) const final EXCLUDES_AudioFlinger_Mutex;
 
+    status_t setTracksInternalMute(
+            const std::vector<media::TrackInternalMuteInfo>& tracksInternalMute) final
+            EXCLUDES_AudioFlinger_Mutex;
+
     status_t onTransactWrapper(TransactionCode code, const Parcel& data, uint32_t flags,
             const std::function<status_t()>& delegate) final EXCLUDES_AudioFlinger_Mutex;
 
@@ -330,9 +333,12 @@
             audio_config_base_t* mixerConfig,
             audio_devices_t deviceType,
             const String8& address,
-            audio_output_flags_t flags) final REQUIRES(mutex());
+            audio_output_flags_t flags,
+            audio_attributes_t attributes) final REQUIRES(mutex());
     const DefaultKeyedVector<audio_module_handle_t, AudioHwDevice*>&
-            getAudioHwDevs_l() const final REQUIRES(mutex()) { return mAudioHwDevs; }
+            getAudioHwDevs_l() const final REQUIRES(mutex(), hardwareMutex()) {
+              return mAudioHwDevs;
+            }
     void updateDownStreamPatches_l(const struct audio_patch* patch,
             const std::set<audio_io_handle_t>& streams) final REQUIRES(mutex());
     void updateOutDevicesForRecordThreads_l(const DeviceDescriptorBaseVector& devices) final
@@ -373,7 +379,8 @@
             EXCLUDES_AudioFlinger_Mutex;
 
     status_t moveEffectChain_ll(audio_session_t sessionId,
-            IAfPlaybackThread* srcThread, IAfPlaybackThread* dstThread) final
+            IAfPlaybackThread* srcThread, IAfPlaybackThread* dstThread,
+            IAfEffectChain* srcChain = nullptr) final
             REQUIRES(mutex(), audio_utils::ThreadBase_Mutex);
 
     // This is a helper that is called during incoming binder calls.
@@ -396,6 +403,10 @@
     void onSupportedLatencyModesChanged(
             audio_io_handle_t output, const std::vector<audio_latency_mode_t>& modes) final
             EXCLUDES_AudioFlinger_ClientMutex;
+    void onHardError(std::set<audio_port_handle_t>& trackPortIds) final
+            EXCLUDES_AudioFlinger_ClientMutex;
+
+    const ::com::android::media::permission::IPermissionProvider& getPermissionProvider() final;
 
     // ---- end of IAfThreadCallback interface
 
@@ -419,6 +430,13 @@
                             const sp<MmapStreamCallback>& callback,
                             sp<MmapStreamInterface>& interface,
             audio_port_handle_t *handle) EXCLUDES_AudioFlinger_Mutex;
+
+    void initAudioPolicyLocal(sp<media::IAudioPolicyServiceLocal> audioPolicyLocal) {
+        if (mAudioPolicyServiceLocal.load() == nullptr) {
+            mAudioPolicyServiceLocal = std::move(audioPolicyLocal);
+        }
+    }
+
 private:
     // FIXME The 400 is temporarily too high until a leak of writers in media.log is fixed.
     static const size_t kLogMemorySize = 400 * 1024;
@@ -461,12 +479,14 @@
                             NotificationClient(const sp<AudioFlinger>& audioFlinger,
                                                 const sp<media::IAudioFlingerClient>& client,
                                                 pid_t pid,
-                                                uid_t uid);
+                uid_t uid,
+                std::string_view packageName);
         virtual             ~NotificationClient();
 
                 sp<media::IAudioFlingerClient> audioFlingerClient() const { return mAudioFlingerClient; }
                 pid_t getPid() const { return mPid; }
                 uid_t getUid() const { return mUid; }
+                const std::string& getPackageName() const { return mPackageName; }
 
                 // IBinder::DeathRecipient
                 virtual     void        binderDied(const wp<IBinder>& who);
@@ -477,6 +497,7 @@
         const sp<AudioFlinger>  mAudioFlinger;
         const pid_t             mPid;
         const uid_t             mUid;
+        const std::string mPackageName;
         const sp<media::IAudioFlingerClient> mAudioFlingerClient;
     };
 
@@ -629,6 +650,10 @@
     DefaultKeyedVector<audio_module_handle_t, AudioHwDevice*> mAudioHwDevs
             GUARDED_BY(hardwareMutex()) {nullptr /* defValue */};
 
+    static bool inputBufferSizeDevsCmp(const AudioHwDevice* lhs, const AudioHwDevice* rhs);
+    std::set<AudioHwDevice*, decltype(&inputBufferSizeDevsCmp)>
+            mInputBufferSizeOrderedDevs GUARDED_BY(hardwareMutex()) {inputBufferSizeDevsCmp};
+
      const sp<DevicesFactoryHalInterface> mDevicesFactoryHal =
              DevicesFactoryHalInterface::create();
      /* const */ sp<DevicesFactoryHalCallback> mDevicesFactoryHalCallback;  // set onFirstRef().
@@ -672,8 +697,7 @@
 
     DefaultKeyedVector<audio_io_handle_t, sp<IAfRecordThread>> mRecordThreads GUARDED_BY(mutex());
 
-    DefaultKeyedVector<pid_t, sp<NotificationClient>> mNotificationClients
-            GUARDED_BY(clientMutex());
+    std::map<pid_t, sp<NotificationClient>> mNotificationClients GUARDED_BY(clientMutex());
 
                 // updated by atomic_fetch_add_explicit
     volatile atomic_uint_fast32_t mNextUniqueIds[AUDIO_UNIQUE_ID_USE_MAX];  // ctor init
@@ -702,6 +726,16 @@
 
     sp<Client> registerPid(pid_t pid) EXCLUDES_AudioFlinger_ClientMutex; // always returns non-0
 
+    sp<IAfEffectHandle> createOrphanEffect_l(const sp<Client>& client,
+                                          const sp<media::IEffectClient>& effectClient,
+                                          int32_t priority,
+                                          audio_session_t sessionId,
+                                          effect_descriptor_t *desc,
+                                          int *enabled,
+                                          status_t *status /*non-NULL*/,
+                                          bool pinned,
+                                          bool notifyFramesProcessed) REQUIRES(mutex());
+
     // for use from destructor
     status_t closeOutput_nonvirtual(audio_io_handle_t output) EXCLUDES_AudioFlinger_Mutex;
     status_t closeInput_nonvirtual(audio_io_handle_t input) EXCLUDES_AudioFlinger_Mutex;
@@ -763,6 +797,9 @@
 
     // Bluetooth Variable latency control logic is enabled or disabled
     std::atomic<bool> mBluetoothLatencyModesEnabled = true;
+
+    // Local interface to AudioPolicyService, late inited, but logically const
+    mediautils::atomic_sp<media::IAudioPolicyServiceLocal> mAudioPolicyServiceLocal;
 };
 
 // ----------------------------------------------------------------------------
diff --git a/services/audioflinger/DeviceEffectManager.cpp b/services/audioflinger/DeviceEffectManager.cpp
index 201d147..7cb9329 100644
--- a/services/audioflinger/DeviceEffectManager.cpp
+++ b/services/audioflinger/DeviceEffectManager.cpp
@@ -71,10 +71,15 @@
 
 void DeviceEffectManager::onReleaseAudioPatch(audio_patch_handle_t handle) {
     ALOGV("%s", __func__);
+    // Keep a reference on disconnected handle to delay destruction without lock held.
+    std::vector<sp<IAfEffectHandle>> disconnectedHandles{};
     audio_utils::lock_guard _l(mutex());
     for (auto& effectProxies : mDeviceEffects) {
         for (auto& effect : effectProxies.second) {
-            effect->onReleasePatch(handle);
+            sp<IAfEffectHandle> disconnectedHandle = effect->onReleasePatch(handle);
+            if (disconnectedHandle != nullptr) {
+                disconnectedHandles.push_back(std::move(disconnectedHandle));
+            }
         }
     }
 }
@@ -143,7 +148,7 @@
         if (lStatus == NO_ERROR) {
             lStatus = effect->addHandle(handle.get());
             if (lStatus == NO_ERROR) {
-                lStatus = effect->init(patches);
+                lStatus = effect->init_l(patches);
                 if (lStatus == NAME_NOT_FOUND) {
                     lStatus = NO_ERROR;
                 }
diff --git a/services/audioflinger/DeviceEffectManager.h b/services/audioflinger/DeviceEffectManager.h
index 7045c8b..287d838 100644
--- a/services/audioflinger/DeviceEffectManager.h
+++ b/services/audioflinger/DeviceEffectManager.h
@@ -139,7 +139,7 @@
     // check if effects should be suspended or restored when a given effect is enable or disabled
     void checkSuspendOnEffectEnabled(const sp<IAfEffectBase>& effect __unused,
                           bool enabled __unused, bool threadLocked __unused) final {}
-    void resetVolume() final {}
+    void resetVolume_l() final REQUIRES(audio_utils::EffectChain_Mutex) {}
     product_strategy_t strategy() const final { return static_cast<product_strategy_t>(0); }
     int32_t activeTrackCnt() const final { return 0; }
     void onEffectEnable(const sp<IAfEffectBase>& effect __unused) final {}
diff --git a/services/audioflinger/Effects.cpp b/services/audioflinger/Effects.cpp
index 3147433..ec8d135 100644
--- a/services/audioflinger/Effects.cpp
+++ b/services/audioflinger/Effects.cpp
@@ -31,6 +31,7 @@
 #include <media/AudioContainers.h>
 #include <media/AudioDeviceTypeAddr.h>
 #include <media/AudioEffect.h>
+#include <media/EffectClientAsyncProxy.h>
 #include <media/ShmemCompat.h>
 #include <media/TypeConverter.h>
 #include <media/audiohal/EffectHalInterface.h>
@@ -38,6 +39,7 @@
 #include <mediautils/MethodStatistics.h>
 #include <mediautils/ServiceUtilities.h>
 #include <mediautils/TimeCheck.h>
+#include <system/audio_effects/audio_effects_utils.h>
 #include <system/audio_effects/effect_aec.h>
 #include <system/audio_effects/effect_downmix.h>
 #include <system/audio_effects/effect_dynamicsprocessing.h>
@@ -69,6 +71,7 @@
 namespace android {
 
 using aidl_utils::statusTFromBinderStatus;
+using android::effect::utils::EffectParamWriter;
 using audioflinger::EffectConfiguration;
 using binder::Status;
 
@@ -561,24 +564,21 @@
 #undef LOG_TAG
 #define LOG_TAG "EffectModule"
 
-EffectModule::EffectModule(const sp<EffectCallbackInterface>& callback,
-                                         effect_descriptor_t *desc,
-                                         int id,
-                                         audio_session_t sessionId,
-                                         bool pinned,
-                                         audio_port_handle_t deviceId)
+EffectModule::EffectModule(const sp<EffectCallbackInterface>& callback, effect_descriptor_t* desc,
+                           int id, audio_session_t sessionId, bool pinned,
+                           audio_port_handle_t deviceId)
     : EffectBase(callback, desc, id, sessionId, pinned),
       // clear mConfig to ensure consistent initial value of buffer framecount
       // in case buffers are associated by setInBuffer() or setOutBuffer()
-      // prior to configure().
+      // prior to configure_l().
       mConfig{{}, {}},
       mStatus(NO_INIT),
-      mMaxDisableWaitCnt(1), // set by configure(), should be >= 1
+      mMaxDisableWaitCnt(1), // set by configure_l(), should be >= 1
       mDisableWaitCnt(0),    // set by process() and updateState()
       mOffloaded(false),
-      mIsOutput(false)
-      , mSupportsFloat(false)
-{
+      mIsOutput(false),
+      mSupportsFloat(false),
+      mEffectInterfaceDebug(desc->name) {
     ALOGV("Constructor %p pinned %d", this, pinned);
     int lStatus;
 
@@ -586,21 +586,24 @@
     mStatus = callback->createEffectHal(
             &desc->uuid, sessionId, deviceId, &mEffectInterface);
     if (mStatus != NO_ERROR) {
+        ALOGE("%s createEffectHal failed: %d", __func__, mStatus);
         return;
     }
-    lStatus = init();
+    lStatus = init_l();
     if (lStatus < 0) {
         mStatus = lStatus;
         goto Error;
     }
 
-    setOffloaded(callback->isOffload(), callback->io());
-    ALOGV("Constructor success name %s, Interface %p", mDescriptor.name, mEffectInterface.get());
+    setOffloaded_l(callback->isOffload(), callback->io());
+    ALOGV("%s Constructor success name %s, Interface %p", __func__, mDescriptor.name,
+          mEffectInterface.get());
 
     return;
 Error:
     mEffectInterface.clear();
-    ALOGV("Constructor Error %d", mStatus);
+    mEffectInterfaceDebug += " init failed:" + std::to_string(lStatus);
+    ALOGE("%s Constructor Error %d", __func__, mStatus);
 }
 
 EffectModule::~EffectModule()
@@ -611,15 +614,16 @@
         AudioEffect::guidToString(&mDescriptor.uuid, uuidStr, sizeof(uuidStr));
         ALOGW("EffectModule %p destructor called with unreleased interface, effect %s",
                 this, uuidStr);
-        release_l();
+        release_l("~EffectModule");
     }
 
 }
 
-bool EffectModule::updateState() {
+// return true if any effect started or stopped
+bool EffectModule::updateState_l() {
     audio_utils::lock_guard _l(mutex());
 
-    bool started = false;
+    bool startedOrStopped = false;
     switch (mState) {
     case RESTART:
         reset_l();
@@ -632,17 +636,17 @@
                    0,
                    mConfig.inputCfg.buffer.frameCount*sizeof(int32_t));
         }
-        if (start_l() == NO_ERROR) {
+        if (start_ll() == NO_ERROR) {
             mState = ACTIVE;
-            started = true;
+            startedOrStopped = true;
         } else {
             mState = IDLE;
         }
         break;
     case STOPPING:
         // volume control for offload and direct threads must take effect immediately.
-        if (stop_l() == NO_ERROR
-            && !(isVolumeControl() && isOffloadedOrDirect())) {
+        if (stop_ll() == NO_ERROR
+            && !(isVolumeControl() && isOffloadedOrDirect_l())) {
             mDisableWaitCnt = mMaxDisableWaitCnt;
         } else {
             mDisableWaitCnt = 1; // will cause immediate transition to IDLE
@@ -654,6 +658,7 @@
         // turn off sequence.
         if (--mDisableWaitCnt == 0) {
             reset_l();
+            startedOrStopped = true;
             mState = IDLE;
         }
         break;
@@ -668,7 +673,7 @@
         break;
     }
 
-    return started;
+    return startedOrStopped;
 }
 
 void EffectModule::process()
@@ -836,9 +841,9 @@
     mEffectInterface->command(EFFECT_CMD_RESET, 0, NULL, &replySize, &reply);
 }
 
-status_t EffectModule::configure()
+status_t EffectModule::configure_l()
 {
-    ALOGVV("configure() started");
+    ALOGVV("%s started", __func__);
     status_t status;
     uint32_t size;
     audio_channel_mask_t channelMask;
@@ -879,7 +884,7 @@
     mConfig.outputCfg.format = AUDIO_FORMAT_PCM_FLOAT;
 
     // Don't use sample rate for thread if effect isn't offloadable.
-    if (callback->isOffloadOrDirect() && !isOffloaded()) {
+    if (callback->isOffloadOrDirect() && !isOffloaded_l()) {
         mConfig.inputCfg.samplingRate = DEFAULT_OUTPUT_SAMPLE_RATE;
         ALOGV("Overriding effect input as 48kHz");
     } else {
@@ -909,9 +914,9 @@
     mConfig.outputCfg.buffer.frameCount = mConfig.inputCfg.buffer.frameCount;
     mIsOutput = callback->isOutput();
 
-    ALOGV("configure() %p chain %p buffer %p framecount %zu",
-          this, callback->chain().promote().get(),
-          mConfig.inputCfg.buffer.raw, mConfig.inputCfg.buffer.frameCount);
+    ALOGV("%s %p chain %p buffer %p framecount %zu", __func__, this,
+          callback->chain().promote().get(), mConfig.inputCfg.buffer.raw,
+          mConfig.inputCfg.buffer.frameCount);
 
     status_t cmdStatus;
     size = sizeof(int);
@@ -1012,11 +1017,11 @@
 exit:
     // TODO: consider clearing mConfig on error.
     mStatus = status;
-    ALOGVV("configure ended");
+    ALOGVV("%s ended", __func__);
     return status;
 }
 
-status_t EffectModule::init()
+status_t EffectModule::init_l()
 {
     audio_utils::lock_guard _l(mutex());
     if (mEffectInterface == 0) {
@@ -1043,26 +1048,39 @@
             return;
         }
 
-        (void)getCallback()->addEffectToHal(mEffectInterface);
-        mCurrentHalStream = getCallback()->io();
+        status_t status = getCallback()->addEffectToHal(mEffectInterface);
+        if (status == NO_ERROR) {
+            mCurrentHalStream = getCallback()->io();
+        }
     }
 }
 
-// start() must be called with PlaybackThread::mutex() or EffectChain::mutex() held
-status_t EffectModule::start()
+void HwAccDeviceEffectModule::addEffectToHal_l()
+{
+    if (mAddedToHal) {
+        return;
+    }
+    status_t status = getCallback()->addEffectToHal(mEffectInterface);
+    if (status == NO_ERROR) {
+        mAddedToHal = true;
+    }
+}
+
+// start_l() must be called with EffectChain::mutex() held
+status_t EffectModule::start_l()
 {
     status_t status;
     {
         audio_utils::lock_guard _l(mutex());
-        status = start_l();
+        status = start_ll();
     }
     if (status == NO_ERROR) {
-        getCallback()->resetVolume();
+        getCallback()->resetVolume_l();
     }
     return status;
 }
 
-status_t EffectModule::start_l()
+status_t EffectModule::start_ll()
 {
     if (mEffectInterface == 0) {
         return NO_INIT;
@@ -1086,13 +1104,13 @@
     return status;
 }
 
-status_t EffectModule::stop()
+status_t EffectModule::stop_l()
 {
     audio_utils::lock_guard _l(mutex());
-    return stop_l();
+    return stop_ll();
 }
 
-status_t EffectModule::stop_l()
+status_t EffectModule::stop_ll()
 {
     if (mEffectInterface == 0) {
         return NO_INIT;
@@ -1103,11 +1121,11 @@
     status_t cmdStatus = NO_ERROR;
     uint32_t size = sizeof(status_t);
 
-    if (isVolumeControl() && isOffloadedOrDirect()) {
+    if (isVolumeControl() && isOffloadedOrDirect_l()) {
         // We have the EffectChain and EffectModule lock, permit a reentrant call to setVolume:
         // resetVolume_l --> setVolume_l --> EffectModule::setVolume
         mSetVolumeReentrantTid = gettid();
-        getCallback()->resetVolume();
+        getCallback()->resetVolume_l();
         mSetVolumeReentrantTid = INVALID_PID;
     }
 
@@ -1126,13 +1144,14 @@
 }
 
 // must be called with EffectChain::mutex() held
-void EffectModule::release_l()
+void EffectModule::release_l(const std::string& from)
 {
     if (mEffectInterface != 0) {
         removeEffectFromHal_l();
         // release effect engine
         mEffectInterface->close();
         mEffectInterface.clear();
+        mEffectInterfaceDebug += " released by: " + from;
     }
 }
 
@@ -1149,6 +1168,16 @@
     return NO_ERROR;
 }
 
+status_t HwAccDeviceEffectModule::removeEffectFromHal_l()
+{
+    if (!mAddedToHal) {
+        return NO_ERROR;
+    }
+    getCallback()->removeEffectFromHal(mEffectInterface);
+    mAddedToHal = false;
+    return NO_ERROR;
+}
+
 // round up delta valid if value and divisor are positive.
 template <typename T>
 static T roundUpDelta(const T &value, const T &divisor) {
@@ -1162,7 +1191,7 @@
                      std::vector<uint8_t>* reply)
 {
     audio_utils::lock_guard _l(mutex());
-    ALOGVV("command(), cmdCode: %d, mEffectInterface: %p", cmdCode, mEffectInterface.get());
+    ALOGVV("%s, cmdCode: %d, mEffectInterface: %p", __func__, cmdCode, mEffectInterface.get());
 
     if (mState == DESTROYED || mEffectInterface == 0) {
         return NO_INIT;
@@ -1258,20 +1287,20 @@
     }
 }
 
-bool EffectModule::isOffloadedOrDirect() const
+bool EffectModule::isOffloadedOrDirect_l() const
 {
     return getCallback()->isOffloadOrDirect();
 }
 
-bool EffectModule::isVolumeControlEnabled() const
+bool EffectModule::isVolumeControlEnabled_l() const
 {
-    return (isVolumeControl() && (isOffloadedOrDirect() ? isEnabled() : isProcessEnabled()));
+    return (isVolumeControl() && (isOffloadedOrDirect_l() ? isEnabled() : isProcessEnabled()));
 }
 
 void EffectModule::setInBuffer(const sp<EffectBufferHalInterface>& buffer) {
     ALOGVV("setInBuffer %p",(&buffer));
 
-    // mConfig.inputCfg.buffer.frameCount may be zero if configure() is not called yet.
+    // mConfig.inputCfg.buffer.frameCount may be zero if configure_l() is not called yet.
     if (buffer != 0) {
         mConfig.inputCfg.buffer.raw = buffer->audioBuffer()->raw;
         buffer->setFrameCount(mConfig.inputCfg.buffer.frameCount);
@@ -1317,7 +1346,7 @@
 void EffectModule::setOutBuffer(const sp<EffectBufferHalInterface>& buffer) {
     ALOGVV("setOutBuffer %p",(&buffer));
 
-    // mConfig.outputCfg.buffer.frameCount may be zero if configure() is not called yet.
+    // mConfig.outputCfg.buffer.frameCount may be zero if configure_l() is not called yet.
     if (buffer != 0) {
         mConfig.outputCfg.buffer.raw = buffer->audioBuffer()->raw;
         buffer->setFrameCount(mConfig.outputCfg.buffer.frameCount);
@@ -1356,37 +1385,49 @@
     }
 }
 
-status_t EffectModule::setVolume(uint32_t *left, uint32_t *right, bool controller)
-{
+status_t EffectModule::setVolume_l(uint32_t* left, uint32_t* right, bool controller, bool force) {
     AutoLockReentrant _l(mutex(), mSetVolumeReentrantTid);
     if (mStatus != NO_ERROR) {
         return mStatus;
     }
     status_t status = NO_ERROR;
     // Send volume indication if EFFECT_FLAG_VOLUME_IND is set and read back altered volume
-    // if controller flag is set (Note that controller == TRUE => EFFECT_FLAG_VOLUME_CTRL set)
-    if (isProcessEnabled() &&
+    // if controller flag is set (Note that controller == TRUE => the volume controller effect in
+    // the effect chain)
+    if (((isOffloadedOrDirect_l() ? isEnabled() : isProcessEnabled()) || force) &&
             ((mDescriptor.flags & EFFECT_FLAG_VOLUME_MASK) == EFFECT_FLAG_VOLUME_CTRL ||
              (mDescriptor.flags & EFFECT_FLAG_VOLUME_MASK) == EFFECT_FLAG_VOLUME_IND ||
              (mDescriptor.flags & EFFECT_FLAG_VOLUME_MASK) == EFFECT_FLAG_VOLUME_MONITOR)) {
-        status = setVolumeInternal(left, right, controller);
+        status = setVolumeInternal_ll(left, right, controller);
     }
     return status;
 }
 
-status_t EffectModule::setVolumeInternal(
+status_t EffectModule::setVolumeInternal_ll(
         uint32_t *left, uint32_t *right, bool controller) {
+    if (mVolume.has_value() && *left == mVolume.value()[0] && *right == mVolume.value()[1] &&
+            !controller) {
+        LOG_ALWAYS_FATAL_IF(
+                !mReturnedVolume.has_value(),
+                "The cached returned volume must not be null when the cached volume has value");
+        *left = mReturnedVolume.value()[0];
+        *right = mReturnedVolume.value()[1];
+        return NO_ERROR;
+    }
+    LOG_ALWAYS_FATAL_IF(mEffectInterface == nullptr, "%s", mEffectInterfaceDebug.c_str());
     uint32_t volume[2] = {*left, *right};
-    uint32_t *pVolume = controller ? volume : nullptr;
+    uint32_t* pVolume = isVolumeControl() ? volume : nullptr;
     uint32_t size = sizeof(volume);
     status_t status = mEffectInterface->command(EFFECT_CMD_SET_VOLUME,
                                                 size,
                                                 volume,
                                                 &size,
                                                 pVolume);
-    if (controller && status == NO_ERROR && size == sizeof(volume)) {
+    if (pVolume && status == NO_ERROR && size == sizeof(volume)) {
+        mVolume = {*left, *right}; // Cache the value that has been set
         *left = volume[0];
         *right = volume[1];
+        mReturnedVolume = {*left, *right};
     }
     return status;
 }
@@ -1480,7 +1521,7 @@
     return status;
 }
 
-status_t EffectModule::setOffloaded(bool offloaded, audio_io_handle_t io)
+status_t EffectModule::setOffloaded_l(bool offloaded, audio_io_handle_t io)
 {
     audio_utils::lock_guard _l(mutex());
     if (mStatus != NO_ERROR) {
@@ -1509,11 +1550,11 @@
         }
         mOffloaded = false;
     }
-    ALOGV("setOffloaded() offloaded %d io %d status %d", offloaded, io, status);
+    ALOGV("%s offloaded %d io %d status %d", __func__, offloaded, io, status);
     return status;
 }
 
-bool EffectModule::isOffloaded() const
+bool EffectModule::isOffloaded_l() const
 {
     audio_utils::lock_guard _l(mutex());
     return mOffloaded;
@@ -1537,8 +1578,7 @@
     return IAfEffectModule::isSpatializer(&mDescriptor.type);
 }
 
-status_t EffectModule::setHapticIntensity(int id, os::HapticScale intensity)
-{
+status_t EffectModule::setHapticScale_l(int id, os::HapticScale hapticScale) {
     if (mStatus != NO_ERROR) {
         return mStatus;
     }
@@ -1547,13 +1587,20 @@
         return INVALID_OPERATION;
     }
 
-    std::vector<uint8_t> request(sizeof(effect_param_t) + 3 * sizeof(uint32_t));
-    effect_param_t *param = (effect_param_t*) request.data();
-    param->psize = sizeof(int32_t);
-    param->vsize = sizeof(int32_t) * 2;
-    *(int32_t*)param->data = HG_PARAM_HAPTIC_INTENSITY;
-    *((int32_t*)param->data + 1) = id;
-    *((int32_t*)param->data + 2) = static_cast<int32_t>(intensity);
+    size_t psize = sizeof(int32_t); // HG_PARAM_HAPTIC_INTENSITY
+    size_t vsize = sizeof(int32_t) + sizeof(os::HapticScale); // id + hapticScale
+    std::vector<uint8_t> request(sizeof(effect_param_t) + psize + vsize);
+    effect_param_t *effectParam = (effect_param_t*) request.data();
+    effectParam->psize = psize;
+    effectParam->vsize = vsize;
+
+    int32_t intensityParam = static_cast<int32_t>(HG_PARAM_HAPTIC_INTENSITY);
+    EffectParamWriter writer(*effectParam);
+    writer.writeToParameter(&intensityParam);
+    writer.writeToValue(&id);
+    writer.writeToValue(&hapticScale);
+    writer.finishValueWrite();
+
     std::vector<uint8_t> response;
     status_t status = command(EFFECT_CMD_SET_PARAM, request, sizeof(int32_t), &response);
     if (status == NO_ERROR) {
@@ -1563,8 +1610,7 @@
     return status;
 }
 
-status_t EffectModule::setVibratorInfo(const media::AudioVibratorInfo& vibratorInfo)
-{
+status_t EffectModule::setVibratorInfo_l(const media::AudioVibratorInfo& vibratorInfo) {
     if (mStatus != NO_ERROR) {
         return mStatus;
     }
@@ -1573,17 +1619,21 @@
         return INVALID_OPERATION;
     }
 
-    const size_t paramCount = 3;
-    std::vector<uint8_t> request(
-            sizeof(effect_param_t) + sizeof(int32_t) + paramCount * sizeof(float));
-    effect_param_t *param = (effect_param_t*) request.data();
-    param->psize = sizeof(int32_t);
-    param->vsize = paramCount * sizeof(float);
-    *(int32_t*)param->data = HG_PARAM_VIBRATOR_INFO;
-    float* vibratorInfoPtr = reinterpret_cast<float*>(param->data + sizeof(int32_t));
-    vibratorInfoPtr[0] = vibratorInfo.resonantFrequency;
-    vibratorInfoPtr[1] = vibratorInfo.qFactor;
-    vibratorInfoPtr[2] = vibratorInfo.maxAmplitude;
+    size_t psize = sizeof(int32_t); // HG_PARAM_VIBRATOR_INFO
+    size_t vsize = 3 * sizeof(float); // resonantFrequency + qFactor + maxAmplitude
+    std::vector<uint8_t> request(sizeof(effect_param_t) + psize + vsize);
+    effect_param_t *effectParam = (effect_param_t*) request.data();
+    effectParam->psize = psize;
+    effectParam->vsize = vsize;
+
+    int32_t infoParam = static_cast<int32_t>(HG_PARAM_VIBRATOR_INFO);
+    EffectParamWriter writer(*effectParam);
+    writer.writeToParameter(&infoParam);
+    writer.writeToValue(&vibratorInfo.resonantFrequency);
+    writer.writeToValue(&vibratorInfo.qFactor);
+    writer.writeToValue(&vibratorInfo.maxAmplitude);
+    writer.finishValueWrite();
+
     std::vector<uint8_t> response;
     status_t status = command(EFFECT_CMD_SET_PARAM, request, sizeof(int32_t), &response);
     if (status == NO_ERROR) {
@@ -1593,8 +1643,8 @@
     return status;
 }
 
-status_t EffectModule::getConfigs(
-        audio_config_base_t* inputCfg, audio_config_base_t* outputCfg, bool* isOutput) const {
+status_t EffectModule::getConfigs_l(audio_config_base_t* inputCfg, audio_config_base_t* outputCfg,
+                                    bool* isOutput) const {
     audio_utils::lock_guard _l(mutex());
     if (mConfig.inputCfg.mask == 0 || mConfig.outputCfg.mask == 0) {
         return NO_INIT;
@@ -1609,7 +1659,7 @@
     return NO_ERROR;
 }
 
-status_t EffectModule::sendMetadata(const std::vector<playback_track_metadata_v7_t>& metadata) {
+status_t EffectModule::sendMetadata_ll(const std::vector<playback_track_metadata_v7_t>& metadata) {
     if (mStatus != NO_ERROR) {
         return mStatus;
     }
@@ -1720,6 +1770,9 @@
         const sp<media::IEffectClient>& effectClient,
         int32_t priority, bool notifyFramesProcessed)
 {
+    if (client == nullptr && effectClient == nullptr) {
+        return sp<InternalEffectHandle>::make(effect, notifyFramesProcessed);
+    }
     return sp<EffectHandle>::make(
             effect, client, effectClient, priority, notifyFramesProcessed);
 }
@@ -1727,14 +1780,18 @@
 EffectHandle::EffectHandle(const sp<IAfEffectBase>& effect,
                                          const sp<Client>& client,
                                          const sp<media::IEffectClient>& effectClient,
-                                         int32_t priority, bool notifyFramesProcessed)
-    : BnEffect(),
-    mEffect(effect), mEffectClient(effectClient), mClient(client), mCblk(NULL),
+                                         int32_t priority, bool notifyFramesProcessed,
+                                         bool isInternal,
+                                         audio_utils::MutexOrder mutexOrder)
+    : BnEffect(), mMutex(mutexOrder),
+    mEffect(effect), mEffectClient(media::EffectClientAsyncProxy::makeIfNeeded(effectClient)),
+    mClient(client), mCblk(nullptr),
     mPriority(priority), mHasControl(false), mEnabled(false), mDisconnected(false),
-    mNotifyFramesProcessed(notifyFramesProcessed)
+    mNotifyFramesProcessed(notifyFramesProcessed), mIsInternal(isInternal)
 {
     ALOGV("constructor %p client %p", this, client.get());
     setMinSchedulerPolicy(SCHED_NORMAL, ANDROID_PRIORITY_AUDIO);
+    setInheritRt(true);
 
     if (client == 0) {
         return;
@@ -1897,7 +1954,7 @@
 
 void EffectHandle::disconnect(bool unpinIfLast)
 {
-    audio_utils::lock_guard _l(mutex());
+    audio_utils::unique_lock _l(mutex());
     ALOGV("disconnect(%s) %p", unpinIfLast ? "true" : "false", this);
     if (mDisconnected) {
         if (unpinIfLast) {
@@ -1909,11 +1966,19 @@
     {
         sp<IAfEffectBase> effect = mEffect.promote();
         if (effect != 0) {
+            // Unlock e.g. for device effect: may need to acquire AudioFlinger lock
+            // Also Internal Effect Handle would require Proxy lock (and vice versa).
+            if (isInternal()) {
+                _l.unlock();
+            }
             if (effect->disconnectHandle(this, unpinIfLast) > 0) {
                 ALOGW("%s Effect handle %p disconnected after thread destruction",
                     __func__, this);
             }
             effect->updatePolicyState();
+            if (isInternal()) {
+                _l.lock();
+            }
         }
     }
 
@@ -1948,7 +2013,7 @@
     audio_config_base_t inputCfg = AUDIO_CONFIG_BASE_INITIALIZER;
     audio_config_base_t outputCfg = AUDIO_CONFIG_BASE_INITIALIZER;
     bool isOutput;
-    status_t status = effectModule->getConfigs(&inputCfg, &outputCfg, &isOutput);
+    status_t status = effectModule->getConfigs_l(&inputCfg, &outputCfg, &isOutput);
     if (status == NO_ERROR) {
         constexpr bool isInput = false; // effects always use 'OUT' channel masks.
         _config->inputCfg = VALUE_OR_RETURN_STATUS_AS_OUT(
@@ -2155,27 +2220,31 @@
 /* static */
 sp<IAfEffectChain> IAfEffectChain::create(
         const sp<IAfThreadBase>& thread,
-        audio_session_t sessionId)
+        audio_session_t sessionId,
+        const sp<IAfThreadCallback>& afThreadCallback)
 {
-    return sp<EffectChain>::make(thread, sessionId);
+    return sp<EffectChain>::make(thread, sessionId, afThreadCallback);
 }
 
-EffectChain::EffectChain(const sp<IAfThreadBase>& thread,
-                                       audio_session_t sessionId)
+EffectChain::EffectChain(const sp<IAfThreadBase>& thread, audio_session_t sessionId,
+                         const sp<IAfThreadCallback>& afThreadCallback)
     : mSessionId(sessionId), mActiveTrackCnt(0), mTrackCnt(0), mTailBufferCount(0),
       mLeftVolume(UINT_MAX), mRightVolume(UINT_MAX),
       mNewLeftVolume(UINT_MAX), mNewRightVolume(UINT_MAX),
-      mEffectCallback(new EffectCallback(wp<EffectChain>(this), thread))
+      mEffectCallback(new EffectCallback(wp<EffectChain>(this), thread, afThreadCallback))
 {
-    mStrategy = thread->getStrategyForStream(AUDIO_STREAM_MUSIC);
-    mMaxTailBuffers = ((kProcessTailDurationMs * thread->sampleRate()) / 1000) /
-                                    thread->frameCount();
+    if (thread != nullptr) {
+        mStrategy = thread->getStrategyForStream(AUDIO_STREAM_MUSIC);
+        mMaxTailBuffers =
+            ((kProcessTailDurationMs * thread->sampleRate()) / 1000) /
+                thread->frameCount();
+    }
 }
 
-// getEffectFromDesc_l() must be called with IAfThreadBase::mutex() held
-sp<IAfEffectModule> EffectChain::getEffectFromDesc_l(
+sp<IAfEffectModule> EffectChain::getEffectFromDesc(
         effect_descriptor_t *descriptor) const
 {
+    audio_utils::lock_guard _l(mutex());
     size_t size = mEffects.size();
 
     for (size_t i = 0; i < size; i++) {
@@ -2189,6 +2258,7 @@
 // getEffectFromId_l() must be called with IAfThreadBase::mutex() held
 sp<IAfEffectModule> EffectChain::getEffectFromId_l(int id) const
 {
+    audio_utils::lock_guard _l(mutex());
     size_t size = mEffects.size();
 
     for (size_t i = 0; i < size; i++) {
@@ -2204,6 +2274,7 @@
 sp<IAfEffectModule> EffectChain::getEffectFromType_l(
         const effect_uuid_t *type) const
 {
+    audio_utils::lock_guard _l(mutex());
     size_t size = mEffects.size();
 
     for (size_t i = 0; i < size; i++) {
@@ -2214,7 +2285,7 @@
     return 0;
 }
 
-std::vector<int> EffectChain::getEffectIds() const
+std::vector<int> EffectChain::getEffectIds_l() const
 {
     std::vector<int> ids;
     audio_utils::lock_guard _l(mutex());
@@ -2244,8 +2315,7 @@
 }
 
 // Must be called with EffectChain::mutex() locked
-void EffectChain::process_l()
-{
+void EffectChain::process_l() {
     // never process effects when:
     // - on an OFFLOAD thread
     // - no more tracks are on the session and the effect tail has been rendered
@@ -2288,15 +2358,17 @@
     }
     bool doResetVolume = false;
     for (size_t i = 0; i < size; i++) {
-        doResetVolume = mEffects[i]->updateState() || doResetVolume;
+        // reset volume when any effect just started or stopped.
+        // resetVolume_l will check if the volume controller effect in the chain needs update and
+        // apply the correct volume
+        doResetVolume = mEffects[i]->updateState_l() || doResetVolume;
     }
     if (doResetVolume) {
         resetVolume_l();
     }
 }
 
-// createEffect_l() must be called with IAfThreadBase::mutex() held
-status_t EffectChain::createEffect_l(sp<IAfEffectModule>& effect,
+status_t EffectChain::createEffect(sp<IAfEffectModule>& effect,
                                                    effect_descriptor_t *desc,
                                                    int id,
                                                    audio_session_t sessionId,
@@ -2306,7 +2378,7 @@
     effect = new EffectModule(mEffectCallback, desc, id, sessionId, pinned, AUDIO_PORT_HANDLE_NONE);
     status_t lStatus = effect->status();
     if (lStatus == NO_ERROR) {
-        lStatus = addEffect_ll(effect);
+        lStatus = addEffect_l(effect);
     }
     if (lStatus != NO_ERROR) {
         effect.clear();
@@ -2314,22 +2386,22 @@
     return lStatus;
 }
 
-// addEffect_l() must be called with IAfThreadBase::mutex() held
-status_t EffectChain::addEffect_l(const sp<IAfEffectModule>& effect)
+status_t EffectChain::addEffect(const sp<IAfEffectModule>& effect)
 {
     audio_utils::lock_guard _l(mutex());
-    return addEffect_ll(effect);
+    return addEffect_l(effect);
 }
-// addEffect_l() must be called with IAfThreadBase::mutex() and EffectChain::mutex() held
-status_t EffectChain::addEffect_ll(const sp<IAfEffectModule>& effect)
+// addEffect_l() must be called with EffectChain::mutex() held
+status_t EffectChain::addEffect_l(const sp<IAfEffectModule>& effect)
 {
     effect->setCallback(mEffectCallback);
 
     effect_descriptor_t desc = effect->desc();
+    ssize_t idx_insert = 0;
     if ((desc.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_AUXILIARY) {
         // Auxiliary effects are inserted at the beginning of mEffects vector as
         // they are processed first and accumulated in chain input buffer
-        mEffects.insertAt(effect, 0);
+        mEffects.insertAt(effect, idx_insert);
 
         // the input buffer for auxiliary effect contains mono samples in
         // 32 bit format. This is to avoid saturation in AudoMixer
@@ -2342,14 +2414,14 @@
                 numSamples * sizeof(float), &halBuffer);
         if (result != OK) return result;
 
-        effect->configure();
+        effect->configure_l();
 
         effect->setInBuffer(halBuffer);
         // auxiliary effects output samples to chain input buffer for further processing
         // by insert effects
         effect->setOutBuffer(mInBuffer);
     } else {
-        ssize_t idx_insert = getInsertIndex(desc);
+        idx_insert = getInsertIndex_l(desc);
         if (idx_insert < 0) {
             return INVALID_OPERATION;
         }
@@ -2357,7 +2429,7 @@
         size_t previousSize = mEffects.size();
         mEffects.insertAt(effect, idx_insert);
 
-        effect->configure();
+        effect->configure_l();
 
         // - By default:
         //   All effects read samples from chain input buffer.
@@ -2372,9 +2444,9 @@
             effect->setOutBuffer(mOutBuffer);
             if (idx_insert == 0) {
                 if (previousSize != 0) {
-                    mEffects[1]->configure();
+                    mEffects[1]->configure_l();
                     mEffects[1]->setInBuffer(mOutBuffer);
-                    mEffects[1]->updateAccessMode();      // reconfig if neeeded.
+                    mEffects[1]->updateAccessMode_l();  // reconfig if needed.
                 }
                 effect->setInBuffer(mInBuffer);
             } else {
@@ -2384,9 +2456,9 @@
             effect->setInBuffer(mInBuffer);
             if (idx_insert == static_cast<ssize_t>(previousSize)) {
                 if (idx_insert != 0) {
-                    mEffects[idx_insert-1]->configure();
+                    mEffects[idx_insert-1]->configure_l();
                     mEffects[idx_insert-1]->setOutBuffer(mInBuffer);
-                    mEffects[idx_insert - 1]->updateAccessMode();      // reconfig if neeeded.
+                    mEffects[idx_insert - 1]->updateAccessMode_l();  // reconfig if needed.
                 }
                 effect->setOutBuffer(mOutBuffer);
             } else {
@@ -2396,21 +2468,33 @@
         ALOGV("%s effect %p, added in chain %p at rank %zu",
                 __func__, effect.get(), this, idx_insert);
     }
-    effect->configure();
+    effect->configure_l();
+
+    if (effect->isVolumeControl()) {
+        const auto volumeControlIndex = findVolumeControl_l(0, mEffects.size());
+        if (!volumeControlIndex.has_value() || (ssize_t)volumeControlIndex.value() < idx_insert) {
+            // If this effect will be the new volume control effect when it is enabled, force
+            // initializing the volume as 0 for volume control effect for safer ramping. The actual
+            // volume will be set from setVolume_l.
+            uint32_t left = 0;
+            uint32_t right = 0;
+            effect->setVolume_l(&left, &right, true /*controller*/, true /*force*/);
+        }
+    }
 
     return NO_ERROR;
 }
 
 std::optional<size_t> EffectChain::findVolumeControl_l(size_t from, size_t to) const {
     for (size_t i = std::min(to, mEffects.size()); i > from; i--) {
-        if (mEffects[i - 1]->isVolumeControlEnabled()) {
+        if (mEffects[i - 1]->isVolumeControlEnabled_l()) {
             return i - 1;
         }
     }
     return std::nullopt;
 }
 
-ssize_t EffectChain::getInsertIndex(const effect_descriptor_t& desc) {
+ssize_t EffectChain::getInsertIndex_l(const effect_descriptor_t& desc) {
     // Insert effects are inserted at the end of mEffects vector as they are processed
     //  after track and auxiliary effects.
     // Insert effect order as a function of indicated preference:
@@ -2483,14 +2567,14 @@
     return idx_insert;
 }
 
-// removeEffect_l() must be called with IAfThreadBase::mutex() held
-size_t EffectChain::removeEffect_l(const sp<IAfEffectModule>& effect,
+size_t EffectChain::removeEffect(const sp<IAfEffectModule>& effect,
                                                  bool release)
 {
     audio_utils::lock_guard _l(mutex());
     size_t size = mEffects.size();
     uint32_t type = effect->desc().flags & EFFECT_FLAG_TYPE_MASK;
 
+    const bool hasThreadAttached = mEffectCallback->hasThreadAttached();
     for (size_t i = 0; i < size; i++) {
         if (effect == mEffects[i]) {
             // calling stop here will remove pre-processing effect from the audio HAL.
@@ -2498,17 +2582,17 @@
             // the middle of a read from audio HAL
             if (mEffects[i]->state() == EffectModule::ACTIVE ||
                     mEffects[i]->state() == EffectModule::STOPPING) {
-                mEffects[i]->stop();
+                mEffects[i]->stop_l();
             }
             if (release) {
-                mEffects[i]->release_l();
+                mEffects[i]->release_l("EffectChain::removeEffect");
             }
-
-            if (type != EFFECT_FLAG_TYPE_AUXILIARY) {
+            // Skip operation when no thread attached (could lead to sigfpe as framecount is 0...)
+            if (hasThreadAttached && type != EFFECT_FLAG_TYPE_AUXILIARY) {
                 if (i == size - 1 && i != 0) {
-                    mEffects[i - 1]->configure();
+                    mEffects[i - 1]->configure_l();
                     mEffects[i - 1]->setOutBuffer(mOutBuffer);
-                    mEffects[i - 1]->updateAccessMode();      // reconfig if neeeded.
+                    mEffects[i - 1]->updateAccessMode_l();      // reconfig if needed.
                 }
             }
             mEffects.removeAt(i);
@@ -2516,10 +2600,10 @@
             // make sure the input buffer configuration for the new first effect in the chain
             // is updated if needed (can switch from HAL channel mask to mixer channel mask)
             if (type != EFFECT_FLAG_TYPE_AUXILIARY // TODO(b/284522658) breaks for aux FX, why?
-                    && i == 0 && size > 1) {
-                mEffects[0]->configure();
+                    && hasThreadAttached && i == 0 && size > 1) {
+                mEffects[0]->configure_l();
                 mEffects[0]->setInBuffer(mInBuffer);
-                mEffects[0]->updateAccessMode();      // reconfig if neeeded.
+                mEffects[0]->updateAccessMode_l();      // reconfig if needed.
             }
 
             ALOGV("removeEffect_l() effect %p, removed from chain %p at rank %zu", effect.get(),
@@ -2534,6 +2618,7 @@
 // setDevices_l() must be called with IAfThreadBase::mutex() held
 void EffectChain::setDevices_l(const AudioDeviceTypeAddrVector &devices)
 {
+    audio_utils::lock_guard _l(mutex());
     size_t size = mEffects.size();
     for (size_t i = 0; i < size; i++) {
         mEffects[i]->setDevices(devices);
@@ -2543,6 +2628,7 @@
 // setInputDevice_l() must be called with IAfThreadBase::mutex() held
 void EffectChain::setInputDevice_l(const AudioDeviceTypeAddr &device)
 {
+    audio_utils::lock_guard _l(mutex());
     size_t size = mEffects.size();
     for (size_t i = 0; i < size; i++) {
         mEffects[i]->setInputDevice(device);
@@ -2552,6 +2638,7 @@
 // setMode_l() must be called with IAfThreadBase::mutex() held
 void EffectChain::setMode_l(audio_mode_t mode)
 {
+    audio_utils::lock_guard _l(mutex());
     size_t size = mEffects.size();
     for (size_t i = 0; i < size; i++) {
         mEffects[i]->setMode(mode);
@@ -2561,6 +2648,7 @@
 // setAudioSource_l() must be called with IAfThreadBase::mutex() held
 void EffectChain::setAudioSource_l(audio_source_t source)
 {
+    audio_utils::lock_guard _l(mutex());
     size_t size = mEffects.size();
     for (size_t i = 0; i < size; i++) {
         mEffects[i]->setAudioSource(source);
@@ -2569,20 +2657,26 @@
 
 bool EffectChain::hasVolumeControlEnabled_l() const {
     for (const auto &effect : mEffects) {
-        if (effect->isVolumeControlEnabled()) return true;
+        if (effect->isVolumeControlEnabled_l()) return true;
     }
     return false;
 }
 
-// setVolume_l() must be called with IAfThreadBase::mutex() or EffectChain::mutex() held
-bool EffectChain::setVolume_l(uint32_t *left, uint32_t *right, bool force)
-{
+// setVolume() must be called without EffectChain::mutex()
+bool EffectChain::setVolume(uint32_t* left, uint32_t* right, bool force) {
+    audio_utils::lock_guard _l(mutex());
+    return setVolume_l(left, right, force);
+}
+
+// setVolume_l() must be called with EffectChain::mutex() held
+bool EffectChain::setVolume_l(uint32_t* left, uint32_t* right, bool force) {
     uint32_t newLeft = *left;
     uint32_t newRight = *right;
     const size_t size = mEffects.size();
 
     // first update volume controller
     const auto volumeControlIndex = findVolumeControl_l(0, size);
+    // index of the effect chain volume controller
     const int ctrlIdx = volumeControlIndex.value_or(-1);
     const sp<IAfEffectModule> volumeControlEffect =
             volumeControlIndex.has_value() ? mEffects[ctrlIdx] : nullptr;
@@ -2596,30 +2690,33 @@
         }
         return volumeControlIndex.has_value();
     }
+    mVolumeControlEffect = volumeControlEffect;
 
-    if (volumeControlEffect != cachedVolumeControlEffect) {
-        // The volume control effect is a new one. Set the old one as full volume. Set the new onw
-        // as zero for safe ramping.
-        if (cachedVolumeControlEffect != nullptr) {
+    for (int i = 0; i < ctrlIdx; ++i) {
+        // For all effects before the effect that controls volume, they are not controlling the
+        // effect chain volume, if these effects has the volume control capability, set the volume
+        // to maximum to avoid double attenuation.
+        if (mEffects[i]->isVolumeControl()) {
             uint32_t leftMax = 1 << 24;
             uint32_t rightMax = 1 << 24;
-            cachedVolumeControlEffect->setVolume(&leftMax, &rightMax, true /*controller*/);
+            mEffects[i]->setVolume_l(&leftMax, &rightMax,
+                                     false /* not an effect chain volume controller */,
+                                     true /* force */);
         }
-        if (volumeControlEffect != nullptr) {
-            uint32_t leftZero = 0;
-            uint32_t rightZero = 0;
-            volumeControlEffect->setVolume(&leftZero, &rightZero, true /*controller*/);
-        }
-        mVolumeControlEffect = volumeControlEffect;
     }
+
     mLeftVolume = newLeft;
     mRightVolume = newRight;
 
     // second get volume update from volume controller
     if (ctrlIdx >= 0) {
-        mEffects[ctrlIdx]->setVolume(&newLeft, &newRight, true);
+        mEffects[ctrlIdx]->setVolume_l(&newLeft, &newRight,
+                                       true /* effect chain volume controller */);
         mNewLeftVolume = newLeft;
         mNewRightVolume = newRight;
+        ALOGD("%s sessionId %d volume controller effect %s set (%d, %d), ret (%d, %d)", __func__,
+              mSessionId, mEffects[ctrlIdx]->desc().name, mLeftVolume, mRightVolume, newLeft,
+              newRight);
     }
     // then indicate volume to all other effects in chain.
     // Pass altered volume to effects before volume controller
@@ -2638,9 +2735,11 @@
         }
         // Pass requested volume directly if this is volume monitor module
         if (mEffects[i]->isVolumeMonitor()) {
-            mEffects[i]->setVolume(left, right, false);
+            mEffects[i]->setVolume_l(left, right,
+                                     false /* not an effect chain volume controller */);
         } else {
-            mEffects[i]->setVolume(&lVol, &rVol, false);
+            mEffects[i]->setVolume_l(&lVol, &rVol,
+                                     false /* not an effect chain volume controller */);
         }
     }
     *left = newLeft;
@@ -2651,7 +2750,7 @@
     return volumeControlIndex.has_value();
 }
 
-// resetVolume_l() must be called with IAfThreadBase::mutex() or EffectChain::mutex() held
+// resetVolume_l() must be called with EffectChain::mutex() held
 void EffectChain::resetVolume_l()
 {
     if ((mLeftVolume != UINT_MAX) && (mRightVolume != UINT_MAX)) {
@@ -2661,8 +2760,12 @@
     }
 }
 
-// containsHapticGeneratingEffect_l must be called with
-// IAfThreadBase::mutex() or EffectChain::mutex() held
+bool EffectChain::containsHapticGeneratingEffect()
+{
+    audio_utils::lock_guard _l(mutex());
+    return containsHapticGeneratingEffect_l();
+}
+// containsHapticGeneratingEffect_l must be called with EffectChain::mutex() held
 bool EffectChain::containsHapticGeneratingEffect_l()
 {
     for (size_t i = 0; i < mEffects.size(); ++i) {
@@ -2673,15 +2776,15 @@
     return false;
 }
 
-void EffectChain::setHapticIntensity_l(int id, os::HapticScale intensity)
+void EffectChain::setHapticScale_l(int id, os::HapticScale hapticScale)
 {
     audio_utils::lock_guard _l(mutex());
     for (size_t i = 0; i < mEffects.size(); ++i) {
-        mEffects[i]->setHapticIntensity(id, intensity);
+        mEffects[i]->setHapticScale_l(id, hapticScale);
     }
 }
 
-void EffectChain::syncHalEffectsState()
+void EffectChain::syncHalEffectsState_l()
 {
     audio_utils::lock_guard _l(mutex());
     for (size_t i = 0; i < mEffects.size(); i++) {
@@ -2750,7 +2853,7 @@
         }
 
         if (desc->mRefCount++ == 0) {
-            sp<IAfEffectModule> effect = getEffectIfEnabled(type);
+            sp<IAfEffectModule> effect = getEffectIfEnabled_l(type);
             if (effect != 0) {
                 desc->mEffect = effect;
                 effect->setSuspended(true);
@@ -2844,8 +2947,7 @@
 #endif //OPENSL_ES_H_
 
 /* static */
-bool EffectChain::isEffectEligibleForBtNrecSuspend(const effect_uuid_t *type)
-{
+bool EffectChain::isEffectEligibleForBtNrecSuspend_l(const effect_uuid_t* type) {
     // Only NS and AEC are suspended when BtNRec is off
     if ((memcmp(type, FX_IID_AEC, sizeof(effect_uuid_t)) == 0) ||
         (memcmp(type, FX_IID_NS, sizeof(effect_uuid_t)) == 0)) {
@@ -2871,6 +2973,7 @@
         Vector< sp<IAfEffectModule> > &effects)
 {
     effects.clear();
+    audio_utils::lock_guard _l(mutex());
     for (size_t i = 0; i < mEffects.size(); i++) {
         if (isEffectEligibleForSuspend(mEffects[i]->desc())) {
             effects.add(mEffects[i]);
@@ -2878,15 +2981,13 @@
     }
 }
 
-sp<IAfEffectModule> EffectChain::getEffectIfEnabled(const effect_uuid_t *type)
+sp<IAfEffectModule> EffectChain::getEffectIfEnabled_l(const effect_uuid_t *type)
 {
     sp<IAfEffectModule> effect = getEffectFromType_l(type);
     return effect != 0 && effect->isEnabled() ? effect : 0;
 }
 
-void EffectChain::checkSuspendOnEffectEnabled(const sp<IAfEffectModule>& effect,
-                                                            bool enabled)
-{
+void EffectChain::checkSuspendOnEffectEnabled_l(const sp<IAfEffectModule>& effect, bool enabled) {
     ssize_t index = mSuspendedEffects.indexOfKey(effect->desc().type.timeLow);
     if (enabled) {
         if (index < 0) {
@@ -2901,12 +3002,11 @@
             setEffectSuspended_l(&effect->desc().type, enabled);
             index = mSuspendedEffects.indexOfKey(effect->desc().type.timeLow);
             if (index < 0) {
-                ALOGW("checkSuspendOnEffectEnabled() Fx should be suspended here!");
+                ALOGW("%s Fx should be suspended here!", __func__);
                 return;
             }
         }
-        ALOGV("checkSuspendOnEffectEnabled() enable suspending fx %08x",
-            effect->desc().type.timeLow);
+        ALOGV("%s enable suspending fx %08x", __func__, effect->desc().type.timeLow);
         sp<SuspendedEffectDesc> desc = mSuspendedEffects.valueAt(index);
         // if effect is requested to suspended but was not yet enabled, suspend it now.
         if (desc->mEffect == 0) {
@@ -2918,8 +3018,7 @@
         if (index < 0) {
             return;
         }
-        ALOGV("checkSuspendOnEffectEnabled() disable restoring fx %08x",
-            effect->desc().type.timeLow);
+        ALOGV("%s disable restoring fx %08x", __func__, effect->desc().type.timeLow);
         sp<SuspendedEffectDesc> desc = mSuspendedEffects.valueAt(index);
         desc->mEffect.clear();
         effect->setSuspended(false);
@@ -2945,6 +3044,12 @@
 
 void EffectChain::setThread(const sp<IAfThreadBase>& thread)
 {
+    if (thread != nullptr) {
+        mStrategy = thread->getStrategyForStream(AUDIO_STREAM_MUSIC);
+        mMaxTailBuffers =
+            ((kProcessTailDurationMs * thread->sampleRate()) / 1000) /
+                thread->frameCount();
+    }
     audio_utils::lock_guard _l(mutex());
     mEffectCallback->setThread(thread);
 }
@@ -3028,9 +3133,9 @@
     for (const auto& effect : mEffects) {
         if (spatializedMetadata.has_value()
                 && IAfEffectModule::isSpatializer(&effect->desc().type)) {
-            effect->sendMetadata(spatializedMetadata.value());
+            effect->sendMetadata_ll(spatializedMetadata.value());
         } else {
-            effect->sendMetadata(allMetadata);
+            effect->sendMetadata_ll(allMetadata);
         }
     }
 }
@@ -3134,7 +3239,7 @@
 uint32_t EffectChain::EffectCallback::sampleRate() const {
     const sp<IAfThreadBase> t = thread().promote();
     if (t == nullptr) {
-        return 0;
+        return DEFAULT_OUTPUT_SAMPLE_RATE;
     }
     return t->sampleRate();
 }
@@ -3142,19 +3247,20 @@
 audio_channel_mask_t EffectChain::EffectCallback::inChannelMask(int id) const
 NO_THREAD_SAFETY_ANALYSIS
 // calling function 'hasAudioSession_l' requires holding mutex 'ThreadBase_Mutex' exclusively
+// calling function 'isFirstEffect_l' requires holding mutex 'EffectChain_Mutex' exclusively
 {
     const sp<IAfThreadBase> t = thread().promote();
     if (t == nullptr) {
-        return AUDIO_CHANNEL_NONE;
+        return AUDIO_CHANNEL_OUT_STEREO;
     }
     sp<IAfEffectChain> c = chain().promote();
     if (c == nullptr) {
-        return AUDIO_CHANNEL_NONE;
+        return AUDIO_CHANNEL_OUT_STEREO;
     }
 
     if (mThreadType == IAfThreadBase::SPATIALIZER) {
         if (c->sessionId() == AUDIO_SESSION_OUTPUT_STAGE) {
-            if (c->isFirstEffect(id)) {
+            if (c->isFirstEffect_l(id)) {
                 return t->mixerChannelMask();
             } else {
                 return t->channelMask();
@@ -3184,11 +3290,11 @@
 {
     const sp<IAfThreadBase> t = thread().promote();
     if (t == nullptr) {
-        return AUDIO_CHANNEL_NONE;
+        return AUDIO_CHANNEL_OUT_STEREO;
     }
     sp<IAfEffectChain> c = chain().promote();
     if (c == nullptr) {
-        return AUDIO_CHANNEL_NONE;
+        return AUDIO_CHANNEL_OUT_STEREO;
     }
 
     if (mThreadType == IAfThreadBase::SPATIALIZER) {
@@ -3222,7 +3328,8 @@
 size_t EffectChain::EffectCallback::frameCount() const {
     const sp<IAfThreadBase> t = thread().promote();
     if (t == nullptr) {
-        return 0;
+        // frameCount cannot be zero.
+        return 1;
     }
     return t->frameCount();
 }
@@ -3248,8 +3355,9 @@
     t->setVolumeForOutput_l(left, right);
 }
 
-void EffectChain::EffectCallback::checkSuspendOnEffectEnabled(
-        const sp<IAfEffectBase>& effect, bool enabled, bool threadLocked) {
+void EffectChain::EffectCallback::checkSuspendOnEffectEnabled(const sp<IAfEffectBase>& effect,
+                                                              bool enabled, bool threadLocked)
+        NO_THREAD_SAFETY_ANALYSIS {
     const sp<IAfThreadBase> t = thread().promote();
     if (t == nullptr) {
         return;
@@ -3261,7 +3369,7 @@
         return;
     }
     // in EffectChain context, an EffectBase is always from an EffectModule so static cast is safe
-    c->checkSuspendOnEffectEnabled(effect->asEffectModule(), enabled);
+    c->checkSuspendOnEffectEnabled_l(effect->asEffectModule(), enabled);
 }
 
 void EffectChain::EffectCallback::onEffectEnable(const sp<IAfEffectBase>& effect) {
@@ -3293,7 +3401,7 @@
     return true;
 }
 
-void EffectChain::EffectCallback::resetVolume() {
+void EffectChain::EffectCallback::resetVolume_l() {
     sp<IAfEffectChain> c = chain().promote();
     if (c == nullptr) {
         return;
@@ -3354,7 +3462,7 @@
     return status;
 }
 
-status_t DeviceEffectProxy::init(
+status_t DeviceEffectProxy::init_l(
         const std::map <audio_patch_handle_t, IAfPatchPanel::Patch>& patches) {
 //For all audio patches
 //If src or sink device match
@@ -3449,19 +3557,17 @@
             ALOGV("%s reusing HAL effect", __func__);
         } else {
             mDevicePort = *port;
-            mHalEffect = new EffectModule(mMyCallback,
-                                      const_cast<effect_descriptor_t *>(&mDescriptor),
-                                      mMyCallback->newEffectId(), AUDIO_SESSION_DEVICE,
-                                      false /* pinned */, port->id);
+            mHalEffect = sp<HwAccDeviceEffectModule>::make(mMyCallback,
+                    const_cast<effect_descriptor_t *>(&mDescriptor), mMyCallback->newEffectId(),
+                    port->id);
+            mHalEffect->configure_l();
             if (audio_is_input_device(mDevice.mType)) {
                 mHalEffect->setInputDevice(mDevice);
             } else {
                 mHalEffect->setDevices({mDevice});
             }
-            mHalEffect->configure();
         }
-        *handle = new EffectHandle(mHalEffect, nullptr, nullptr, 0 /*priority*/,
-                                   mNotifyFramesProcessed);
+        *handle = sp<InternalEffectHandle>::make(mHalEffect, mNotifyFramesProcessed);
         status = (*handle)->initCheck();
         if (status == OK) {
             status = mHalEffect->addHandle((*handle).get());
@@ -3508,15 +3614,16 @@
     return status;
 }
 
-void DeviceEffectProxy::onReleasePatch(audio_patch_handle_t patchHandle) {
-    sp<IAfEffectHandle> effect;
+sp<IAfEffectHandle> DeviceEffectProxy::onReleasePatch(audio_patch_handle_t patchHandle) {
+    sp<IAfEffectHandle> disconnectedHandle;
     {
         audio_utils::lock_guard _l(proxyMutex());
         if (mEffectHandles.find(patchHandle) != mEffectHandles.end()) {
-            effect = mEffectHandles.at(patchHandle);
+            disconnectedHandle = std::move(mEffectHandles.at(patchHandle));
             mEffectHandles.erase(patchHandle);
         }
     }
+    return disconnectedHandle;
 }
 
 
@@ -3524,7 +3631,7 @@
 {
     audio_utils::lock_guard _l(proxyMutex());
     if (effect == mHalEffect) {
-        mHalEffect->release_l();
+        mHalEffect->release_l("DeviceEffectProxy::removeEffect");
         mHalEffect.clear();
         mDevicePort.id = AUDIO_PORT_HANDLE_NONE;
     }
@@ -3747,7 +3854,7 @@
     if (effect == nullptr) {
         return;
     }
-    effect->start();
+    effect->start_l();
 }
 
 void DeviceEffectProxy::ProxyCallback::onEffectDisable(
@@ -3756,7 +3863,7 @@
     if (effect == nullptr) {
         return;
     }
-    effect->stop();
+    effect->stop_l();
 }
 
 } // namespace android
diff --git a/services/audioflinger/Effects.h b/services/audioflinger/Effects.h
index 8583d47..9ecf89e 100644
--- a/services/audioflinger/Effects.h
+++ b/services/audioflinger/Effects.h
@@ -25,6 +25,8 @@
 #include <private/media/AudioEffectShared.h>
 
 #include <map>  // avoid transitive dependency
+#include <optional>
+#include <vector>
 
 namespace android {
 
@@ -78,11 +80,11 @@
                         { return (mDescriptor.flags & EFFECT_FLAG_VOLUME_MASK)
                             == EFFECT_FLAG_VOLUME_MONITOR; }
 
-    status_t setEnabled(bool enabled, bool fromHandle) override;
-    status_t setEnabled_l(bool enabled) final;
+    status_t setEnabled(bool enabled, bool fromHandle) override EXCLUDES_EffectBase_Mutex;
+    status_t setEnabled_l(bool enabled) final REQUIRES(audio_utils::EffectBase_Mutex);
     bool isEnabled() const final;
-    void setSuspended(bool suspended) final;
-    bool suspended() const final;
+    void setSuspended(bool suspended) final EXCLUDES_EffectBase_Mutex;
+    bool suspended() const final EXCLUDES_EffectBase_Mutex;
 
     status_t command(int32_t __unused,
                              const std::vector<uint8_t>& __unused,
@@ -99,36 +101,40 @@
         return mCallback.load();
     }
 
-    status_t addHandle(IAfEffectHandle *handle) final;
-    ssize_t disconnectHandle(IAfEffectHandle *handle, bool unpinIfLast) final;
-    ssize_t removeHandle(IAfEffectHandle *handle) final;
-    ssize_t removeHandle_l(IAfEffectHandle *handle) final;
-    IAfEffectHandle* controlHandle_l() final;
-    bool purgeHandles() final;
+    status_t addHandle(IAfEffectHandle* handle) final EXCLUDES_EffectBase_Mutex;
+    ssize_t disconnectHandle(IAfEffectHandle* handle,
+                             bool unpinIfLast) final EXCLUDES_EffectBase_Mutex;
+    ssize_t removeHandle(IAfEffectHandle* handle) final EXCLUDES_EffectBase_Mutex;
+    ssize_t removeHandle_l(IAfEffectHandle* handle) final REQUIRES(audio_utils::EffectBase_Mutex);
+    IAfEffectHandle* controlHandle_l() final REQUIRES(audio_utils::EffectBase_Mutex);
+    bool purgeHandles() final EXCLUDES_EffectBase_Mutex;
 
-    void             checkSuspendOnEffectEnabled(bool enabled, bool threadLocked) final;
+    void checkSuspendOnEffectEnabled(bool enabled, bool threadLocked) final;
 
-    bool             isPinned() const final { return mPinned; }
-    void             unPin() final { mPinned = false; }
+    bool isPinned() const final { return mPinned; }
+    void unPin() final { mPinned = false; }
 
-    audio_utils::mutex& mutex() const final { return mMutex; }
+    audio_utils::mutex& mutex() const final
+            RETURN_CAPABILITY(android::audio_utils::EffectBase_Mutex) {
+        return mMutex;
+    }
 
-    status_t         updatePolicyState() final;
+    status_t updatePolicyState() final EXCLUDES_EffectBase_Mutex;
 
     sp<IAfEffectModule> asEffectModule() override { return nullptr; }
     sp<IAfDeviceEffectProxy> asDeviceEffectProxy() override { return nullptr; }
 
-    void             dump(int fd, const Vector<String16>& args) const override;
+    void dump(int fd, const Vector<String16>& args) const override;
 
 protected:
-    bool             isInternal_l() const {
-                         for (auto handle : mHandles) {
-                            if (handle->client() != nullptr) {
-                                return false;
-                            }
-                         }
-                         return true;
-                     }
+    bool isInternal_l() const REQUIRES(audio_utils::EffectBase_Mutex) {
+        for (auto handle : mHandles) {
+            if (handle->client() != nullptr) {
+                return false;
+            }
+        }
+        return true;
+    }
 
     bool             mPinned = false;
 
@@ -150,7 +156,10 @@
     // Audio policy effect state management
     // Mutex protecting transactions with audio policy manager as mutex() cannot
     // be held to avoid cross deadlocks with audio policy mutex
-    audio_utils::mutex& policyMutex() const { return mPolicyMutex; }
+    audio_utils::mutex& policyMutex() const
+            RETURN_CAPABILITY(android::audio_utils::EffectBase_PolicyMutex) {
+        return mPolicyMutex;
+    }
     mutable audio_utils::mutex mPolicyMutex{audio_utils::MutexOrder::kEffectBase_PolicyMutex};
     // Effect is registered in APM or not
     bool                      mPolicyRegistered = false;
@@ -170,30 +179,28 @@
 // the attached track(s) to accumulate their auxiliary channel.
 class EffectModule : public IAfEffectModule, public EffectBase {
 public:
-    EffectModule(const sp<EffectCallbackInterface>& callabck,
+    EffectModule(const sp<EffectCallbackInterface>& callback,
                     effect_descriptor_t *desc,
                     int id,
                     audio_session_t sessionId,
                     bool pinned,
-                    audio_port_handle_t deviceId);
-    ~EffectModule() override;
+                    audio_port_handle_t deviceId) REQUIRES(audio_utils::EffectChain_Mutex);
+    ~EffectModule() override REQUIRES(audio_utils::EffectChain_Mutex);
 
-    void process() final;
-    bool updateState() final;
-    status_t command(int32_t cmdCode,
-                     const std::vector<uint8_t>& cmdData,
-                     int32_t maxReplySize,
-                     std::vector<uint8_t>* reply) final;
+    void process() final EXCLUDES_EffectBase_Mutex;
+    bool updateState_l() final REQUIRES(audio_utils::EffectChain_Mutex) EXCLUDES_EffectBase_Mutex;
+    status_t command(int32_t cmdCode, const std::vector<uint8_t>& cmdData, int32_t maxReplySize,
+                     std::vector<uint8_t>* reply) final EXCLUDES_EffectBase_Mutex;
 
-    void reset_l() final;
-    status_t configure() final;
-    status_t init() final;
+    void reset_l() final REQUIRES(audio_utils::EffectBase_Mutex);
+    status_t configure_l() final REQUIRES(audio_utils::EffectChain_Mutex);
+    status_t init_l() final REQUIRES(audio_utils::EffectChain_Mutex) EXCLUDES_EffectBase_Mutex;
     uint32_t status() const final {
         return mStatus;
     }
     bool isProcessEnabled() const final;
-    bool isOffloadedOrDirect() const final;
-    bool isVolumeControlEnabled() const final;
+    bool isOffloadedOrDirect_l() const final REQUIRES(audio_utils::EffectChain_Mutex);
+    bool isVolumeControlEnabled_l() const final REQUIRES(audio_utils::EffectChain_Mutex);
     void setInBuffer(const sp<EffectBufferHalInterface>& buffer) final;
     int16_t *inBuffer() const final {
         return mInBuffer != 0 ? reinterpret_cast<int16_t*>(mInBuffer->ptr()) : NULL;
@@ -203,39 +210,49 @@
         return mOutBuffer != 0 ? reinterpret_cast<int16_t*>(mOutBuffer->ptr()) : NULL;
     }
     // Updates the access mode if it is out of date.  May issue a new effect configure.
-    void updateAccessMode() final {
-                    if (requiredEffectBufferAccessMode() != mConfig.outputCfg.accessMode) {
-                        configure();
-                    }
-                }
-    status_t setDevices(const AudioDeviceTypeAddrVector &devices) final;
-    status_t setInputDevice(const AudioDeviceTypeAddr &device) final;
-    status_t setVolume(uint32_t *left, uint32_t *right, bool controller) final;
-    status_t setMode(audio_mode_t mode) final;
-    status_t setAudioSource(audio_source_t source) final;
-    status_t start() final;
-    status_t stop() final;
+    void updateAccessMode_l() final REQUIRES(audio_utils::EffectChain_Mutex) {
+        if (requiredEffectBufferAccessMode() != mConfig.outputCfg.accessMode) {
+            configure_l();
+        }
+    }
+    status_t setDevices(const AudioDeviceTypeAddrVector& devices) final EXCLUDES_EffectBase_Mutex;
+    status_t setInputDevice(const AudioDeviceTypeAddr& device) final EXCLUDES_EffectBase_Mutex;
+    status_t setVolume_l(uint32_t* left, uint32_t* right, bool controller, bool force) final
+            REQUIRES(audio_utils::EffectChain_Mutex);
+    status_t setMode(audio_mode_t mode) final EXCLUDES_EffectBase_Mutex;
+    status_t setAudioSource(audio_source_t source) final EXCLUDES_EffectBase_Mutex;
+    status_t start_l() final REQUIRES(audio_utils::EffectChain_Mutex) EXCLUDES_EffectBase_Mutex;
+    status_t stop_l() final REQUIRES(audio_utils::EffectChain_Mutex) EXCLUDES_EffectBase_Mutex;
 
-    status_t setOffloaded(bool offloaded, audio_io_handle_t io) final;
-    bool isOffloaded() const final;
-    void addEffectToHal_l() final;
-    void release_l() final;
+    status_t setOffloaded_l(bool offloaded, audio_io_handle_t io) final
+            REQUIRES(audio_utils::EffectChain_Mutex) EXCLUDES_EffectBase_Mutex;
+    bool isOffloaded_l() const final
+            REQUIRES(audio_utils::EffectChain_Mutex) EXCLUDES_EffectBase_Mutex;
+    void addEffectToHal_l() override REQUIRES(audio_utils::EffectChain_Mutex);
+    void release_l(const std::string& from = "") final REQUIRES(audio_utils::EffectChain_Mutex);
 
     sp<IAfEffectModule> asEffectModule() final { return this; }
 
     bool isHapticGenerator() const final;
     bool isSpatializer() const final;
 
-    status_t setHapticIntensity(int id, os::HapticScale intensity) final;
-    status_t setVibratorInfo(const media::AudioVibratorInfo& vibratorInfo) final;
-    status_t sendMetadata(const std::vector<playback_track_metadata_v7_t>& metadata) final;
+    status_t setHapticScale_l(int id, os::HapticScale hapticScale) final
+            REQUIRES(audio_utils::EffectChain_Mutex) EXCLUDES_EffectBase_Mutex;
+    status_t setVibratorInfo_l(const media::AudioVibratorInfo& vibratorInfo) final
+            REQUIRES(audio_utils::EffectChain_Mutex) EXCLUDES_EffectBase_Mutex;
+    status_t sendMetadata_ll(const std::vector<playback_track_metadata_v7_t>& metadata) final
+            REQUIRES(audio_utils::ThreadBase_Mutex,
+                     audio_utils::EffectChain_Mutex) EXCLUDES_EffectBase_Mutex;
 
-    status_t getConfigs(audio_config_base_t* inputCfg,
-                                audio_config_base_t* outputCfg,
-                                bool* isOutput) const final;
+    status_t getConfigs_l(audio_config_base_t* inputCfg, audio_config_base_t* outputCfg,
+                          bool* isOutput) const final
+            REQUIRES(audio_utils::EffectHandle_Mutex) EXCLUDES_EffectBase_Mutex;
 
     void dump(int fd, const Vector<String16>& args) const final;
 
+protected:
+    sp<EffectHalInterface> mEffectInterface; // Effect module HAL
+
 private:
 
     // Maximum time allocated to effect engines to complete the turn off sequence
@@ -243,20 +260,20 @@
 
     DISALLOW_COPY_AND_ASSIGN(EffectModule);
 
-    status_t start_l();
-    status_t stop_l();
-    status_t removeEffectFromHal_l();
+    status_t start_ll() REQUIRES(audio_utils::EffectChain_Mutex, audio_utils::EffectBase_Mutex);
+    status_t stop_ll() REQUIRES(audio_utils::EffectChain_Mutex, audio_utils::EffectBase_Mutex);
+    status_t removeEffectFromHal_l() override REQUIRES(audio_utils::EffectChain_Mutex);
     status_t sendSetAudioDevicesCommand(const AudioDeviceTypeAddrVector &devices, uint32_t cmdCode);
     effect_buffer_access_e requiredEffectBufferAccessMode() const {
         return mConfig.inputCfg.buffer.raw == mConfig.outputCfg.buffer.raw
                 ? EFFECT_BUFFER_ACCESS_WRITE : EFFECT_BUFFER_ACCESS_ACCUMULATE;
     }
 
-    status_t setVolumeInternal(uint32_t *left, uint32_t *right, bool controller);
-
+    status_t setVolumeInternal_ll(uint32_t* left, uint32_t* right,
+                                  bool controller /* the volume controller effect of the chain */)
+            REQUIRES(audio_utils::EffectChain_Mutex, audio_utils::EffectBase_Mutex);
 
     effect_config_t     mConfig;    // input and output audio configuration
-    sp<EffectHalInterface> mEffectInterface; // Effect module HAL
     sp<EffectBufferHalInterface> mInBuffer;  // Buffers for interacting with HAL
     sp<EffectBufferHalInterface> mOutBuffer;
     status_t            mStatus;    // initialization status
@@ -278,12 +295,12 @@
     template <typename MUTEX>
     class AutoLockReentrant {
     public:
-        AutoLockReentrant(MUTEX& mutex, pid_t allowedTid)
+        AutoLockReentrant(MUTEX& mutex, pid_t allowedTid) ACQUIRE(audio_utils::EffectBase_Mutex)
             : mMutex(gettid() == allowedTid ? nullptr : &mutex)
         {
             if (mMutex != nullptr) mMutex->lock();
         }
-        ~AutoLockReentrant() {
+        ~AutoLockReentrant() RELEASE(audio_utils::EffectBase_Mutex) {
             if (mMutex != nullptr) mMutex->unlock();
         }
     private:
@@ -293,6 +310,26 @@
     static constexpr pid_t INVALID_PID = (pid_t)-1;
     // this tid is allowed to call setVolume() without acquiring the mutex.
     pid_t mSetVolumeReentrantTid = INVALID_PID;
+
+    // Cache the volume that has been set successfully.
+    std::optional<std::vector<uint32_t>> mVolume;
+    // Cache the volume that returned from the effect when setting volume successfully. The value
+    // here is used to indicate the volume to apply before this effect.
+    std::optional<std::vector<uint32_t>> mReturnedVolume;
+    // TODO: b/315995877, remove this debugging string after root cause
+    std::string mEffectInterfaceDebug GUARDED_BY(audio_utils::EffectChain_Mutex);
+};
+
+class HwAccDeviceEffectModule : public EffectModule {
+public:
+    HwAccDeviceEffectModule(const sp<EffectCallbackInterface>& callback, effect_descriptor_t *desc,
+            int id, audio_port_handle_t deviceId) :
+        EffectModule(callback, desc, id, AUDIO_SESSION_DEVICE, /* pinned */ false, deviceId) {}
+    void addEffectToHal_l() final REQUIRES(audio_utils::EffectChain_Mutex);
+
+private:
+    status_t removeEffectFromHal_l() final REQUIRES(audio_utils::EffectChain_Mutex);
+    bool mAddedToHal = false;
 };
 
 // The EffectHandle class implements the IEffect interface. It provides resources
@@ -307,7 +344,8 @@
     EffectHandle(const sp<IAfEffectBase>& effect,
             const sp<Client>& client,
             const sp<media::IEffectClient>& effectClient,
-            int32_t priority, bool notifyFramesProcessed);
+            int32_t priority, bool notifyFramesProcessed, bool isInternal = false,
+            audio_utils::MutexOrder mutexOrder = audio_utils::MutexOrder::kEffectHandle_Mutex);
     ~EffectHandle() override;
     status_t onTransact(
             uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags) final;
@@ -327,6 +365,11 @@
                                       int32_t* _aidl_return) final;
 
     const sp<Client>& client() const final { return mClient; }
+    /**
+     * Checks if the handle is internal, aka created by AudioFlinger for internal needs (e.g.
+     * device effect HAL handle or device effect thread handle).
+     */
+    virtual bool isInternal() const { return mIsInternal; }
 
     sp<android::media::IEffect> asIEffect() final {
         return sp<android::media::IEffect>::fromExisting(this);
@@ -364,13 +407,18 @@
 
     void dumpToBuffer(char* buffer, size_t size) const final;
 
+protected:
+    // protects IEffect method calls
+    mutable audio_utils::mutex mMutex;
 
 private:
     DISALLOW_COPY_AND_ASSIGN(EffectHandle);
 
-    audio_utils::mutex& mutex() const { return mMutex; }
-    // protects IEffect method calls
-    mutable audio_utils::mutex mMutex{audio_utils::MutexOrder::kEffectHandle_Mutex};
+    virtual audio_utils::mutex& mutex() const
+            RETURN_CAPABILITY(android::audio_utils::EffectHandle_Mutex) {
+        return mMutex;
+    }
+
     const wp<IAfEffectBase> mEffect;               // pointer to controlled EffectModule
     const sp<media::IEffectClient> mEffectClient;  // callback interface for client notifications
     /*const*/ sp<Client> mClient;            // client for shared memory allocation, see
@@ -386,6 +434,28 @@
     bool mDisconnected;                      // Set to true by disconnect()
     const bool mNotifyFramesProcessed;       // true if the client callback event
                                              // EVENT_FRAMES_PROCESSED must be generated
+    const bool mIsInternal;
+};
+
+/**
+ * There are 2 types of effects:
+ * -Session Effect: handle is directly called from the client, without AudioFlinger lock.
+ * -Device Effect: a device effect proxy is aggregating a collection of internal effect handles that
+ * controls the same effect added on all audio patches involving the device effect selected port
+ * requested either by a client or by AudioPolicyEffects. These internal effect handles do not have
+ * client. Sequence flow implies a different locking order, hence the lock is specialied.
+ */
+class InternalEffectHandle : public EffectHandle {
+public:
+    InternalEffectHandle(const sp<IAfEffectBase>& effect, bool notifyFramesProcessed) :
+            EffectHandle(effect, /* client= */ nullptr, /* effectClient= */ nullptr,
+                         /* priority= */ 0, notifyFramesProcessed, /* isInternal */ true,
+                         audio_utils::MutexOrder::kDeviceEffectHandle_Mutex) {}
+
+    virtual audio_utils::mutex& mutex() const
+            RETURN_CAPABILITY(android::audio_utils::DeviceEffectHandle_Mutex) {
+        return mMutex;
+    }
 };
 
 // the EffectChain class represents a group of effects associated to one audio session.
@@ -399,36 +469,49 @@
 // it also provide it's own input buffer used by the track as accumulation buffer.
 class EffectChain : public IAfEffectChain {
 public:
-    EffectChain(const sp<IAfThreadBase>& thread, audio_session_t sessionId);
+    EffectChain(const sp<IAfThreadBase>& thread,
+                audio_session_t sessionId,
+                const sp<IAfThreadCallback>& afThreadCallback);
 
-    void process_l() final;
+    void process_l() final REQUIRES(audio_utils::EffectChain_Mutex);
 
-    audio_utils::mutex& mutex() const final { return mMutex; }
+    audio_utils::mutex& mutex() const final RETURN_CAPABILITY(audio_utils::EffectChain_Mutex) {
+        return mMutex;
+    }
 
-    status_t createEffect_l(sp<IAfEffectModule>& effect,
-                            effect_descriptor_t *desc,
-                            int id,
-                            audio_session_t sessionId,
-                            bool pinned) final;
-    status_t addEffect_l(const sp<IAfEffectModule>& handle) final;
-    status_t addEffect_ll(const sp<IAfEffectModule>& handle) final;
-    size_t removeEffect_l(const sp<IAfEffectModule>& handle, bool release = false) final;
+    status_t createEffect(sp<IAfEffectModule>& effect, effect_descriptor_t* desc, int id,
+                            audio_session_t sessionId, bool pinned) final
+            EXCLUDES_EffectChain_Mutex;
+    status_t addEffect(const sp<IAfEffectModule>& handle) final
+            EXCLUDES_EffectChain_Mutex;
+    status_t addEffect_l(const sp<IAfEffectModule>& handle) final
+            REQUIRES(audio_utils::EffectChain_Mutex);
+    size_t removeEffect(const sp<IAfEffectModule>& handle, bool release = false) final
+            EXCLUDES_EffectChain_Mutex;
 
     audio_session_t sessionId() const final { return mSessionId; }
     void setSessionId(audio_session_t sessionId) final { mSessionId = sessionId; }
 
-    sp<IAfEffectModule> getEffectFromDesc_l(effect_descriptor_t *descriptor) const final;
-    sp<IAfEffectModule> getEffectFromId_l(int id) const final;
-    sp<IAfEffectModule> getEffectFromType_l(const effect_uuid_t *type) const final;
-    std::vector<int> getEffectIds() const final;
+    sp<IAfEffectModule> getEffectFromDesc(effect_descriptor_t* descriptor) const final
+            EXCLUDES_EffectChain_Mutex;
+    sp<IAfEffectModule> getEffectFromId_l(int id) const final
+            REQUIRES(audio_utils::ThreadBase_Mutex) EXCLUDES_EffectChain_Mutex;
+    sp<IAfEffectModule> getEffectFromType_l(const effect_uuid_t* type) const final
+            REQUIRES(audio_utils::ThreadBase_Mutex) EXCLUDES_EffectChain_Mutex;
+    std::vector<int> getEffectIds_l() const final REQUIRES(audio_utils::ThreadBase_Mutex);
     // FIXME use float to improve the dynamic range
 
-    bool setVolume_l(uint32_t *left, uint32_t *right, bool force = false) final;
-    void resetVolume_l() final;
-    void setDevices_l(const AudioDeviceTypeAddrVector &devices) final;
-    void setInputDevice_l(const AudioDeviceTypeAddr &device) final;
-    void setMode_l(audio_mode_t mode) final;
-    void setAudioSource_l(audio_source_t source) final;
+    bool setVolume(uint32_t* left, uint32_t* right,
+                   bool force = false) final EXCLUDES_EffectChain_Mutex;
+    void resetVolume_l() final REQUIRES(audio_utils::EffectChain_Mutex);
+    void setDevices_l(const AudioDeviceTypeAddrVector& devices) final
+            REQUIRES(audio_utils::ThreadBase_Mutex) EXCLUDES_EffectChain_Mutex;
+    void setInputDevice_l(const AudioDeviceTypeAddr& device) final
+            REQUIRES(audio_utils::ThreadBase_Mutex) EXCLUDES_EffectChain_Mutex;
+    void setMode_l(audio_mode_t mode) final
+            REQUIRES(audio_utils::ThreadBase_Mutex) EXCLUDES_EffectChain_Mutex;
+    void setAudioSource_l(audio_source_t source) final
+            REQUIRES(audio_utils::ThreadBase_Mutex)  EXCLUDES_EffectChain_Mutex;
 
     void setInBuffer(const sp<EffectBufferHalInterface>& buffer) final {
         mInBuffer = buffer;
@@ -459,21 +542,22 @@
 
     // suspend or restore effects of the specified type. The number of suspend requests is counted
     // and restore occurs once all suspend requests are cancelled.
-    void setEffectSuspended_l(const effect_uuid_t *type,
-                              bool suspend) final;
+    void setEffectSuspended_l(const effect_uuid_t* type, bool suspend) final
+            REQUIRES(audio_utils::ThreadBase_Mutex);
     // suspend all eligible effects
-    void setEffectSuspendedAll_l(bool suspend) final;
+    void setEffectSuspendedAll_l(bool suspend) final REQUIRES(audio_utils::ThreadBase_Mutex);
     // check if effects should be suspended or restored when a given effect is enable or disabled
-    void checkSuspendOnEffectEnabled(
-            const sp<IAfEffectModule>& effect, bool enabled) final;
+    void checkSuspendOnEffectEnabled_l(const sp<IAfEffectModule>& effect, bool enabled) final
+            REQUIRES(audio_utils::ThreadBase_Mutex);
 
-    void clearInputBuffer() final;
+    void clearInputBuffer() final EXCLUDES_EffectChain_Mutex;
 
     // At least one non offloadable effect in the chain is enabled
-    bool isNonOffloadableEnabled() const final;
-    bool isNonOffloadableEnabled_l() const final;
+    bool isNonOffloadableEnabled() const final EXCLUDES_EffectChain_Mutex;
+    bool isNonOffloadableEnabled_l() const final REQUIRES(audio_utils::EffectChain_Mutex);
 
-    void syncHalEffectsState() final;
+    void syncHalEffectsState_l()
+            REQUIRES(audio_utils::ThreadBase_Mutex) EXCLUDES_EffectChain_Mutex final;
 
     // flags is an ORed set of audio_output_flags_t which is updated on return.
     void checkOutputFlagCompatibility(audio_output_flags_t *flags) const final;
@@ -492,26 +576,34 @@
 
     // isCompatibleWithThread_l() must be called with thread->mutex() held
     bool isCompatibleWithThread_l(const sp<IAfThreadBase>& thread) const final
-            REQUIRES(audio_utils::ThreadBase_Mutex);
+            REQUIRES(audio_utils::ThreadBase_Mutex) EXCLUDES_EffectChain_Mutex;
 
-    // Requires either IAfThreadBase::mutex() or EffectChain::mutex() held
-    bool containsHapticGeneratingEffect_l() final;
+    bool containsHapticGeneratingEffect() final
+            EXCLUDES_EffectChain_Mutex;
 
-    void setHapticIntensity_l(int id, os::HapticScale intensity) final;
+    bool containsHapticGeneratingEffect_l() final
+            REQUIRES(audio_utils::EffectChain_Mutex);
+
+    void setHapticScale_l(int id, os::HapticScale hapticScale) final
+            REQUIRES(audio_utils::ThreadBase_Mutex) EXCLUDES_EffectChain_Mutex;
 
     sp<EffectCallbackInterface> effectCallback() const final { return mEffectCallback; }
 
     wp<IAfThreadBase> thread() const final { return mEffectCallback->thread(); }
 
-    bool isFirstEffect(int id) const final {
+    bool isFirstEffect_l(int id) const final REQUIRES(audio_utils::EffectChain_Mutex) {
         return !mEffects.isEmpty() && id == mEffects[0]->id();
     }
 
     void dump(int fd, const Vector<String16>& args) const final;
 
-    size_t numberOfEffects() const final { return mEffects.size(); }
+    size_t numberOfEffects() const final {
+      audio_utils::lock_guard _l(mutex());
+      return mEffects.size();
+    }
 
     sp<IAfEffectModule> getEffectModule(size_t index) const final {
+        audio_utils::lock_guard _l(mutex());
         return mEffects[index];
     }
 
@@ -519,9 +611,11 @@
         const std::optional<const std::vector<playback_track_metadata_v7_t>> spatializedMetadata)
             final REQUIRES(audio_utils::ThreadBase_Mutex);
 
-    void setThread(const sp<IAfThreadBase>& thread) final;
+    void setThread(const sp<IAfThreadBase>& thread) final EXCLUDES_EffectChain_Mutex;
 
-private:
+  private:
+    bool setVolume_l(uint32_t* left, uint32_t* right, bool force = false)
+            REQUIRES(audio_utils::EffectChain_Mutex);
 
     // For transaction consistency, please consider holding the EffectChain lock before
     // calling the EffectChain::EffectCallback methods, excepting
@@ -534,11 +628,13 @@
         // Note: ctors taking a weak pointer to their owner must not promote it
         // during construction (but may keep a reference for later promotion).
         EffectCallback(const wp<EffectChain>& owner,
-                const sp<IAfThreadBase>& thread)  // we take a sp<> but store a wp<>.
+                const sp<IAfThreadBase>& thread,
+                const sp<IAfThreadCallback>& afThreadCallback)  // we take a sp<> but store a wp<>.
             : mChain(owner)
-            , mThread(thread) {
-            mThreadType = thread->type();
-            mAfThreadCallback = thread->afThreadCallback();
+            , mThread(thread), mAfThreadCallback(afThreadCallback) {
+            if (thread != nullptr) {
+                mThreadType = thread->type();
+            }
         }
 
         status_t createEffectHal(const effect_uuid_t *pEffectUuid,
@@ -568,9 +664,10 @@
         void setVolumeForOutput(float left, float right) const override;
 
         // check if effects should be suspended/restored when a given effect is enable/disabled
-        void checkSuspendOnEffectEnabled(const sp<IAfEffectBase>& effect,
-                              bool enabled, bool threadLocked) override;
-        void resetVolume() override;
+        void checkSuspendOnEffectEnabled(const sp<IAfEffectBase>& effect, bool enabled,
+                                         bool threadLocked) override;
+        void resetVolume_l() override
+                REQUIRES(audio_utils::ThreadBase_Mutex, audio_utils::EffectChain_Mutex);
         product_strategy_t strategy() const override;
         int32_t activeTrackCnt() const override;
         void onEffectEnable(const sp<IAfEffectBase>& effect) override;
@@ -579,6 +676,9 @@
         wp<IAfEffectChain> chain() const final { return mChain; }
 
         bool isAudioPolicyReady() const final {
+            if (mAfThreadCallback == nullptr) {
+                return false;
+            }
             return mAfThreadCallback->isAudioPolicyReady();
         }
 
@@ -586,15 +686,19 @@
 
         void setThread(const sp<IAfThreadBase>& thread) {
             mThread = thread;
-            mThreadType = thread->type();
-            mAfThreadCallback = thread->afThreadCallback();
+            if (thread != nullptr) {
+                mThreadType = thread->type();
+                mAfThreadCallback = thread->afThreadCallback();
+            }
         }
-
+        bool hasThreadAttached() const {
+            return thread().promote() != nullptr;
+        }
     private:
         const wp<IAfEffectChain> mChain;
         mediautils::atomic_wp<IAfThreadBase> mThread;
         sp<IAfThreadCallback> mAfThreadCallback;
-        IAfThreadBase::type_t mThreadType;
+        IAfThreadBase::type_t mThreadType = IAfThreadBase::MIXER;
     };
 
     DISALLOW_COPY_AND_ASSIGN(EffectChain);
@@ -610,31 +714,37 @@
 
     // get a list of effect modules to suspend when an effect of the type
     // passed is enabled.
-    void  getSuspendEligibleEffects(Vector<sp<IAfEffectModule>> &effects);
+    void getSuspendEligibleEffects(Vector<sp<IAfEffectModule>>& effects)
+            EXCLUDES_EffectChain_Mutex;
 
     // get an effect module if it is currently enable
-    sp<IAfEffectModule> getEffectIfEnabled(const effect_uuid_t *type);
+    sp<IAfEffectModule> getEffectIfEnabled_l(const effect_uuid_t* type)
+            REQUIRES(audio_utils::ThreadBase_Mutex);
     // true if the effect whose descriptor is passed can be suspended
     // OEMs can modify the rules implemented in this method to exclude specific effect
     // types or implementations from the suspend/restore mechanism.
     bool isEffectEligibleForSuspend(const effect_descriptor_t& desc);
 
-    static bool isEffectEligibleForBtNrecSuspend(const effect_uuid_t *type);
+    static bool isEffectEligibleForBtNrecSuspend_l(const effect_uuid_t* type)
+            REQUIRES(audio_utils::ThreadBase_Mutex);
 
-    void clearInputBuffer_l();
+    void clearInputBuffer_l() REQUIRES(audio_utils::EffectChain_Mutex);
 
     // true if any effect module within the chain has volume control
-    bool hasVolumeControlEnabled_l() const;
+    bool hasVolumeControlEnabled_l() const REQUIRES(audio_utils::EffectChain_Mutex);
 
-    void setVolumeForOutput_l(uint32_t left, uint32_t right);
+    void setVolumeForOutput_l(uint32_t left, uint32_t right)
+            REQUIRES(audio_utils::EffectChain_Mutex);
 
-    ssize_t getInsertIndex(const effect_descriptor_t& desc);
+    ssize_t getInsertIndex_l(const effect_descriptor_t& desc)
+            REQUIRES(audio_utils::EffectChain_Mutex);
 
-    std::optional<size_t> findVolumeControl_l(size_t from, size_t to) const;
+    std::optional<size_t> findVolumeControl_l(size_t from, size_t to) const
+            REQUIRES(audio_utils::EffectChain_Mutex);
 
     // mutex protecting effect list
     mutable audio_utils::mutex mMutex{audio_utils::MutexOrder::kEffectChain_Mutex};
-             Vector<sp<IAfEffectModule>> mEffects; // list of effect modules
+             Vector<sp<IAfEffectModule>> mEffects  GUARDED_BY(mutex()); // list of effect modules
              audio_session_t mSessionId; // audio session ID
              sp<EffectBufferHalInterface> mInBuffer;  // chain input buffer
              sp<EffectBufferHalInterface> mOutBuffer; // chain output buffer
@@ -649,7 +759,7 @@
              uint32_t mRightVolume;      // previous volume on right channel
              uint32_t mNewLeftVolume;       // new volume on left channel
              uint32_t mNewRightVolume;      // new volume on right channel
-             product_strategy_t mStrategy; // strategy for this effect chain
+             product_strategy_t mStrategy = PRODUCT_STRATEGY_NONE; // strategy for this effect chain
              // mSuspendedEffects lists all effects currently suspended in the chain.
              // Use effect type UUID timelow field as key. There is no real risk of identical
              // timeLow fields among effect type UUIDs.
@@ -674,16 +784,16 @@
     status_t setEnabled(bool enabled, bool fromHandle) final;
     sp<IAfDeviceEffectProxy> asDeviceEffectProxy() final { return this; }
 
-    status_t init(const std::map<audio_patch_handle_t,
-            IAfPatchPanel::Patch>& patches) final;
+    status_t init_l(const std::map<audio_patch_handle_t, IAfPatchPanel::Patch>& patches) final
+            REQUIRES(audio_utils::DeviceEffectManager_Mutex) EXCLUDES_EffectBase_Mutex;
 
     status_t onCreatePatch(audio_patch_handle_t patchHandle,
-            const IAfPatchPanel::Patch& patch) final;
+                           const IAfPatchPanel::Patch& patch) final;
 
     status_t onUpdatePatch(audio_patch_handle_t oldPatchHandle, audio_patch_handle_t newPatchHandle,
            const IAfPatchPanel::Patch& patch) final;
 
-    void onReleasePatch(audio_patch_handle_t patchHandle) final;
+    sp<IAfEffectHandle> onReleasePatch(audio_patch_handle_t patchHandle) final;
 
     size_t removeEffect(const sp<IAfEffectModule>& effect) final;
 
@@ -696,10 +806,8 @@
     audio_channel_mask_t channelMask() const final;
     uint32_t channelCount() const final;
 
-    status_t command(int32_t cmdCode,
-                     const std::vector<uint8_t>& cmdData,
-                     int32_t maxReplySize,
-                     std::vector<uint8_t>* reply) final;
+    status_t command(int32_t cmdCode, const std::vector<uint8_t>& cmdData, int32_t maxReplySize,
+                     std::vector<uint8_t>* reply) final EXCLUDES_DeviceEffectProxy_ProxyMutex;
 
     void dump2(int fd, int spaces) const final;
 
@@ -734,7 +842,10 @@
         audio_channel_mask_t outChannelMask() const override;
         uint32_t outChannelCount() const override;
         audio_channel_mask_t hapticChannelMask() const override { return AUDIO_CHANNEL_NONE; }
-        size_t frameCount() const override  { return 0; }
+        /**
+         * frameCount cannot be zero.
+         */
+        size_t frameCount() const override  { return 1; }
         uint32_t latency() const override  { return 0; }
 
         status_t addEffectToHal(const sp<EffectHalInterface>& effect) override;
@@ -745,8 +856,8 @@
 
         void checkSuspendOnEffectEnabled(const sp<IAfEffectBase>& effect __unused,
                               bool enabled __unused, bool threadLocked __unused) override {}
-        void resetVolume() override {}
-        product_strategy_t strategy() const override  { return static_cast<product_strategy_t>(0); }
+        void resetVolume_l() override REQUIRES(audio_utils::EffectChain_Mutex) {}
+        product_strategy_t strategy() const override  { return PRODUCT_STRATEGY_NONE; }
         int32_t activeTrackCnt() const override { return 0; }
         void onEffectEnable(const sp<IAfEffectBase>& effect __unused) override;
         void onEffectDisable(const sp<IAfEffectBase>& effect __unused) override;
@@ -765,13 +876,16 @@
     };
 
     status_t checkPort(const IAfPatchPanel::Patch& patch,
-            const struct audio_port_config *port, sp<IAfEffectHandle> *handle);
+            const struct audio_port_config* port, sp<IAfEffectHandle>* handle);
 
     const AudioDeviceTypeAddr mDevice;
     const sp<DeviceEffectManagerCallback> mManagerCallback;
     const sp<ProxyCallback> mMyCallback;
 
-    audio_utils::mutex& proxyMutex() const { return mProxyMutex; }
+    audio_utils::mutex& proxyMutex() const
+            RETURN_CAPABILITY(android::audio_utils::DeviceEffectProxy_ProxyMutex) {
+        return mProxyMutex;
+    }
     mutable audio_utils::mutex mProxyMutex{
             audio_utils::MutexOrder::kDeviceEffectProxy_ProxyMutex};
     std::map<audio_patch_handle_t, sp<IAfEffectHandle>> mEffectHandles; // protected by mProxyMutex
diff --git a/services/audioflinger/IAfEffect.h b/services/audioflinger/IAfEffect.h
index 56076a3..3452e94 100644
--- a/services/audioflinger/IAfEffect.h
+++ b/services/audioflinger/IAfEffect.h
@@ -80,7 +80,7 @@
     // Methods usually implemented with help from EffectChain: pay attention to mutex locking order
     virtual product_strategy_t strategy() const = 0;
     virtual int32_t activeTrackCnt() const = 0;
-    virtual void resetVolume() = 0;
+    virtual void resetVolume_l() REQUIRES(audio_utils::EffectChain_Mutex) = 0;
     virtual wp<IAfEffectChain> chain() const = 0;
     virtual bool isAudioPolicyReady() const = 0;
 };
@@ -106,43 +106,45 @@
     virtual bool isOffloadable() const = 0;
     virtual bool isImplementationSoftware() const = 0;
     virtual bool isProcessImplemented() const = 0;
-    virtual bool isVolumeControl() const = 0;
+    virtual bool isVolumeControl() const REQUIRES(audio_utils::EffectChain_Mutex) = 0;
     virtual bool isVolumeMonitor() const = 0;
     virtual bool isEnabled() const = 0;
     virtual bool isPinned() const = 0;
     virtual void unPin() = 0;
-    virtual status_t updatePolicyState() = 0;
-    virtual bool purgeHandles() = 0;
+    virtual status_t updatePolicyState() EXCLUDES_EffectBase_Mutex = 0;
+    virtual bool purgeHandles() EXCLUDES_EffectBase_Mutex = 0;
     virtual void checkSuspendOnEffectEnabled(bool enabled, bool threadLocked) = 0;
 
     // mCallback is atomic so this can be lock-free.
     virtual void setCallback(const sp<EffectCallbackInterface>& callback) = 0;
     virtual sp<EffectCallbackInterface> getCallback() const = 0;
 
-    virtual status_t addHandle(IAfEffectHandle *handle) = 0;
-    virtual ssize_t removeHandle(IAfEffectHandle *handle) = 0;
+    virtual status_t addHandle(IAfEffectHandle* handle) EXCLUDES_EffectBase_Mutex = 0;
+    virtual ssize_t removeHandle(IAfEffectHandle* handle) EXCLUDES_EffectBase_Mutex = 0;
 
     virtual sp<IAfEffectModule> asEffectModule() = 0;
     virtual sp<IAfDeviceEffectProxy> asDeviceEffectProxy() = 0;
 
-    virtual status_t command(int32_t cmdCode,
-            const std::vector<uint8_t>& cmdData,
-            int32_t maxReplySize,
-            std::vector<uint8_t>* reply) = 0;
+    virtual status_t command(int32_t cmdCode, const std::vector<uint8_t>& cmdData,
+                             int32_t maxReplySize, std::vector<uint8_t>* reply)
+            EXCLUDES(audio_utils::EffectBase_Mutex) = 0;
 
     virtual void dump(int fd, const Vector<String16>& args) const = 0;
 
 private:
-    virtual status_t setEnabled(bool enabled, bool fromHandle) = 0;
-    virtual status_t setEnabled_l(bool enabled) = 0;
-    virtual void setSuspended(bool suspended) = 0;
-    virtual bool suspended() const = 0;
+    virtual status_t setEnabled(bool enabled, bool fromHandle) EXCLUDES_EffectBase_Mutex = 0;
+    virtual status_t setEnabled_l(bool enabled) REQUIRES(audio_utils::EffectBase_Mutex) = 0;
+    virtual void setSuspended(bool suspended) EXCLUDES_EffectBase_Mutex = 0;
+    virtual bool suspended() const EXCLUDES_EffectBase_Mutex = 0;
 
-    virtual ssize_t disconnectHandle(IAfEffectHandle *handle, bool unpinIfLast) = 0;
-    virtual ssize_t removeHandle_l(IAfEffectHandle *handle) = 0;
-    virtual IAfEffectHandle* controlHandle_l() = 0;
+    virtual ssize_t disconnectHandle(IAfEffectHandle* handle,
+                                     bool unpinIfLast) EXCLUDES_EffectBase_Mutex = 0;
+    virtual ssize_t removeHandle_l(IAfEffectHandle* handle)
+            REQUIRES(audio_utils::EffectBase_Mutex) = 0;
+    virtual IAfEffectHandle* controlHandle_l() REQUIRES(audio_utils::EffectBase_Mutex) = 0;
 
-    virtual audio_utils::mutex& mutex() const = 0;
+    virtual audio_utils::mutex& mutex() const
+            RETURN_CAPABILITY(android::audio_utils::EffectBase_Mutex) = 0;
 };
 
 class IAfEffectModule : public virtual IAfEffectBase {
@@ -151,7 +153,7 @@
 
 public:
     static sp<IAfEffectModule> create(
-            const sp<EffectCallbackInterface>& callabck,
+            const sp<EffectCallbackInterface>& callback,
             effect_descriptor_t *desc,
             int id,
             audio_session_t sessionId,
@@ -161,48 +163,59 @@
     virtual int16_t *inBuffer() const = 0;
     virtual status_t setDevices(const AudioDeviceTypeAddrVector &devices) = 0;
     virtual status_t setInputDevice(const AudioDeviceTypeAddr &device) = 0;
-    virtual status_t setVolume(uint32_t *left, uint32_t *right, bool controller) = 0;
-    virtual status_t setOffloaded(bool offloaded, audio_io_handle_t io) = 0;
-    virtual bool isOffloaded() const = 0;
+    virtual status_t setVolume_l(uint32_t* left, uint32_t* right,
+                                 bool controller /* effect controlling chain volume */,
+                                 bool force = false) REQUIRES(audio_utils::EffectChain_Mutex) = 0;
+    virtual status_t setOffloaded_l(bool offloaded, audio_io_handle_t io) = 0;
+    virtual bool isOffloaded_l() const = 0;
 
     virtual status_t setAudioSource(audio_source_t source) = 0;
     virtual status_t setMode(audio_mode_t mode) = 0;
 
-    virtual status_t start() = 0;
-    virtual status_t getConfigs(audio_config_base_t* inputCfg,
-            audio_config_base_t* outputCfg,
-            bool* isOutput) const = 0;
+    virtual status_t start_l() = 0;
+    virtual status_t getConfigs_l(audio_config_base_t* inputCfg, audio_config_base_t* outputCfg,
+                                  bool* isOutput) const
+            REQUIRES(audio_utils::EffectHandle_Mutex) EXCLUDES_EffectBase_Mutex = 0;
 
     static bool isHapticGenerator(const effect_uuid_t* type);
     virtual bool isHapticGenerator() const = 0;
     static bool isSpatializer(const effect_uuid_t* type);
     virtual bool isSpatializer() const = 0;
 
-    virtual status_t setHapticIntensity(int id, os::HapticScale intensity) = 0;
-    virtual status_t setVibratorInfo(const media::AudioVibratorInfo& vibratorInfo) = 0;
-    virtual status_t sendMetadata(const std::vector<playback_track_metadata_v7_t>& metadata) = 0;
+    virtual status_t setHapticScale_l(int id, os::HapticScale hapticScale)
+            REQUIRES(audio_utils::EffectChain_Mutex) EXCLUDES_EffectBase_Mutex = 0;
+    virtual status_t setVibratorInfo_l(const media::AudioVibratorInfo& vibratorInfo)
+            REQUIRES(audio_utils::EffectChain_Mutex) EXCLUDES_EffectBase_Mutex = 0;
+    virtual status_t sendMetadata_ll(const std::vector<playback_track_metadata_v7_t>& metadata)
+            REQUIRES(audio_utils::ThreadBase_Mutex,
+                     audio_utils::EffectChain_Mutex) EXCLUDES_EffectBase_Mutex = 0;
+    // return true if there was a state change from STARTING to ACTIVE, or STOPPED to IDLE, effect
+    // chain will do a volume reset in these two cases
+    virtual bool updateState_l()
+            REQUIRES(audio_utils::EffectChain_Mutex) EXCLUDES_EffectBase_Mutex = 0;
 
 private:
     virtual void process() = 0;
-    virtual bool updateState() = 0;
-    virtual void reset_l() = 0;
-    virtual status_t configure() = 0;
-    virtual status_t init() = 0;
+    virtual void reset_l() REQUIRES(audio_utils::EffectChain_Mutex) = 0;
+    virtual status_t configure_l() REQUIRES(audio_utils::EffectChain_Mutex) = 0;
+    virtual status_t init_l()
+            REQUIRES(audio_utils::EffectChain_Mutex) EXCLUDES_EffectBase_Mutex = 0;
     virtual uint32_t status() const = 0;
     virtual bool isProcessEnabled() const = 0;
-    virtual bool isOffloadedOrDirect() const = 0;
-    virtual bool isVolumeControlEnabled() const = 0;
+    virtual bool isOffloadedOrDirect_l() const REQUIRES(audio_utils::EffectChain_Mutex) = 0;
+    virtual bool isVolumeControlEnabled_l() const REQUIRES(audio_utils::EffectChain_Mutex) = 0;
 
     virtual void setInBuffer(const sp<EffectBufferHalInterface>& buffer) = 0;
     virtual void setOutBuffer(const sp<EffectBufferHalInterface>& buffer) = 0;
     virtual int16_t *outBuffer() const = 0;
 
     // Updates the access mode if it is out of date.  May issue a new effect configure.
-    virtual void updateAccessMode() = 0;
+    virtual void updateAccessMode_l() = 0;
 
-    virtual status_t stop() = 0;
+    virtual status_t stop_l() = 0;
     virtual void addEffectToHal_l() = 0;
-    virtual void release_l() = 0;
+    virtual status_t removeEffectFromHal_l() = 0;
+    virtual void release_l(const std::string& from) = 0;
 };
 
 class IAfEffectChain : public RefBase {
@@ -210,7 +223,8 @@
 public:
     static sp<IAfEffectChain> create(
             const sp<IAfThreadBase>& thread,
-            audio_session_t sessionId);
+            audio_session_t sessionId,
+            const sp<IAfThreadCallback>& afThreadCallback);
 
     // special key used for an entry in mSuspendedEffects keyed vector
     // corresponding to a suspend all request.
@@ -220,33 +234,42 @@
     // a session is stopped or removed to allow effect tail to be rendered
     static constexpr int kProcessTailDurationMs = 1000;
 
-    virtual void process_l() = 0;
+    virtual void process_l() REQUIRES(audio_utils::EffectChain_Mutex) = 0;
 
-    virtual audio_utils::mutex& mutex() const = 0;
+    virtual audio_utils::mutex& mutex() const RETURN_CAPABILITY(audio_utils::EffectChain_Mutex) = 0;
 
-    virtual status_t createEffect_l(sp<IAfEffectModule>& effect,
-                            effect_descriptor_t *desc,
-                            int id,
-                            audio_session_t sessionId,
-                            bool pinned) = 0;
+    virtual status_t createEffect(sp<IAfEffectModule>& effect, effect_descriptor_t* desc, int id,
+                                    audio_session_t sessionId, bool pinned)
+            EXCLUDES_EffectChain_Mutex = 0;
 
-    virtual status_t addEffect_l(const sp<IAfEffectModule>& handle) = 0;
-    virtual status_t addEffect_ll(const sp<IAfEffectModule>& handle) = 0;
-    virtual size_t removeEffect_l(const sp<IAfEffectModule>& handle, bool release = false) = 0;
+    virtual status_t addEffect(const sp<IAfEffectModule>& handle)
+            EXCLUDES_EffectChain_Mutex = 0;
+    virtual status_t addEffect_l(const sp<IAfEffectModule>& handle)
+            REQUIRES(audio_utils::EffectChain_Mutex) = 0;
+    virtual size_t removeEffect(const sp<IAfEffectModule>& handle,
+                                  bool release = false) EXCLUDES_EffectChain_Mutex = 0;
 
     virtual audio_session_t sessionId() const = 0;
     virtual void setSessionId(audio_session_t sessionId) = 0;
 
-    virtual sp<IAfEffectModule> getEffectFromDesc_l(effect_descriptor_t *descriptor) const = 0;
-    virtual sp<IAfEffectModule> getEffectFromId_l(int id) const = 0;
-    virtual sp<IAfEffectModule> getEffectFromType_l(const effect_uuid_t *type) const = 0;
-    virtual std::vector<int> getEffectIds() const = 0;
-    virtual bool setVolume_l(uint32_t *left, uint32_t *right, bool force = false) = 0;
-    virtual void resetVolume_l() = 0;
-    virtual void setDevices_l(const AudioDeviceTypeAddrVector &devices) = 0;
-    virtual void setInputDevice_l(const AudioDeviceTypeAddr &device) = 0;
-    virtual void setMode_l(audio_mode_t mode) = 0;
-    virtual void setAudioSource_l(audio_source_t source) = 0;
+    virtual sp<IAfEffectModule> getEffectFromDesc(effect_descriptor_t* descriptor) const
+            EXCLUDES_EffectChain_Mutex = 0;
+    virtual sp<IAfEffectModule> getEffectFromId_l(int id) const
+            REQUIRES(audio_utils::ThreadBase_Mutex) EXCLUDES_EffectChain_Mutex = 0;
+    virtual sp<IAfEffectModule> getEffectFromType_l(const effect_uuid_t* type) const
+            REQUIRES(audio_utils::ThreadBase_Mutex) EXCLUDES_EffectChain_Mutex = 0;
+    virtual std::vector<int> getEffectIds_l() const = 0;
+    virtual bool setVolume(uint32_t* left, uint32_t* right,
+                           bool force = false) EXCLUDES_EffectChain_Mutex = 0;
+    virtual void resetVolume_l() REQUIRES(audio_utils::EffectChain_Mutex) = 0;
+    virtual void setDevices_l(const AudioDeviceTypeAddrVector& devices)
+            REQUIRES(audio_utils::ThreadBase_Mutex) EXCLUDES_EffectChain_Mutex = 0;
+    virtual void setInputDevice_l(const AudioDeviceTypeAddr& device)
+            REQUIRES(audio_utils::ThreadBase_Mutex)  EXCLUDES_EffectChain_Mutex = 0;
+    virtual void setMode_l(audio_mode_t mode)
+            REQUIRES(audio_utils::ThreadBase_Mutex)  EXCLUDES_EffectChain_Mutex = 0;
+    virtual void setAudioSource_l(audio_source_t source)
+            REQUIRES(audio_utils::ThreadBase_Mutex) = 0;
 
     virtual void setInBuffer(const sp<EffectBufferHalInterface>& buffer) = 0;
     virtual float *inBuffer() const = 0;
@@ -266,20 +289,21 @@
 
     // suspend or restore effects of the specified type. The number of suspend requests is counted
     // and restore occurs once all suspend requests are cancelled.
-    virtual void setEffectSuspended_l(
-            const effect_uuid_t *type, bool suspend) = 0;
+    virtual void setEffectSuspended_l(const effect_uuid_t* type, bool suspend) = 0;
     // suspend all eligible effects
     virtual void setEffectSuspendedAll_l(bool suspend) = 0;
     // check if effects should be suspended or restored when a given effect is enable or disabled
-    virtual void checkSuspendOnEffectEnabled(const sp<IAfEffectModule>& effect, bool enabled) = 0;
+    virtual void checkSuspendOnEffectEnabled_l(const sp<IAfEffectModule>& effect, bool enabled)
+            REQUIRES(audio_utils::ThreadBase_Mutex) REQUIRES(audio_utils::ThreadBase_Mutex) = 0;
 
-    virtual void clearInputBuffer() = 0;
+    virtual void clearInputBuffer() EXCLUDES_EffectChain_Mutex = 0;
 
     // At least one non offloadable effect in the chain is enabled
-    virtual bool isNonOffloadableEnabled() const = 0;
-    virtual bool isNonOffloadableEnabled_l() const = 0;
+    virtual bool isNonOffloadableEnabled() const EXCLUDES_EffectChain_Mutex = 0;
+    virtual bool isNonOffloadableEnabled_l() const REQUIRES(audio_utils::EffectChain_Mutex) = 0;
 
-    virtual void syncHalEffectsState() = 0;
+    virtual void syncHalEffectsState_l()
+            REQUIRES(audio_utils::ThreadBase_Mutex) EXCLUDES_EffectChain_Mutex = 0;
 
     // flags is an ORed set of audio_output_flags_t which is updated on return.
     virtual void checkOutputFlagCompatibility(audio_output_flags_t *flags) const = 0;
@@ -297,18 +321,24 @@
     virtual bool isBitPerfectCompatible() const = 0;
 
     // isCompatibleWithThread_l() must be called with thread->mLock held
-    virtual bool isCompatibleWithThread_l(const sp<IAfThreadBase>& thread) const = 0;
+    virtual bool isCompatibleWithThread_l(const sp<IAfThreadBase>& thread) const
+            REQUIRES(audio_utils::ThreadBase_Mutex) EXCLUDES_EffectChain_Mutex = 0;
 
-    virtual bool containsHapticGeneratingEffect_l() = 0;
+    virtual bool containsHapticGeneratingEffect()
+            EXCLUDES_EffectChain_Mutex = 0;
 
-    virtual void setHapticIntensity_l(int id, os::HapticScale intensity) = 0;
+    virtual bool containsHapticGeneratingEffect_l()
+            REQUIRES(audio_utils::EffectChain_Mutex) = 0;
+
+    virtual void setHapticScale_l(int id, os::HapticScale hapticScale)
+            REQUIRES(audio_utils::ThreadBase_Mutex) EXCLUDES_EffectChain_Mutex = 0;
 
     virtual sp<EffectCallbackInterface> effectCallback() const = 0;
 
     virtual wp<IAfThreadBase> thread() const = 0;
-    virtual void setThread(const sp<IAfThreadBase>& thread) = 0;
+    virtual void setThread(const sp<IAfThreadBase>& thread) EXCLUDES_EffectChain_Mutex = 0;
 
-    virtual bool isFirstEffect(int id) const = 0;
+    virtual bool isFirstEffect_l(int id) const REQUIRES(audio_utils::EffectChain_Mutex) = 0;
 
     virtual size_t numberOfEffects() const = 0;
     virtual sp<IAfEffectModule> getEffectModule(size_t index) const = 0;
@@ -360,9 +390,8 @@
                 const sp<DeviceEffectManagerCallback>& callback,
                 effect_descriptor_t *desc, int id, bool notifyFramesProcessed);
 
-    virtual status_t init(
-            const std::map<audio_patch_handle_t,
-            IAfPatchPanel::Patch>& patches) = 0;
+    virtual status_t init_l(const std::map<audio_patch_handle_t, IAfPatchPanel::Patch>& patches)
+            REQUIRES(audio_utils::DeviceEffectManager_Mutex) EXCLUDES_EffectBase_Mutex = 0;
     virtual const AudioDeviceTypeAddr& device() const = 0;
 
     virtual status_t onCreatePatch(
@@ -371,7 +400,14 @@
     virtual status_t onUpdatePatch(audio_patch_handle_t oldPatchHandle,
             audio_patch_handle_t newPatchHandle,
             const IAfPatchPanel::Patch& patch) = 0;
-    virtual void onReleasePatch(audio_patch_handle_t patchHandle) = 0;
+    /**
+     * Checks (and release) of the effect handle is linked with the given release patch handle.
+     *
+     * @param patchHandle handle of the released patch
+     * @return a reference on the effect handle released if any, nullptr otherwise.
+     * It allows to delay the destruction of the handle.
+     */
+    virtual sp<IAfEffectHandle> onReleasePatch(audio_patch_handle_t patchHandle) = 0;
 
     virtual void dump2(int fd, int spaces) const = 0; // TODO(b/291319101) naming?
 
diff --git a/services/audioflinger/IAfPatchPanel.h b/services/audioflinger/IAfPatchPanel.h
index 6110e4c..37dce3a 100644
--- a/services/audioflinger/IAfPatchPanel.h
+++ b/services/audioflinger/IAfPatchPanel.h
@@ -82,7 +82,8 @@
             audio_config_base_t* mixerConfig,
             audio_devices_t deviceType,
             const String8& address,
-            audio_output_flags_t flags) REQUIRES(mutex()) = 0;
+            audio_output_flags_t flags,
+            audio_attributes_t attributes) REQUIRES(mutex()) = 0;
     virtual audio_utils::mutex& mutex() const
             RETURN_CAPABILITY(audio_utils::AudioFlinger_Mutex) = 0;
     virtual const DefaultKeyedVector<audio_module_handle_t, AudioHwDevice*>&
diff --git a/services/audioflinger/IAfThread.h b/services/audioflinger/IAfThread.h
index 46a67e8..4d26aa0 100644
--- a/services/audioflinger/IAfThread.h
+++ b/services/audioflinger/IAfThread.h
@@ -19,8 +19,9 @@
 #include <android/media/IAudioTrackCallback.h>
 #include <android/media/IEffectClient.h>
 #include <audiomanager/IAudioManager.h>
-#include <audio_utils/mutex.h>
+#include <audio_utils/DeferredExecutor.h>
 #include <audio_utils/MelProcessor.h>
+#include <audio_utils/mutex.h>
 #include <binder/MemoryDealer.h>
 #include <datapath/AudioStreamIn.h>
 #include <datapath/AudioStreamOut.h>
@@ -36,6 +37,10 @@
 
 #include <optional>
 
+namespace com::android::media::permission {
+    class IPermissionProvider;
+}
+
 namespace android {
 
 class IAfDirectOutputThread;
@@ -95,7 +100,8 @@
     virtual bool updateOrphanEffectChains(const sp<IAfEffectModule>& effect)
             EXCLUDES_AudioFlinger_Mutex = 0;
     virtual status_t moveEffectChain_ll(audio_session_t sessionId,
-            IAfPlaybackThread* srcThread, IAfPlaybackThread* dstThread)
+            IAfPlaybackThread* srcThread, IAfPlaybackThread* dstThread,
+            IAfEffectChain* srcChain = nullptr)
             REQUIRES(mutex(), audio_utils::ThreadBase_Mutex) = 0;
 
     virtual void requestLogMerge() = 0;
@@ -115,9 +121,14 @@
             const sp<AudioIoDescriptor>& ioDesc,
             pid_t pid = 0) EXCLUDES_AudioFlinger_ClientMutex = 0;
     virtual void onNonOffloadableGlobalEffectEnable() EXCLUDES_AudioFlinger_Mutex = 0;
-    virtual void onSupportedLatencyModesChanged(
-            audio_io_handle_t output, const std::vector<audio_latency_mode_t>& modes)
+    virtual void onSupportedLatencyModesChanged(audio_io_handle_t output,
+                                                const std::vector<audio_latency_mode_t>& modes)
             EXCLUDES_AudioFlinger_ClientMutex = 0;
+
+    virtual void onHardError(std::set<audio_port_handle_t>& trackPortIds) = 0;
+
+    virtual const ::com::android::media::permission::IPermissionProvider&
+            getPermissionProvider() = 0;
 };
 
 class IAfThreadBase : public virtual RefBase {
@@ -279,7 +290,7 @@
     // integrity of the chains during the process.
     // Also sets the parameter 'effectChains' to current value of mEffectChains.
     virtual void lockEffectChains_l(Vector<sp<IAfEffectChain>>& effectChains)
-            REQUIRES(mutex()) = 0;
+            REQUIRES(mutex()) EXCLUDES_EffectChain_Mutex = 0;
     // unlock effect chains after process
     virtual void unlockEffectChains(const Vector<sp<IAfEffectChain>>& effectChains)
             EXCLUDES_ThreadBase_Mutex = 0;
@@ -392,6 +403,12 @@
     // avoid races.
     virtual void waitWhileThreadBusy_l(audio_utils::unique_lock& ul)
             REQUIRES(mutex()) = 0;
+
+    // The ThreadloopExecutor is used to defer functors or dtors
+    // to when the Threadloop does not hold any mutexes (at the end of the
+    // processing period cycle).
+    virtual audio_utils::DeferredExecutor& getThreadloopExecutor() = 0;
+
     // Dynamic cast to derived interface
     virtual sp<IAfDirectOutputThread> asIAfDirectOutputThread() { return nullptr; }
     virtual sp<IAfDuplicatingThread> asIAfDuplicatingThread() { return nullptr; }
@@ -535,6 +552,9 @@
     virtual const std::atomic<int64_t>& framesWritten() const = 0;
 
     virtual bool usesHwAvSync() const = 0;
+
+    virtual void setTracksInternalMute(std::map<audio_port_handle_t, bool>* tracksInternalMute)
+            EXCLUDES_ThreadBase_Mutex = 0;
 };
 
 class IAfDirectOutputThread : public virtual IAfPlaybackThread {
diff --git a/services/audioflinger/IAfTrack.h b/services/audioflinger/IAfTrack.h
index 2302e13..a9c87ad 100644
--- a/services/audioflinger/IAfTrack.h
+++ b/services/audioflinger/IAfTrack.h
@@ -18,6 +18,7 @@
 
 #include <android/media/BnAudioRecord.h>
 #include <android/media/BnAudioTrack.h>
+#include <audio_utils/mutex.h>
 #include <audiomanager/IAudioManager.h>
 #include <binder/IMemory.h>
 #include <fastpath/FastMixerDumpState.h>
@@ -338,12 +339,12 @@
     /** Set haptic playback of the track is enabled or not, should be
      * set after query or get callback from vibrator service */
     virtual void setHapticPlaybackEnabled(bool hapticPlaybackEnabled) = 0;
-    /** Return at what intensity to play haptics, used in mixer. */
-    virtual os::HapticScale getHapticIntensity() const = 0;
+    /** Return the haptics scale, used in mixer. */
+    virtual os::HapticScale getHapticScale() const = 0;
     /** Return the maximum amplitude allowed for haptics data, used in mixer. */
     virtual float getHapticMaxAmplitude() const = 0;
-    /** Set intensity of haptic playback, should be set after querying vibrator service. */
-    virtual void setHapticIntensity(os::HapticScale hapticIntensity) = 0;
+    /** Set scale for haptic playback, should be set after querying vibrator service. */
+    virtual void setHapticScale(os::HapticScale hapticScale) = 0;
     /** Set maximum amplitude allowed for haptic data, should be set after querying
      *  vibrator service.
      */
@@ -351,7 +352,8 @@
     virtual sp<os::ExternalVibration> getExternalVibration() const = 0;
 
     // This function should be called with holding thread lock.
-    virtual void updateTeePatches_l() = 0;
+    virtual void updateTeePatches_l() REQUIRES(audio_utils::ThreadBase_Mutex)
+            EXCLUDES_BELOW_ThreadBase_Mutex = 0;
 
     // Argument teePatchesToUpdate is by value, use std::move to optimize.
     virtual void setTeePatchesToUpdate_l(TeePatches teePatchesToUpdate) = 0;
@@ -373,6 +375,8 @@
     virtual void triggerEvents(AudioSystem::sync_event_t type) = 0;
 
     virtual void disable() = 0;
+    virtual bool isDisabled() const = 0;
+
     virtual int& fastIndex() = 0;
     virtual bool isPlaybackRestricted() const = 0;
 
@@ -423,6 +427,10 @@
     virtual FillingStatus& fillingStatus() = 0;
     virtual int8_t& retryCount() = 0;
     virtual FastTrackUnderruns& fastTrackUnderruns() = 0;
+
+    // Internal mute, this is currently only used for bit-perfect playback
+    virtual bool getInternalMute() const = 0;
+    virtual void setInternalMute(bool muted) = 0;
 };
 
 // playback track, used by DuplicatingThread
@@ -571,10 +579,11 @@
             size_t bufferSize,
             audio_output_flags_t flags,
             const Timeout& timeout = {},
-            size_t frameCountToBeReady = 1 /** Default behaviour is to start
+            size_t frameCountToBeReady = 1, /** Default behaviour is to start
                                              *  as soon as possible to have
                                              *  the lowest possible latency
-                                             *  even if it might glitch. */);
+                                             *  even if it might glitch. */
+            float speed = 1.0f);
 };
 
 class IAfPatchRecord : public virtual IAfRecordTrack, public virtual IAfPatchTrackBase {
diff --git a/services/audioflinger/MelReporter.cpp b/services/audioflinger/MelReporter.cpp
index 1d38306..57f4ff6 100644
--- a/services/audioflinger/MelReporter.cpp
+++ b/services/audioflinger/MelReporter.cpp
@@ -117,6 +117,11 @@
     }
 }
 
+void MelReporter::resetReferencesForTest() {
+    mAfMelReporterCallback.clear();
+    mSoundDoseManager->resetReferencesForTest();
+}
+
 void MelReporter::onCreateAudioPatch(audio_patch_handle_t handle,
         const IAfPatchPanel::Patch& patch) {
     if (!mSoundDoseManager->isCsdEnabled()) {
diff --git a/services/audioflinger/MelReporter.h b/services/audioflinger/MelReporter.h
index 0aeb225..8b062f3 100644
--- a/services/audioflinger/MelReporter.h
+++ b/services/audioflinger/MelReporter.h
@@ -103,6 +103,8 @@
             const std::vector<playback_track_metadata_v7_t>& metadataVec)
             EXCLUDES_AudioFlinger_Mutex;
 
+    void resetReferencesForTest();
+
 private:
     struct ActiveMelPatch {
         audio_io_handle_t streamHandle{AUDIO_IO_HANDLE_NONE};
@@ -131,7 +133,7 @@
 
     bool useHalSoundDoseInterface_l() REQUIRES(mutex());
 
-    const sp<IAfMelReporterCallback> mAfMelReporterCallback;
+    sp<IAfMelReporterCallback> mAfMelReporterCallback;
     const sp<IAfPatchPanel> mAfPatchPanel;
 
     /* const */ sp<SoundDoseManager> mSoundDoseManager;  // set onFirstRef
diff --git a/services/audioflinger/PatchPanel.cpp b/services/audioflinger/PatchPanel.cpp
index 4333cc8..35f17c1 100644
--- a/services/audioflinger/PatchPanel.cpp
+++ b/services/audioflinger/PatchPanel.cpp
@@ -260,6 +260,7 @@
                     if (patch->sinks[0].config_mask & AUDIO_PORT_CONFIG_FLAGS) {
                         flags = patch->sinks[0].flags.output;
                     }
+                    audio_attributes_t attributes = AUDIO_ATTRIBUTES_INITIALIZER;
                     const sp<IAfThreadBase> thread = mAfPatchPanelCallback->openOutput_l(
                                                             patch->sinks[0].ext.device.hw_module,
                                                             &output,
@@ -267,7 +268,8 @@
                                                             &mixerConfig,
                                                             outputDevice,
                                                             outputDeviceAddress,
-                                                            flags);
+                                                            flags,
+                                                            attributes);
                     ALOGV("mAfPatchPanelCallback->openOutput_l() returned %p", thread.get());
                     if (thread == 0) {
                         status = NO_MEMORY;
@@ -646,7 +648,8 @@
                                            tempRecordTrack->bufferSize(),
                                            outputFlags,
                                            {} /*timeout*/,
-                                           frameCountToBeReady);
+                                           frameCountToBeReady,
+                                           1.0f);
     status = mPlayback.checkTrack(tempPatchTrack.get());
     if (status != NO_ERROR) {
         return status;
diff --git a/services/audioflinger/PlaybackTracks.h b/services/audioflinger/PlaybackTracks.h
index b4cb805..2cc6236 100644
--- a/services/audioflinger/PlaybackTracks.h
+++ b/services/audioflinger/PlaybackTracks.h
@@ -174,15 +174,15 @@
     void setHapticPlaybackEnabled(bool hapticPlaybackEnabled) final {
                 mHapticPlaybackEnabled = hapticPlaybackEnabled;
             }
-            /** Return at what intensity to play haptics, used in mixer. */
-    os::HapticScale getHapticIntensity() const final { return mHapticIntensity; }
+            /** Return the haptics scale, used in mixer. */
+    os::HapticScale getHapticScale() const final { return mHapticScale; }
             /** Return the maximum amplitude allowed for haptics data, used in mixer. */
     float getHapticMaxAmplitude() const final { return mHapticMaxAmplitude; }
             /** Set intensity of haptic playback, should be set after querying vibrator service. */
-    void setHapticIntensity(os::HapticScale hapticIntensity) final {
-                if (os::isValidHapticScale(hapticIntensity)) {
-                    mHapticIntensity = hapticIntensity;
-                    setHapticPlaybackEnabled(mHapticIntensity != os::HapticScale::MUTE);
+    void setHapticScale(os::HapticScale hapticScale) final {
+                if (os::isValidHapticScale(hapticScale)) {
+                    mHapticScale = hapticScale;
+                    setHapticPlaybackEnabled(!mHapticScale.isScaleMute());
                 }
             }
             /** Set maximum amplitude allowed for haptic data, should be set after querying
@@ -194,7 +194,8 @@
     sp<os::ExternalVibration> getExternalVibration() const final { return mExternalVibration; }
 
             // This function should be called with holding thread lock.
-    void updateTeePatches_l() final;
+    void updateTeePatches_l() final REQUIRES(audio_utils::ThreadBase_Mutex)
+            EXCLUDES_BELOW_ThreadBase_Mutex;
     void setTeePatchesToUpdate_l(TeePatches teePatchesToUpdate) final;
 
     void tallyUnderrunFrames(size_t frames) final {
@@ -219,6 +220,8 @@
      */
     void processMuteEvent_l(const sp<IAudioManager>& audioManager, mute_state_t muteState) final;
 
+    bool getInternalMute() const final { return mInternalMute; }
+    void setInternalMute(bool muted) final { mInternalMute = muted; }
 protected:
 
     DISALLOW_COPY_AND_ASSIGN(Track);
@@ -274,6 +277,8 @@
     void triggerEvents(AudioSystem::sync_event_t type) final;
     void invalidate() final;
     void disable() final;
+    bool isDisabled() const final;
+
     int& fastIndex() final { return mFastIndex; }
     bool isPlaybackRestricted() const final {
         // The monitor is only created for tracks that can be silenced.
@@ -328,8 +333,8 @@
     sp<OpPlayAudioMonitor>  mOpPlayAudioMonitor;
 
     bool                mHapticPlaybackEnabled = false; // indicates haptic playback enabled or not
-    // intensity to play haptic data
-    os::HapticScale mHapticIntensity = os::HapticScale::MUTE;
+    // scale to play haptic data
+    os::HapticScale mHapticScale = os::HapticScale::mute();
     // max amplitude allowed for haptic data
     float mHapticMaxAmplitude = NAN;
     class AudioVibrationController : public os::BnExternalVibrationController {
@@ -398,6 +403,8 @@
     // access these two variables only when holding player thread lock.
     std::unique_ptr<os::PersistableBundle> mMuteEventExtras;
     mute_state_t        mMuteState;
+
+    bool                mInternalMute = false;
 };  // end of Track
 
 
@@ -448,7 +455,7 @@
     void                queueBuffer(Buffer& inBuffer);
     void                clearBufferQueue();
 
-    void                restartIfDisabled();
+    void restartIfDisabled() override;
 
     // Maximum number of pending buffers allocated by OutputTrack::write()
     static const uint8_t kMaxOverFlowBuffers = 10;
@@ -490,10 +497,11 @@
                                    size_t bufferSize,
                                    audio_output_flags_t flags,
                                    const Timeout& timeout = {},
-                                   size_t frameCountToBeReady = 1 /** Default behaviour is to start
+                                   size_t frameCountToBeReady = 1, /** Default behaviour is to start
                                                                     *  as soon as possible to have
                                                                     *  the lowest possible latency
-                                                                    *  even if it might glitch. */);
+                                                                    *  even if it might glitch. */
+                                   float speed = 1.0f);
     ~PatchTrack() override;
 
     size_t framesReady() const final;
@@ -511,7 +519,7 @@
     void releaseBuffer(Proxy::Buffer* buffer) final;
 
 private:
-            void restartIfDisabled();
+    void restartIfDisabled() override;
 };  // end of PatchTrack
 
 } // namespace android
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index 244a262..583552a 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -33,6 +33,7 @@
 #include <afutils/Vibrator.h>
 #include <audio_utils/MelProcessor.h>
 #include <audio_utils/Metadata.h>
+#include <com_android_media_audioserver.h>
 #ifdef DEBUG_CPU_USAGE
 #include <audio_utils/Statistics.h>
 #include <cpustats/ThreadCpuUsage.h>
@@ -71,6 +72,7 @@
 #include <media/nbaio/Pipe.h>
 #include <media/nbaio/PipeReader.h>
 #include <media/nbaio/SourceAudioBufferProvider.h>
+#include <media/ValidatedAttributionSourceState.h>
 #include <mediautils/BatteryNotifier.h>
 #include <mediautils/Process.h>
 #include <mediautils/SchedulingPolicyService.h>
@@ -119,6 +121,8 @@
     return a < b ? a : b;
 }
 
+using com::android::media::permission::ValidatedAttributionSourceState;
+
 namespace android {
 
 using audioflinger::SyncEvent;
@@ -161,6 +165,9 @@
 
 // maximum time to wait in sendConfigEvent_l() for a status to be received
 static const nsecs_t kConfigEventTimeoutNs = seconds(2);
+// longer timeout for create audio patch to account for specific scenarii
+// with Bluetooth devices
+static const nsecs_t kCreatePatchEventTimeoutNs = seconds(4);
 
 // minimum sleep time for the mixer thread loop when tracks are active but in underrun
 static const uint32_t kMinThreadSleepTimeUs = 5000;
@@ -223,6 +230,8 @@
 static const int kPriorityAudioApp = 2;
 static const int kPriorityFastMixer = 3;
 static const int kPriorityFastCapture = 3;
+// Request real-time priority for PlaybackThread in ARC
+static const int kPriorityPlaybackThreadArc = 1;
 
 // IAudioFlinger::createTrack() has an in/out parameter 'pFrameCount' for the total size of the
 // track buffer in shared memory.  Zero on input means to use a default value.  For fast tracks,
@@ -692,6 +701,10 @@
     }
     // When Thread::requestExitAndWait is made virtual and this method is renamed to
     // "virtual status_t requestExitAndWait()", replace by "return Thread::requestExitAndWait();"
+
+    // For TimeCheck: track waiting on the thread join of getTid().
+    audio_utils::mutex::scoped_join_wait_check sjw(getTid());
+
     requestExitAndWait();
 }
 
@@ -721,9 +734,12 @@
     mutex().unlock();
     {
         audio_utils::unique_lock _l(event->mutex());
+        nsecs_t timeoutNs = event->mType == CFG_EVENT_CREATE_AUDIO_PATCH ?
+              kCreatePatchEventTimeoutNs : kConfigEventTimeoutNs;
         while (event->mWaitStatus) {
-            if (event->mCondition.wait_for(_l, std::chrono::nanoseconds(kConfigEventTimeoutNs))
-                        == std::cv_status::timeout) {
+            if (event->mCondition.wait_for(
+                    _l, std::chrono::nanoseconds(timeoutNs), getTid())
+                            == std::cv_status::timeout) {
                 event->mStatus = TIMED_OUT;
                 event->mWaitStatus = false;
             }
@@ -1479,8 +1495,8 @@
     }
 
     if (IAfEffectModule::isHapticGenerator(&desc->type) && mHapticChannelCount == 0) {
-        ALOGW("%s: thread doesn't support haptic playback while the effect is HapticGenerator",
-                __func__);
+        ALOGW("%s: thread (%s) doesn't support haptic playback while the effect is HapticGenerator",
+              __func__, threadTypeToString(mType));
         return BAD_VALUE;
     }
 
@@ -1659,12 +1675,12 @@
         if (chain == 0) {
             // create a new chain for this session
             ALOGV("createEffect_l() new effect chain for session %d", sessionId);
-            chain = IAfEffectChain::create(this, sessionId);
+            chain = IAfEffectChain::create(this, sessionId, mAfThreadCallback);
             addEffectChain_l(chain);
             chain->setStrategy(getStrategyForSession_l(sessionId));
             chainCreated = true;
         } else {
-            effect = chain->getEffectFromDesc_l(desc);
+            effect = chain->getEffectFromDesc(desc);
         }
 
         ALOGV("createEffect_l() got effect %p on chain %p", effect.get(), chain.get());
@@ -1672,7 +1688,7 @@
         if (effect == 0) {
             effectId = mAfThreadCallback->nextUniqueId(AUDIO_UNIQUE_ID_USE_EFFECT);
             // create a new effect module if none present in the chain
-            lStatus = chain->createEffect_l(effect, desc, effectId, sessionId, pinned);
+            lStatus = chain->createEffect(effect, desc, effectId, sessionId, pinned);
             if (lStatus != NO_ERROR) {
                 goto Exit;
             }
@@ -1688,10 +1704,11 @@
             // TODO(b/184194057): Use the vibrator information from the vibrator that will be used
             // for the HapticGenerator.
             const std::optional<media::AudioVibratorInfo> defaultVibratorInfo =
-                    std::move(mAfThreadCallback->getDefaultVibratorInfo_l());
+                    mAfThreadCallback->getDefaultVibratorInfo_l();
             if (defaultVibratorInfo) {
+                audio_utils::lock_guard _cl(chain->mutex());
                 // Only set the vibrator info when it is a valid one.
-                effect->setVibratorInfo(*defaultVibratorInfo);
+                effect->setVibratorInfo_l(*defaultVibratorInfo);
             }
         }
         // create effect handle and connect it to effect module
@@ -1711,7 +1728,7 @@
     if (!probe && lStatus != NO_ERROR && lStatus != ALREADY_EXISTS) {
         audio_utils::lock_guard _l(mutex());
         if (effectCreated) {
-            chain->removeEffect_l(effect);
+            chain->removeEffect(effect);
         }
         if (chainCreated) {
             removeEffectChain_l(chain);
@@ -1793,7 +1810,7 @@
 std::vector<int> ThreadBase::getEffectIds_l(audio_session_t sessionId) const
 {
     sp<IAfEffectChain> chain = getEffectChain_l(sessionId);
-    return chain != nullptr ? chain->getEffectIds() : std::vector<int>{};
+    return chain != nullptr ? chain->getEffectIds_l() : std::vector<int>{};
 }
 
 // PlaybackThread::addEffect_ll() must be called with AudioFlinger::mutex() and
@@ -1812,7 +1829,7 @@
     if (chain == 0) {
         // create a new chain for this session
         ALOGV("%s: new effect chain for session %d", __func__, sessionId);
-        chain = IAfEffectChain::create(this, sessionId);
+        chain = IAfEffectChain::create(this, sessionId, mAfThreadCallback);
         addEffectChain_l(chain);
         chain->setStrategy(getStrategyForSession_l(sessionId));
         chainCreated = true;
@@ -1825,9 +1842,9 @@
         return BAD_VALUE;
     }
 
-    effect->setOffloaded(mType == OFFLOAD, mId);
+    effect->setOffloaded_l(mType == OFFLOAD, mId);
 
-    status_t status = chain->addEffect_l(effect);
+    status_t status = chain->addEffect(effect);
     if (status != NO_ERROR) {
         if (chainCreated) {
             removeEffectChain_l(chain);
@@ -1854,7 +1871,7 @@
     sp<IAfEffectChain> chain = effect->getCallback()->chain().promote();
     if (chain != 0) {
         // remove effect chain if removing last effect
-        if (chain->removeEffect_l(effect, release) == 0) {
+        if (chain->removeEffect(effect, release) == 0) {
             removeEffectChain_l(chain);
         }
     } else {
@@ -1862,22 +1879,20 @@
     }
 }
 
-void ThreadBase::lockEffectChains_l(
-        Vector<sp<IAfEffectChain>>& effectChains)
-NO_THREAD_SAFETY_ANALYSIS  // calls EffectChain::lock()
+void ThreadBase::lockEffectChains_l(Vector<sp<IAfEffectChain>>& effectChains)
+        NO_THREAD_SAFETY_ANALYSIS  // calls EffectChain::lock()
 {
     effectChains = mEffectChains;
-    for (size_t i = 0; i < mEffectChains.size(); i++) {
-        mEffectChains[i]->mutex().lock();
+    for (const auto& effectChain : effectChains) {
+        effectChain->mutex().lock();
     }
 }
 
-void ThreadBase::unlockEffectChains(
-        const Vector<sp<IAfEffectChain>>& effectChains)
-NO_THREAD_SAFETY_ANALYSIS  // calls EffectChain::unlock()
+void ThreadBase::unlockEffectChains(const Vector<sp<IAfEffectChain>>& effectChains)
+        NO_THREAD_SAFETY_ANALYSIS  // calls EffectChain::unlock()
 {
-    for (size_t i = 0; i < effectChains.size(); i++) {
-        effectChains[i]->mutex().unlock();
+    for (const auto& effectChain : effectChains) {
+        effectChain->mutex().unlock();
     }
 }
 
@@ -2690,14 +2705,17 @@
             }
         }
 
-        // Set DIRECT flag if current thread is DirectOutputThread. This can
-        // happen when the playback is rerouted to direct output thread by
+        // Set DIRECT/OFFLOAD flag if current thread is DirectOutputThread/OffloadThread.
+        // This can happen when the playback is rerouted to direct output/offload thread by
         // dynamic audio policy.
         // Do NOT report the flag changes back to client, since the client
-        // doesn't explicitly request a direct flag.
+        // doesn't explicitly request a direct/offload flag.
         audio_output_flags_t trackFlags = *flags;
         if (mType == DIRECT) {
             trackFlags = static_cast<audio_output_flags_t>(trackFlags | AUDIO_OUTPUT_FLAG_DIRECT);
+        } else if (mType == OFFLOAD) {
+            trackFlags = static_cast<audio_output_flags_t>(trackFlags |
+                                   AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD | AUDIO_OUTPUT_FLAG_DIRECT);
         }
         *afTrackFlags = trackFlags;
 
@@ -2896,21 +2914,21 @@
         sp<IAfEffectChain> chain = getEffectChain_l(track->sessionId());
         if (mHapticChannelMask != AUDIO_CHANNEL_NONE
                 && ((track->channelMask() & AUDIO_CHANNEL_HAPTIC_ALL) != AUDIO_CHANNEL_NONE
-                        || (chain != nullptr && chain->containsHapticGeneratingEffect_l()))) {
+                        || (chain != nullptr && chain->containsHapticGeneratingEffect()))) {
             // Unlock due to VibratorService will lock for this call and will
             // call Tracks.mute/unmute which also require thread's lock.
             mutex().unlock();
-            const os::HapticScale intensity = afutils::onExternalVibrationStart(
+            const os::HapticScale hapticScale = afutils::onExternalVibrationStart(
                     track->getExternalVibration());
             std::optional<media::AudioVibratorInfo> vibratorInfo;
             {
                 // TODO(b/184194780): Use the vibrator information from the vibrator that will be
                 // used to play this track.
                  audio_utils::lock_guard _l(mAfThreadCallback->mutex());
-                vibratorInfo = std::move(mAfThreadCallback->getDefaultVibratorInfo_l());
+                vibratorInfo = mAfThreadCallback->getDefaultVibratorInfo_l();
             }
             mutex().lock();
-            track->setHapticIntensity(intensity);
+            track->setHapticScale(hapticScale);
             if (vibratorInfo) {
                 track->setHapticMaxAmplitude(vibratorInfo->maxAmplitude);
             }
@@ -2926,7 +2944,7 @@
 
             // Set haptic intensity for effect
             if (chain != nullptr) {
-                chain->setHapticIntensity_l(track->id(), intensity);
+                chain->setHapticScale_l(track->id(), hapticScale);
             }
         }
 
@@ -2997,6 +3015,23 @@
     }
 }
 
+std::set<audio_port_handle_t> PlaybackThread::getTrackPortIds_l()
+{
+    std::set<int32_t> result;
+    for (const auto& t : mTracks) {
+        if (t->isExternalTrack()) {
+            result.insert(t->portId());
+        }
+    }
+    return result;
+}
+
+std::set<audio_port_handle_t> PlaybackThread::getTrackPortIds()
+{
+    audio_utils::lock_guard _l(mutex());
+    return getTrackPortIds_l();
+}
+
 String8 PlaybackThread::getParameters(const String8& keys)
 {
     audio_utils::lock_guard _l(mutex());
@@ -3050,13 +3085,13 @@
     mCallbackThread->resetDraining();
 }
 
-void PlaybackThread::onError()
+void PlaybackThread::onError(bool isHardError)
 {
-    mCallbackThread->setAsyncError();
+    mCallbackThread->setAsyncError(isHardError);
 }
 
 void PlaybackThread::onCodecFormatChanged(
-        const std::basic_string<uint8_t>& metadataBs)
+        const std::vector<uint8_t>& metadataBs)
 {
     const auto weakPointerThis = wp<PlaybackThread>::fromExisting(this);
     std::thread([this, metadataBs, weakPointerThis]() {
@@ -3319,7 +3354,11 @@
         return {}; // nothing to do
     }
     StreamOutHalInterface::SourceMetadata metadata;
-    if (com_android_media_audio_stereo_spatialization()) {
+    static const bool stereo_spatialization_property =
+            property_get_bool("ro.audio.stereo_spatialization_enabled", false);
+    const bool stereo_spatialization_enabled =
+            stereo_spatialization_property && com_android_media_audio_stereo_spatialization();
+    if (stereo_spatialization_enabled) {
         std::map<audio_session_t, std::vector<playback_track_metadata_v7_t> >allSessionsMetadata;
         for (const sp<IAfTrack>& track : mActiveTracks) {
             std::vector<playback_track_metadata_v7_t>& sessionMetadata =
@@ -3391,9 +3430,9 @@
         return NO_ERROR;
     } else {
         status_t status;
-        uint32_t frames;
+        uint64_t frames = 0;
         status = mOutput->getRenderPosition(&frames);
-        *dspFrames = (size_t)frames;
+        *dspFrames = (uint32_t)frames;
         return status;
     }
 }
@@ -3503,7 +3542,7 @@
             char *endptr;
             unsigned long ul = strtoul(value, &endptr, 0);
             if (*endptr == '\0' && ul != 0) {
-                ALOGD("Silence is golden");
+                ALOGW("%s: mute from ro.audio.silent. Silence is golden", __func__);
                 // The setprop command will not allow a property to be changed after
                 // the first time it is set, so we don't have to worry about un-muting.
                 setMasterMute_l(true);
@@ -3951,6 +3990,27 @@
                 stream()->setHalThreadPriority(priorityBoost);
             }
         }
+    } else if (property_get_bool("ro.boot.container", false /* default_value */)) {
+        // In ARC experiments (b/73091832), the latency under using CFS scheduler with any priority
+        // is not enough for PlaybackThread to process audio data in time. We request the lowest
+        // real-time priority, SCHED_FIFO=1, for PlaybackThread in ARC. ro.boot.container is true
+        // only on ARC.
+        const pid_t tid = getTid();
+        if (tid == -1) {
+            ALOGW("%s: Cannot update PlaybackThread priority for ARC, no tid", __func__);
+        } else {
+            const status_t status = requestPriority(getpid(),
+                                                    tid,
+                                                    kPriorityPlaybackThreadArc,
+                                                    false /* isForApp */,
+                                                    true /* asynchronous */);
+            if (status != OK) {
+                ALOGW("%s: Cannot update PlaybackThread priority for ARC, status %d", __func__,
+                        status);
+            } else {
+                stream()->setHalThreadPriority(kPriorityPlaybackThreadArc);
+            }
+        }
     }
 
     Vector<sp<IAfTrack>> tracksToRemove;
@@ -4162,6 +4222,30 @@
 
             metadataUpdate = updateMetadata_l();
 
+            // Acquire a local copy of active tracks with lock (release w/o lock).
+            //
+            // Control methods on the track acquire the ThreadBase lock (e.g. start()
+            // stop(), pause(), etc.), but the threadLoop is entitled to call audio
+            // data / buffer methods on tracks from activeTracks without the ThreadBase lock.
+            activeTracks.insert(activeTracks.end(), mActiveTracks.begin(), mActiveTracks.end());
+
+            setHalLatencyMode_l();
+
+            // updateTeePatches_l will acquire the ThreadBase_Mutex of other threads,
+            // so this is done before we lock our effect chains.
+            for (const auto& track : mActiveTracks) {
+                track->updateTeePatches_l();
+            }
+
+            // signal actual start of output stream when the render position reported by
+            // the kernel starts moving.
+            if (!mHalStarted && ((isSuspended() && (mBytesWritten != 0)) || (!mStandby
+                    && (mKernelPositionOnStandby
+                            != mTimestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL])))) {
+                mHalStarted = true;
+                mWaitHalStartCV.notify_all();
+            }
+
             // prevent any changes in effect chain list and in each effect chain
             // during mixing and effect process as the audio buffers could be deleted
             // or modified if an effect is created or deleted
@@ -4189,28 +4273,6 @@
                     }
                 }
             }
-
-            // Acquire a local copy of active tracks with lock (release w/o lock).
-            //
-            // Control methods on the track acquire the ThreadBase lock (e.g. start()
-            // stop(), pause(), etc.), but the threadLoop is entitled to call audio
-            // data / buffer methods on tracks from activeTracks without the ThreadBase lock.
-            activeTracks.insert(activeTracks.end(), mActiveTracks.begin(), mActiveTracks.end());
-
-            setHalLatencyMode_l();
-
-            for (const auto &track : mActiveTracks ) {
-                track->updateTeePatches_l();
-            }
-
-            // signal actual start of output stream when the render position reported by the kernel
-            // starts moving.
-            if (!mHalStarted && ((isSuspended() && (mBytesWritten != 0)) || (!mStandby
-                    && (mKernelPositionOnStandby
-                            != mTimestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL])))) {
-                mHalStarted = true;
-                mWaitHalStartCV.notify_all();
-            }
         } // mutex() scope ends
 
         if (mBytesRemaining == 0) {
@@ -4592,7 +4654,11 @@
 
         // FIXME Note that the above .clear() is no longer necessary since effectChains
         // is now local to this block, but will keep it for now (at least until merge done).
+
+        mThreadloopExecutor.process();
     }
+    mThreadloopExecutor.process(); // process any remaining deferred actions.
+    // deferred actions after this point are ignored.
 
     threadLoop_exit();
 
@@ -4772,7 +4838,7 @@
         }
         if (mHapticChannelCount > 0 &&
                 ((track->channelMask() & AUDIO_CHANNEL_HAPTIC_ALL) != AUDIO_CHANNEL_NONE
-                        || (chain != nullptr && chain->containsHapticGeneratingEffect_l()))) {
+                        || (chain != nullptr && chain->containsHapticGeneratingEffect()))) {
             mutex().unlock();
             // Unlock due to VibratorService will lock for this call and will
             // call Tracks.mute/unmute which also require thread's lock.
@@ -4782,7 +4848,7 @@
             // When the track is stop, set the haptic intensity as MUTE
             // for the HapticGenerator effect.
             if (chain != nullptr) {
-                chain->setHapticIntensity_l(track->id(), os::HapticScale::MUTE);
+                chain->setHapticScale_l(track->id(), os::HapticScale::mute());
             }
         }
 
@@ -5035,7 +5101,6 @@
         // mPipeSink below
         // mNormalSink below
 {
-    setMasterBalance(afThreadCallback->getMasterBalance_l());
     ALOGV("MixerThread() id=%d type=%d", id, type);
     ALOGV("mSampleRate=%u, mChannelMask=%#x, mChannelCount=%u, mFormat=%#x, mFrameSize=%zu, "
             "mFrameCount=%zu, mNormalFrameCount=%zu",
@@ -5047,6 +5112,8 @@
         // The Duplicating thread uses the AudioMixer and delivers data to OutputTracks
         // (downstream MixerThreads) in DuplicatingThread::threadLoop_write().
         // Do not create or use mFastMixer, mOutputSink, mPipeSink, or mNormalSink.
+        // Balance is *not* set in the DuplicatingThread here (or from AudioFlinger),
+        // as the downstream MixerThreads implement it.
         return;
     }
     // create an NBAIO sink for the HAL output stream, and negotiate
@@ -5142,7 +5209,7 @@
                                                     // audio to FastMixer
         fastTrack->mFormat = mFormat; // mPipeSink format for audio to FastMixer
         fastTrack->mHapticPlaybackEnabled = mHapticChannelMask != AUDIO_CHANNEL_NONE;
-        fastTrack->mHapticIntensity = os::HapticScale::NONE;
+        fastTrack->mHapticScale = os::HapticScale::none();
         fastTrack->mHapticMaxAmplitude = NAN;
         fastTrack->mGeneration++;
         state->mFastTracksGen++;
@@ -5206,6 +5273,9 @@
         mNormalSink = initFastMixer ? mPipeSink : mOutputSink;
         break;
     }
+    // setMasterBalance needs to be called after the FastMixer
+    // (if any) is set up, in order to deliver the balance settings to it.
+    setMasterBalance(afThreadCallback->getMasterBalance_l());
 }
 
 MixerThread::~MixerThread()
@@ -5391,11 +5461,15 @@
     broadcast_l();
 }
 
-void PlaybackThread::onAsyncError()
+void PlaybackThread::onAsyncError(bool isHardError)
 {
+    auto allTrackPortIds = getTrackPortIds();
     for (int i = AUDIO_STREAM_SYSTEM; i < (int)AUDIO_STREAM_CNT; i++) {
         invalidateTracks((audio_stream_type_t)i);
     }
+    if (isHardError) {
+        mAfThreadCallback->onHardError(allTrackPortIds);
+    }
 }
 
 void MixerThread::threadLoop_mix()
@@ -5501,7 +5575,7 @@
     sp<IAfEffectChain> chain = getEffectChain_l(AUDIO_SESSION_OUTPUT_MIX);
     if (chain != 0) {
         uint32_t v = (uint32_t)(masterVolume * (1 << 24));
-        chain->setVolume_l(&v, &v);
+        chain->setVolume(&v, &v);
         masterVolume = (float)((v + (1 << 23)) >> 24);
         chain.clear();
     }
@@ -5700,7 +5774,7 @@
                     fastTrack->mChannelMask = track->channelMask();
                     fastTrack->mFormat = track->format();
                     fastTrack->mHapticPlaybackEnabled = track->getHapticPlaybackEnabled();
-                    fastTrack->mHapticIntensity = track->getHapticIntensity();
+                    fastTrack->mHapticScale = track->getHapticScale();
                     fastTrack->mHapticMaxAmplitude = track->getHapticMaxAmplitude();
                     fastTrack->mGeneration++;
                     state->mTrackMask |= 1 << j;
@@ -5738,6 +5812,11 @@
                 vlf *= volume;
                 vrf *= volume;
 
+                if (track->getInternalMute()) {
+                    vlf = 0.f;
+                    vrf = 0.f;
+                }
+
                 track->setFinalVolume(vlf, vrf);
                 ++fastTracks;
             } else {
@@ -5836,7 +5915,7 @@
 
             mixedTracks++;
 
-            // track->mainBuffer() != mSinkBuffer or mMixerBuffer means
+            // track->mainBuffer() != mSinkBuffer and mMixerBuffer means
             // there is an effect chain connected to the track
             chain.clear();
             if (track->mainBuffer() != mSinkBuffer &&
@@ -5937,10 +6016,15 @@
                 vaf = v * sendLevel * (1. / MAX_GAIN_INT);
             }
 
-            track->setFinalVolume(vrf, vlf);
+            if (track->getInternalMute()) {
+                vrf = 0.f;
+                vlf = 0.f;
+            }
+
+            track->setFinalVolume(vlf, vrf);
 
             // Delegate volume control to effect in track effect chain if needed
-            if (chain != 0 && chain->setVolume_l(&vl, &vr)) {
+            if (chain != 0 && chain->setVolume(&vl, &vr)) {
                 // Do not ramp volume if volume is controlled by effect
                 param = AudioMixer::VOLUME;
                 // Update remaining floating point volume levels
@@ -6061,10 +6145,11 @@
                 trackId,
                 AudioMixer::TRACK,
                 AudioMixer::HAPTIC_ENABLED, (void *)(uintptr_t)track->getHapticPlaybackEnabled());
+            const os::HapticScale hapticScale = track->getHapticScale();
             mAudioMixer->setParameter(
-                trackId,
-                AudioMixer::TRACK,
-                AudioMixer::HAPTIC_INTENSITY, (void *)(uintptr_t)track->getHapticIntensity());
+                    trackId,
+                    AudioMixer::TRACK,
+                    AudioMixer::HAPTIC_SCALE, (void *)&hapticScale);
             const float hapticMaxAmplitude = track->getHapticMaxAmplitude();
             mAudioMixer->setParameter(
                 trackId,
@@ -6125,8 +6210,8 @@
                 // No buffers for this track. Give it a few chances to
                 // fill a buffer, then remove it from active list.
                 if (--(track->retryCount()) <= 0) {
-                    ALOGI("BUFFER TIMEOUT: remove(%d) from active list on thread %p",
-                            trackId, this);
+                    ALOGI("%s BUFFER TIMEOUT: remove track(%d) from active list due to underrun"
+                          " on thread %d", __func__, trackId, mId);
                     tracksToRemove->add(track);
                     // indicate to client process that the track was disabled because of underrun;
                     // it will then automatically call start() when data is available
@@ -6680,8 +6765,8 @@
                 // Convert volumes from float to 8.24
                 uint32_t vl = (uint32_t)(left * (1 << 24));
                 uint32_t vr = (uint32_t)(right * (1 << 24));
-                // Direct/Offload effect chains set output volume in setVolume_l().
-                (void)mEffectChains[0]->setVolume_l(&vl, &vr);
+                // Direct/Offload effect chains set output volume in setVolume().
+                (void)mEffectChains[0]->setVolume(&vl, &vr);
             } else {
                 // otherwise we directly set the volume.
                 setVolumeForOutput_l(left, right);
@@ -6883,7 +6968,8 @@
                     if (isTimestampAdvancing) { // HAL is still playing audio, give us more time.
                         track->retryCount() = kMaxTrackRetriesOffload;
                     } else {
-                        ALOGV("BUFFER TIMEOUT: remove track(%d) from active list", trackId);
+                        ALOGI("%s BUFFER TIMEOUT: remove track(%d) from active list due to"
+                              " underrun on thread %d", __func__, trackId, mId);
                         tracksToRemove->add(track);
                         // indicate to client process that the track was disabled because of
                         // underrun; it will then automatically call start() when data is available
@@ -7003,16 +7089,20 @@
 {
     bool trackPaused = false;
     bool trackStopped = false;
+    bool trackDisabled = false;
 
-    // do not put the HAL in standby when paused. AwesomePlayer clear the offloaded AudioTrack
+    // do not put the HAL in standby when paused. NuPlayer clear the offloaded AudioTrack
     // after a timeout and we will enter standby then.
+    // On offload threads, do not enter standby if the main track is still underrunning.
     if (mTracks.size() > 0) {
-        trackPaused = mTracks[mTracks.size() - 1]->isPaused();
-        trackStopped = mTracks[mTracks.size() - 1]->isStopped() ||
-                           mTracks[mTracks.size() - 1]->state() == IAfTrackBase::IDLE;
+        const auto& mainTrack = mTracks[mTracks.size() - 1];
+
+        trackPaused = mainTrack->isPaused();
+        trackStopped = mainTrack->isStopped() || mainTrack->state() == IAfTrackBase::IDLE;
+        trackDisabled = (mType == OFFLOAD) && mainTrack->isDisabled();
     }
 
-    return !mStandby && !(trackPaused || (mHwPaused && !trackStopped));
+    return !mStandby && !(trackPaused || (mHwPaused && !trackStopped) || trackDisabled);
 }
 
 // checkForNewParameter_l() must be called with ThreadBase::mutex() held
@@ -7061,7 +7151,7 @@
 uint32_t DirectOutputThread::activeSleepTimeUs() const
 {
     uint32_t time;
-    if (audio_has_proportional_frames(mFormat)) {
+    if (audio_has_proportional_frames(mFormat) && mType != OFFLOAD) {
         time = PlaybackThread::activeSleepTimeUs();
     } else {
         time = kDirectMinSleepTimeUs;
@@ -7072,7 +7162,7 @@
 uint32_t DirectOutputThread::idleSleepTimeUs() const
 {
     uint32_t time;
-    if (audio_has_proportional_frames(mFormat)) {
+    if (audio_has_proportional_frames(mFormat) && mType != OFFLOAD) {
         time = (uint32_t)(((mFrameCount * 1000) / mSampleRate) * 1000) / 2;
     } else {
         time = kDirectMinSleepTimeUs;
@@ -7083,7 +7173,7 @@
 uint32_t DirectOutputThread::suspendSleepTimeUs() const
 {
     uint32_t time;
-    if (audio_has_proportional_frames(mFormat)) {
+    if (audio_has_proportional_frames(mFormat) && mType != OFFLOAD) {
         time = (uint32_t)(((mFrameCount * 1000) / mSampleRate) * 1000);
     } else {
         time = kDirectMinSleepTimeUs;
@@ -7100,7 +7190,7 @@
     // no delay on outputs with HW A/V sync
     if (usesHwAvSync()) {
         mStandbyDelayNs = 0;
-    } else if ((mType == OFFLOAD) && !audio_has_proportional_frames(mFormat)) {
+    } else if (mType == OFFLOAD) {
         mStandbyDelayNs = kOffloadStandbyDelayNs;
     } else {
         mStandbyDelayNs = microseconds(mActiveSleepTimeUs*2);
@@ -7133,7 +7223,7 @@
         mPlaybackThread(playbackThread),
         mWriteAckSequence(0),
         mDrainSequence(0),
-        mAsyncError(false)
+        mAsyncError(ASYNC_ERROR_NONE)
 {
 }
 
@@ -7147,7 +7237,7 @@
     while (!exitPending()) {
         uint32_t writeAckSequence;
         uint32_t drainSequence;
-        bool asyncError;
+        AsyncError asyncError;
 
         {
             audio_utils::unique_lock _l(mutex());
@@ -7168,7 +7258,7 @@
             drainSequence = mDrainSequence;
             mDrainSequence &= ~1;
             asyncError = mAsyncError;
-            mAsyncError = false;
+            mAsyncError = ASYNC_ERROR_NONE;
         }
         {
             const sp<PlaybackThread> playbackThread = mPlaybackThread.promote();
@@ -7179,8 +7269,8 @@
                 if (drainSequence & 1) {
                     playbackThread->resetDraining(drainSequence >> 1);
                 }
-                if (asyncError) {
-                    playbackThread->onAsyncError();
+                if (asyncError != ASYNC_ERROR_NONE) {
+                    playbackThread->onAsyncError(asyncError == ASYNC_ERROR_HARD);
                 }
             }
         }
@@ -7230,10 +7320,10 @@
     }
 }
 
-void AsyncCallbackThread::setAsyncError()
+void AsyncCallbackThread::setAsyncError(bool isHardError)
 {
     audio_utils::lock_guard _l(mutex());
-    mAsyncError = true;
+    mAsyncError = isHardError ? ASYNC_ERROR_HARD : ASYNC_ERROR_SOFT;
     mWaitWorkCV.notify_one();
 }
 
@@ -7477,8 +7567,8 @@
                     if (isTimestampAdvancing) { // HAL is still playing audio, give us more time.
                         track->retryCount() = kMaxTrackRetriesOffload;
                     } else {
-                        ALOGV("OffloadThread: BUFFER TIMEOUT: remove track(%d) from active list",
-                                track->id());
+                        ALOGI("%s BUFFER TIMEOUT: remove track(%d) from active list due to"
+                              " underrun on thread %d", __func__, track->id(), mId);
                         tracksToRemove->add(track);
                         // tell client process that the track was disabled because of underrun;
                         // it will then automatically call start() when data is available
@@ -7865,6 +7955,11 @@
     if (mSupportedLatencyModes.empty()) {
         return;
     }
+    // Do not update the HAL latency mode if no track is active
+    if (mActiveTracks.isEmpty()) {
+        return;
+    }
+
     audio_latency_mode_t latencyMode = AUDIO_LATENCY_MODE_FREE;
     if (mSupportedLatencyModes.size() == 1) {
         // If the HAL only support one latency mode currently, confirm the choice
@@ -7875,16 +7970,12 @@
         //   (mRequestedLatencyMode = AUDIO_LATENCY_MODE_LOW)
         //      AND
         // - At least one active track is spatialized
-        bool hasSpatializedActiveTrack = false;
         for (const auto& track : mActiveTracks) {
             if (track->isSpatialized()) {
-                hasSpatializedActiveTrack = true;
+                latencyMode = mRequestedLatencyMode;
                 break;
             }
         }
-        if (hasSpatializedActiveTrack && mRequestedLatencyMode == AUDIO_LATENCY_MODE_LOW) {
-            latencyMode = AUDIO_LATENCY_MODE_LOW;
-        }
     }
 
     if (latencyMode != mSetLatencyMode) {
@@ -7898,7 +7989,7 @@
 }
 
 status_t SpatializerThread::setRequestedLatencyMode(audio_latency_mode_t mode) {
-    if (mode != AUDIO_LATENCY_MODE_LOW && mode != AUDIO_LATENCY_MODE_FREE) {
+    if (mode < 0 || mode >= AUDIO_LATENCY_MODE_CNT) {
         return BAD_VALUE;
     }
     audio_utils::lock_guard _l(mutex());
@@ -8187,7 +8278,6 @@
     inputStandBy();
 
 reacquire_wakelock:
-    sp<IAfRecordTrack> activeTrack;
     {
         audio_utils::lock_guard _l(mutex());
         acquireWakeLock_l();
@@ -8203,6 +8293,9 @@
 
     // loop while there is work to do
     for (int64_t loopCount = 0;; ++loopCount) {  // loopCount used for statistics tracking
+        // Note: these sp<> are released at the end of the for loop outside of the mutex() lock.
+        sp<IAfRecordTrack> activeTrack;
+        std::vector<sp<IAfRecordTrack>> oldActiveTracks;
         Vector<sp<IAfEffectChain>> effectChains;
 
         // activeTracks accumulates a copy of a subset of mActiveTracks
@@ -8252,7 +8345,9 @@
             bool doBroadcast = false;
             bool allStopped = true;
             for (size_t i = 0; i < size; ) {
-
+                if (activeTrack) {  // ensure track release is outside lock.
+                    oldActiveTracks.emplace_back(std::move(activeTrack));
+                }
                 activeTrack = mActiveTracks[i];
                 if (activeTrack->isTerminated()) {
                     if (activeTrack->isFastTrack()) {
@@ -8588,6 +8683,9 @@
 
         // loop over each active track
         for (size_t i = 0; i < size; i++) {
+            if (activeTrack) {  // ensure track release is outside lock.
+                oldActiveTracks.emplace_back(std::move(activeTrack));
+            }
             activeTrack = activeTracks[i];
 
             // skip fast tracks, as those are handled directly by FastCapture
@@ -8731,11 +8829,14 @@
             mIoJitterMs.add(jitterMs);
             mProcessTimeMs.add(processMs);
         }
+       mThreadloopExecutor.process();
         // update timing info.
         mLastIoBeginNs = lastIoBeginNs;
         mLastIoEndNs = lastIoEndNs;
         lastLoopCountRead = loopCount;
     }
+    mThreadloopExecutor.process(); // process any remaining deferred actions.
+    // deferred actions after this point are ignored.
 
     standbyIfNotAlreadyInStandby();
 
@@ -9128,7 +9229,7 @@
     // This is needed for proper patchRecord peer release.
     while (recordTrack->state() == IAfTrackBase::PAUSING && !recordTrack->isInvalid()) {
         mWaitWorkCV.notify_all(); // signal thread to stop
-        mStartStopCV.wait(_l);
+        mStartStopCV.wait(_l, getTid());
     }
 
     if (recordTrack->state() == IAfTrackBase::PAUSED) { // successful stop
@@ -9748,7 +9849,7 @@
 
     // make sure enabled pre processing effects state is communicated to the HAL as we
     // just moved them to a new input stream.
-    chain->syncHalEffectsState();
+    chain->syncHalEffectsState_l();
 
     mEffectChains.add(chain);
 
@@ -10209,8 +10310,23 @@
     audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE;
 
     audio_io_handle_t io = mId;
-    const AttributionSourceState adjAttributionSource = afutils::checkAttributionSourcePackage(
-            client.attributionSource);
+    AttributionSourceState adjAttributionSource;
+    if (!com::android::media::audio::audioserver_permissions()) {
+        adjAttributionSource = afutils::checkAttributionSourcePackage(
+                client.attributionSource);
+    } else {
+        // TODO(b/342475009) validate in oboeservice, and plumb downwards
+        auto validatedRes = ValidatedAttributionSourceState::createFromTrustedUidNoPackage(
+                    client.attributionSource,
+                    mAfThreadCallback->getPermissionProvider()
+                );
+        if (!validatedRes.has_value()) {
+            ALOGE("MMAP client package validation fail: %s",
+                    validatedRes.error().toString8().c_str());
+            return aidl_utils::statusTFromBinderStatus(validatedRes.error());
+        }
+        adjAttributionSource = std::move(validatedRes.value()).unwrapInto();
+    }
 
     const auto localSessionId = mSessionId;
     auto localAttr = mAttr;
@@ -10521,7 +10637,10 @@
         unlockEffectChains(effectChains);
         // Effect chains will be actually deleted here if they were removed from
         // mEffectChains list during mixing or effects processing
+        mThreadloopExecutor.process();
     }
+    mThreadloopExecutor.process(); // process any remaining deferred actions.
+    // deferred actions after this point are ignored.
 
     threadLoop_exit();
 
@@ -10639,6 +10758,16 @@
         }
     }
 
+    // For mmap streams, once the routing has changed, they will be disconnected. It should be
+    // okay to notify the client earlier before the new patch creation.
+    if (mDeviceId != deviceId) {
+        if (const sp<MmapStreamCallback> callback = mCallback.promote()) {
+            // The aaudioservice handle the routing changed event asynchronously. In that case,
+            // it is safe to hold the lock here.
+            callback->onRoutingChanged(deviceId);
+        }
+    }
+
     if (mAudioHwDev->supportsAudioPatches()) {
         status = mHalDevice->createAudioPatch(patch->num_sources, patch->sources, patch->num_sinks,
                                               patch->sinks, handle);
@@ -10664,12 +10793,6 @@
             sendIoConfigEvent_l(AUDIO_INPUT_CONFIG_CHANGED);
             mInDeviceTypeAddr = sourceDeviceTypeAddr;
         }
-        sp<MmapStreamCallback> callback = mCallback.promote();
-        if (mDeviceId != deviceId && callback != 0) {
-            mutex().unlock();
-            callback->onRoutingChanged(deviceId);
-            mutex().lock();
-        }
         mPatch = *patch;
         mDeviceId = deviceId;
     }
@@ -10733,7 +10856,7 @@
     chain->setThread(this);
     chain->setInBuffer(nullptr);
     chain->setOutBuffer(nullptr);
-    chain->syncHalEffectsState();
+    chain->syncHalEffectsState_l();
 
     mEffectChains.add(chain);
     checkSuspendOnAddEffectChain_l(chain);
@@ -10821,22 +10944,19 @@
 
 void MmapThread::checkInvalidTracks_l()
 {
-    sp<MmapStreamCallback> callback;
     for (const sp<IAfMmapTrack>& track : mActiveTracks) {
         if (track->isInvalid()) {
-            callback = mCallback.promote();
-            if (callback == nullptr &&  mNoCallbackWarningCount < kMaxNoCallbackWarnings) {
+            if (const sp<MmapStreamCallback> callback = mCallback.promote()) {
+                // The aaudioservice handle the routing changed event asynchronously. In that case,
+                // it is safe to hold the lock here.
+                callback->onRoutingChanged(AUDIO_PORT_HANDLE_NONE);
+            } else if (mNoCallbackWarningCount < kMaxNoCallbackWarnings) {
                 ALOGW("Could not notify MMAP stream tear down: no onRoutingChanged callback!");
                 mNoCallbackWarningCount++;
             }
             break;
         }
     }
-    if (callback != 0) {
-        mutex().unlock();
-        callback->onRoutingChanged(AUDIO_PORT_HANDLE_NONE);
-        mutex().lock();
-    }
 }
 
 void MmapThread::dumpInternals_l(int fd, const Vector<String16>& /* args */)
@@ -11026,7 +11146,7 @@
         // only one effect chain can be present on DirectOutputThread, so if
         // there is one, the track is connected to it
         if (!mEffectChains.isEmpty()) {
-            mEffectChains[0]->setVolume_l(&vol, &vol);
+            mEffectChains[0]->setVolume(&vol, &vol);
             volume = (float)vol / (1 << 24);
         }
         // Try to use HW volume control and fall back to SW control if not implemented
@@ -11095,7 +11215,7 @@
             char *endptr;
             unsigned long ul = strtoul(value, &endptr, 0);
             if (*endptr == '\0' && ul != 0) {
-                ALOGD("Silence is golden");
+                ALOGW("%s: mute from ro.audio.silent. Silence is golden", __func__);
                 // The setprop command will not allow a property to be changed after
                 // the first time it is set, so we don't have to worry about un-muting.
                 setMasterMute_l(true);
@@ -11305,14 +11425,15 @@
     // If there is only one active track and it is bit-perfect, enable tee buffer.
     float volumeLeft = 1.0f;
     float volumeRight = 1.0f;
-    if (mActiveTracks.size() == 1 && mActiveTracks[0]->isBitPerfect()) {
-        const int trackId = mActiveTracks[0]->id();
+    if (sp<IAfTrack> bitPerfectTrack = getTrackToStreamBitPerfectly_l();
+        bitPerfectTrack != nullptr) {
+        const int trackId = bitPerfectTrack->id();
         mAudioMixer->setParameter(
                     trackId, AudioMixer::TRACK, AudioMixer::TEE_BUFFER, (void *)mSinkBuffer);
         mAudioMixer->setParameter(
                     trackId, AudioMixer::TRACK, AudioMixer::TEE_BUFFER_FRAME_COUNT,
                     (void *)(uintptr_t)mNormalFrameCount);
-        mActiveTracks[0]->getFinalVolume(&volumeLeft, &volumeRight);
+        bitPerfectTrack->getFinalVolume(&volumeLeft, &volumeRight);
         mIsBitPerfect = true;
     } else {
         mIsBitPerfect = false;
@@ -11337,4 +11458,39 @@
     mHasDataCopiedToSinkBuffer = mIsBitPerfect;
 }
 
+void BitPerfectThread::setTracksInternalMute(
+        std::map<audio_port_handle_t, bool>* tracksInternalMute) {
+    for (auto& track : mTracks) {
+        if (auto it = tracksInternalMute->find(track->portId()); it != tracksInternalMute->end()) {
+            track->setInternalMute(it->second);
+            tracksInternalMute->erase(it);
+        }
+    }
+}
+
+sp<IAfTrack> BitPerfectThread::getTrackToStreamBitPerfectly_l() {
+    if (com::android::media::audioserver::
+                fix_concurrent_playback_behavior_with_bit_perfect_client()) {
+        sp<IAfTrack> bitPerfectTrack = nullptr;
+        bool allOtherTracksMuted = true;
+        // Return the bit perfect track if all other tracks are muted
+        for (const auto& track : mActiveTracks) {
+            if (track->isBitPerfect()) {
+                bitPerfectTrack = track;
+            } else if (track->getFinalVolume() != 0.f) {
+                allOtherTracksMuted = false;
+                if (bitPerfectTrack != nullptr) {
+                    break;
+                }
+            }
+        }
+        return allOtherTracksMuted ? bitPerfectTrack : nullptr;
+    } else {
+        if (mActiveTracks.size() == 1 && mActiveTracks[0]->isBitPerfect()) {
+            return mActiveTracks[0];
+        }
+    }
+    return nullptr;
+}
+
 } // namespace android
diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h
index 8491e43..10a77ef 100644
--- a/services/audioflinger/Threads.h
+++ b/services/audioflinger/Threads.h
@@ -379,10 +379,10 @@
         return isOutput() ? outDeviceTypes_l() : DeviceTypeSet({inDeviceType_l()});
     }
 
-    const AudioDeviceTypeAddrVector& outDeviceTypeAddrs() const final {
+    const AudioDeviceTypeAddrVector& outDeviceTypeAddrs() const final REQUIRES(mutex()) {
         return mOutDeviceTypeAddrs;
     }
-    const AudioDeviceTypeAddr& inDeviceTypeAddr() const final {
+    const AudioDeviceTypeAddr& inDeviceTypeAddr() const final REQUIRES(mutex()) {
         return mInDeviceTypeAddr;
     }
 
@@ -436,9 +436,11 @@
                 // ThreadBase mutex before processing the mixer and effects. This guarantees the
                 // integrity of the chains during the process.
                 // Also sets the parameter 'effectChains' to current value of mEffectChains.
-    void lockEffectChains_l(Vector<sp<IAfEffectChain>>& effectChains) final REQUIRES(mutex());
+    void lockEffectChains_l(Vector<sp<IAfEffectChain>>& effectChains) final
+            REQUIRES(audio_utils::ThreadBase_Mutex) ACQUIRE(audio_utils::EffectChain_Mutex);
                 // unlock effect chains after process
-    void unlockEffectChains(const Vector<sp<IAfEffectChain>>& effectChains) final;
+    void unlockEffectChains(const Vector<sp<IAfEffectChain>>& effectChains) final
+            RELEASE(audio_utils::EffectChain_Mutex);
                 // get a copy of mEffectChains vector
     Vector<sp<IAfEffectChain>> getEffectChains_l() const final REQUIRES(mutex()) {
         return mEffectChains;
@@ -569,6 +571,10 @@
     void stopMelComputation_l() override
             REQUIRES(audio_utils::AudioFlinger_Mutex);
 
+    audio_utils::DeferredExecutor& getThreadloopExecutor() override {
+        return mThreadloopExecutor;
+    }
+
 protected:
 
                 // entry describing an effect being suspended in mSuspendedSessions keyed vector
@@ -875,6 +881,14 @@
 
                 SimpleLog mLocalLog;  // locked internally
 
+    // mThreadloopExecutor contains deferred functors and object (dtors) to
+    // be executed at the end of the processing period, without any
+    // mutexes held.
+    //
+    // mThreadloopExecutor is locked internally, so its methods are thread-safe
+    // for access.
+    audio_utils::DeferredExecutor mThreadloopExecutor;
+
     private:
     void dumpBase_l(int fd, const Vector<String16>& args) REQUIRES(mutex());
     void dumpEffectChains_l(int fd, const Vector<String16>& args) REQUIRES(mutex());
@@ -946,7 +960,7 @@
     // StreamOutHalInterfaceCallback implementation
     virtual     void        onWriteReady();
     virtual     void        onDrainReady();
-    virtual     void        onError();
+    virtual     void        onError(bool /*isHardError*/);
 
 public: // AsyncCallbackThread
                 void        resetWriteBlocked(uint32_t sequence);
@@ -958,11 +972,11 @@
     virtual bool shouldStandby_l() REQUIRES(mutex(), ThreadBase_ThreadLoop);
     virtual void onAddNewTrack_l() REQUIRES(mutex());
 public:  // AsyncCallbackThread
-                void        onAsyncError(); // error reported by AsyncCallbackThread
+                void        onAsyncError(bool isHardError); // error reported by AsyncCallbackThread
 protected:
     // StreamHalInterfaceCodecFormatCallback implementation
                 void        onCodecFormatChanged(
-            const std::basic_string<uint8_t>& metadataBs) final;
+            const std::vector<uint8_t>& metadataBs) final;
 
     // ThreadBase virtuals
     void preExit() final EXCLUDES_ThreadBase_Mutex;
@@ -1198,6 +1212,11 @@
                     }
                     return mHalStarted;
                 }
+
+    void setTracksInternalMute(std::map<audio_port_handle_t, bool>* /* tracksInternalMute */)
+            override EXCLUDES_ThreadBase_Mutex {
+        // Do nothing. It is only used for bit perfect thread
+    }
 protected:
     // updated by readOutputParameters_l()
     size_t                          mNormalFrameCount;  // normal mixer and effects
@@ -1371,6 +1390,8 @@
     bool destroyTrack_l(const sp<IAfTrack>& track) final REQUIRES(mutex());
 
     void removeTrack_l(const sp<IAfTrack>& track) REQUIRES(mutex());
+    std::set<audio_port_handle_t> getTrackPortIds_l() REQUIRES(mutex());
+    std::set<audio_port_handle_t> getTrackPortIds();
 
     void readOutputParameters_l() REQUIRES(mutex());
     MetadataUpdate updateMetadata_l() final REQUIRES(mutex());
@@ -1834,7 +1855,7 @@
             void        resetWriteBlocked();
             void        setDraining(uint32_t sequence);
             void        resetDraining();
-            void        setAsyncError();
+            void        setAsyncError(bool isHardError);
 
 private:
     const wp<PlaybackThread>   mPlaybackThread;
@@ -1848,7 +1869,8 @@
     uint32_t                   mDrainSequence;
     audio_utils::condition_variable mWaitWorkCV;
     mutable audio_utils::mutex mMutex{audio_utils::MutexOrder::kAsyncCallbackThread_Mutex};
-    bool                       mAsyncError;
+    enum AsyncError { ASYNC_ERROR_NONE, ASYNC_ERROR_SOFT, ASYNC_ERROR_HARD };
+    AsyncError                 mAsyncError;
 
     audio_utils::mutex& mutex() const RETURN_CAPABILITY(audio_utils::AsyncCallbackThread_Mutex) {
         return mMutex;
@@ -2447,12 +2469,17 @@
     BitPerfectThread(const sp<IAfThreadCallback>& afThreadCallback, AudioStreamOut *output,
                      audio_io_handle_t id, bool systemReady);
 
+    void setTracksInternalMute(std::map<audio_port_handle_t, bool>* tracksInternalMuted)
+            final EXCLUDES_ThreadBase_Mutex;
+
 protected:
     mixer_state prepareTracks_l(Vector<sp<IAfTrack>>* tracksToRemove) final
             REQUIRES(mutex(), ThreadBase_ThreadLoop);
     void threadLoop_mix() final REQUIRES(ThreadBase_ThreadLoop);
 
 private:
+    sp<IAfTrack> getTrackToStreamBitPerfectly_l() REQUIRES(mutex());
+
     // These variables are only accessed on the threadLoop; hence need no mutex.
     bool mIsBitPerfect GUARDED_BY(ThreadBase_ThreadLoop) = false;
     float mVolumeLeft GUARDED_BY(ThreadBase_ThreadLoop) = 0.f;
diff --git a/services/audioflinger/TrackBase.h b/services/audioflinger/TrackBase.h
index 5708c61..a0b85f7 100644
--- a/services/audioflinger/TrackBase.h
+++ b/services/audioflinger/TrackBase.h
@@ -333,6 +333,9 @@
                                     // true for Track, false for RecordTrack,
                                     // this could be a track type if needed later
 
+    void deferRestartIfDisabled();
+    virtual void restartIfDisabled() {}
+
     const wp<IAfThreadBase> mThread;
     const alloc_type     mAllocType;
     /*const*/ sp<Client> mClient;   // see explanation at ~TrackBase() why not const
diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp
index 4e82173..f5f11cc 100644
--- a/services/audioflinger/Tracks.cpp
+++ b/services/audioflinger/Tracks.cpp
@@ -314,6 +314,17 @@
     return NO_ERROR;
 }
 
+void TrackBase::deferRestartIfDisabled()
+{
+    const auto thread = mThread.promote();
+    if (thread == nullptr) return;
+    thread->getThreadloopExecutor().defer(
+            [track = wp<TrackBase>::fromExisting(this)] {
+            const auto actual = track.promote();
+            if (actual) actual->restartIfDisabled();
+        });
+}
+
 PatchTrackBase::PatchTrackBase(const sp<ClientProxy>& proxy,
         IAfThreadBase* thread, const Timeout& timeout)
     : mProxy(proxy)
@@ -389,6 +400,7 @@
       mTrack(track)
 {
     setMinSchedulerPolicy(SCHED_NORMAL, ANDROID_PRIORITY_AUDIO);
+    setInheritRt(true);
 }
 
 TrackHandle::~TrackHandle() {
@@ -912,7 +924,7 @@
                         "  Format Chn mask  SRate "
                         "ST Usg CT "
                         " G db  L dB  R dB  VS dB "
-                        "  Server FrmCnt  FrmRdy F Underruns  Flushed BitPerfect"
+                        "  Server FrmCnt  FrmRdy F Underruns  Flushed BitPerfect InternalMute"
                         "%s\n",
                         isServerLatencySupported() ? "   Latency" : "");
 }
@@ -998,7 +1010,7 @@
                         "%08X %08X %6u "
                         "%2u %3x %2x "
                         "%5.2g %5.2g %5.2g %5.2g%c "
-                        "%08X %6zu%c %6zu %c %9u%c %7u %10s",
+                        "%08X %6zu%c %6zu %c %9u%c %7u %10s %12s",
             active ? "yes" : "no",
             (mClient == 0) ? getpid() : mClient->pid(),
             mSessionId,
@@ -1028,7 +1040,8 @@
             mAudioTrackServerProxy->getUnderrunFrames(),
             nowInUnderrun,
             (unsigned)mAudioTrackServerProxy->framesFlushed() % 10000000,
-            isBitPerfect() ? "true" : "false"
+            isBitPerfect() ? "true" : "false",
+            getInternalMute() ? "true" : "false"
             );
 
     if (isServerLatencySupported()) {
@@ -1233,7 +1246,7 @@
                 && (state == IDLE || state == STOPPED || state == FLUSHED)) {
             mFrameMap.reset();
 
-            if (!isFastTrack() && (isDirect() || isOffloaded())) {
+            if (!isFastTrack()) {
                 // Start point of track -> sink frame map. If the HAL returns a
                 // frame position smaller than the first written frame in
                 // updateTrackFrameInfo, the timestamp can be interpolated
@@ -1674,7 +1687,7 @@
 
     if (result == OK) {
         ALOGI("%s(%d): processed mute state for port ID %d from %d to %d", __func__, id(), mPortId,
-              int(muteState), int(mMuteState));
+                static_cast<int>(mMuteState), static_cast<int>(muteState));
         mMuteState = muteState;
     } else {
         ALOGW("%s(%d): cannot process mute state for port ID %d, status error %d", __func__, id(),
@@ -1880,6 +1893,12 @@
     signalClientFlag(CBLK_DISABLED);
 }
 
+bool Track::isDisabled() const {
+    audio_track_cblk_t* cblk = mCblk;
+    return (cblk != nullptr)
+            && ((android_atomic_release_load(&cblk->mFlags) & CBLK_DISABLED) != 0);
+}
+
 void Track::signalClientFlag(int32_t flag)
 {
     // FIXME should use proxy, and needs work
@@ -2302,7 +2321,7 @@
                 waitTimeLeftMs = 0;
             }
             if (status == NOT_ENOUGH_DATA) {
-                restartIfDisabled();
+                deferRestartIfDisabled();
                 continue;
             }
         }
@@ -2314,7 +2333,7 @@
         buf.mFrameCount = outFrames;
         buf.mRaw = NULL;
         mClientProxy->releaseBuffer(&buf);
-        restartIfDisabled();
+        deferRestartIfDisabled();
         pInBuffer->frameCount -= outFrames;
         pInBuffer->raw = (int8_t *)pInBuffer->raw + outFrames * mFrameSize;
         mOutBuffer.frameCount -= outFrames;
@@ -2441,10 +2460,11 @@
         size_t bufferSize,
         audio_output_flags_t flags,
         const Timeout& timeout,
-        size_t frameCountToBeReady /** Default behaviour is to start
+        size_t frameCountToBeReady, /** Default behaviour is to start
                                          *  as soon as possible to have
                                          *  the lowest possible latency
-                                         *  even if it might glitch. */)
+                                         *  even if it might glitch. */
+        float speed)
 {
     return sp<PatchTrack>::make(
             playbackThread,
@@ -2457,7 +2477,8 @@
             bufferSize,
             flags,
             timeout,
-            frameCountToBeReady);
+            frameCountToBeReady,
+            speed);
 }
 
 PatchTrack::PatchTrack(IAfPlaybackThread* playbackThread,
@@ -2470,17 +2491,26 @@
                                                      size_t bufferSize,
                                                      audio_output_flags_t flags,
                                                      const Timeout& timeout,
-                                                     size_t frameCountToBeReady)
+                                                     size_t frameCountToBeReady,
+                                                     float speed)
     :   Track(playbackThread, NULL, streamType,
               audio_attributes_t{} /* currently unused for patch track */,
               sampleRate, format, channelMask, frameCount,
               buffer, bufferSize, nullptr /* sharedBuffer */,
               AUDIO_SESSION_NONE, getpid(), audioServerAttributionSource(getpid()), flags,
-              TYPE_PATCH, AUDIO_PORT_HANDLE_NONE, frameCountToBeReady),
-        PatchTrackBase(mCblk ? new ClientProxy(mCblk, mBuffer, frameCount, mFrameSize, true, true)
-                        : nullptr,
+              TYPE_PATCH, AUDIO_PORT_HANDLE_NONE, frameCountToBeReady, speed),
+        PatchTrackBase(mCblk ? new AudioTrackClientProxy(mCblk, mBuffer, frameCount, mFrameSize,
+                        true /*clientInServer*/) : nullptr,
                        playbackThread, timeout)
 {
+    if (mProxy != nullptr) {
+        sp<AudioTrackClientProxy>::cast(mProxy)->setPlaybackRate({
+                /* .mSpeed = */ speed,
+                /* .mPitch = */ AUDIO_TIMESTRETCH_PITCH_NORMAL,
+                /* .mStretchMode = */ AUDIO_TIMESTRETCH_STRETCH_DEFAULT,
+                /* .mFallbackMode = */ AUDIO_TIMESTRETCH_FALLBACK_FAIL
+        });
+    }
     ALOGV("%s(%d): sampleRate %d mPeerTimeout %d.%03d sec",
                                       __func__, mId, sampleRate,
                                       (int)mPeerTimeout.tv_sec,
@@ -2558,7 +2588,7 @@
     const size_t originalFrameCount = buffer->mFrameCount;
     do {
         if (status == NOT_ENOUGH_DATA) {
-            restartIfDisabled();
+            deferRestartIfDisabled();
             buffer->mFrameCount = originalFrameCount; // cleared on error, must be restored.
         }
         status = mProxy->obtainBuffer(buffer, timeOut);
@@ -2569,7 +2599,7 @@
 void PatchTrack::releaseBuffer(Proxy::Buffer* buffer)
 {
     mProxy->releaseBuffer(buffer);
-    restartIfDisabled();
+    deferRestartIfDisabled();
 
     // Check if the PatchTrack has enough data to write once in releaseBuffer().
     // If not, prevent an underrun from occurring by moving the track into FS_FILLING;
@@ -2641,6 +2671,7 @@
     mRecordTrack(recordTrack)
 {
     setMinSchedulerPolicy(SCHED_NORMAL, ANDROID_PRIORITY_AUDIO);
+    setInheritRt(true);
 }
 
 RecordHandle::~RecordHandle() {
@@ -3554,6 +3585,8 @@
     }
 
     if (result == OK) {
+        ALOGI("%s(%d): processed mute state for port ID %d from %d to %d", __func__, id(), mPortId,
+                static_cast<int>(mMuteState), static_cast<int>(muteState));
         mMuteState = muteState;
     } else {
         ALOGW("%s(%d): cannot process mute state for port ID %d, status error %d",
diff --git a/services/audioflinger/afutils/Android.bp b/services/audioflinger/afutils/Android.bp
index 5e29ce9..e147266 100644
--- a/services/audioflinger/afutils/Android.bp
+++ b/services/audioflinger/afutils/Android.bp
@@ -23,7 +23,7 @@
     tidy_checks: audioflinger_utils_tidy_errors,
     tidy_checks_as_errors: audioflinger_utils_tidy_errors,
     tidy_flags: [
-      "-format-style=file",
+        "-format-style=file",
     ],
 }
 
@@ -64,10 +64,10 @@
     ],
 
     header_libs: [
-        "libaaudio_headers",  // PropertyUtils.cpp
+        "libaaudio_headers", // PropertyUtils.cpp
     ],
 
     include_dirs: [
-        "frameworks/av/services/audioflinger",  // for configuration
+        "frameworks/av/services/audioflinger", // for configuration
     ],
 }
diff --git a/services/audioflinger/afutils/NBAIO_Tee.cpp b/services/audioflinger/afutils/NBAIO_Tee.cpp
index 86fb128..cdc8e95 100644
--- a/services/audioflinger/afutils/NBAIO_Tee.cpp
+++ b/services/audioflinger/afutils/NBAIO_Tee.cpp
@@ -514,6 +514,12 @@
     return NO_ERROR; // return full path
 }
 
+/* static */
+NBAIO_Tee::RunningTees& NBAIO_Tee::getRunningTees() {
+    [[clang::no_destroy]] static RunningTees runningTees;
+    return runningTees;
+}
+
 } // namespace android
 
 #endif // TEE_SINK
diff --git a/services/audioflinger/afutils/NBAIO_Tee.h b/services/audioflinger/afutils/NBAIO_Tee.h
index a5c544e..5ab1949 100644
--- a/services/audioflinger/afutils/NBAIO_Tee.h
+++ b/services/audioflinger/afutils/NBAIO_Tee.h
@@ -310,10 +310,7 @@
     };
 
     // singleton
-    static RunningTees &getRunningTees() {
-        static RunningTees runningTees;
-        return runningTees;
-    }
+    static RunningTees& getRunningTees();
 
     // The NBAIO TeeImpl may have lifetime longer than NBAIO_Tee if
     // RunningTees::dump() is being called simultaneous to ~NBAIO_Tee().
diff --git a/services/audioflinger/afutils/Vibrator.cpp b/services/audioflinger/afutils/Vibrator.cpp
index ab15a09..7c99ca9 100644
--- a/services/audioflinger/afutils/Vibrator.cpp
+++ b/services/audioflinger/afutils/Vibrator.cpp
@@ -20,6 +20,7 @@
 
 #include "Vibrator.h"
 
+#include <android/os/ExternalVibrationScale.h>
 #include <android/os/IExternalVibratorService.h>
 #include <binder/IServiceManager.h>
 #include <utils/Log.h>
@@ -46,14 +47,15 @@
 os::HapticScale onExternalVibrationStart(const sp<os::ExternalVibration>& externalVibration) {
     if (externalVibration->getAudioAttributes().flags & AUDIO_FLAG_MUTE_HAPTIC) {
         ALOGD("%s, mute haptic according to audio attributes flag", __func__);
-        return os::HapticScale::MUTE;
+        return os::HapticScale::mute();
     }
     const sp<os::IExternalVibratorService> evs = getExternalVibratorService();
     if (evs != nullptr) {
-        int32_t ret;
+
+        os::ExternalVibrationScale ret;
         binder::Status status = evs->onExternalVibrationStart(*externalVibration, &ret);
         if (status.isOk()) {
-            ALOGD("%s, start external vibration with intensity as %d", __func__, ret);
+            ALOGD("%s, start external vibration with intensity as %d", __func__, ret.scaleLevel);
             return os::ExternalVibration::externalVibrationScaleToHapticScale(ret);
         }
     }
@@ -61,7 +63,7 @@
             __func__,
             evs == nullptr ? "external vibration service not found"
                            : "error when querying intensity");
-    return os::HapticScale::MUTE;
+    return os::HapticScale::mute();
 }
 
 void onExternalVibrationStop(const sp<os::ExternalVibration>& externalVibration) {
diff --git a/services/audioflinger/datapath/Android.bp b/services/audioflinger/datapath/Android.bp
index 4235f14..6918881 100644
--- a/services/audioflinger/datapath/Android.bp
+++ b/services/audioflinger/datapath/Android.bp
@@ -29,7 +29,7 @@
     tidy_checks: audioflinger_datapath_tidy_errors,
     tidy_checks_as_errors: audioflinger_datapath_tidy_errors,
     tidy_flags: [
-      "-format-style=file",
+        "-format-style=file",
     ],
 }
 
@@ -70,6 +70,6 @@
     ],
 
     include_dirs: [
-        "frameworks/av/services/audioflinger",  // for configuration
+        "frameworks/av/services/audioflinger", // for configuration
     ],
 }
diff --git a/services/audioflinger/datapath/AudioHwDevice.cpp b/services/audioflinger/datapath/AudioHwDevice.cpp
index 95e9ecc..5314e9e 100644
--- a/services/audioflinger/datapath/AudioHwDevice.cpp
+++ b/services/audioflinger/datapath/AudioHwDevice.cpp
@@ -43,7 +43,8 @@
         audio_devices_t deviceType,
         audio_output_flags_t flags,
         struct audio_config *config,
-        const char *address)
+        const char *address,
+        const std::vector<playback_track_metadata_v7_t>& sourceMetadata)
 {
 
     struct audio_config originalConfig = *config;
@@ -52,7 +53,7 @@
     // Try to open the HAL first using the current format.
     ALOGV("openOutputStream(), try sampleRate %d, format %#x, channelMask %#x", config->sample_rate,
             config->format, config->channel_mask);
-    status_t status = outputStream->open(handle, deviceType, config, address);
+    status_t status = outputStream->open(handle, deviceType, config, address, sourceMetadata);
 
     if (status != NO_ERROR) {
         delete outputStream;
@@ -72,7 +73,8 @@
         if (wrapperNeeded) {
             if (SPDIFEncoder::isFormatSupported(originalConfig.format)) {
                 outputStream = new SpdifStreamOut(this, flags, originalConfig.format);
-                status = outputStream->open(handle, deviceType, &originalConfig, address);
+                status = outputStream->open(handle, deviceType, &originalConfig, address,
+                                            sourceMetadata);
                 if (status != NO_ERROR) {
                     ALOGE("ERROR - openOutputStream(), SPDIF open returned %d",
                         status);
diff --git a/services/audioflinger/datapath/AudioHwDevice.h b/services/audioflinger/datapath/AudioHwDevice.h
index 80c1473..e1a9018 100644
--- a/services/audioflinger/datapath/AudioHwDevice.h
+++ b/services/audioflinger/datapath/AudioHwDevice.h
@@ -87,7 +87,8 @@
             audio_devices_t deviceType,
             audio_output_flags_t flags,
             struct audio_config *config,
-            const char *address);
+            const char *address,
+            const std::vector<playback_track_metadata_v7_t>& sourceMetadata);
 
     status_t openInputStream(
             AudioStreamIn **ppStreamIn,
diff --git a/services/audioflinger/datapath/AudioStreamIn.cpp b/services/audioflinger/datapath/AudioStreamIn.cpp
index 24f3bb9..165ac25 100644
--- a/services/audioflinger/datapath/AudioStreamIn.cpp
+++ b/services/audioflinger/datapath/AudioStreamIn.cpp
@@ -56,15 +56,15 @@
         return status;
     }
 
-    // Adjust for standby using HAL rate frames.
-    // Only apply this correction if the HAL is getting PCM frames.
-    if (mHalFormatHasProportionalFrames) {
+    if (mHalFormatHasProportionalFrames &&
+            (flags & AUDIO_INPUT_FLAG_DIRECT) == AUDIO_INPUT_FLAG_DIRECT) {
+        // For DirectRecord reset position to 0 on standby.
         const uint64_t adjustedPosition = (halPosition <= mFramesReadAtStandby) ?
                 0 : (halPosition - mFramesReadAtStandby);
         // Scale from HAL sample rate to application rate.
         *frames = adjustedPosition / mRateMultiplier;
     } else {
-        // For compressed formats.
+        // For compressed formats and linear PCM.
         *frames = halPosition;
     }
 
diff --git a/services/audioflinger/datapath/AudioStreamOut.cpp b/services/audioflinger/datapath/AudioStreamOut.cpp
index 1830d15..c65373e 100644
--- a/services/audioflinger/datapath/AudioStreamOut.cpp
+++ b/services/audioflinger/datapath/AudioStreamOut.cpp
@@ -51,42 +51,17 @@
         return NO_INIT;
     }
 
-    uint32_t halPosition = 0;
+    uint64_t halPosition = 0;
     const status_t status = stream->getRenderPosition(&halPosition);
     if (status != NO_ERROR) {
         return status;
     }
-
-    // Maintain a 64-bit render position using the 32-bit result from the HAL.
-    // This delta calculation relies on the arithmetic overflow behavior
-    // of integers. For example (100 - 0xFFFFFFF0) = 116.
-    const auto truncatedPosition = (uint32_t)mRenderPosition;
-    int32_t deltaHalPosition; // initialization not needed, overwitten by __builtin_sub_overflow()
-    (void) __builtin_sub_overflow(halPosition, truncatedPosition, &deltaHalPosition);
-
-    if (deltaHalPosition > 0) {
-        mRenderPosition += deltaHalPosition;
-    } else if (mExpectRetrograde) {
-        mExpectRetrograde = false;
-        mRenderPosition -= static_cast<uint64_t>(-deltaHalPosition);
-    }
     // Scale from HAL sample rate to application rate.
-    *frames = mRenderPosition / mRateMultiplier;
+    *frames = halPosition / mRateMultiplier;
 
     return status;
 }
 
-// return bottom 32-bits of the render position
-status_t AudioStreamOut::getRenderPosition(uint32_t *frames)
-{
-    uint64_t position64 = 0;
-    const status_t status = getRenderPosition(&position64);
-    if (status == NO_ERROR) {
-        *frames = (uint32_t)position64;
-    }
-    return status;
-}
-
 status_t AudioStreamOut::getPresentationPosition(uint64_t *frames, struct timespec *timestamp)
 {
     if (stream == nullptr) {
@@ -99,15 +74,15 @@
         return status;
     }
 
-    // Adjust for standby using HAL rate frames.
-    // Only apply this correction if the HAL is getting PCM frames.
-    if (mHalFormatHasProportionalFrames) {
+    if (mHalFormatHasProportionalFrames &&
+            (flags & AUDIO_OUTPUT_FLAG_DIRECT) == AUDIO_OUTPUT_FLAG_DIRECT) {
+        // For DirectTrack reset position to 0 on standby.
         const uint64_t adjustedPosition = (halPosition <= mFramesWrittenAtStandby) ?
                 0 : (halPosition - mFramesWrittenAtStandby);
         // Scale from HAL sample rate to application rate.
         *frames = adjustedPosition / mRateMultiplier;
     } else {
-        // For offloaded MP3 and other compressed formats.
+        // For offloaded MP3 and other compressed formats, and linear PCM.
         *frames = halPosition;
     }
 
@@ -118,7 +93,8 @@
         audio_io_handle_t handle,
         audio_devices_t deviceType,
         struct audio_config *config,
-        const char *address)
+        const char *address,
+        const std::vector<playback_track_metadata_v7_t>& sourceMetadata)
 {
     sp<StreamOutHalInterface> outStream;
 
@@ -132,7 +108,8 @@
             customFlags,
             config,
             address,
-            &outStream);
+            &outStream,
+            sourceMetadata);
     ALOGV("AudioStreamOut::open(), HAL returned stream %p, sampleRate %d, format %#x,"
             " channelMask %#x, status %d", outStream.get(), config->sample_rate, config->format,
             config->channel_mask, status);
@@ -149,7 +126,8 @@
                 customFlags,
                 &customConfig,
                 address,
-                &outStream);
+                &outStream,
+                sourceMetadata);
         ALOGV("AudioStreamOut::open(), treat IEC61937 as PCM, status = %d", status);
     }
 
@@ -179,8 +157,6 @@
 
 int AudioStreamOut::flush()
 {
-    mRenderPosition = 0;
-    mExpectRetrograde = false;
     mFramesWritten = 0;
     mFramesWrittenAtStandby = 0;
     const status_t result = stream->flush();
@@ -189,12 +165,14 @@
 
 int AudioStreamOut::standby()
 {
-    mRenderPosition = 0;
-    mExpectRetrograde = false;
     mFramesWrittenAtStandby = mFramesWritten;
     return stream->standby();
 }
 
+void AudioStreamOut::presentationComplete() {
+    stream->presentationComplete();
+}
+
 ssize_t AudioStreamOut::write(const void *buffer, size_t numBytes)
 {
     size_t bytesWritten;
diff --git a/services/audioflinger/datapath/AudioStreamOut.h b/services/audioflinger/datapath/AudioStreamOut.h
index ea41bba..2bf94a1 100644
--- a/services/audioflinger/datapath/AudioStreamOut.h
+++ b/services/audioflinger/datapath/AudioStreamOut.h
@@ -47,13 +47,11 @@
             audio_io_handle_t handle,
             audio_devices_t deviceType,
             struct audio_config *config,
-            const char *address);
+            const char *address,
+            const std::vector<playback_track_metadata_v7_t>& sourceMetadata);
 
     virtual ~AudioStreamOut();
 
-    // Get the bottom 32-bits of the 64-bit render position.
-    status_t getRenderPosition(uint32_t *frames);
-
     virtual status_t getRenderPosition(uint64_t *frames);
 
     virtual status_t getPresentationPosition(uint64_t *frames, struct timespec *timestamp);
@@ -91,21 +89,14 @@
     virtual status_t flush();
     virtual status_t standby();
 
-    // Avoid suppressing retrograde motion in mRenderPosition for gapless offload/direct when
-    // transitioning between tracks.
-    // The HAL resets the frame position without flush/stop being called, but calls back prior to
-    // this event. So, on the next occurrence of retrograde motion, we permit backwards movement of
-    // mRenderPosition.
-    virtual void presentationComplete() { mExpectRetrograde = true; }
+    virtual void presentationComplete();
 
 protected:
     uint64_t mFramesWritten = 0; // reset by flush
     uint64_t mFramesWrittenAtStandby = 0;
-    uint64_t mRenderPosition = 0; // reset by flush, standby, or presentation complete
     int mRateMultiplier = 1;
     bool mHalFormatHasProportionalFrames = false;
     size_t mHalFrameSize = 0;
-    bool mExpectRetrograde = false; // see presentationComplete
 };
 
 } // namespace android
diff --git a/services/audioflinger/datapath/SpdifStreamOut.cpp b/services/audioflinger/datapath/SpdifStreamOut.cpp
index 65a4eec..d3983b0 100644
--- a/services/audioflinger/datapath/SpdifStreamOut.cpp
+++ b/services/audioflinger/datapath/SpdifStreamOut.cpp
@@ -45,7 +45,8 @@
         audio_io_handle_t handle,
         audio_devices_t devices,
         struct audio_config *config,
-        const char *address)
+        const char *address,
+        const std::vector<playback_track_metadata_v7_t>& sourceMetadata)
 {
     struct audio_config customConfig = *config;
 
@@ -75,7 +76,8 @@
             handle,
             devices,
             &customConfig,
-            address);
+            address,
+            sourceMetadata);
 
     ALOGI("SpdifStreamOut::open() status = %d", status);
 
diff --git a/services/audioflinger/datapath/SpdifStreamOut.h b/services/audioflinger/datapath/SpdifStreamOut.h
index c6d27ba..1cd8f65 100644
--- a/services/audioflinger/datapath/SpdifStreamOut.h
+++ b/services/audioflinger/datapath/SpdifStreamOut.h
@@ -43,7 +43,8 @@
             audio_io_handle_t handle,
             audio_devices_t devices,
             struct audio_config *config,
-            const char *address) override;
+            const char *address,
+            const std::vector<playback_track_metadata_v7_t>& sourceMetadata) override;
 
     /**
     * Write audio buffer to driver. Returns number of bytes written, or a
diff --git a/services/audioflinger/fastpath/Android.bp b/services/audioflinger/fastpath/Android.bp
index 84a580f..5ebc583 100644
--- a/services/audioflinger/fastpath/Android.bp
+++ b/services/audioflinger/fastpath/Android.bp
@@ -24,7 +24,7 @@
     tidy_checks: fastpath_tidy_errors,
     tidy_checks_as_errors: fastpath_tidy_errors,
     tidy_flags: [
-      "-format-style=file",
+        "-format-style=file",
     ],
 }
 
diff --git a/services/audioflinger/fastpath/FastMixer.cpp b/services/audioflinger/fastpath/FastMixer.cpp
index e0a15c1..1d41b3f 100644
--- a/services/audioflinger/fastpath/FastMixer.cpp
+++ b/services/audioflinger/fastpath/FastMixer.cpp
@@ -178,8 +178,8 @@
                 (void *)(uintptr_t)mSinkChannelMask);
         mMixer->setParameter(index, AudioMixer::TRACK, AudioMixer::HAPTIC_ENABLED,
                 (void *)(uintptr_t)fastTrack->mHapticPlaybackEnabled);
-        mMixer->setParameter(index, AudioMixer::TRACK, AudioMixer::HAPTIC_INTENSITY,
-                (void *)(uintptr_t)fastTrack->mHapticIntensity);
+        mMixer->setParameter(index, AudioMixer::TRACK, AudioMixer::HAPTIC_SCALE,
+                (void *)(&(fastTrack->mHapticScale)));
         mMixer->setParameter(index, AudioMixer::TRACK, AudioMixer::HAPTIC_MAX_AMPLITUDE,
                 (void *)(&(fastTrack->mHapticMaxAmplitude)));
 
diff --git a/services/audioflinger/fastpath/FastMixerState.h b/services/audioflinger/fastpath/FastMixerState.h
index 8ab6d25..0a56f92 100644
--- a/services/audioflinger/fastpath/FastMixerState.h
+++ b/services/audioflinger/fastpath/FastMixerState.h
@@ -54,7 +54,7 @@
     audio_format_t          mFormat = AUDIO_FORMAT_INVALID;         // track format
     int                     mGeneration = 0;     // increment when any field is assigned
     bool                    mHapticPlaybackEnabled = false; // haptic playback is enabled or not
-    os::HapticScale         mHapticIntensity = os::HapticScale::MUTE; // intensity of haptic data
+    os::HapticScale mHapticScale = os::HapticScale::mute(); // scale of haptic data
     float                   mHapticMaxAmplitude = NAN; // max amplitude allowed for haptic data
 };
 
diff --git a/services/audioflinger/sounddose/Android.bp b/services/audioflinger/sounddose/Android.bp
index 2cab5d1..884622e 100644
--- a/services/audioflinger/sounddose/Android.bp
+++ b/services/audioflinger/sounddose/Android.bp
@@ -29,7 +29,7 @@
     tidy_checks: audioflinger_sounddose_tidy_errors,
     tidy_checks_as_errors: audioflinger_sounddose_tidy_errors,
     tidy_flags: [
-      "-format-style=file",
+        "-format-style=file",
     ],
 }
 
@@ -40,9 +40,9 @@
 
     defaults: [
         "audioflinger_sounddose_flags_defaults",
-        "latest_android_media_audio_common_types_ndk_shared",
         "latest_android_hardware_audio_core_sounddose_ndk_shared",
         "latest_android_hardware_audio_sounddose_ndk_shared",
+        "latest_android_media_audio_common_types_ndk_shared",
     ],
 
     srcs: [
@@ -66,9 +66,9 @@
     ],
 
     cflags: [
+        "-DBACKEND_NDK",
         "-Wall",
         "-Werror",
-        "-DBACKEND_NDK",
     ],
 }
 
diff --git a/services/audioflinger/sounddose/SoundDoseManager.cpp b/services/audioflinger/sounddose/SoundDoseManager.cpp
index 6797e3d..cdc36dc 100644
--- a/services/audioflinger/sounddose/SoundDoseManager.cpp
+++ b/services/audioflinger/sounddose/SoundDoseManager.cpp
@@ -231,10 +231,12 @@
         ALOGI("%s: could not find port id for device %s", __func__, adt.toString().c_str());
         return AUDIO_PORT_HANDLE_NONE;
     }
-    const auto btDeviceIt = mBluetoothDevicesWithCsd.find(std::make_pair(address, type));
-    if (btDeviceIt != mBluetoothDevicesWithCsd.end()) {
-        if (!btDeviceIt->second) {
-            ALOGI("%s: bt device %s does not support sound dose", __func__, adt.toString().c_str());
+
+    if (audio_is_ble_out_device(type) || audio_is_a2dp_device(type)) {
+        const auto btDeviceIt = mBluetoothDevicesWithCsd.find(std::make_pair(address, type));
+        if (btDeviceIt == mBluetoothDevicesWithCsd.end() || !btDeviceIt->second) {
+            ALOGI("%s: bt device %s does not support sound dose", __func__,
+                  adt.toString().c_str());
             return AUDIO_PORT_HANDLE_NONE;
         }
     }
@@ -282,7 +284,7 @@
     auto id = soundDoseManager->getIdForAudioDevice(in_audioDevice);
     if (id == AUDIO_PORT_HANDLE_NONE) {
         ALOGI("%s: no mapped id for audio device with type %d and address %s",
-                __func__, in_audioDevice.type.type,
+                __func__, static_cast<int>(in_audioDevice.type.type),
                 in_audioDevice.address.toString().c_str());
         return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
     }
@@ -315,7 +317,7 @@
     auto id = soundDoseManager->getIdForAudioDevice(in_audioDevice);
     if (id == AUDIO_PORT_HANDLE_NONE) {
         ALOGI("%s: no mapped id for audio device with type %d and address %s",
-                __func__, in_audioDevice.type.type,
+                __func__, static_cast<int>(in_audioDevice.type.type),
                 in_audioDevice.address.toString().c_str());
         return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
     }
@@ -751,6 +753,10 @@
     }
 }
 
+void SoundDoseManager::resetReferencesForTest() {
+    mMelReporterCallback.clear();
+}
+
 sp<media::ISoundDose> SoundDoseManager::getSoundDoseInterface(
         const sp<media::ISoundDoseCallback>& callback) {
     ALOGV("%s: Register ISoundDoseCallback", __func__);
diff --git a/services/audioflinger/sounddose/SoundDoseManager.h b/services/audioflinger/sounddose/SoundDoseManager.h
index 52a3fd6..8363d9b 100644
--- a/services/audioflinger/sounddose/SoundDoseManager.h
+++ b/services/audioflinger/sounddose/SoundDoseManager.h
@@ -157,6 +157,8 @@
 
     void onMomentaryExposure(float currentMel, audio_port_handle_t deviceId) const override;
 
+    void resetReferencesForTest();
+
 private:
     class SoundDose : public media::BnSoundDose,
                       public IBinder::DeathRecipient {
@@ -229,7 +231,7 @@
 
     mutable std::mutex mLock;
 
-    const sp<IMelReporterCallback> mMelReporterCallback;
+    sp<IMelReporterCallback> mMelReporterCallback;
 
     // no need for lock since MelAggregator is thread-safe
     const sp<audio_utils::MelAggregator> mMelAggregator;
diff --git a/services/audioflinger/sounddose/tests/Android.bp b/services/audioflinger/sounddose/tests/Android.bp
index 2a2addf..fcbebe1 100644
--- a/services/audioflinger/sounddose/tests/Android.bp
+++ b/services/audioflinger/sounddose/tests/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_base_license"
@@ -11,13 +12,13 @@
     name: "sounddosemanager_tests",
 
     srcs: [
-        "sounddosemanager_tests.cpp"
+        "sounddosemanager_tests.cpp",
     ],
 
     defaults: [
-        "latest_android_media_audio_common_types_ndk_static",
         "latest_android_hardware_audio_core_sounddose_ndk_static",
         "latest_android_hardware_audio_sounddose_ndk_static",
+        "latest_android_media_audio_common_types_ndk_static",
     ],
 
     shared_libs: [
@@ -42,10 +43,10 @@
     ],
 
     cflags: [
+        "-DBACKEND_NDK",
         "-Wall",
         "-Werror",
         "-Wextra",
-        "-DBACKEND_NDK",
     ],
 
     test_suites: [
diff --git a/services/audioflinger/timing/Android.bp b/services/audioflinger/timing/Android.bp
index 30ebca0..2666ddb 100644
--- a/services/audioflinger/timing/Android.bp
+++ b/services/audioflinger/timing/Android.bp
@@ -29,7 +29,7 @@
     tidy_checks: audioflinger_timing_tidy_errors,
     tidy_checks_as_errors: audioflinger_timing_tidy_errors,
     tidy_flags: [
-      "-format-style=file",
+        "-format-style=file",
     ],
 }
 
diff --git a/services/audioflinger/timing/tests/Android.bp b/services/audioflinger/timing/tests/Android.bp
index d1e5563..040a914 100644
--- a/services/audioflinger/timing/tests/Android.bp
+++ b/services/audioflinger/timing/tests/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_media_framework_audio",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_base_license"
@@ -13,7 +14,7 @@
     host_supported: true,
 
     srcs: [
-        "mediasyncevent_tests.cpp"
+        "mediasyncevent_tests.cpp",
     ],
 
     header_libs: [
@@ -38,7 +39,7 @@
     host_supported: true,
 
     srcs: [
-        "monotonicframecounter_tests.cpp"
+        "monotonicframecounter_tests.cpp",
     ],
 
     static_libs: [
@@ -54,26 +55,26 @@
 }
 
 cc_test {
-     name: "synchronizedrecordstate_tests",
+    name: "synchronizedrecordstate_tests",
 
-     host_supported: true,
+    host_supported: true,
 
-     srcs: [
-         "synchronizedrecordstate_tests.cpp"
-     ],
+    srcs: [
+        "synchronizedrecordstate_tests.cpp",
+    ],
 
-     header_libs: [
-         "libaudioclient_headers",
-     ],
+    header_libs: [
+        "libaudioclient_headers",
+    ],
 
-     static_libs: [
-         "liblog",
-         "libutils", // RefBase
-     ],
+    static_libs: [
+        "liblog",
+        "libutils", // RefBase
+    ],
 
-     cflags: [
-         "-Wall",
-         "-Werror",
-         "-Wextra",
-     ],
- }
\ No newline at end of file
+    cflags: [
+        "-Wall",
+        "-Werror",
+        "-Wextra",
+    ],
+}
diff --git a/services/audioparameterparser/Android.bp b/services/audioparameterparser/Android.bp
new file mode 100644
index 0000000..1c1c1e1
--- /dev/null
+++ b/services/audioparameterparser/Android.bp
@@ -0,0 +1,69 @@
+// Copyright (C) 2024 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package {
+    default_applicable_licenses: [
+        "frameworks_av_services_audioparameterparser_license",
+    ],
+}
+
+// Added automatically by a large-scale-change
+// See: http://go/android-license-faq
+license {
+    name: "frameworks_av_services_audioparameterparser_license",
+    visibility: [":__subpackages__"],
+    license_kinds: [
+        "SPDX-license-identifier-Apache-2.0",
+    ],
+    license_text: [
+        "NOTICE",
+    ],
+}
+
+cc_defaults {
+    name: "android.hardware.audio.parameter_parser.example_defaults",
+    defaults: [
+        "latest_android_hardware_audio_core_ndk_shared",
+        "latest_av_audio_types_aidl_ndk_shared",
+    ],
+
+    shared_libs: [
+        "libbase",
+        "libbinder_ndk",
+    ],
+
+    cflags: [
+        "-Wall",
+        "-Werror",
+        "-Wextra",
+        "-Wthread-safety",
+    ],
+}
+
+cc_binary {
+    name: "android.hardware.audio.parameter_parser.example_service",
+    system_ext_specific: true,
+    relative_install_path: "hw",
+
+    init_rc: ["android.hardware.audio.parameter_parser.example_service.rc"],
+
+    defaults: [
+        "android.hardware.audio.parameter_parser.example_defaults",
+    ],
+
+    srcs: [
+        "ParameterParser.cpp",
+        "main.cpp",
+    ],
+}
diff --git a/services/audioparameterparser/NOTICE b/services/audioparameterparser/NOTICE
new file mode 100644
index 0000000..44158cb
--- /dev/null
+++ b/services/audioparameterparser/NOTICE
@@ -0,0 +1,190 @@
+
+   Copyright (c) 2005-2024, The Android Open Source Project
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+
+
+                                 Apache License
+                           Version 2.0, January 2004
+                        http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+      "License" shall mean the terms and conditions for use, reproduction,
+      and distribution as defined by Sections 1 through 9 of this document.
+
+      "Licensor" shall mean the copyright owner or entity authorized by
+      the copyright owner that is granting the License.
+
+      "Legal Entity" shall mean the union of the acting entity and all
+      other entities that control, are controlled by, or are under common
+      control with that entity. For the purposes of this definition,
+      "control" means (i) the power, direct or indirect, to cause the
+      direction or management of such entity, whether by contract or
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+      outstanding shares, or (iii) beneficial ownership of such entity.
+
+      "You" (or "Your") shall mean an individual or Legal Entity
+      exercising permissions granted by this License.
+
+      "Source" form shall mean the preferred form for making modifications,
+      including but not limited to software source code, documentation
+      source, and configuration files.
+
+      "Object" form shall mean any form resulting from mechanical
+      transformation or translation of a Source form, including but
+      not limited to compiled object code, generated documentation,
+      and conversions to other media types.
+
+      "Work" shall mean the work of authorship, whether in Source or
+      Object form, made available under the License, as indicated by a
+      copyright notice that is included in or attached to the work
+      (an example is provided in the Appendix below).
+
+      "Derivative Works" shall mean any work, whether in Source or Object
+      form, that is based on (or derived from) the Work and for which the
+      editorial revisions, annotations, elaborations, or other modifications
+      represent, as a whole, an original work of authorship. For the purposes
+      of this License, Derivative Works shall not include works that remain
+      separable from, or merely link (or bind by name) to the interfaces of,
+      the Work and Derivative Works thereof.
+
+      "Contribution" shall mean any work of authorship, including
+      the original version of the Work and any modifications or additions
+      to that Work or Derivative Works thereof, that is intentionally
+      submitted to Licensor for inclusion in the Work by the copyright owner
+      or by an individual or Legal Entity authorized to submit on behalf of
+      the copyright owner. For the purposes of this definition, "submitted"
+      means any form of electronic, verbal, or written communication sent
+      to the Licensor or its representatives, including but not limited to
+      communication on electronic mailing lists, source code control systems,
+      and issue tracking systems that are managed by, or on behalf of, the
+      Licensor for the purpose of discussing and improving the Work, but
+      excluding communication that is conspicuously marked or otherwise
+      designated in writing by the copyright owner as "Not a Contribution."
+
+      "Contributor" shall mean Licensor and any individual or Legal Entity
+      on behalf of whom a Contribution has been received by Licensor and
+      subsequently incorporated within the Work.
+
+   2. Grant of Copyright License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      copyright license to reproduce, prepare Derivative Works of,
+      publicly display, publicly perform, sublicense, and distribute the
+      Work and such Derivative Works in Source or Object form.
+
+   3. Grant of Patent License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      (except as stated in this section) patent license to make, have made,
+      use, offer to sell, sell, import, and otherwise transfer the Work,
+      where such license applies only to those patent claims licensable
+      by such Contributor that are necessarily infringed by their
+      Contribution(s) alone or by combination of their Contribution(s)
+      with the Work to which such Contribution(s) was submitted. If You
+      institute patent litigation against any entity (including a
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
+      or a Contribution incorporated within the Work constitutes direct
+      or contributory patent infringement, then any patent licenses
+      granted to You under this License for that Work shall terminate
+      as of the date such litigation is filed.
+
+   4. Redistribution. You may reproduce and distribute copies of the
+      Work or Derivative Works thereof in any medium, with or without
+      modifications, and in Source or Object form, provided that You
+      meet the following conditions:
+
+      (a) You must give any other recipients of the Work or
+          Derivative Works a copy of this License; and
+
+      (b) You must cause any modified files to carry prominent notices
+          stating that You changed the files; and
+
+      (c) You must retain, in the Source form of any Derivative Works
+          that You distribute, all copyright, patent, trademark, and
+          attribution notices from the Source form of the Work,
+          excluding those notices that do not pertain to any part of
+          the Derivative Works; and
+
+      (d) If the Work includes a "NOTICE" text file as part of its
+          distribution, then any Derivative Works that You distribute must
+          include a readable copy of the attribution notices contained
+          within such NOTICE file, excluding those notices that do not
+          pertain to any part of the Derivative Works, in at least one
+          of the following places: within a NOTICE text file distributed
+          as part of the Derivative Works; within the Source form or
+          documentation, if provided along with the Derivative Works; or,
+          within a display generated by the Derivative Works, if and
+          wherever such third-party notices normally appear. The contents
+          of the NOTICE file are for informational purposes only and
+          do not modify the License. You may add Your own attribution
+          notices within Derivative Works that You distribute, alongside
+          or as an addendum to the NOTICE text from the Work, provided
+          that such additional attribution notices cannot be construed
+          as modifying the License.
+
+      You may add Your own copyright statement to Your modifications and
+      may provide additional or different license terms and conditions
+      for use, reproduction, or distribution of Your modifications, or
+      for any such Derivative Works as a whole, provided Your use,
+      reproduction, and distribution of the Work otherwise complies with
+      the conditions stated in this License.
+
+   5. Submission of Contributions. Unless You explicitly state otherwise,
+      any Contribution intentionally submitted for inclusion in the Work
+      by You to the Licensor shall be under the terms and conditions of
+      this License, without any additional terms or conditions.
+      Notwithstanding the above, nothing herein shall supersede or modify
+      the terms of any separate license agreement you may have executed
+      with Licensor regarding such Contributions.
+
+   6. Trademarks. This License does not grant permission to use the trade
+      names, trademarks, service marks, or product names of the Licensor,
+      except as required for reasonable and customary use in describing the
+      origin of the Work and reproducing the content of the NOTICE file.
+
+   7. Disclaimer of Warranty. Unless required by applicable law or
+      agreed to in writing, Licensor provides the Work (and each
+      Contributor provides its Contributions) on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+      implied, including, without limitation, any warranties or conditions
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+      PARTICULAR PURPOSE. You are solely responsible for determining the
+      appropriateness of using or redistributing the Work and assume any
+      risks associated with Your exercise of permissions under this License.
+
+   8. Limitation of Liability. In no event and under no legal theory,
+      whether in tort (including negligence), contract, or otherwise,
+      unless required by applicable law (such as deliberate and grossly
+      negligent acts) or agreed to in writing, shall any Contributor be
+      liable to You for damages, including any direct, indirect, special,
+      incidental, or consequential damages of any character arising as a
+      result of this License or out of the use or inability to use the
+      Work (including but not limited to damages for loss of goodwill,
+      work stoppage, computer failure or malfunction, or any and all
+      other commercial damages or losses), even if such Contributor
+      has been advised of the possibility of such damages.
+
+   9. Accepting Warranty or Additional Liability. While redistributing
+      the Work or Derivative Works thereof, You may choose to offer,
+      and charge a fee for, acceptance of support, warranty, indemnity,
+      or other liability obligations and/or rights consistent with this
+      License. However, in accepting such obligations, You may act only
+      on Your own behalf and on Your sole responsibility, not on behalf
+      of any other Contributor, and only if You agree to indemnify,
+      defend, and hold each Contributor harmless for any liability
+      incurred by, or claims asserted against, such Contributor by reason
+      of your accepting any such warranty or additional liability.
+
+   END OF TERMS AND CONDITIONS
+
diff --git a/services/audioparameterparser/ParameterParser.cpp b/services/audioparameterparser/ParameterParser.cpp
new file mode 100644
index 0000000..8d6a64f
--- /dev/null
+++ b/services/audioparameterparser/ParameterParser.cpp
@@ -0,0 +1,62 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "ParameterParser.h"
+
+#define LOG_TAG "Audio_ParameterParser"
+#include <android-base/logging.h>
+
+namespace vendor::audio::parserservice {
+
+using ::aidl::android::hardware::audio::core::VendorParameter;
+using ParameterScope = ::aidl::android::media::audio::IHalAdapterVendorExtension::ParameterScope;
+
+::ndk::ScopedAStatus ParameterParser::parseVendorParameterIds(ParameterScope in_scope,
+                                                              const std::string& in_rawKeys,
+                                                              std::vector<std::string>*) {
+    LOG(DEBUG) << __func__ << ": scope: " << toString(in_scope) << ", keys: " << in_rawKeys;
+    return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus ParameterParser::parseVendorParameters(ParameterScope in_scope,
+                                                            const std::string& in_rawKeysAndValues,
+                                                            std::vector<VendorParameter>*,
+                                                            std::vector<VendorParameter>*) {
+    LOG(DEBUG) << __func__ << ": scope: " << toString(in_scope)
+               << ", keys/values: " << in_rawKeysAndValues;
+    return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus ParameterParser::parseBluetoothA2dpReconfigureOffload(
+        const std::string& in_rawValue, std::vector<VendorParameter>*) {
+    LOG(DEBUG) << __func__ << ": value: " << in_rawValue;
+    return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus ParameterParser::parseBluetoothLeReconfigureOffload(
+        const std::string& in_rawValue, std::vector<VendorParameter>*) {
+    LOG(DEBUG) << __func__ << ": value: " << in_rawValue;
+    return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus ParameterParser::processVendorParameters(
+        ParameterScope in_scope, const std::vector<VendorParameter>& in_parameters, std::string*) {
+    LOG(DEBUG) << __func__ << ": scope: " << toString(in_scope)
+               << ", parameters: " << ::android::internal::ToString(in_parameters);
+    return ::ndk::ScopedAStatus::ok();
+}
+
+}  // namespace vendor::audio::parserservice
diff --git a/services/audioparameterparser/ParameterParser.h b/services/audioparameterparser/ParameterParser.h
new file mode 100644
index 0000000..1e0e333
--- /dev/null
+++ b/services/audioparameterparser/ParameterParser.h
@@ -0,0 +1,57 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <aidl/android/media/audio/BnHalAdapterVendorExtension.h>
+
+namespace vendor::audio::parserservice {
+
+class ParameterParser : public ::aidl::android::media::audio::BnHalAdapterVendorExtension {
+  public:
+    ParameterParser() = default;
+
+  private:
+    ::ndk::ScopedAStatus parseVendorParameterIds(
+            ::aidl::android::media::audio::IHalAdapterVendorExtension::ParameterScope in_scope,
+            const std::string& in_rawKeys, std::vector<std::string>* _aidl_return) override;
+
+    ::ndk::ScopedAStatus parseVendorParameters(
+            ::aidl::android::media::audio::IHalAdapterVendorExtension::ParameterScope in_scope,
+            const std::string& in_rawKeysAndValues,
+            std::vector<::aidl::android::hardware::audio::core::VendorParameter>*
+                    out_syncParameters,
+            std::vector<::aidl::android::hardware::audio::core::VendorParameter>*
+                    out_asyncParameters) override;
+
+    ::ndk::ScopedAStatus parseBluetoothA2dpReconfigureOffload(
+            const std::string& in_rawValue,
+            std::vector<::aidl::android::hardware::audio::core::VendorParameter>* _aidl_return)
+            override;
+
+    ::ndk::ScopedAStatus parseBluetoothLeReconfigureOffload(
+            const std::string& in_rawValue,
+            std::vector<::aidl::android::hardware::audio::core::VendorParameter>* _aidl_return)
+            override;
+
+    ::ndk::ScopedAStatus processVendorParameters(
+            ::aidl::android::media::audio::IHalAdapterVendorExtension::ParameterScope in_scope,
+            const std::vector<::aidl::android::hardware::audio::core::VendorParameter>&
+                    in_parameters,
+            std::string* _aidl_return) override;
+};
+
+}  // namespace vendor::audio::parserservice
diff --git a/services/audioparameterparser/android.hardware.audio.parameter_parser.example_service.rc b/services/audioparameterparser/android.hardware.audio.parameter_parser.example_service.rc
new file mode 100644
index 0000000..b6aca5c
--- /dev/null
+++ b/services/audioparameterparser/android.hardware.audio.parameter_parser.example_service.rc
@@ -0,0 +1,6 @@
+service audio_parameter_parser_service /system_ext/bin/hw/android.hardware.audio.parameter_parser.example_service
+    class core
+    user audioserver
+    group media
+    ioprio rt 4
+    task_profiles ProcessCapacityHigh HighPerformance
diff --git a/services/audioparameterparser/main.cpp b/services/audioparameterparser/main.cpp
new file mode 100644
index 0000000..d22eb55
--- /dev/null
+++ b/services/audioparameterparser/main.cpp
@@ -0,0 +1,41 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "Audio_ParameterParser"
+#include <android-base/logging.h>
+#include <android/binder_manager.h>
+#include <android/binder_process.h>
+
+#include "ParameterParser.h"
+
+using vendor::audio::parserservice::ParameterParser;
+
+int main() {
+    // This is a debug implementation, always enable debug logging.
+    android::base::SetMinimumLogSeverity(::android::base::DEBUG);
+
+    auto parser = ndk::SharedRefBase::make<ParameterParser>();
+    const std::string parserFqn =
+            std::string().append(ParameterParser::descriptor).append("/default");
+    binder_status_t status =
+            AServiceManager_addService(parser->asBinder().get(), parserFqn.c_str());
+    if (status != STATUS_OK) {
+        LOG(ERROR) << "failed to register service for \"" << parserFqn << "\"";
+    }
+
+    ABinderProcess_joinThreadPool();
+    return EXIT_FAILURE;  // should not reach
+}
diff --git a/services/audiopolicy/Android.bp b/services/audiopolicy/Android.bp
index e018dd3..66ba7e2 100644
--- a/services/audiopolicy/Android.bp
+++ b/services/audiopolicy/Android.bp
@@ -1,10 +1,11 @@
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
     // to get the below license kinds:
     //   SPDX-license-identifier-Apache-2.0
-    default_applicable_licenses: ["frameworks_av_license"],
+    default_applicable_licenses: ["Android-Apache-2.0"],
 }
 
 cc_library_headers {
diff --git a/services/audiopolicy/AudioPolicyInterface.h b/services/audiopolicy/AudioPolicyInterface.h
index 9ececea..1bac259 100644
--- a/services/audiopolicy/AudioPolicyInterface.h
+++ b/services/audiopolicy/AudioPolicyInterface.h
@@ -18,6 +18,7 @@
 #define ANDROID_AUDIOPOLICY_INTERFACE_H
 
 #include <android/media/DeviceConnectedState.h>
+#include <android/media/TrackInternalMuteInfo.h>
 #include <media/AudioCommonTypes.h>
 #include <media/AudioContainers.h>
 #include <media/AudioDeviceTypeAddr.h>
@@ -166,7 +167,8 @@
                                      audio_input_flags_t flags,
                                      audio_port_handle_t *selectedDeviceId,
                                      input_type_t *inputType,
-                                     audio_port_handle_t *portId) = 0;
+                                     audio_port_handle_t *portId,
+                                     uint32_t *virtualDeviceId) = 0;
     // indicates to the audio policy manager that the input starts being used.
     virtual status_t startInput(audio_port_handle_t portId) = 0;
     // indicates to the audio policy manager that the input stops being used.
@@ -178,10 +180,16 @@
     // volume control functions
     //
 
+    // notifies the audio policy manager that the absolute volume mode is enabled/disabled on
+    // the passed device. Also specifies the stream that is controlling the absolute volume.
+    virtual status_t setDeviceAbsoluteVolumeEnabled(audio_devices_t device,
+                                                    const char *address,
+                                                    bool enabled,
+                                                    audio_stream_type_t streamToDriveAbs) = 0;
     // initialises stream volume conversion parameters by specifying volume index range.
     virtual void initStreamVolume(audio_stream_type_t stream,
-                                      int indexMin,
-                                      int indexMax) = 0;
+                                  int indexMin,
+                                  int indexMax) = 0;
 
     // sets the new stream volume at a level corresponding to the supplied index for the
     // supplied device. By convention, specifying AUDIO_DEVICE_OUT_DEFAULT_FOR_VOLUME means
@@ -269,6 +277,7 @@
 
     virtual status_t registerPolicyMixes(const Vector<AudioMix>& mixes) = 0;
     virtual status_t unregisterPolicyMixes(Vector<AudioMix> mixes) = 0;
+    virtual status_t getRegisteredPolicyMixes(std::vector<AudioMix>& mixes) = 0;
 
     virtual status_t updatePolicyMix(
         const AudioMix& mix,
@@ -285,8 +294,7 @@
     virtual status_t startAudioSource(const struct audio_port_config *source,
                                       const audio_attributes_t *attributes,
                                       audio_port_handle_t *portId,
-                                      uid_t uid,
-                                      bool internal = false) = 0;
+                                      uid_t uid) = 0;
     virtual status_t stopAudioSource(audio_port_handle_t portId) = 0;
 
     virtual status_t setMasterMono(bool mono) = 0;
@@ -469,7 +477,8 @@
                                 audio_config_base_t *mixerConfig,
                                 const sp<DeviceDescriptorBase>& device,
                                 uint32_t *latencyMs,
-                                audio_output_flags_t flags) = 0;
+                                audio_output_flags_t flags,
+                                audio_attributes_t audioAttributes) = 0;
     // creates a special output that is duplicated to the two outputs passed as arguments.
     // The duplication is performed by a special mixer thread in the AudioFlinger.
     virtual audio_io_handle_t openDuplicateOutput(audio_io_handle_t output1,
@@ -583,6 +592,9 @@
     // Get the attributes of the mix port when connecting to the given device port.
     virtual status_t getAudioMixPort(const struct audio_port_v7 *devicePort,
                                      struct audio_port_v7 *mixPort) = 0;
+
+    virtual status_t setTracksInternalMute(
+            const std::vector<media::TrackInternalMuteInfo>& tracksInternalMute) = 0;
 };
 
     // These are the signatures of createAudioPolicyManager/destroyAudioPolicyManager
diff --git a/services/audiopolicy/TEST_MAPPING b/services/audiopolicy/TEST_MAPPING
index a2ebb8d..cf1a771 100644
--- a/services/audiopolicy/TEST_MAPPING
+++ b/services/audiopolicy/TEST_MAPPING
@@ -46,6 +46,9 @@
           "include-filter": "com.google.android.gts.audio.AudioPolicyHostTest"
         }
       ]
+    },
+    {
+      "name": "spatializer_tests"
     }
   ]
 }
diff --git a/services/audiopolicy/common/Android.bp b/services/audiopolicy/common/Android.bp
index 91701ad..a699b8b 100644
--- a/services/audiopolicy/common/Android.bp
+++ b/services/audiopolicy/common/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/services/audiopolicy/common/include/policy.h b/services/audiopolicy/common/include/policy.h
index 4643bd1..ee6af4c 100644
--- a/services/audiopolicy/common/include/policy.h
+++ b/services/audiopolicy/common/include/policy.h
@@ -17,6 +17,7 @@
 #pragma once
 
 #include <system/audio.h>
+#include <set>
 #include <vector>
 
 #include <media/AudioContainers.h>
@@ -27,16 +28,16 @@
 
 static const audio_attributes_t defaultAttr = AUDIO_ATTRIBUTES_INITIALIZER;
 
+static const std::set<audio_usage_t > gHighPriorityUseCases = {
+        AUDIO_USAGE_ALARM, AUDIO_USAGE_NOTIFICATION_TELEPHONY_RINGTONE
+};
+
 } // namespace android
 
 static const audio_format_t gDynamicFormat = AUDIO_FORMAT_DEFAULT;
 
 static const uint32_t SONIFICATION_RESPECTFUL_AFTER_MUSIC_DELAY = 5000;
 
-// For mixed output and inputs, the policy will use max mixer sampling rates.
-// Do not limit sampling rate otherwise
-#define SAMPLE_RATE_HZ_MAX 192000
-
 // Used when a client opens a capture stream, without specifying a desired sample rate.
 #define SAMPLE_RATE_HZ_DEFAULT 48000
 
diff --git a/services/audiopolicy/common/managerdefinitions/Android.bp b/services/audiopolicy/common/managerdefinitions/Android.bp
index 8b76842..051e975 100644
--- a/services/audiopolicy/common/managerdefinitions/Android.bp
+++ b/services/audiopolicy/common/managerdefinitions/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
@@ -35,11 +36,13 @@
         "src/TypeConverter.cpp",
     ],
     shared_libs: [
+        "android.media.audiopolicy-aconfig-cc",
         "audioclient-types-aidl-cpp",
         "audiopolicy-types-aidl-cpp",
         "libaudioclient_aidl_conversion",
         "libaudiofoundation",
         "libaudiopolicy",
+        "libaudioutils",
         "libbase",
         "libcutils",
         "libhidlbase",
@@ -54,9 +57,6 @@
         "libmedia",
         "libmedia_helper",
     ],
-    static_libs: [
-        "libaudioutils",
-    ],
     header_libs: [
         "libaudiopolicycommon",
         "libaudiopolicymanager_interface_headers",
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioIODescriptorInterface.h b/services/audiopolicy/common/managerdefinitions/include/AudioIODescriptorInterface.h
index 6167f95..e519766 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioIODescriptorInterface.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioIODescriptorInterface.h
@@ -47,13 +47,17 @@
 
     if (active) {
         // On MMAP IOs, the preferred device is selected by the first client (virtual client
-        // created when the mmap stream is opened). This client is never active.
+        // created when the mmap stream is opened). This client is never active and we only
+        // consider the Filter criteria, not the active state.
         // On non MMAP IOs, the preferred device is honored only if all active clients have
         // a preferred device in which case the first client drives the selection.
         if (desc->isMmap()) {
-            // The client list is never empty on a MMAP IO
-            return devices.getDeviceFromId(
-                    desc->clientsList(false /*activeOnly*/)[0]->preferredDeviceId());
+            auto matchingClients = desc->clientsList(
+                    false /*activeOnly*/, filter, false /*preferredDevice*/);
+            if (matchingClients.empty()) {
+                return nullptr;
+            }
+            return devices.getDeviceFromId(matchingClients[0]->preferredDeviceId());
         } else {
             auto activeClientsWithRoute =
                 desc->clientsList(true /*activeOnly*/, filter, true /*preferredDevice*/);
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioInputDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/AudioInputDescriptor.h
index c26ea10..0f2fe24 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioInputDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioInputDescriptor.h
@@ -41,7 +41,8 @@
 {
 public:
     AudioInputDescriptor(const sp<IOProfile>& profile,
-                         AudioPolicyClientInterface *clientInterface);
+                         AudioPolicyClientInterface *clientInterface,
+                         bool isPreemptor);
 
     virtual ~AudioInputDescriptor() = default;
 
@@ -127,6 +128,8 @@
     // active use case
     void checkSuspendEffects();
 
+    bool isPreemptor() const { return mIsPreemptor; }
+
  private:
 
     void updateClientRecordingConfiguration(int event, const sp<RecordClientDescriptor>& client);
@@ -145,6 +148,7 @@
     int32_t mGlobalActiveCount = 0;  // non-client-specific activity ref count
     EffectDescriptorCollection mEnabledEffects;
     audio_input_flags_t& mFlags = AudioPortConfig::mFlags.input;
+    bool mIsPreemptor; // true if this input was opened after preemting another one
 };
 
 class AudioInputCollection :
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
index 7c70877..bfb28a5 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
@@ -16,7 +16,6 @@
 
 #pragma once
 
-#define __STDC_LIMIT_MACROS
 #include <inttypes.h>
 
 #include <sys/types.h>
@@ -30,6 +29,7 @@
 #include "ClientDescriptor.h"
 #include "DeviceDescriptor.h"
 #include "PolicyAudioPort.h"
+#include "PreferredMixerAttributesInfo.h"
 #include <vector>
 
 namespace android {
@@ -413,7 +413,8 @@
                       const DeviceVector &devices,
                       audio_stream_type_t stream,
                       audio_output_flags_t flags,
-                      audio_io_handle_t *output);
+                      audio_io_handle_t *output,
+                      audio_attributes_t attributes);
 
         // Called when a stream is about to be started
         // Note: called before setClientActive(true);
@@ -478,6 +479,16 @@
 
     PortHandleVector getClientsForStream(audio_stream_type_t streamType) const;
 
+    bool isBitPerfect() const {
+        return (getFlags().output & AUDIO_OUTPUT_FLAG_BIT_PERFECT) != AUDIO_OUTPUT_FLAG_NONE;
+    }
+
+    /**
+     * Return true if there is any client with the same usage active on the given device.
+     * When the given device is null, return true if there is any client active.
+     */
+    bool isUsageActiveOnDevice(audio_usage_t usage, sp<DeviceDescriptor> device) const;
+
     virtual std::string info() const override;
 
     const sp<IOProfile> mProfile;          // I/O profile this output derives from
@@ -490,7 +501,7 @@
     audio_session_t mDirectClientSession; // session id of the direct output client
     bool mPendingReopenToQueryProfiles = false;
     audio_channel_mask_t mMixerChannelMask = AUDIO_CHANNEL_NONE;
-    bool mUsePreferredMixerAttributes = false;
+    sp<PreferredMixerAttributesInfo> mPreferredAttrInfo = nullptr;
 };
 
 // Audio output driven by an input device directly.
@@ -617,6 +628,8 @@
      */
     bool isAnyDeviceTypeActive(const DeviceTypeSet& deviceTypes) const;
 
+    bool isUsageActiveOnDevice(audio_usage_t usage, sp<DeviceDescriptor> device) const;
+
     void dump(String8 *dst) const;
 };
 
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioProfileVectorHelper.h b/services/audiopolicy/common/managerdefinitions/include/AudioProfileVectorHelper.h
index f84bda7..5fb0ad4 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioProfileVectorHelper.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioProfileVectorHelper.h
@@ -38,11 +38,40 @@
 void appendAudioProfiles(AudioProfileVector &audioProfileVector,
                          const AudioProfileVector &audioProfileVectorToAppend);
 
+/**
+ * Check if the profile vector contains a profile that matches the given sampling rate, channel
+ * mask and format. Note that this method uses `audio_formats_match` from policy.h, which will
+ * consider PCM formats match if their bytes per sample are greater than 2.
+ *
+ * @param audioProfileVector
+ * @param samplingRate
+ * @param channelMask
+ * @param format
+ * @return NO_ERROR if the given profile vector is empty or it contains a profile that matches the
+ *         given sampling rate, channel mask and format. Otherwise, returns BAD_VALUE.
+ */
 status_t checkExactProfile(const AudioProfileVector &audioProfileVector,
                            const uint32_t samplingRate,
                            audio_channel_mask_t channelMask,
                            audio_format_t format);
 
+/**
+ * Check if the profile vector contains a profile that has exactly the same sampling rate, channel
+ * mask and format as the given values.
+ *
+ * @param audioProfileVector
+ * @param samplingRate
+ * @param channelMask
+ * @param format
+ * @return NO_ERROR if the given profile vector is empty or it contains a profile that that has
+ *         exactly the same sampling rate, channel mask and format as the given values. Otherwise,
+ *         returns BAD_VALUE.
+ */
+status_t checkIdenticalProfile(const AudioProfileVector &audioProfileVector,
+                               const uint32_t samplingRate,
+                               audio_channel_mask_t channelMask,
+                               audio_format_t format);
+
 status_t checkCompatibleProfile(const AudioProfileVector &audioProfileVector,
                                 uint32_t &samplingRate,
                                 audio_channel_mask_t &channelMask,
diff --git a/services/audiopolicy/common/managerdefinitions/include/ClientDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/ClientDescriptor.h
index fe90a1e..60da405 100644
--- a/services/audiopolicy/common/managerdefinitions/include/ClientDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/ClientDescriptor.h
@@ -63,6 +63,8 @@
      * HW Audio Source.
      */
     virtual bool isInternal() const { return false; }
+    virtual bool isCallRx() const { return false; }
+    virtual bool isCallTx() const { return false; }
     audio_port_handle_t portId() const { return mPortId; }
     uid_t uid() const { return mUid; }
     audio_session_t session() const { return mSessionId; };
@@ -165,6 +167,18 @@
         mIsInvalid = true;
     }
 
+    bool getInternalMute() const { return mInternalMute; }
+
+    /**
+     * Set the internal mute for a client. Return true if the existing value is different from
+     * the given value.
+     */
+    bool setInternalMute(bool muted) {
+        const bool result = (mInternalMute != muted);
+        mInternalMute = muted;
+        return result;
+    }
+
 private:
     const audio_stream_type_t mStream;
     const product_strategy_t mStrategy;
@@ -178,6 +192,7 @@
      */
     uint32_t mActivityCount = 0;
     bool mIsInvalid = false;
+    bool mInternalMute = false;
 };
 
 class RecordClientDescriptor: public ClientDescriptor
@@ -223,7 +238,7 @@
                            const sp<DeviceDescriptor>& srcDevice,
                            audio_stream_type_t stream, product_strategy_t strategy,
                            VolumeSource volumeSource,
-                           bool isInternal);
+                           bool isInternal, bool isCallRx, bool isCallTx);
 
     ~SourceClientDescriptor() override = default;
 
@@ -250,6 +265,8 @@
     wp<HwAudioOutputDescriptor> hwOutput() const { return mHwOutput; }
     void setHwOutput(const sp<HwAudioOutputDescriptor>& hwOutput);
     bool isInternal() const override { return mIsInternal; }
+    bool isCallRx() const override { return mIsCallRx; }
+    bool isCallTx() const override { return mIsCallTx; }
 
     using ClientDescriptor::dump;
     void dump(String8 *dst, int spaces) const override;
@@ -281,6 +298,8 @@
      * requester to prevent rerouting SwOutput involved in raw patches.
      */
     bool mIsInternal = false;
+    bool mIsCallRx = false;
+    bool mIsCallTx = false;
 };
 
 class SourceClientCollection :
diff --git a/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h
index c502fc2..7002e63 100644
--- a/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h
@@ -282,6 +282,11 @@
 
     const AudioProfileVector& getSupportedProfiles() { return mSupportedProfiles; }
 
+    /**
+     * @brief checks if all devices in device vector are attached to the HwModule or not
+     * @return true if all the devices in device vector are attached, otherwise false
+     */
+    bool areAllDevicesAttached() const;
     // Return a string to describe the DeviceVector. The sensitive information will only be
     // added to the string if `includeSensitiveInfo` is true.
     std::string toString(bool includeSensitiveInfo = false) const;
diff --git a/services/audiopolicy/common/managerdefinitions/include/EffectDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/EffectDescriptor.h
index c2e4b11..f7b9b33 100644
--- a/services/audiopolicy/common/managerdefinitions/include/EffectDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/EffectDescriptor.h
@@ -32,11 +32,15 @@
 class EffectDescriptor : public RefBase
 {
 public:
-    EffectDescriptor(const effect_descriptor_t *desc, bool isMusicEffect,
-                     int id, audio_io_handle_t io, audio_session_t session) :
-        mId(id), mIo(io), mSession(session), mEnabled(false), mSuspended(false),
-        mIsMusicEffect(isMusicEffect)
-    {
+  EffectDescriptor(const effect_descriptor_t* desc, bool isMusicEffect, int id,
+                   audio_io_handle_t io, audio_session_t session)
+      : mId(id),
+        mIo(io),
+        mIsOrphan(io == AUDIO_IO_HANDLE_NONE),
+        mSession(session),
+        mEnabled(false),
+        mSuspended(false),
+        mIsMusicEffect(isMusicEffect) {
         memcpy (&mDesc, desc, sizeof(effect_descriptor_t));
     }
 
@@ -95,8 +99,18 @@
      * @return ioHandle if found, AUDIO_IO_HANDLE_NONE otherwise.
      */
     audio_io_handle_t getIoForSession(audio_session_t sessionId,
-                                      const effect_uuid_t *effectType = nullptr);
-    bool hasOrphansForSession(audio_session_t sessionId);
+                                      const effect_uuid_t* effectType = nullptr) const;
+
+    /**
+     * @brief Checks if there is at least one orphan effect with given sessionId and optional effect
+     * type uuid.
+     * @param sessionId Session ID.
+     * @param effectType Optional effect type UUID pointer to effect_uuid_t, nullptr by default.
+     * @return True if there is an orphan effect for given sessionId and type UUID, false otherwise.
+     */
+    bool hasOrphansForSession(audio_session_t sessionId,
+                              const effect_uuid_t* effectType = nullptr) const;
+
     EffectDescriptorCollection getOrphanEffectsForSession(audio_session_t sessionId) const;
     void dump(String8 *dst, int spaces = 0, bool verbose = true) const;
 
diff --git a/services/audiopolicy/common/managerdefinitions/include/IOProfile.h b/services/audiopolicy/common/managerdefinitions/include/IOProfile.h
index f3a9518..688772c 100644
--- a/services/audiopolicy/common/managerdefinitions/include/IOProfile.h
+++ b/services/audiopolicy/common/managerdefinitions/include/IOProfile.h
@@ -70,10 +70,17 @@
         return mMixerBehaviors;
     }
 
+    enum CompatibilityScore{
+        NO_MATCH = 0,
+        PARTIAL_MATCH = 1,
+        EXACT_MATCH = 2
+    };
+
     /**
-     * @brief isCompatibleProfile: This method is used for input and direct output,
+     * @brief compatibilityScore: This method is used for input and direct output,
      * and is not used for other output.
-     * Checks if the IO profile is compatible with specified parameters.
+     * Return the compatibility score to measure how much the IO profile is compatible
+     * with specified parameters.
      * For input, flags is interpreted as audio_input_flags_t.
      * TODO: merge audio_output_flags_t and audio_input_flags_t.
      *
@@ -86,18 +93,18 @@
      * @param updatedChannelMask if non-NULL, it is assigned the actual channel mask
      * @param flags to be checked for compatibility
      * @param exactMatchRequiredForInputFlags true if exact match is required on flags
-     * @return true if the profile is compatible, false otherwise.
+     * @return how the IO profile is compatible with the given parameters.
      */
-    bool isCompatibleProfile(const DeviceVector &devices,
-                             uint32_t samplingRate,
-                             uint32_t *updatedSamplingRate,
-                             audio_format_t format,
-                             audio_format_t *updatedFormat,
-                             audio_channel_mask_t channelMask,
-                             audio_channel_mask_t *updatedChannelMask,
-                             // FIXME parameter type
-                             uint32_t flags,
-                             bool exactMatchRequiredForInputFlags = false) const;
+    CompatibilityScore getCompatibilityScore(const DeviceVector &devices,
+                                             uint32_t samplingRate,
+                                             uint32_t *updatedSamplingRate,
+                                             audio_format_t format,
+                                             audio_format_t *updatedFormat,
+                                             audio_channel_mask_t channelMask,
+                                             audio_channel_mask_t *updatedChannelMask,
+                                             // FIXME parameter type
+                                             uint32_t flags,
+                                             bool exactMatchRequiredForInputFlags = false) const;
 
     /**
      * @brief areAllDevicesSupported: Checks if the given devices are supported by the IO profile.
diff --git a/services/audiopolicy/common/managerdefinitions/include/PolicyAudioPort.h b/services/audiopolicy/common/managerdefinitions/include/PolicyAudioPort.h
index acf787b..6b21e9f 100644
--- a/services/audiopolicy/common/managerdefinitions/include/PolicyAudioPort.h
+++ b/services/audiopolicy/common/managerdefinitions/include/PolicyAudioPort.h
@@ -56,9 +56,14 @@
     // Audio port IDs are in a different namespace than AudioFlinger unique IDs
     static audio_port_handle_t getNextUniqueId();
 
-    // searches for an exact match
+    // searches for an exact match, note that this method use `audio_formats_match` from policy.h,
+    // which will consider PCM formats match if their bytes per sample are greater than 2.
     virtual status_t checkExactAudioProfile(const struct audio_port_config *config) const;
 
+    // searches for an identical match, unlike `checkExactAudioProfile` above, this will also
+    // require the formats to be exactly the same.
+    virtual status_t checkIdenticalAudioProfile(const struct audio_port_config *config) const;
+
     // searches for a compatible match, currently implemented for input
     // parameters are input|output, returned value is the best match.
     status_t checkCompatibleAudioProfile(uint32_t &samplingRate,
@@ -100,6 +105,12 @@
                          const ChannelMaskSet &channelMasks) const;
     void pickSamplingRate(uint32_t &rate, const SampleRateSet &samplingRates) const;
 
+    status_t checkAudioProfile(const struct audio_port_config *config,
+                               std::function<status_t(const AudioProfileVector&,
+                                                      const uint32_t samplingRate,
+                                                      audio_channel_mask_t,
+                                                      audio_format_t)> checkProfile) const;
+
     sp<HwModule> mModule;     // audio HW module exposing this I/O stream
     AudioRouteVector mRoutes; // Routes involving this port
 };
diff --git a/services/audiopolicy/common/managerdefinitions/include/PreferredMixerAttributesInfo.h b/services/audiopolicy/common/managerdefinitions/include/PreferredMixerAttributesInfo.h
index 9472481..a493e3c 100644
--- a/services/audiopolicy/common/managerdefinitions/include/PreferredMixerAttributesInfo.h
+++ b/services/audiopolicy/common/managerdefinitions/include/PreferredMixerAttributesInfo.h
@@ -44,6 +44,17 @@
 
     void increaseActiveClient() { mActiveClientsCount++; }
     void decreaseActiveClient() { mActiveClientsCount--; }
+    void resetActiveClient() { mActiveClientsCount = 0; }
+
+    bool isBitPerfect() const {
+        return (getFlags() & AUDIO_OUTPUT_FLAG_BIT_PERFECT) != AUDIO_OUTPUT_FLAG_NONE;
+    }
+
+    bool configMatches(const audio_config_t& config) const {
+        return config.format == mMixerAttributes.config.format &&
+                config.channel_mask == mMixerAttributes.config.channel_mask &&
+                config.sample_rate == mMixerAttributes.config.sample_rate;
+    }
 
     void dump(String8 *dst);
 
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioInputDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioInputDescriptor.cpp
index 6f71ac5..5a0fd97 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioInputDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioInputDescriptor.cpp
@@ -30,9 +30,10 @@
 namespace android {
 
 AudioInputDescriptor::AudioInputDescriptor(const sp<IOProfile>& profile,
-                                           AudioPolicyClientInterface *clientInterface)
+                                           AudioPolicyClientInterface *clientInterface,
+                                           bool isPreemptor)
     : mProfile(profile)
-    ,  mClientInterface(clientInterface)
+    ,  mClientInterface(clientInterface), mIsPreemptor(isPreemptor)
 {
     if (profile != NULL) {
         profile->pickAudioProfile(mSamplingRate, mChannelMask, mFormat);
@@ -235,7 +236,8 @@
                                                   &deviceType,
                                                   String8(mDevice->address().c_str()),
                                                   source,
-                                                  flags);
+                                                  static_cast<audio_input_flags_t>(
+                                                          flags & mProfile->getFlags()));
     LOG_ALWAYS_FATAL_IF(mDevice->type() != deviceType,
                         "%s openInput returned device %08x when given device %08x",
                         __FUNCTION__, mDevice->type(), deviceType);
@@ -274,6 +276,9 @@
                             "%s invalid profile active count %u",
                             __func__, mProfile->curActiveCount);
         mProfile->curActiveCount--;
+        // allow preemption again now that at least one client was able to
+        // capture on this input
+        mIsPreemptor = false;
     }
 }
 
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
index 6537a00..6fef215 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
@@ -524,6 +524,14 @@
     StreamTypeVector streams = streamTypes;
     if (!AudioOutputDescriptor::setVolume(
             volumeDb, muted, vs, streamTypes, deviceTypes, delayMs, force, isVoiceVolSrc)) {
+        if (hasStream(streamTypes, AUDIO_STREAM_BLUETOOTH_SCO)) {
+            VolumeSource callVolSrc = getVoiceSource();
+            if (callVolSrc != VOLUME_SOURCE_NONE && volumeDb != getCurVolume(callVolSrc)) {
+                setCurVolume(callVolSrc, volumeDb, true);
+                mClientInterface->setStreamVolume(
+                        AUDIO_STREAM_VOICE_CALL, Volume::DbToAmpl(volumeDb), mIoHandle, delayMs);
+            }
+        }
         return false;
     }
     if (streams.empty()) {
@@ -561,6 +569,7 @@
             audio_port_config config = {};
             devicePort->toAudioPortConfig(&config);
             config.config_mask = AUDIO_PORT_CONFIG_GAIN;
+            config.gain.mode = gains[0]->getMode();
             config.gain.values[0] = gainValueMb;
             return mClientInterface->setAudioPortConfig(&config, 0) == NO_ERROR;
         }
@@ -587,7 +596,8 @@
                                        const DeviceVector &devices,
                                        audio_stream_type_t stream,
                                        audio_output_flags_t flags,
-                                       audio_io_handle_t *output)
+                                       audio_io_handle_t *output,
+                                       audio_attributes_t attributes)
 {
     mDevices = devices;
     sp<DeviceDescriptor> device = devices.getDeviceForOpening();
@@ -651,7 +661,8 @@
                                                    &lMixerConfig,
                                                    device,
                                                    &mLatency,
-                                                   mFlags);
+                                                   mFlags,
+                                                   attributes);
 
     if (status == NO_ERROR) {
         LOG_ALWAYS_FATAL_IF(*output == AUDIO_IO_HANDLE_NONE,
@@ -791,6 +802,16 @@
     mDevices = devices;
 }
 
+bool SwAudioOutputDescriptor::isUsageActiveOnDevice(audio_usage_t usage,
+                                                    sp<android::DeviceDescriptor> device) const {
+    if (device != nullptr && !mDevices.contains(device)) {
+        return false;
+    }
+    return std::any_of(mActiveClients.begin(), mActiveClients.end(),
+                       [usage](sp<TrackClientDescriptor> client) {
+                           return client->attributes().usage == usage; });
+}
+
 // HwAudioOutputDescriptor implementation
 HwAudioOutputDescriptor::HwAudioOutputDescriptor(const sp<SourceClientDescriptor>& source,
                                                  AudioPolicyClientInterface *clientInterface)
@@ -1016,6 +1037,17 @@
     return clientsForStream;
 }
 
+bool SwAudioOutputCollection::isUsageActiveOnDevice(audio_usage_t usage,
+                                                    sp<android::DeviceDescriptor> device) const {
+    for (size_t i = 0; i < this->size(); i++) {
+        const sp<SwAudioOutputDescriptor> outputDesc = this->valueAt(i);
+        if (outputDesc->isUsageActiveOnDevice(usage, device)) {
+            return true;
+        }
+    }
+    return false;
+}
+
 std::string SwAudioOutputDescriptor::info() const {
     std::string result;
     result.append("[" );
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
index dc0f466..3430f4b 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
@@ -15,7 +15,7 @@
  */
 
 #define LOG_TAG "APM_AudioPolicyMix"
-// #define LOG_NDEBUG 0
+//#define LOG_NDEBUG 0
 
 #include <algorithm>
 #include <iterator>
@@ -28,6 +28,9 @@
 #include "PolicyAudioPort.h"
 #include "IOProfile.h"
 #include <AudioOutputDescriptor.h>
+#include <android_media_audiopolicy.h>
+
+namespace audiopolicy_flags = android::media::audiopolicy;
 
 namespace android {
 namespace {
@@ -190,6 +193,12 @@
                     mix.mDeviceType, mix.mDeviceAddress.c_str());
             return BAD_VALUE;
         }
+        if (audiopolicy_flags::audio_mix_ownership()) {
+            if (mix.mToken == registeredMix->mToken) {
+                ALOGE("registerMix(): same mix already registered - skipping");
+                return BAD_VALUE;
+            }
+        }
     }
     if (!areMixCriteriaConsistent(mix.mCriteria)) {
         ALOGE("registerMix(): Mix contains inconsistent criteria "
@@ -212,12 +221,21 @@
 {
     for (size_t i = 0; i < size(); i++) {
         const sp<AudioPolicyMix>& registeredMix = itemAt(i);
-        if (mix.mDeviceType == registeredMix->mDeviceType
+        if (audiopolicy_flags::audio_mix_ownership()) {
+            if (mix.mToken == registeredMix->mToken) {
+                ALOGD("unregisterMix(): removing mix for dev=0x%x addr=%s",
+                      mix.mDeviceType, mix.mDeviceAddress.c_str());
+                removeAt(i);
+                return NO_ERROR;
+            }
+        } else {
+            if (mix.mDeviceType == registeredMix->mDeviceType
                 && mix.mDeviceAddress.compare(registeredMix->mDeviceAddress) == 0) {
-            ALOGD("unregisterMix(): removing mix for dev=0x%x addr=%s",
-                    mix.mDeviceType, mix.mDeviceAddress.c_str());
-            removeAt(i);
-            return NO_ERROR;
+                ALOGD("unregisterMix(): removing mix for dev=0x%x addr=%s",
+                      mix.mDeviceType, mix.mDeviceAddress.c_str());
+                removeAt(i);
+                return NO_ERROR;
+            }
         }
     }
 
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioProfileVectorHelper.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioProfileVectorHelper.cpp
index 82f51ad..164f70a 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioProfileVectorHelper.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioProfileVectorHelper.cpp
@@ -190,6 +190,18 @@
     return BAD_VALUE;
 }
 
+status_t checkIdentical(const sp<AudioProfile> &audioProfile,
+                        uint32_t samplingRate,
+                        audio_channel_mask_t channelMask,
+                        audio_format_t format) {
+    if(audioProfile->getFormat() == format &&
+        audioProfile->supportsChannels(channelMask) &&
+        audioProfile->supportsRate(samplingRate)) {
+        return NO_ERROR;
+    }
+    return BAD_VALUE;
+}
+
 status_t checkCompatibleSamplingRate(const sp<AudioProfile> &audioProfile,
                                      uint32_t samplingRate,
                                      uint32_t &updatedSamplingRate)
@@ -320,23 +332,43 @@
     return bestMatch > 0 ? NO_ERROR : BAD_VALUE;
 }
 
-status_t checkExactProfile(const AudioProfileVector& audioProfileVector,
-                           const uint32_t samplingRate,
-                           audio_channel_mask_t channelMask,
-                           audio_format_t format)
-{
+namespace {
+
+status_t checkProfile(const AudioProfileVector& audioProfileVector,
+                      const uint32_t samplingRate,
+                      audio_channel_mask_t channelMask,
+                      audio_format_t format,
+                      std::function<status_t(const sp<AudioProfile> &, uint32_t,
+                                             audio_channel_mask_t, audio_format_t)> check) {
     if (audioProfileVector.empty()) {
         return NO_ERROR;
     }
 
     for (const auto& profile : audioProfileVector) {
-        if (checkExact(profile, samplingRate, channelMask, format) == NO_ERROR) {
+        if (check(profile, samplingRate, channelMask, format) == NO_ERROR) {
             return NO_ERROR;
         }
     }
     return BAD_VALUE;
 }
 
+} // namespace
+
+status_t checkExactProfile(const AudioProfileVector& audioProfileVector,
+                           const uint32_t samplingRate,
+                           audio_channel_mask_t channelMask,
+                           audio_format_t format)
+{
+    return checkProfile(audioProfileVector, samplingRate, channelMask, format, checkExact);
+}
+
+status_t checkIdenticalProfile(const AudioProfileVector &audioProfileVector,
+                               const uint32_t samplingRate,
+                               audio_channel_mask_t channelMask,
+                               audio_format_t format) {
+    return checkProfile(audioProfileVector, samplingRate, channelMask, format, checkIdentical);
+}
+
 status_t checkCompatibleProfile(const AudioProfileVector &audioProfileVector,
                                 uint32_t &samplingRate,
                                 audio_channel_mask_t &channelMask,
diff --git a/services/audiopolicy/common/managerdefinitions/src/ClientDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/ClientDescriptor.cpp
index 2aee501..ad6977b 100644
--- a/services/audiopolicy/common/managerdefinitions/src/ClientDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/ClientDescriptor.cpp
@@ -57,8 +57,8 @@
 void TrackClientDescriptor::dump(String8 *dst, int spaces) const
 {
     ClientDescriptor::dump(dst, spaces);
-    dst->appendFormat("%*sStream: %d; Flags: %08x; Refcount: %d\n", spaces, "",
-            mStream, mFlags, mActivityCount);
+    dst->appendFormat("%*sStream: %d; Flags: %08x; Refcount: %d; InternalMute: %s\n",
+            spaces, "", mStream, mFlags, mActivityCount, mInternalMute ? "Yes" : "No");
     dst->appendFormat("%*sDAP Primary Mix: %p\n", spaces, "", mPrimaryMix.promote().get());
     if (!mSecondaryOutputs.empty()) {
         dst->appendFormat("%*sDAP Secondary Outputs: ", spaces - 2, "");
@@ -96,12 +96,14 @@
 SourceClientDescriptor::SourceClientDescriptor(audio_port_handle_t portId, uid_t uid,
          audio_attributes_t attributes, const struct audio_port_config &config,
          const sp<DeviceDescriptor>& srcDevice, audio_stream_type_t stream,
-         product_strategy_t strategy, VolumeSource volumeSource, bool isInternal) :
+         product_strategy_t strategy, VolumeSource volumeSource,
+         bool isInternal, bool isCallRx, bool isCallTx) :
     TrackClientDescriptor::TrackClientDescriptor(portId, uid, AUDIO_SESSION_NONE, attributes,
         {config.sample_rate, config.channel_mask, config.format}, AUDIO_PORT_HANDLE_NONE,
         stream, strategy, volumeSource, AUDIO_OUTPUT_FLAG_NONE, false,
         {} /* Sources do not support secondary outputs*/, nullptr),
-    mSrcDevice(srcDevice), mIsInternal(isInternal)
+    mSrcDevice(srcDevice), mIsInternal(isInternal),
+    mIsCallRx(isCallRx), mIsCallTx(isCallTx)
 {
 }
 
diff --git a/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp
index 9f7b8fc..46a04de 100644
--- a/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp
@@ -541,4 +541,14 @@
     return filteredDevices;
 }
 
+bool DeviceVector::areAllDevicesAttached() const
+{
+    for (const auto &device : *this) {
+        if (!device->isAttached()) {
+            return false;
+        }
+    }
+    return true;
+}
+
 } // namespace android
diff --git a/services/audiopolicy/common/managerdefinitions/src/EffectDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/EffectDescriptor.cpp
index c85df0f..090da6c 100644
--- a/services/audiopolicy/common/managerdefinitions/src/EffectDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/EffectDescriptor.cpp
@@ -210,11 +210,13 @@
     }
 }
 
-bool EffectDescriptorCollection::hasOrphansForSession(audio_session_t sessionId)
-{
+bool EffectDescriptorCollection::hasOrphansForSession(audio_session_t sessionId,
+                                                      const effect_uuid_t* effectType) const {
     for (size_t i = 0; i < size(); ++i) {
         sp<EffectDescriptor> effect = valueAt(i);
-        if (effect->mSession == sessionId && effect->mIsOrphan) {
+        if (effect->mSession == sessionId && effect->mIsOrphan &&
+            (effectType == nullptr ||
+             memcmp(&effect->mDesc.type, effectType, sizeof(effect_uuid_t)) == 0)) {
             return true;
         }
     }
@@ -235,7 +237,7 @@
 }
 
 audio_io_handle_t EffectDescriptorCollection::getIoForSession(audio_session_t sessionId,
-                                                              const effect_uuid_t *effectType)
+                                                              const effect_uuid_t *effectType) const
 {
     for (size_t i = 0; i < size(); ++i) {
         sp<EffectDescriptor> effect = valueAt(i);
diff --git a/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp b/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp
index dd222de..991b103 100644
--- a/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp
@@ -33,17 +33,17 @@
     }
 }
 
-bool IOProfile::isCompatibleProfile(const DeviceVector &devices,
-                                    uint32_t samplingRate,
-                                    uint32_t *updatedSamplingRate,
-                                    audio_format_t format,
-                                    audio_format_t *updatedFormat,
-                                    audio_channel_mask_t channelMask,
-                                    audio_channel_mask_t *updatedChannelMask,
-                                    // FIXME type punning here
-                                    uint32_t flags,
-                                    bool exactMatchRequiredForInputFlags) const
-{
+IOProfile::CompatibilityScore IOProfile::getCompatibilityScore(
+        const android::DeviceVector &devices,
+        uint32_t samplingRate,
+        uint32_t *updatedSamplingRate,
+        audio_format_t format,
+        audio_format_t *updatedFormat,
+        audio_channel_mask_t channelMask,
+        audio_channel_mask_t *updatedChannelMask,
+        // FIXME type punning here
+        uint32_t flags,
+        bool exactMatchRequiredForInputFlags) const {
     const bool isPlaybackThread =
             getType() == AUDIO_PORT_TYPE_MIX && getRole() == AUDIO_PORT_ROLE_SOURCE;
     const bool isRecordThread =
@@ -51,13 +51,13 @@
     ALOG_ASSERT(isPlaybackThread != isRecordThread);
     if (!areAllDevicesSupported(devices) ||
             !isCompatibleProfileForFlags(flags, exactMatchRequiredForInputFlags)) {
-        return false;
+        return NO_MATCH;
     }
 
     if (!audio_is_valid_format(format) ||
             (isPlaybackThread && (samplingRate == 0 || !audio_is_output_channel(channelMask))) ||
             (isRecordThread && (!audio_is_input_channel(channelMask)))) {
-         return false;
+         return NO_MATCH;
     }
 
     audio_format_t myUpdatedFormat = format;
@@ -69,32 +69,46 @@
         .channel_mask = channelMask,
         .format = format,
     };
+    auto result = NO_MATCH;
     if (isRecordThread)
     {
         if ((flags & AUDIO_INPUT_FLAG_MMAP_NOIRQ) != 0) {
-            if (checkExactAudioProfile(&config) != NO_ERROR) {
-                return false;
+            if (checkIdenticalAudioProfile(&config) != NO_ERROR) {
+                return result;
             }
-        } else if (checkExactAudioProfile(&config) != NO_ERROR && checkCompatibleAudioProfile(
-                myUpdatedSamplingRate, myUpdatedChannelMask, myUpdatedFormat) != NO_ERROR) {
-            return false;
+            result = EXACT_MATCH;
+        } else if (checkExactAudioProfile(&config) == NO_ERROR) {
+            result = EXACT_MATCH;
+        } else if (checkCompatibleAudioProfile(
+                myUpdatedSamplingRate, myUpdatedChannelMask, myUpdatedFormat) == NO_ERROR) {
+            result = PARTIAL_MATCH;
+        } else {
+            return result;
         }
     } else {
-        if (checkExactAudioProfile(&config) != NO_ERROR) {
-            return false;
+        if ((flags & AUDIO_OUTPUT_FLAG_MMAP_NOIRQ) != 0 ||
+            (flags & AUDIO_OUTPUT_FLAG_BIT_PERFECT) != 0) {
+            if (checkIdenticalAudioProfile(&config) != NO_ERROR) {
+                return result;
+            }
+            result = EXACT_MATCH;
+        } else if (checkExactAudioProfile(&config) == NO_ERROR) {
+            result = EXACT_MATCH;
+        } else {
+            return result;
         }
     }
 
-    if (updatedSamplingRate != NULL) {
+    if (updatedSamplingRate != nullptr) {
         *updatedSamplingRate = myUpdatedSamplingRate;
     }
-    if (updatedFormat != NULL) {
+    if (updatedFormat != nullptr) {
         *updatedFormat = myUpdatedFormat;
     }
-    if (updatedChannelMask != NULL) {
+    if (updatedChannelMask != nullptr) {
         *updatedChannelMask = myUpdatedChannelMask;
     }
-    return true;
+    return result;
 }
 
 bool IOProfile::areAllDevicesSupported(const DeviceVector &devices) const {
@@ -156,9 +170,9 @@
         for (const auto sampleRate : profile->getSampleRates()) {
             for (const auto channelMask : profile->getChannels()) {
                 const audio_config_base_t config = {
-                        .format = profile->getFormat(),
                         .sample_rate = sampleRate,
-                        .channel_mask = channelMask
+                        .channel_mask = channelMask,
+                        .format = profile->getFormat(),
                 };
                 for (const auto mixerBehavior : mMixerBehaviors) {
                     mixerAttributes->push_back({
diff --git a/services/audiopolicy/common/managerdefinitions/src/PolicyAudioPort.cpp b/services/audiopolicy/common/managerdefinitions/src/PolicyAudioPort.cpp
index ce8178f..cd54626 100644
--- a/services/audiopolicy/common/managerdefinitions/src/PolicyAudioPort.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/PolicyAudioPort.cpp
@@ -20,6 +20,7 @@
 #include "PolicyAudioPort.h"
 #include "HwModule.h"
 #include <policy.h>
+#include <system/audio.h>
 
 #ifndef ARRAY_SIZE
 #define ARRAY_SIZE(a) (sizeof(a) / sizeof((a)[0]))
@@ -63,21 +64,11 @@
 
 status_t PolicyAudioPort::checkExactAudioProfile(const struct audio_port_config *config) const
 {
-    status_t status = NO_ERROR;
-    auto config_mask = config->config_mask;
-    if (config_mask & AUDIO_PORT_CONFIG_GAIN) {
-        config_mask &= ~AUDIO_PORT_CONFIG_GAIN;
-        status = asAudioPort()->checkGain(&config->gain, config->gain.index);
-        if (status != NO_ERROR) {
-            return status;
-        }
-    }
-    if (config_mask != 0) {
-        // TODO should we check sample_rate / channel_mask / format separately?
-        status = checkExactProfile(asAudioPort()->getAudioProfiles(), config->sample_rate,
-                config->channel_mask, config->format);
-    }
-    return status;
+    return checkAudioProfile(config, checkExactProfile);
+}
+
+status_t PolicyAudioPort::checkIdenticalAudioProfile(const struct audio_port_config *config) const {
+    return checkAudioProfile(config, checkIdenticalProfile);
 }
 
 void PolicyAudioPort::pickSamplingRate(uint32_t &pickedRate,
@@ -266,4 +257,28 @@
             asAudioPort()->getName().c_str(), samplingRate, channelMask, format);
 }
 
+status_t PolicyAudioPort::checkAudioProfile(
+        const struct audio_port_config *config,
+        std::function<status_t(const AudioProfileVector &,
+                               const uint32_t,
+                               audio_channel_mask_t,
+                               audio_format_t)> checkProfile) const {
+    status_t status = NO_ERROR;
+    auto config_mask = config->config_mask;
+    if (config_mask & AUDIO_PORT_CONFIG_GAIN) {
+        config_mask &= ~AUDIO_PORT_CONFIG_GAIN;
+        status = asAudioPort()->checkGain(&config->gain, config->gain.index);
+        if (status != NO_ERROR) {
+            return status;
+        }
+    }
+    if (config_mask != 0) {
+        // TODO should we check sample_rate / channel_mask / format separately?
+        status = checkProfile(asAudioPort()->getAudioProfiles(), config->sample_rate,
+                   config->channel_mask, config->format);
+    }
+    return status;
+
+}
+
 } // namespace android
diff --git a/services/audiopolicy/config/Android.bp b/services/audiopolicy/config/Android.bp
index 86600f4..7d529df 100644
--- a/services/audiopolicy/config/Android.bp
+++ b/services/audiopolicy/config/Android.bp
@@ -18,6 +18,7 @@
 }
 
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
@@ -31,41 +32,49 @@
     vendor: true,
     src: ":a2dp_in_audio_policy_configuration",
 }
+
 prebuilt_etc {
     name: "a2dp_audio_policy_configuration.xml",
     vendor: true,
     src: ":a2dp_audio_policy_configuration",
 }
+
 prebuilt_etc {
     name: "audio_policy_configuration.xml",
     vendor: true,
     src: ":audio_policy_configuration_generic",
 }
+
 prebuilt_etc {
     name: "r_submix_audio_policy_configuration.xml",
     vendor: true,
     src: ":r_submix_audio_policy_configuration",
 }
+
 prebuilt_etc {
     name: "audio_policy_volumes.xml",
     vendor: true,
     src: ":audio_policy_volumes",
 }
+
 prebuilt_etc {
     name: "default_volume_tables.xml",
     vendor: true,
     src: ":default_volume_tables",
 }
+
 prebuilt_etc {
     name: "surround_sound_configuration_5_0.xml",
     vendor: true,
     src: ":surround_sound_configuration_5_0",
 }
+
 prebuilt_etc {
     name: "usb_audio_policy_configuration.xml",
     vendor: true,
     src: ":usb_audio_policy_configuration",
 }
+
 prebuilt_etc {
     name: "primary_audio_policy_configuration.xml",
     src: ":primary_audio_policy_configuration",
@@ -76,50 +85,62 @@
     name: "a2dp_in_audio_policy_configuration",
     srcs: ["a2dp_in_audio_policy_configuration.xml"],
 }
+
 filegroup {
     name: "a2dp_audio_policy_configuration",
     srcs: ["a2dp_audio_policy_configuration.xml"],
 }
+
 filegroup {
     name: "primary_audio_policy_configuration",
     srcs: ["primary_audio_policy_configuration.xml"],
 }
+
 filegroup {
     name: "surround_sound_configuration_5_0",
     srcs: ["surround_sound_configuration_5_0.xml"],
 }
+
 filegroup {
     name: "default_volume_tables",
     srcs: ["default_volume_tables.xml"],
 }
+
 filegroup {
     name: "audio_policy_volumes",
     srcs: ["audio_policy_volumes.xml"],
 }
+
 filegroup {
     name: "audio_policy_configuration_generic",
     srcs: ["audio_policy_configuration_generic.xml"],
 }
+
 filegroup {
     name: "audio_policy_configuration_generic_configurable",
     srcs: ["audio_policy_configuration_generic_configurable.xml"],
 }
+
 filegroup {
     name: "usb_audio_policy_configuration",
     srcs: ["usb_audio_policy_configuration.xml"],
 }
+
 filegroup {
     name: "r_submix_audio_policy_configuration",
     srcs: ["r_submix_audio_policy_configuration.xml"],
 }
+
 filegroup {
     name: "bluetooth_audio_policy_configuration_7_0",
     srcs: ["bluetooth_audio_policy_configuration_7_0.xml"],
 }
+
 filegroup {
     name: "bluetooth_with_le_audio_policy_configuration_7_0",
     srcs: ["bluetooth_with_le_audio_policy_configuration_7_0.xml"],
 }
+
 filegroup {
     name: "hearing_aid_audio_policy_configuration_7_0",
     srcs: ["hearing_aid_audio_policy_configuration_7_0.xml"],
diff --git a/services/audiopolicy/engine/common/Android.bp b/services/audiopolicy/engine/common/Android.bp
index 0034a04..d8aac37 100644
--- a/services/audiopolicy/engine/common/Android.bp
+++ b/services/audiopolicy/engine/common/Android.bp
@@ -13,6 +13,7 @@
 // limitations under the License.
 
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
@@ -31,10 +32,10 @@
     name: "libaudiopolicyengine_common",
     srcs: [
         "src/EngineBase.cpp",
+        "src/LastRemovableMediaDevices.cpp",
         "src/ProductStrategy.cpp",
         "src/VolumeCurve.cpp",
         "src/VolumeGroup.cpp",
-        "src/LastRemovableMediaDevices.cpp",
     ],
     cflags: [
         "-Wall",
@@ -42,10 +43,10 @@
         "-Wextra",
     ],
     header_libs: [
-        "libbase_headers",
         "libaudiopolicycommon",
         "libaudiopolicyengine_common_headers",
         "libaudiopolicyengine_interface_headers",
+        "libbase_headers",
     ],
     export_header_lib_headers: [
         "libaudiopolicyengine_common_headers",
@@ -58,7 +59,10 @@
         "libaudiopolicycomponents",
     ],
     whole_static_libs: [
-        "server_configurable_flags",
         "com.android.media.audio-aconfig-cc",
+        "server_configurable_flags",
+    ],
+    defaults: [
+        "aconfig_lib_cc_static_link.defaults",
     ],
 }
diff --git a/services/audiopolicy/engine/common/src/VolumeCurve.cpp b/services/audiopolicy/engine/common/src/VolumeCurve.cpp
index fccbc60..9411155 100644
--- a/services/audiopolicy/engine/common/src/VolumeCurve.cpp
+++ b/services/audiopolicy/engine/common/src/VolumeCurve.cpp
@@ -69,7 +69,7 @@
         return mCurvePoints[nbCurvePoints - 1].mAttenuationInMb / 100.0f;
     }
     if (indexInUiPosition == 0) {
-        if (indexInUiPosition != mCurvePoints[0].mIndex) {
+        if ((size_t)volIdx != mCurvePoints[0].mIndex) {
             return VOLUME_MIN_DB; // out of bounds
         }
         return mCurvePoints[0].mAttenuationInMb / 100.0f;
diff --git a/services/audiopolicy/engine/config/Android.bp b/services/audiopolicy/engine/config/Android.bp
index 12597de..ab2c134 100644
--- a/services/audiopolicy/engine/config/Android.bp
+++ b/services/audiopolicy/engine/config/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
@@ -32,7 +33,7 @@
     ],
     header_libs: [
         "libaudio_system_headers",
-        "libmedia_headers",
         "libaudioclient_headers",
+        "libmedia_headers",
     ],
 }
diff --git a/services/audiopolicy/engine/config/src/EngineConfig.cpp b/services/audiopolicy/engine/config/src/EngineConfig.cpp
index ca78ce7..3f9ae19 100644
--- a/services/audiopolicy/engine/config/src/EngineConfig.cpp
+++ b/services/audiopolicy/engine/config/src/EngineConfig.cpp
@@ -22,6 +22,7 @@
 #include <string>
 #include <string>
 #include <vector>
+#include <unordered_map>
 
 #define LOG_TAG "APM::AudioPolicyEngine/Config"
 //#define LOG_NDEBUG 0
@@ -51,6 +52,27 @@
 
 namespace {
 
+ConversionResult<std::string> aidl2legacy_AudioHalProductStrategy_ProductStrategyType(int id) {
+    using AudioProductStrategyType = media::audio::common::AudioProductStrategyType;
+
+#define STRATEGY_ENTRY(name) {static_cast<int>(AudioProductStrategyType::name), "STRATEGY_" #name}
+    static const std::unordered_map<int, std::string> productStrategyMap = {STRATEGY_ENTRY(MEDIA),
+                            STRATEGY_ENTRY(PHONE),
+                            STRATEGY_ENTRY(SONIFICATION),
+                            STRATEGY_ENTRY(SONIFICATION_RESPECTFUL),
+                            STRATEGY_ENTRY(DTMF),
+                            STRATEGY_ENTRY(ENFORCED_AUDIBLE),
+                            STRATEGY_ENTRY(TRANSMITTED_THROUGH_SPEAKER),
+                            STRATEGY_ENTRY(ACCESSIBILITY)};
+#undef STRATEGY_ENTRY
+
+    auto it = productStrategyMap.find(id);
+    if (it == productStrategyMap.end()) {
+        return base::unexpected(BAD_VALUE);
+    }
+    return it->second;
+}
+
 ConversionResult<AttributesGroup> aidl2legacy_AudioHalAttributeGroup_AttributesGroup(
         const media::audio::common::AudioHalAttributesGroup& aidl) {
     AttributesGroup legacy;
@@ -65,7 +87,8 @@
 ConversionResult<ProductStrategy> aidl2legacy_AudioHalProductStrategy_ProductStrategy(
         const media::audio::common::AudioHalProductStrategy& aidl) {
     ProductStrategy legacy;
-    legacy.name = "strategy_" + std::to_string(aidl.id);
+    legacy.name = VALUE_OR_RETURN(
+                    aidl2legacy_AudioHalProductStrategy_ProductStrategyType(aidl.id));
     legacy.attributesGroups = VALUE_OR_RETURN(convertContainer<AttributesGroups>(
                     aidl.attributesGroups,
                     aidl2legacy_AudioHalAttributeGroup_AttributesGroup));
diff --git a/services/audiopolicy/engine/config/tests/Android.bp b/services/audiopolicy/engine/config/tests/Android.bp
index 5d1aa16..2df51d0 100644
--- a/services/audiopolicy/engine/config/tests/Android.bp
+++ b/services/audiopolicy/engine/config/tests/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
@@ -27,8 +28,8 @@
     data: [":audiopolicy_engineconfig_files"],
 
     cflags: [
-        "-Werror",
         "-Wall",
+        "-Werror",
     ],
 
     test_suites: ["device-tests"],
diff --git a/services/audiopolicy/engine/config/tests/resources/Android.bp b/services/audiopolicy/engine/config/tests/resources/Android.bp
index 9cee978..99d62a3 100644
--- a/services/audiopolicy/engine/config/tests/resources/Android.bp
+++ b/services/audiopolicy/engine/config/tests/resources/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/services/audiopolicy/engine/interface/Android.bp b/services/audiopolicy/engine/interface/Android.bp
index 5dd5adb..b1f7666 100644
--- a/services/audiopolicy/engine/interface/Android.bp
+++ b/services/audiopolicy/engine/interface/Android.bp
@@ -13,6 +13,7 @@
 // limitations under the License.
 
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/services/audiopolicy/engineconfigurable/Android.bp b/services/audiopolicy/engineconfigurable/Android.bp
index eb2e2f4..a0a4bdf 100644
--- a/services/audiopolicy/engineconfigurable/Android.bp
+++ b/services/audiopolicy/engineconfigurable/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
@@ -19,8 +20,8 @@
     srcs: [
         "src/Engine.cpp",
         "src/EngineInstance.cpp",
-        "src/Stream.cpp",
         "src/InputSource.cpp",
+        "src/Stream.cpp",
     ],
     cflags: [
         "-Wall",
@@ -29,10 +30,10 @@
     ],
     local_include_dirs: ["include"],
     header_libs: [
-        "libbase_headers",
         "libaudiopolicycommon",
         "libaudiopolicyengine_interface_headers",
         "libaudiopolicyengineconfigurable_interface_headers",
+        "libbase_headers",
     ],
     static_libs: [
         "libaudiopolicyengine_common",
@@ -40,17 +41,20 @@
         "libaudiopolicyengineconfigurable_pfwwrapper",
 
     ],
-  shared_libs: [
+    shared_libs: [
         "libaudio_aidl_conversion_common_cpp",
         "libaudiofoundation",
+        "libaudiopolicy",
         "libaudiopolicycomponents",
         "libbase",
-        "liblog",
         "libcutils",
-        "libutils",
+        "liblog",
         "libmedia_helper",
-        "libaudiopolicy",
         "libparameter",
+        "libutils",
         "libxml2",
     ],
+    defaults: [
+        "aconfig_lib_cc_static_link.defaults",
+    ],
 }
diff --git a/services/audiopolicy/engineconfigurable/config/Android.bp b/services/audiopolicy/engineconfigurable/config/Android.bp
index b3d1f97..8dd13e8 100644
--- a/services/audiopolicy/engineconfigurable/config/Android.bp
+++ b/services/audiopolicy/engineconfigurable/config/Android.bp
@@ -17,6 +17,7 @@
 // Root soong_namespace for common components
 
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
@@ -30,10 +31,12 @@
     vendor: true,
     src: ":audio_policy_engine_criteria",
 }
+
 filegroup {
     name: "audio_policy_engine_criterion_types_template",
     srcs: ["example/common/audio_policy_engine_criterion_types.xml.in"],
 }
+
 filegroup {
     name: "audio_policy_engine_criteria",
     srcs: ["example/common/audio_policy_engine_criteria.xml"],
diff --git a/services/audiopolicy/engineconfigurable/config/example/automotive/Android.bp b/services/audiopolicy/engineconfigurable/config/example/automotive/Android.bp
index e46b60f..7e429ef 100644
--- a/services/audiopolicy/engineconfigurable/config/example/automotive/Android.bp
+++ b/services/audiopolicy/engineconfigurable/config/example/automotive/Android.bp
@@ -23,6 +23,7 @@
 }
 
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
@@ -36,22 +37,25 @@
     vendor: true,
     src: ":audio_policy_engine_configuration",
     required: [
-        ":audio_policy_engine_criterion_types.xml",
         ":audio_policy_engine_criteria.xml",
+        ":audio_policy_engine_criterion_types.xml",
         ":audio_policy_engine_product_strategies.xml",
         ":audio_policy_engine_volumes.xml",
     ],
 }
+
 prebuilt_etc {
     name: "audio_policy_engine_product_strategies.xml",
     vendor: true,
     src: "audio_policy_engine_product_strategies.xml",
 }
+
 prebuilt_etc {
     name: "audio_policy_engine_volumes.xml",
     vendor: true,
     src: ":audio_policy_engine_volumes",
 }
+
 prebuilt_etc {
     name: "audio_policy_engine_criterion_types.xml",
     vendor: true,
@@ -65,39 +69,44 @@
     name: "audio_policy_engine_criterion_types",
     defaults: ["buildpolicycriteriontypesrule"],
     srcs: [
-        ":audio_policy_configuration_top_file",
         ":audio_policy_configuration_files",
+        ":audio_policy_configuration_top_file",
     ],
 }
+
 filegroup {
     name: "audio_policy_configuration_files",
     srcs: [
-        ":r_submix_audio_policy_configuration",
-        ":default_volume_tables",
         ":audio_policy_volumes",
-        ":surround_sound_configuration_5_0",
+        ":default_volume_tables",
         ":primary_audio_policy_configuration",
+        ":r_submix_audio_policy_configuration",
+        ":surround_sound_configuration_5_0",
     ],
 }
+
 filegroup {
-    name : "audio_policy_configuration_top_file",
+    name: "audio_policy_configuration_top_file",
     srcs: [":audio_policy_configuration_generic"],
 }
+
 filegroup {
     name: "audio_policy_engine_configuration",
     srcs: ["audio_policy_engine_configuration.xml"],
 }
+
 filegroup {
     name: "audio_policy_engine_volumes",
     srcs: ["audio_policy_engine_volumes.xml"],
 }
+
 filegroup {
     name: "audio_policy_engine_configuration_files",
     srcs: [
         ":audio_policy_engine_configuration",
-        "audio_policy_engine_product_strategies.xml",
-        ":audio_policy_engine_volumes",
-        ":audio_policy_engine_criterion_types",
         ":audio_policy_engine_criteria",
+        ":audio_policy_engine_criterion_types",
+        ":audio_policy_engine_volumes",
+        "audio_policy_engine_product_strategies.xml",
     ],
 }
diff --git a/services/audiopolicy/engineconfigurable/config/example/caremu/Android.bp b/services/audiopolicy/engineconfigurable/config/example/caremu/Android.bp
index ad6eeb1..12a554d 100644
--- a/services/audiopolicy/engineconfigurable/config/example/caremu/Android.bp
+++ b/services/audiopolicy/engineconfigurable/config/example/caremu/Android.bp
@@ -18,12 +18,13 @@
 
 soong_namespace {
     imports: [
-        "frameworks/av/services/audiopolicy/engineconfigurable/config/example/automotive",
         "frameworks/av/services/audiopolicy/config",
+        "frameworks/av/services/audiopolicy/engineconfigurable/config/example/automotive",
     ],
 }
 
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
@@ -37,17 +38,19 @@
     vendor: true,
     src: ":audio_policy_engine_configuration",
     required: [
-        "audio_policy_engine_criterion_types.xml",
-        "audio_policy_engine_criteria.xml",
-        "audio_policy_engine_product_strategies.xml",
         ":audio_policy_engine_volumes.xml",
+        "audio_policy_engine_criteria.xml",
+        "audio_policy_engine_criterion_types.xml",
+        "audio_policy_engine_product_strategies.xml",
     ],
 }
+
 prebuilt_etc {
     name: "audio_policy_engine_product_strategies.xml",
     vendor: true,
     src: "audio_policy_engine_product_strategies.xml",
 }
+
 prebuilt_etc {
     name: "audio_policy_engine_criterion_types.xml",
     vendor: true,
@@ -61,31 +64,34 @@
     name: "audio_policy_engine_criterion_types",
     defaults: ["buildpolicycriteriontypesrule"],
     srcs: [
-        ":audio_policy_configuration_top_file",
         ":audio_policy_configuration_files",
+        ":audio_policy_configuration_top_file",
     ],
 }
+
 filegroup {
     name: "audio_policy_configuration_files",
     srcs: [
-        ":r_submix_audio_policy_configuration",
-        ":default_volume_tables",
         ":audio_policy_volumes",
-        ":surround_sound_configuration_5_0",
+        ":default_volume_tables",
         ":primary_audio_policy_configuration",
+        ":r_submix_audio_policy_configuration",
+        ":surround_sound_configuration_5_0",
     ],
 }
+
 filegroup {
-    name : "audio_policy_configuration_top_file",
+    name: "audio_policy_configuration_top_file",
     srcs: [":audio_policy_configuration_generic"],
 }
+
 filegroup {
     name: "audio_policy_engine_configuration_files",
     srcs: [
         ":audio_policy_engine_configuration",
-        "audio_policy_engine_product_strategies.xml",
-        ":audio_policy_engine_volumes",
-        ":audio_policy_engine_criterion_types",
         ":audio_policy_engine_criteria",
+        ":audio_policy_engine_criterion_types",
+        ":audio_policy_engine_volumes",
+        "audio_policy_engine_product_strategies.xml",
     ],
 }
diff --git a/services/audiopolicy/engineconfigurable/config/example/phone/Android.bp b/services/audiopolicy/engineconfigurable/config/example/phone/Android.bp
index 773a99a..b0a4dfd 100644
--- a/services/audiopolicy/engineconfigurable/config/example/phone/Android.bp
+++ b/services/audiopolicy/engineconfigurable/config/example/phone/Android.bp
@@ -23,6 +23,7 @@
 }
 
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
@@ -36,27 +37,31 @@
     vendor: true,
     src: ":audio_policy_engine_configuration",
     required: [
-        ":audio_policy_engine_criterion_types.xml",
         ":audio_policy_engine_criteria.xml",
+        ":audio_policy_engine_criterion_types.xml",
         ":audio_policy_engine_product_strategies.xml",
         ":audio_policy_engine_volumes.xml",
     ],
 }
+
 prebuilt_etc {
     name: "audio_policy_engine_product_strategies.xml",
     vendor: true,
     src: "audio_policy_engine_product_strategies.xml",
 }
+
 prebuilt_etc {
     name: "audio_policy_engine_stream_volumes.xml",
     vendor: true,
     src: ":audio_policy_engine_stream_volumes",
 }
+
 prebuilt_etc {
     name: "audio_policy_engine_default_stream_volumes.xml",
     vendor: true,
     src: ":audio_policy_engine_default_stream_volumes",
 }
+
 prebuilt_etc {
     name: "audio_policy_engine_criterion_types.xml",
     vendor: true,
@@ -70,44 +75,50 @@
     name: "audio_policy_engine_criterion_types",
     defaults: ["buildpolicycriteriontypesrule"],
     srcs: [
-        ":audio_policy_configuration_top_file",
         ":audio_policy_configuration_files",
+        ":audio_policy_configuration_top_file",
     ],
 }
+
 filegroup {
     name: "audio_policy_configuration_files",
     srcs: [
-        ":r_submix_audio_policy_configuration",
-        ":default_volume_tables",
         ":audio_policy_volumes",
-        ":surround_sound_configuration_5_0",
+        ":default_volume_tables",
         ":primary_audio_policy_configuration",
+        ":r_submix_audio_policy_configuration",
+        ":surround_sound_configuration_5_0",
     ],
 }
+
 filegroup {
-    name : "audio_policy_configuration_top_file",
+    name: "audio_policy_configuration_top_file",
     srcs: [":audio_policy_configuration_generic"],
 }
+
 filegroup {
     name: "audio_policy_engine_configuration",
     srcs: ["audio_policy_engine_configuration.xml"],
 }
+
 filegroup {
     name: "audio_policy_engine_stream_volumes",
     srcs: ["audio_policy_engine_stream_volumes.xml"],
 }
+
 filegroup {
     name: "audio_policy_engine_default_stream_volumes",
     srcs: ["audio_policy_engine_default_stream_volumes.xml"],
 }
+
 filegroup {
     name: "audio_policy_engine_configuration_files",
     srcs: [
         ":audio_policy_engine_configuration",
-        "audio_policy_engine_product_strategies.xml",
-        ":audio_policy_engine_stream_volumes",
-        ":audio_policy_engine_default_stream_volumes",
-        ":audio_policy_engine_criterion_types",
         ":audio_policy_engine_criteria",
+        ":audio_policy_engine_criterion_types",
+        ":audio_policy_engine_default_stream_volumes",
+        ":audio_policy_engine_stream_volumes",
+        "audio_policy_engine_product_strategies.xml",
     ],
 }
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/Android.bp b/services/audiopolicy/engineconfigurable/parameter-framework/Android.bp
index ee62d5e..7fe111f 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/Android.bp
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/Android.bp
@@ -17,6 +17,7 @@
 // Root soong_namespace for common components
 
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
@@ -31,18 +32,21 @@
     src: ":PolicyClass",
     sub_dir: "parameter-framework/Structure/Policy",
 }
+
 prebuilt_etc {
     name: "PolicySubsystem.xml",
     vendor: true,
     src: ":PolicySubsystem",
     sub_dir: "parameter-framework/Structure/Policy",
 }
+
 prebuilt_etc {
     name: "PolicySubsystem-CommonTypes.xml",
     vendor: true,
     src: ":buildcommontypesstructure_gen",
     sub_dir: "parameter-framework/Structure/Policy",
 }
+
 genrule {
     name: "buildcommontypesstructure_gen",
     defaults: ["buildcommontypesstructurerule"],
@@ -52,34 +56,42 @@
     name: "product_strategies_structure_template",
     srcs: ["examples/common/Structure/ProductStrategies.xml.in"],
 }
+
 filegroup {
     name: "PolicySubsystem",
     srcs: ["examples/common/Structure/PolicySubsystem.xml"],
 }
+
 filegroup {
     name: "PolicySubsystem-no-strategy",
     srcs: ["examples/common/Structure/PolicySubsystem-no-strategy.xml"],
 }
+
 filegroup {
     name: "common_types_structure_template",
     srcs: ["examples/common/Structure/PolicySubsystem-CommonTypes.xml.in"],
 }
+
 filegroup {
     name: "PolicyClass",
     srcs: ["examples/common/Structure/PolicyClass.xml"],
 }
+
 filegroup {
     name: "volumes.pfw",
     srcs: ["examples/Settings/volumes.pfw"],
 }
+
 filegroup {
     name: "device_for_input_source.pfw",
     srcs: ["examples/Settings/device_for_input_source.pfw"],
 }
+
 filegroup {
     name: "ParameterFrameworkConfigurationPolicy.userdebug.xml",
     srcs: ["examples/ParameterFrameworkConfigurationPolicy.userdebug.xml"],
 }
+
 filegroup {
     name: "ParameterFrameworkConfigurationPolicy.user.xml",
     srcs: ["examples/ParameterFrameworkConfigurationPolicy.user.xml"],
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/examples/Car/Android.bp b/services/audiopolicy/engineconfigurable/parameter-framework/examples/Car/Android.bp
index 7d2d293..42585e9 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/examples/Car/Android.bp
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/examples/Car/Android.bp
@@ -18,8 +18,8 @@
 
 soong_namespace {
     imports: [
-        "frameworks/av/services/audiopolicy/engineconfigurable/config/example/automotive",
         "frameworks/av/services/audiopolicy/config",
+        "frameworks/av/services/audiopolicy/engineconfigurable/config/example/automotive",
     ],
 }
 
@@ -27,6 +27,7 @@
 // Generate Audio Policy Parameter Framework Product Strategies Structure file from template
 //
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
@@ -42,6 +43,7 @@
     sub_dir: "parameter-framework/Structure/Policy",
     required: ["libpolicy-subsystem"],
 }
+
 genrule {
     name: "buildstrategiesstructure_gen",
     defaults: ["buildstrategiesstructurerule"],
@@ -61,23 +63,25 @@
     src: ":domaingeneratorpolicyrule_gen",
     sub_dir: "parameter-framework/Settings/Policy",
     required: [
-        "ProductStrategies.xml",
         "PolicyClass.xml",
-        "PolicySubsystem.xml",
         "PolicySubsystem-CommonTypes.xml",
+        "PolicySubsystem.xml",
+        "ProductStrategies.xml",
     ],
 }
+
 genrule {
     name: "domaingeneratorpolicyrule_gen",
     enabled: false, // TODO: This module fails to build
     defaults: ["domaingeneratorpolicyrule"],
     srcs: [
-        ":audio_policy_pfw_toplevel",
-        ":audio_policy_pfw_structure_files",
         ":audio_policy_engine_criterion_types",
+        ":audio_policy_pfw_structure_files",
+        ":audio_policy_pfw_toplevel",
         ":edd_files",
     ],
 }
+
 filegroup {
     name: "edd_files",
     srcs: [
@@ -86,11 +90,13 @@
         "Settings/device_for_product_strategies.pfw",
     ],
 }
+
 // This is for Settings generation, must use socket port, so userdebug version is required
 filegroup {
     name: "audio_policy_pfw_toplevel",
     srcs: [":ParameterFrameworkConfigurationPolicy.userdebug.xml"],
 }
+
 filegroup {
     name: "audio_policy_pfw_structure_files",
     srcs: [
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/examples/CarEmu/Android.bp b/services/audiopolicy/engineconfigurable/parameter-framework/examples/CarEmu/Android.bp
index f825e5f..efde298 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/examples/CarEmu/Android.bp
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/examples/CarEmu/Android.bp
@@ -18,9 +18,9 @@
 
 soong_namespace {
     imports: [
+        "frameworks/av/services/audiopolicy/config",
         "frameworks/av/services/audiopolicy/engineconfigurable/config/example/caremu",
         "frameworks/av/services/audiopolicy/engineconfigurable/parameter-framework/examples/Car",
-        "frameworks/av/services/audiopolicy/config",
     ],
 }
 
@@ -28,6 +28,7 @@
 // Generate Audio Policy Parameter Framework Product Strategies Structure file from template
 //
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
@@ -43,6 +44,7 @@
     sub_dir: "parameter-framework/Structure/Policy",
     required: ["libpolicy-subsystem"],
 }
+
 genrule {
     name: "buildstrategiesstructure_gen",
     defaults: ["buildstrategiesstructurerule"],
@@ -62,23 +64,25 @@
     src: ":domaingeneratorpolicyrule_gen",
     sub_dir: "parameter-framework/Settings/Policy",
     required: [
-        "ProductStrategies.xml",
         "PolicyClass.xml",
-        "PolicySubsystem.xml",
         "PolicySubsystem-CommonTypes.xml",
+        "PolicySubsystem.xml",
+        "ProductStrategies.xml",
     ],
 }
+
 genrule {
     name: "domaingeneratorpolicyrule_gen",
     enabled: false, // TODO: This module fails to build
     defaults: ["domaingeneratorpolicyrule"],
     srcs: [
-        ":audio_policy_pfw_toplevel",
-        ":audio_policy_pfw_structure_files",
         ":audio_policy_engine_criterion_types",
+        ":audio_policy_pfw_structure_files",
+        ":audio_policy_pfw_toplevel",
         ":edd_files",
     ],
 }
+
 filegroup {
     name: "edd_files",
     srcs: [
@@ -87,11 +91,13 @@
         "Settings/device_for_product_strategies.pfw",
     ],
 }
+
 // This is for Settings generation, must use socket port, so userdebug version is required
 filegroup {
     name: "audio_policy_pfw_toplevel",
     srcs: [":ParameterFrameworkConfigurationPolicy.userdebug.xml"],
 }
+
 filegroup {
     name: "audio_policy_pfw_structure_files",
     srcs: [
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/examples/Phone/Android.bp b/services/audiopolicy/engineconfigurable/parameter-framework/examples/Phone/Android.bp
index 4a83cbc..474094e 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/examples/Phone/Android.bp
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/examples/Phone/Android.bp
@@ -18,8 +18,8 @@
 
 soong_namespace {
     imports: [
-        "frameworks/av/services/audiopolicy/engineconfigurable/config/example/phone",
         "frameworks/av/services/audiopolicy/config",
+        "frameworks/av/services/audiopolicy/engineconfigurable/config/example/phone",
     ],
 }
 
@@ -27,6 +27,7 @@
 // Generate Audio Policy Parameter Framework Product Strategies Structure file from template
 //
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
@@ -42,6 +43,7 @@
     sub_dir: "parameter-framework/Structure/Policy",
     required: ["libpolicy-subsystem"],
 }
+
 genrule {
     name: "buildstrategiesstructure_gen",
     defaults: ["buildstrategiesstructurerule"],
@@ -61,45 +63,49 @@
     src: ":domaingeneratorpolicyrule_gen",
     sub_dir: "parameter-framework/Settings/Policy",
     required: [
-        "ProductStrategies.xml",
         "PolicyClass.xml",
-        "PolicySubsystem.xml",
         "PolicySubsystem-CommonTypes.xml",
+        "PolicySubsystem.xml",
+        "ProductStrategies.xml",
     ],
 }
+
 genrule {
     name: "domaingeneratorpolicyrule_gen",
     enabled: false, // TODO: This module fails to build
     defaults: ["domaingeneratorpolicyrule"],
     srcs: [
-        ":audio_policy_pfw_toplevel",
-        ":audio_policy_pfw_structure_files",
         ":audio_policy_engine_criterion_types",
+        ":audio_policy_pfw_structure_files",
+        ":audio_policy_pfw_toplevel",
         ":edd_files",
     ],
 }
+
 filegroup {
     name: "edd_files",
     srcs: [
         ":device_for_input_source.pfw",
         ":volumes.pfw",
-        "Settings/device_for_product_strategy_media.pfw",
         "Settings/device_for_product_strategy_accessibility.pfw",
         "Settings/device_for_product_strategy_dtmf.pfw",
         "Settings/device_for_product_strategy_enforced_audible.pfw",
+        "Settings/device_for_product_strategy_media.pfw",
+        "Settings/device_for_product_strategy_patch.pfw",
         "Settings/device_for_product_strategy_phone.pfw",
+        "Settings/device_for_product_strategy_rerouting.pfw",
         "Settings/device_for_product_strategy_sonification.pfw",
         "Settings/device_for_product_strategy_sonification_respectful.pfw",
         "Settings/device_for_product_strategy_transmitted_through_speaker.pfw",
-        "Settings/device_for_product_strategy_rerouting.pfw",
-        "Settings/device_for_product_strategy_patch.pfw",
     ],
 }
+
 // This is for Settings generation, must use socket port, so userdebug version is required
 filegroup {
     name: "audio_policy_pfw_toplevel",
     srcs: [":ParameterFrameworkConfigurationPolicy.userdebug.xml"],
 }
+
 filegroup {
     name: "audio_policy_pfw_structure_files",
     srcs: [
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/examples/SettingsNoInput/Android.bp b/services/audiopolicy/engineconfigurable/parameter-framework/examples/SettingsNoInput/Android.bp
index 89ab892..aba9767 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/examples/SettingsNoInput/Android.bp
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/examples/SettingsNoInput/Android.bp
@@ -18,12 +18,13 @@
 
 soong_namespace {
     imports: [
-        "frameworks/av/services/audiopolicy/engineconfigurable/config/example/phone",
         "frameworks/av/services/audiopolicy/config",
+        "frameworks/av/services/audiopolicy/engineconfigurable/config/example/phone",
     ],
 }
 
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
@@ -41,8 +42,8 @@
     sub_dir: "parameter-framework/Settings/Policy",
     required: [
         "PolicyClass.xml",
-        "PolicySubsystem.xml",
         "PolicySubsystem-CommonTypes.xml",
+        "PolicySubsystem.xml",
     ],
 }
 
@@ -51,16 +52,18 @@
     enabled: false, // TODO: This module fails to build
     defaults: ["domaingeneratorpolicyrule"],
     srcs: [
-        ":audio_policy_pfw_toplevel",
-        ":audio_policy_pfw_structure_files",
         ":audio_policy_engine_criterion_types",
+        ":audio_policy_pfw_structure_files",
+        ":audio_policy_pfw_toplevel",
         ":edd_files",
     ],
 }
+
 filegroup {
     name: "audio_policy_pfw_toplevel",
     srcs: [":ParameterFrameworkConfigurationPolicy.userdebug.xml"],
 }
+
 filegroup {
     name: "audio_policy_pfw_structure_files",
     srcs: [
@@ -69,13 +72,15 @@
         ":buildcommontypesstructure_gen",
     ],
 }
+
 filegroup {
     name: "edd_files",
     srcs: [
-        "device_for_input_source.pfw",
         ":volumes.pfw",
+        "device_for_input_source.pfw",
     ],
 }
+
 prebuilt_etc {
     name: "PolicySubsystem.xml",
     vendor: true,
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/examples/SettingsNoOutput/Android.bp b/services/audiopolicy/engineconfigurable/parameter-framework/examples/SettingsNoOutput/Android.bp
index 4880547..77677a1 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/examples/SettingsNoOutput/Android.bp
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/examples/SettingsNoOutput/Android.bp
@@ -18,12 +18,13 @@
 
 soong_namespace {
     imports: [
-        "frameworks/av/services/audiopolicy/engineconfigurable/config/example/phone",
         "frameworks/av/services/audiopolicy/config",
+        "frameworks/av/services/audiopolicy/engineconfigurable/config/example/phone",
     ],
 }
 
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
@@ -41,25 +42,28 @@
     sub_dir: "parameter-framework/Settings/Policy",
     required: [
         "PolicyClass.xml",
-        "PolicySubsystem.xml",
         "PolicySubsystem-CommonTypes.xml",
+        "PolicySubsystem.xml",
     ],
 }
+
 genrule {
     name: "domaingeneratorpolicyrule_gen",
     enabled: false, // TODO: This module fails to build
     defaults: ["domaingeneratorpolicyrule"],
     srcs: [
-        ":audio_policy_pfw_toplevel",
-        ":audio_policy_pfw_structure_files",
         ":audio_policy_engine_criterion_types",
+        ":audio_policy_pfw_structure_files",
+        ":audio_policy_pfw_toplevel",
         ":edd_files",
     ],
 }
+
 filegroup {
     name: "audio_policy_pfw_toplevel",
     srcs: [":ParameterFrameworkConfigurationPolicy.userdebug.xml"],
 }
+
 filegroup {
     name: "audio_policy_pfw_structure_files",
     srcs: [
@@ -68,14 +72,16 @@
         ":buildcommontypesstructure_gen",
     ],
 }
+
 filegroup {
     name: "edd_files",
     srcs: [
-        "device_for_strategies.pfw",
-        ":volumes.pfw",
         ":device_for_input_source.pfw",
+        ":volumes.pfw",
+        "device_for_strategies.pfw",
     ],
 }
+
 prebuilt_etc {
     name: "PolicySubsystem.xml",
     vendor: true,
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/plugin/Android.bp b/services/audiopolicy/engineconfigurable/parameter-framework/plugin/Android.bp
index f7159c5..3dc2229 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/plugin/Android.bp
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/plugin/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
@@ -10,11 +11,11 @@
 cc_library_shared {
     name: "libpolicy-subsystem",
     srcs: [
-        "PolicySubsystemBuilder.cpp",
-        "PolicySubsystem.cpp",
         "InputSource.cpp",
-        "Stream.cpp",
+        "PolicySubsystem.cpp",
+        "PolicySubsystemBuilder.cpp",
         "ProductStrategy.cpp",
+        "Stream.cpp",
     ],
     cflags: [
         "-Wall",
@@ -24,11 +25,11 @@
         "-fvisibility=hidden",
     ],
     header_libs: [
-        "libbase_headers",
-        "libaudiopolicycommon",
         "libaudioclient_headers",
+        "libaudiopolicycommon",
         "libaudiopolicyengine_interface_headers",
         "libaudiopolicyengineconfigurable_interface_headers",
+        "libbase_headers",
     ],
     static_libs: [
         "libaudiopolicyengine_common",
@@ -38,8 +39,8 @@
         "libaudiopolicycomponents",
         "libaudiopolicyengineconfigurable",
         "liblog",
-        "libutils",
         "libmedia_helper",
-        "libparameter"
+        "libparameter",
+        "libutils",
     ],
 }
diff --git a/services/audiopolicy/engineconfigurable/tools/Android.bp b/services/audiopolicy/engineconfigurable/tools/Android.bp
index 3aec064..d1fb2fb 100644
--- a/services/audiopolicy/engineconfigurable/tools/Android.bp
+++ b/services/audiopolicy/engineconfigurable/tools/Android.bp
@@ -13,6 +13,7 @@
 // limitations under the License.
 
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
@@ -36,13 +37,13 @@
     name: "buildpolicycriteriontypesrule",
     tools: ["buildPolicyCriterionTypes"],
     cmd: "cp $(locations :audio_policy_configuration_files) $(genDir)/. && " +
-         "cp $(location :audio_policy_configuration_top_file) $(genDir)/audio_policy_configuration.xml && " +
-         "$(location buildPolicyCriterionTypes) " +
-         " --androidaudiobaseheader $(location :libaudio_system_audio_base) " +
-         " --androidaudiocommonbaseheader $(location :libaudio_system_audio_common_base) " +
-         "--audiopolicyconfigurationfile $(genDir)/audio_policy_configuration.xml " +
-         "--criteriontypes $(location :audio_policy_engine_criterion_types_template) " +
-         "--outputfile $(out)",
+        "cp $(location :audio_policy_configuration_top_file) $(genDir)/audio_policy_configuration.xml && " +
+        "$(location buildPolicyCriterionTypes) " +
+        " --androidaudiobaseheader $(location :libaudio_system_audio_base) " +
+        " --androidaudiocommonbaseheader $(location :libaudio_system_audio_common_base) " +
+        "--audiopolicyconfigurationfile $(genDir)/audio_policy_configuration.xml " +
+        "--criteriontypes $(location :audio_policy_engine_criterion_types_template) " +
+        "--outputfile $(out)",
     srcs: [
         // The commented inputs must be provided to use this genrule_defaults
         // @todo uncomment if 1428659 is merged":android_audio_base_header_file",
@@ -66,8 +67,8 @@
     ],
     libs: [
         "EddParser.py",
-        "hostConfig.py",
         "PFWScriptGenerator.py",
+        "hostConfig.py",
     ],
     required: [
         "domainGeneratorConnector",
@@ -77,21 +78,21 @@
 genrule_defaults {
     name: "domaingeneratorpolicyrule",
     tools: [
-        "domainGeneratorPolicy",
         "domainGeneratorConnector",
+        "domainGeneratorPolicy",
     ],
     cmd: "mkdir -p $(genDir)/Structure/Policy && " +
-         "cp $(locations :audio_policy_pfw_structure_files) $(genDir)/Structure/Policy && " +
-         "cp $(location :audio_policy_pfw_toplevel) $(genDir)/top_level && " +
-         "$(location domainGeneratorPolicy) " +
-         "--validate " +
-         "--domain-generator-tool $(location domainGeneratorConnector) " +
-         "--toplevel-config $(genDir)/top_level " +
-         "--criteria $(location :audio_policy_engine_criteria) " +
-         "--criteriontypes $(location :audio_policy_engine_criterion_types) " +
-         "--add-edds $(locations :edd_files) " +
-         "--schemas-dir external/parameter-framework/upstream/schemas " +
-         " > $(out)",
+        "cp $(locations :audio_policy_pfw_structure_files) $(genDir)/Structure/Policy && " +
+        "cp $(location :audio_policy_pfw_toplevel) $(genDir)/top_level && " +
+        "$(location domainGeneratorPolicy) " +
+        "--validate " +
+        "--domain-generator-tool $(location domainGeneratorConnector) " +
+        "--toplevel-config $(genDir)/top_level " +
+        "--criteria $(location :audio_policy_engine_criteria) " +
+        "--criteriontypes $(location :audio_policy_engine_criterion_types) " +
+        "--add-edds $(locations :edd_files) " +
+        "--schemas-dir external/parameter-framework/upstream/schemas " +
+        " > $(out)",
     srcs: [
         // The commented inputs must be provided to use this genrule_defaults
         // ":audio_policy_pfw_toplevel",
@@ -118,11 +119,11 @@
 genrule_defaults {
     name: "buildstrategiesstructurerule",
     tools: ["buildStrategiesStructureFile"],
-    cmd: "cp $(locations :audio_policy_engine_configuration_files) $(genDir) && ls -l $(genDir) &&"+
-         "$(location buildStrategiesStructureFile) " +
-         "--audiopolicyengineconfigurationfile $(genDir)/audio_policy_engine_configuration.xml "+
-         "--productstrategiesstructurefile $(location :product_strategies_structure_template) " +
-         "--outputfile $(out)",
+    cmd: "cp $(locations :audio_policy_engine_configuration_files) $(genDir) && ls -l $(genDir) &&" +
+        "$(location buildStrategiesStructureFile) " +
+        "--audiopolicyengineconfigurationfile $(genDir)/audio_policy_engine_configuration.xml " +
+        "--productstrategiesstructurefile $(location :product_strategies_structure_template) " +
+        "--outputfile $(out)",
     srcs: [
         // The commented inputs must be provided to use this genrule_defaults
         // ":audio_policy_engine_configuration_files",
@@ -146,9 +147,9 @@
     name: "buildcommontypesstructurerule",
     tools: ["buildCommonTypesStructureFile"],
     cmd: "$(location buildCommonTypesStructureFile) " +
-         "--androidaudiobaseheader $(location :libaudio_system_audio_base) " +
-         "--commontypesstructure $(location :common_types_structure_template) " +
-         "--outputfile $(out)",
+        "--androidaudiobaseheader $(location :libaudio_system_audio_base) " +
+        "--commontypesstructure $(location :common_types_structure_template) " +
+        "--outputfile $(out)",
     srcs: [
         ":common_types_structure_template",
         ":libaudio_system_audio_base",
diff --git a/services/audiopolicy/engineconfigurable/wrapper/Android.bp b/services/audiopolicy/engineconfigurable/wrapper/Android.bp
index 0ef0b82..78d5fa3 100644
--- a/services/audiopolicy/engineconfigurable/wrapper/Android.bp
+++ b/services/audiopolicy/engineconfigurable/wrapper/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
@@ -17,14 +18,14 @@
         "-Wextra",
     ],
     header_libs: [
-        "libbase_headers",
-        "libaudiopolicycommon",
         "libaudiofoundation_headers",
+        "libaudiopolicycommon",
+        "libbase_headers",
     ],
     shared_libs: [
         "liblog",
-        "libutils",
         "libmedia_helper",
         "libparameter",
+        "libutils",
     ],
 }
diff --git a/services/audiopolicy/enginedefault/Android.bp b/services/audiopolicy/enginedefault/Android.bp
index 7d4ccab..aec8c16 100644
--- a/services/audiopolicy/enginedefault/Android.bp
+++ b/services/audiopolicy/enginedefault/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
@@ -14,30 +15,34 @@
         "src/EngineInstance.cpp",
     ],
     cflags: [
-        "-fvisibility=hidden",
         "-Wall",
         "-Werror",
         "-Wextra",
+        "-fvisibility=hidden",
     ],
     header_libs: [
-        "libbase_headers",
         "libaudiopolicycommon",
         "libaudiopolicyengine_interface_headers",
+        "libbase_headers",
     ],
     static_libs: [
         "libaudiopolicyengine_common",
         "libaudiopolicyengine_config",
     ],
     shared_libs: [
+        "com.android.media.audioserver-aconfig-cc",
         "libaudio_aidl_conversion_common_cpp",
         "libaudiofoundation",
+        "libaudiopolicy",
         "libaudiopolicycomponents",
         "libbase",
-        "liblog",
         "libcutils",
-        "libutils",
+        "liblog",
         "libmedia_helper",
-        "libaudiopolicy",
+        "libutils",
         "libxml2",
     ],
+    defaults: [
+        "aconfig_lib_cc_static_link.defaults",
+    ],
 }
diff --git a/services/audiopolicy/enginedefault/config/example/Android.bp b/services/audiopolicy/enginedefault/config/example/Android.bp
index 59a704b..31f9a46 100644
--- a/services/audiopolicy/enginedefault/config/example/Android.bp
+++ b/services/audiopolicy/enginedefault/config/example/Android.bp
@@ -20,6 +20,7 @@
 }
 
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
@@ -33,21 +34,24 @@
     vendor: true,
     src: "phone/audio_policy_engine_configuration.xml",
     required: [
-        ":audio_policy_engine_stream_volumes.xml",
         ":audio_policy_engine_default_stream_volumes.xml",
         ":audio_policy_engine_product_strategies.xml",
+        ":audio_policy_engine_stream_volumes.xml",
     ],
 }
+
 prebuilt_etc {
     name: "audio_policy_engine_product_strategies.xml",
     vendor: true,
     src: "phone/audio_policy_engine_product_strategies.xml",
 }
+
 prebuilt_etc {
     name: "audio_policy_engine_stream_volumes.xml",
     vendor: true,
     src: "phone/audio_policy_engine_stream_volumes.xml",
 }
+
 prebuilt_etc {
     name: "audio_policy_engine_default_stream_volumes.xml",
     vendor: true,
diff --git a/services/audiopolicy/enginedefault/src/Engine.cpp b/services/audiopolicy/enginedefault/src/Engine.cpp
index 13cc165..9fafe2e 100644
--- a/services/audiopolicy/enginedefault/src/Engine.cpp
+++ b/services/audiopolicy/enginedefault/src/Engine.cpp
@@ -30,6 +30,7 @@
 #include <PolicyAudioPort.h>
 #include <IOProfile.h>
 #include <AudioIODescriptorInterface.h>
+#include <com_android_media_audioserver.h>
 #include <policy.h>
 #include <media/AudioContainers.h>
 #include <utils/String8.h>
@@ -142,7 +143,8 @@
         }
         break;
     case AUDIO_POLICY_FORCE_FOR_VIBRATE_RINGING:
-        if (config != AUDIO_POLICY_FORCE_BT_SCO && config != AUDIO_POLICY_FORCE_NONE) {
+        if (config != AUDIO_POLICY_FORCE_BT_SCO && config != AUDIO_POLICY_FORCE_BT_BLE
+                && config != AUDIO_POLICY_FORCE_NONE) {
             ALOGW("setForceUse() invalid config %d for VIBRATE_RINGING", config);
             return BAD_VALUE;
         }
@@ -154,12 +156,58 @@
     return EngineBase::setForceUse(usage, config);
 }
 
+bool Engine::isBtScoActive(DeviceVector& availableOutputDevices,
+                           const SwAudioOutputCollection &outputs) const {
+    if (availableOutputDevices.getDevicesFromTypes(getAudioDeviceOutAllScoSet()).isEmpty()) {
+        return false;
+    }
+    // SCO is active if:
+    // 1) we are in a call and SCO is the preferred device for PHONE strategy
+    if (isInCall() && audio_is_bluetooth_out_sco_device(
+            getPreferredDeviceTypeForLegacyStrategy(availableOutputDevices, STRATEGY_PHONE))) {
+        return true;
+    }
+
+    // 2) A strategy for which the preferred device is SCO is active
+    for (const auto &ps : getOrderedProductStrategies()) {
+        if (outputs.isStrategyActive(ps) &&
+            !getPreferredAvailableDevicesForProductStrategy(availableOutputDevices, ps)
+                .getDevicesFromTypes(getAudioDeviceOutAllScoSet()).isEmpty()) {
+            return true;
+        }
+    }
+    // 3) a ringtone is active and SCO is used for ringing
+    if (outputs.isActiveLocally(toVolumeSource(AUDIO_STREAM_RING))
+          && (getForceUse(AUDIO_POLICY_FORCE_FOR_VIBRATE_RINGING)
+                    == AUDIO_POLICY_FORCE_BT_SCO)) {
+        return true;
+    }
+    // 4) an active input is routed from SCO
+    DeviceVector availableInputDevices = getApmObserver()->getAvailableInputDevices();
+    const auto &inputs = getApmObserver()->getInputs();
+    if (inputs.activeInputsCountOnDevices(availableInputDevices.getDevicesFromType(
+            AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET)) > 0) {
+        return true;
+    }
+    return false;
+}
+
 void Engine::filterOutputDevicesForStrategy(legacy_strategy strategy,
                                             DeviceVector& availableOutputDevices,
                                             const SwAudioOutputCollection &outputs) const
 {
     DeviceVector availableInputDevices = getApmObserver()->getAvailableInputDevices();
 
+    if (com::android::media::audioserver::use_bt_sco_for_media()) {
+        // remove A2DP and LE Audio devices whenever BT SCO is in use
+        if (isBtScoActive(availableOutputDevices, outputs)) {
+            availableOutputDevices.remove(
+                availableOutputDevices.getDevicesFromTypes(getAudioDeviceOutAllA2dpSet()));
+            availableOutputDevices.remove(
+                availableOutputDevices.getDevicesFromTypes(getAudioDeviceOutAllBleSet()));
+        }
+    }
+
     switch (strategy) {
     case STRATEGY_SONIFICATION_RESPECTFUL: {
         if (!(isInCall() || outputs.isActiveLocally(toVolumeSource(AUDIO_STREAM_VOICE_CALL)))) {
@@ -355,6 +403,40 @@
                 }
             }
         }
+
+        // if LEA headset is connected and we are told to use it, play ringtone over
+        // speaker and BT LEA
+        if (!availableOutputDevices.getDevicesFromTypes(getAudioDeviceOutAllBleSet()).isEmpty()) {
+            DeviceVector devices2;
+            devices2 = availableOutputDevices.getFirstDevicesFromTypes({
+                    AUDIO_DEVICE_OUT_BLE_HEADSET, AUDIO_DEVICE_OUT_BLE_SPEAKER});
+            // Use ONLY Bluetooth LEA output when ringing in vibration mode
+            if (!((getForceUse(AUDIO_POLICY_FORCE_FOR_SYSTEM) == AUDIO_POLICY_FORCE_SYSTEM_ENFORCED)
+                    && (strategy == STRATEGY_ENFORCED_AUDIBLE))) {
+                if (getForceUse(AUDIO_POLICY_FORCE_FOR_VIBRATE_RINGING)
+                        == AUDIO_POLICY_FORCE_BT_BLE) {
+                    if (!devices2.isEmpty()) {
+                        devices = devices2;
+                        break;
+                    }
+                }
+            }
+            // Use both Bluetooth LEA and phone default output when ringing in normal mode
+            if (audio_is_ble_out_device(getPreferredDeviceTypeForLegacyStrategy(
+                    availableOutputDevices, STRATEGY_PHONE))) {
+                if (strategy == STRATEGY_SONIFICATION) {
+                    devices.replaceDevicesByType(
+                            AUDIO_DEVICE_OUT_SPEAKER,
+                            availableOutputDevices.getDevicesFromType(
+                                    AUDIO_DEVICE_OUT_SPEAKER_SAFE));
+                }
+                if (!devices2.isEmpty()) {
+                    devices.add(devices2);
+                    break;
+                }
+            }
+        }
+
         // The second device used for sonification is the same as the device used by media strategy
         FALLTHROUGH_INTENDED;
 
@@ -381,10 +463,12 @@
 
         // LE audio broadcast device is only used if:
         // - No call is active
-        // - either MEDIA or SONIFICATION_RESPECTFUL is the highest priority active strategy
-        //   OR the LE audio unicast device is not active
+        // - either MEDIA, SONIFICATION_RESPECTFUL or SONIFICATION is the highest priority
+        // active strategy
+        // OR the LE audio unicast device is not active
         if (devices2.isEmpty() && !isInCall()
-                && (strategy == STRATEGY_MEDIA || strategy == STRATEGY_SONIFICATION_RESPECTFUL)) {
+                && (strategy == STRATEGY_MEDIA || strategy == STRATEGY_SONIFICATION_RESPECTFUL
+                      || strategy == STRATEGY_SONIFICATION)) {
             legacy_strategy topActiveStrategy = STRATEGY_NONE;
             for (const auto &ps : getOrderedProductStrategies()) {
                 if (outputs.isStrategyActive(ps)) {
@@ -396,6 +480,7 @@
 
             if (topActiveStrategy == STRATEGY_NONE || topActiveStrategy == STRATEGY_MEDIA
                     || topActiveStrategy == STRATEGY_SONIFICATION_RESPECTFUL
+                    || topActiveStrategy == STRATEGY_SONIFICATION
                     || !outputs.isAnyDeviceTypeActive(getAudioDeviceOutLeAudioUnicastSet())) {
                 devices2 =
                         availableOutputDevices.getDevicesFromType(AUDIO_DEVICE_OUT_BLE_BROADCAST);
@@ -418,15 +503,27 @@
                         getLastRemovableMediaDevices(GROUP_WIRED, excludedDevices));
             }
         }
+
+        if (com::android::media::audioserver::use_bt_sco_for_media()) {
+            if (devices2.isEmpty() && isBtScoActive(availableOutputDevices, outputs)) {
+                devices2 = availableOutputDevices.getFirstDevicesFromTypes(
+                        { AUDIO_DEVICE_OUT_BLUETOOTH_SCO_CARKIT,
+                          AUDIO_DEVICE_OUT_BLUETOOTH_SCO_HEADSET,
+                          AUDIO_DEVICE_OUT_BLUETOOTH_SCO});
+            }
+        }
+
         if ((devices2.isEmpty()) &&
                 (getForceUse(AUDIO_POLICY_FORCE_FOR_DOCK) == AUDIO_POLICY_FORCE_ANALOG_DOCK)) {
             devices2 = availableOutputDevices.getDevicesFromType(
                     AUDIO_DEVICE_OUT_ANLG_DOCK_HEADSET);
         }
+
         if (devices2.isEmpty()) {
             devices2 = availableOutputDevices.getFirstDevicesFromTypes({
                         AUDIO_DEVICE_OUT_DGTL_DOCK_HEADSET, AUDIO_DEVICE_OUT_SPEAKER});
         }
+
         DeviceVector devices3;
         if (strategy == STRATEGY_MEDIA) {
             // ARC, SPDIF and AUX_LINE can co-exist with others.
diff --git a/services/audiopolicy/enginedefault/src/Engine.h b/services/audiopolicy/enginedefault/src/Engine.h
index 878bca9..a6090cf 100644
--- a/services/audiopolicy/enginedefault/src/Engine.h
+++ b/services/audiopolicy/enginedefault/src/Engine.h
@@ -109,6 +109,9 @@
     DeviceVector getDisabledDevicesForInputSource(
             const DeviceVector& availableInputDevices, audio_source_t inputSource) const;
 
+    bool isBtScoActive(DeviceVector& availableOutputDevices,
+                       const SwAudioOutputCollection &outputs) const;
+
     std::map<product_strategy_t, legacy_strategy> mLegacyStrategyMap;
 };
 } // namespace audio_policy
diff --git a/services/audiopolicy/fuzzer/Android.bp b/services/audiopolicy/fuzzer/Android.bp
index fd240e3..30d4403 100644
--- a/services/audiopolicy/fuzzer/Android.bp
+++ b/services/audiopolicy/fuzzer/Android.bp
@@ -17,6 +17,7 @@
  ******************************************************************************/
 
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
@@ -27,39 +28,18 @@
 
 cc_fuzz {
     name: "audiopolicy_fuzzer",
+    defaults: [
+        "libaudiopolicyservice_dependencies",
+    ],
     srcs: [
         "audiopolicy_fuzzer.cpp",
     ],
-    include_dirs: [
-        "frameworks/av/services/audiopolicy",
-    ],
-    shared_libs: [
-        "android.hardware.audio.common-util",
-        "capture_state_listener-aidl-cpp",
-        "libaudioclient",
-        "libaudiofoundation",
-        "libaudiopolicycomponents",
-        "libbase",
-        "libcutils",
-        "libhidlbase",
-        "libdl",
-        "liblog",
-        "libmedia_helper",
-        "libmediametrics",
-        "libutils",
-        "libxml2",
-        "libbinder",
-        "libaudiopolicy",
-        "libaudiopolicymanagerdefault",
-        "framework-permission-aidl-cpp",
-    ],
     static_libs: [
         "android.hardware.audio.common@7.0-enums",
     ],
-    header_libs: [
-        "libaudiopolicycommon",
-        "libaudiopolicyengine_interface_headers",
-        "libaudiopolicymanager_interface_headers",
+    include_dirs: [
+        "frameworks/av/services/audiopolicy", // include path outside of libaudiopolicyservice
+        "frameworks/av/services/audiopolicy/engine/interface", // for /tests/AudioPolicyTestManager.h:
     ],
     data: [":audiopolicyfuzzer_configuration_files"],
     fuzz_config: {
diff --git a/services/audiopolicy/fuzzer/aidl/Android.bp b/services/audiopolicy/fuzzer/aidl/Android.bp
index 38a2cde..680f76d 100644
--- a/services/audiopolicy/fuzzer/aidl/Android.bp
+++ b/services/audiopolicy/fuzzer/aidl/Android.bp
@@ -16,38 +16,22 @@
  *
  ******************************************************************************/
 
+package {
+    default_team: "trendy_team_android_media_audio_framework",
+}
+
 cc_defaults {
     name: "audiopolicy_aidl_fuzzer_defaults",
     shared_libs: [
-        "audiopolicy-aidl-cpp",
-        "audiopolicy-types-aidl-cpp",
-        "framework-permission-aidl-cpp",
-        "libaudiopolicy",
-        "libaudiopolicymanagerdefault",
-        "libactivitymanager_aidl",
-        "libaudiohal",
-        "libaudiopolicyservice",
         "libaudioflinger",
-        "libaudioclient",
-        "libaudioprocessing",
-        "libhidlbase",
-        "liblog",
-        "libmediautils",
-        "libnblog",
-        "libnbaio",
-        "libpowermanager",
-        "libvibrator",
-        "packagemanager_aidl-cpp",
-    ],
-    static_libs: [
-        "libfakeservicemanager",
+        "libaudiopolicyservice",
         "libmediaplayerservice",
     ],
+    static_libs: [
+        "libaudiomockhal",
+        "libfakeservicemanager",
+    ],
     header_libs: [
-        "libaudiohal_headers",
-        "libaudioflinger_headers",
-        "libaudiopolicymanager_interface_headers",
-        "libbinder_headers",
         "libmedia_headers",
     ],
     fuzz_config: {
@@ -69,6 +53,11 @@
     srcs: ["audiopolicy_aidl_fuzzer.cpp"],
     defaults: [
         "audiopolicy_aidl_fuzzer_defaults",
+        "latest_android_hardware_audio_core_ndk_shared",
+        "latest_android_hardware_audio_core_sounddose_ndk_shared",
+        "latest_android_hardware_audio_effect_ndk_shared",
+        "libaudioflinger_dependencies",
+        "libaudiopolicyservice_dependencies",
         "service_fuzzer_defaults",
     ],
 }
diff --git a/services/audiopolicy/fuzzer/aidl/audiopolicy_aidl_fuzzer.cpp b/services/audiopolicy/fuzzer/aidl/audiopolicy_aidl_fuzzer.cpp
index ca79c49..f5e72f5 100644
--- a/services/audiopolicy/fuzzer/aidl/audiopolicy_aidl_fuzzer.cpp
+++ b/services/audiopolicy/fuzzer/aidl/audiopolicy_aidl_fuzzer.cpp
@@ -18,8 +18,12 @@
 #include <AudioFlinger.h>
 #include <android-base/logging.h>
 #include <android/binder_interface_utils.h>
+#include <android/binder_manager.h>
 #include <android/binder_process.h>
 #include <android/media/IAudioPolicyService.h>
+#include <core-mock/ConfigMock.h>
+#include <core-mock/ModuleMock.h>
+#include <effect-mock/FactoryMock.h>
 #include <fakeservicemanager/FakeServiceManager.h>
 #include <fuzzbinder/libbinder_driver.h>
 #include <fuzzbinder/random_binder.h>
@@ -34,6 +38,7 @@
 
 [[clang::no_destroy]] static std::once_flag gSmOnce;
 sp<FakeServiceManager> gFakeServiceManager;
+sp<AudioPolicyService> gAudioPolicyService;
 
 bool addService(const String16& serviceName, const sp<FakeServiceManager>& fakeServiceManager,
                 FuzzedDataProvider& fdp) {
@@ -45,42 +50,58 @@
     return true;
 }
 
+extern "C" int LLVMFuzzerInitialize(int* /*argc*/, char*** /*argv*/) {
+    /* Create a FakeServiceManager instance and add required services */
+    gFakeServiceManager = sp<FakeServiceManager>::make();
+    setDefaultServiceManager(gFakeServiceManager);
+
+    auto configService = ndk::SharedRefBase::make<ConfigMock>();
+    CHECK_EQ(NO_ERROR, AServiceManager_addService(configService.get()->asBinder().get(),
+                                                  "android.hardware.audio.core.IConfig/default"));
+
+    auto factoryService = ndk::SharedRefBase::make<FactoryMock>();
+    CHECK_EQ(NO_ERROR,
+             AServiceManager_addService(factoryService.get()->asBinder().get(),
+                                        "android.hardware.audio.effect.IFactory/default"));
+
+    auto moduleService = ndk::SharedRefBase::make<ModuleMock>();
+    CHECK_EQ(NO_ERROR, AServiceManager_addService(moduleService.get()->asBinder().get(),
+                                                  "android.hardware.audio.core.IModule/default"));
+
+    // Disable creating thread pool for fuzzer instance of audio flinger and audio policy services
+    AudioSystem::disableThreadPool();
+
+    return 0;
+}
+
 extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
     FuzzedDataProvider fdp(data, size);
 
-    std::call_once(gSmOnce, [&] {
-        /* Create a FakeServiceManager instance and add required services */
-        gFakeServiceManager = sp<FakeServiceManager>::make();
-        setDefaultServiceManager(gFakeServiceManager);
-    });
-    gFakeServiceManager->clear();
-
-    for (const char* service :
-         {"activity", "sensor_privacy", "permission", "scheduling_policy",
-          "android.hardware.audio.core.IConfig", "batterystats", "media.metrics"}) {
+    for (const char* service : {"activity", "sensor_privacy", "permission", "scheduling_policy",
+                                "batterystats", "media.metrics"}) {
         if (!addService(String16(service), gFakeServiceManager, fdp)) {
             return 0;
         }
     }
 
-    const auto audioFlinger = sp<AudioFlinger>::make();
-    const auto afAdapter = sp<AudioFlingerServerAdapter>::make(audioFlinger);
+    // TODO(330882064) : Initialise Audio Flinger and Audio Policy services every time
+    std::call_once(gSmOnce, [&] {
+        const auto audioFlinger = sp<AudioFlinger>::make();
+        const auto audioFlingerServerAdapter = sp<AudioFlingerServerAdapter>::make(audioFlinger);
+        CHECK_EQ(NO_ERROR,
+                 gFakeServiceManager->addService(String16(IAudioFlinger::DEFAULT_SERVICE_NAME),
+                                                 IInterface::asBinder(audioFlingerServerAdapter),
+                                                 false /* allowIsolated */,
+                                                 IServiceManager::DUMP_FLAG_PRIORITY_DEFAULT));
 
-    CHECK_EQ(NO_ERROR,
-             gFakeServiceManager->addService(
-                     String16(IAudioFlinger::DEFAULT_SERVICE_NAME), IInterface::asBinder(afAdapter),
-                     false /* allowIsolated */, IServiceManager::DUMP_FLAG_PRIORITY_DEFAULT));
+        gAudioPolicyService = sp<AudioPolicyService>::make();
+        CHECK_EQ(NO_ERROR,
+                 gFakeServiceManager->addService(String16("media.audio_policy"),
+                                                 gAudioPolicyService, false /* allowIsolated */,
+                                                 IServiceManager::DUMP_FLAG_PRIORITY_DEFAULT));
+    });
 
-    AudioSystem::get_audio_flinger_for_fuzzer();
-    const auto audioPolicyService = sp<AudioPolicyService>::make();
-
-    CHECK_EQ(NO_ERROR,
-             gFakeServiceManager->addService(String16("media.audio_policy"), audioPolicyService,
-                                             false /* allowIsolated */,
-                                             IServiceManager::DUMP_FLAG_PRIORITY_DEFAULT));
-
-    fuzzService(media::IAudioPolicyService::asBinder(audioPolicyService),
-                FuzzedDataProvider(data, size));
+    fuzzService(media::IAudioPolicyService::asBinder(gAudioPolicyService), std::move(fdp));
 
     return 0;
 }
diff --git a/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp b/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp
index 58fcb5c..6416a47 100644
--- a/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp
+++ b/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp
@@ -207,7 +207,8 @@
                          audio_port_handle_t *selectedDeviceId, audio_format_t format,
                          audio_channel_mask_t channelMask, int sampleRate,
                          audio_input_flags_t flags = AUDIO_INPUT_FLAG_NONE,
-                         audio_port_handle_t *portId = nullptr);
+                         audio_port_handle_t *portId = nullptr,
+                         uint32_t *virtualDeviceId = nullptr);
     bool findDevicePort(audio_port_role_t role, audio_devices_t deviceType,
                         const std::string &address, audio_port_v7 *foundPort);
     static audio_port_handle_t getDeviceIdFromPatch(const struct audio_patch *patch);
@@ -283,7 +284,7 @@
 bool AudioPolicyManagerFuzzer::getInputForAttr(
     const audio_attributes_t &attr, audio_unique_id_t riid, audio_port_handle_t *selectedDeviceId,
     audio_format_t format, audio_channel_mask_t channelMask, int sampleRate,
-    audio_input_flags_t flags, audio_port_handle_t *portId) {
+    audio_input_flags_t flags, audio_port_handle_t *portId, uint32_t *virtualDeviceId) {
     audio_io_handle_t input = AUDIO_IO_HANDLE_NONE;
     audio_config_base_t config = AUDIO_CONFIG_BASE_INITIALIZER;
     config.sample_rate = sampleRate;
@@ -298,7 +299,7 @@
     attributionSource.uid = 0;
     attributionSource.token = sp<BBinder>::make();
     if (mManager->getInputForAttr(&attr, &input, riid, AUDIO_SESSION_NONE, attributionSource,
-            &config, flags, selectedDeviceId, &inputType, portId) != OK) {
+            &config, flags, selectedDeviceId, &inputType, portId, virtualDeviceId) != OK) {
         return false;
     }
     if (*portId == AUDIO_PORT_HANDLE_NONE || input == AUDIO_IO_HANDLE_NONE) {
diff --git a/services/audiopolicy/fuzzer/resources/Android.bp b/services/audiopolicy/fuzzer/resources/Android.bp
index 22ee256..2a2b83b 100644
--- a/services/audiopolicy/fuzzer/resources/Android.bp
+++ b/services/audiopolicy/fuzzer/resources/Android.bp
@@ -17,6 +17,7 @@
  ******************************************************************************/
 
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/services/audiopolicy/managerdefault/Android.bp b/services/audiopolicy/managerdefault/Android.bp
index 6a37b4e..e6f6374 100644
--- a/services/audiopolicy/managerdefault/Android.bp
+++ b/services/audiopolicy/managerdefault/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
@@ -24,27 +25,28 @@
     shared_libs: [
         "com.android.media.audio-aconfig-cc",
         "libaudiofoundation",
+        "libaudiopolicy",
         "libaudiopolicycomponents",
+        "libbinder",
         "libcutils",
         "libdl",
-        "libutils",
+        "libhidlbase",
         "liblog",
-        "libaudiopolicy",
         "libmedia_helper",
         "libmediametrics",
-        "libbinder",
-        "libhidlbase",
+        "libutils",
         "libxml2",
         // The default audio policy engine is always present in the system image.
         // libaudiopolicyengineconfigurable can be built in addition by specifying
         // a dependency on it in the device makefile. There will be no build time
         // conflict with libaudiopolicyenginedefault.
-        "libaudiopolicyenginedefault",
-        "framework-permission-aidl-cpp",
-        "libaudioclient_aidl_conversion",
         "audioclient-types-aidl-cpp",
         // Flag support
-        "com.android.media.audioserver-aconfig-cc"
+        "android.media.audiopolicy-aconfig-cc",
+        "com.android.media.audioserver-aconfig-cc",
+        "framework-permission-aidl-cpp",
+        "libaudioclient_aidl_conversion",
+        "libaudiopolicyenginedefault",
     ],
 
     header_libs: [
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index 46e4d60..a2363af 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -42,6 +42,7 @@
 #include <Serializer.h>
 #include <android/media/audio/common/AudioPort.h>
 #include <com_android_media_audio.h>
+#include <android_media_audiopolicy.h>
 #include <com_android_media_audioserver.h>
 #include <cutils/bitops.h>
 #include <cutils/properties.h>
@@ -58,10 +59,14 @@
 
 namespace android {
 
+
+namespace audio_flags = android::media::audiopolicy;
+
 using android::media::audio::common::AudioDevice;
 using android::media::audio::common::AudioDeviceAddress;
 using android::media::audio::common::AudioPortDeviceExt;
 using android::media::audio::common::AudioPortExt;
+using com::android::media::audioserver::fix_call_audio_patch;
 using content::AttributionSourceState;
 
 //FIXME: workaround for truncated touch sounds
@@ -125,7 +130,8 @@
     device->toAudioPort(&devicePort);
     if (status_t status = mpClientInterface->setDeviceConnectedState(&devicePort, state);
             status != OK) {
-        ALOGE("Error %d while setting connected state for device %s", state,
+        ALOGE("Error %d while setting connected state %d for device %s",
+                status, static_cast<int>(state),
                 device->getDeviceTypeAddr().toString(false).c_str());
     }
 }
@@ -213,9 +219,9 @@
             if (checkOutputsForDevice(device, state, outputs) != NO_ERROR) {
                 mAvailableOutputDevices.remove(device);
 
-                mHwModules.cleanUpForDevice(device);
-
                 broadcastDeviceConnectionState(device, media::DeviceConnectedState::DISCONNECTED);
+
+                mHwModules.cleanUpForDevice(device);
                 return INVALID_OPERATION;
             }
 
@@ -333,7 +339,7 @@
                         && (!device_distinguishes_on_address(device->type())
                                 // always force when disconnecting (a non-duplicated device)
                                 || (state == AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE));
-                if (desc->mUsePreferredMixerAttributes && newDevices != desc->devices()) {
+                if (desc->mPreferredAttrInfo != nullptr && newDevices != desc->devices()) {
                     // If the device is using preferred mixer attributes, the output need to reopen
                     // with default configuration when the new selected devices are different from
                     // current routing devices
@@ -369,6 +375,7 @@
         checkLeBroadcastRoutes(wasLeUnicastActive, nullptr, 0);
 
         mpClientInterface->onAudioPortListUpdate();
+        ALOGV("%s() completed for device: %s", __func__, device->toString().c_str());
         return NO_ERROR;
     }  // end if is output device
 
@@ -384,6 +391,8 @@
                 return INVALID_OPERATION;
             }
 
+            ALOGV("%s() connecting device %s", __func__, device->toString().c_str());
+
             if (mAvailableInputDevices.add(device) < 0) {
                 return NO_MEMORY;
             }
@@ -391,8 +400,12 @@
             // Before checking intputs, broadcast connect event to allow HAL to retrieve dynamic
             // parameters on newly connected devices (instead of opening the inputs...)
             broadcastDeviceConnectionState(device, media::DeviceConnectedState::CONNECTED);
+            // Propagate device availability to Engine
+            setEngineDeviceConnectionState(device, state);
 
             if (checkInputsForDevice(device, state) != NO_ERROR) {
+                setEngineDeviceConnectionState(device, AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE);
+
                 mAvailableInputDevices.remove(device);
 
                 broadcastDeviceConnectionState(device, media::DeviceConnectedState::DISCONNECTED);
@@ -426,6 +439,9 @@
 
             // remove device from mReportedFormatsMap cache
             mReportedFormatsMap.erase(device);
+
+            // Propagate device availability to Engine
+            setEngineDeviceConnectionState(device, state);
         } break;
 
         default:
@@ -433,9 +449,6 @@
             return BAD_VALUE;
         }
 
-        // Propagate device availability to Engine
-        setEngineDeviceConnectionState(device, state);
-
         checkCloseInputs();
         // As the input device list can impact the output device selection, update
         // getDeviceForStrategy() cache
@@ -452,6 +465,7 @@
         }
 
         mpClientInterface->onAudioPortListUpdate();
+        ALOGV("%s() completed for device: %s", __func__, device->toString().c_str());
         return NO_ERROR;
     } // end if is input device
 
@@ -558,15 +572,31 @@
         }
     }
     auto musicStrategy = streamToStrategy(AUDIO_STREAM_MUSIC);
+    uint32_t muteWaitMs = 0;
     for (size_t i = 0; i < mOutputs.size(); i++) {
        sp<SwAudioOutputDescriptor> desc = mOutputs.valueAt(i);
-       // mute media strategies and delay device switch by the largest
-       // This avoid sending the music tail into the earpiece or headset.
+       // mute media strategies to avoid sending the music tail into
+       // the earpiece or headset.
+       if (desc->isStrategyActive(musicStrategy)) {
+           uint32_t tempRecommendedMuteDuration = desc->getRecommendedMuteDurationMs();
+           uint32_t tempMuteDurationMs = tempRecommendedMuteDuration > 0 ?
+                        tempRecommendedMuteDuration : desc->latency() * 4;
+           if (muteWaitMs < tempMuteDurationMs) {
+               muteWaitMs = tempMuteDurationMs;
+           }
+       }
        setStrategyMute(musicStrategy, true, desc);
        setStrategyMute(musicStrategy, false, desc, MUTE_TIME_MS,
           mEngine->getOutputDevicesForAttributes(attributes_initializer(AUDIO_USAGE_MEDIA),
                                               nullptr, true /*fromCache*/).types());
     }
+    // Wait for the muted audio to propagate down the audio path see checkDeviceMuteStrategies().
+    // We assume that MUTE_TIME_MS is way larger than muteWaitMs so that unmuting still
+    // happens after the actual device switch.
+    if (muteWaitMs > 0) {
+        ALOGW_IF(MUTE_TIME_MS < muteWaitMs * 2, "%s excessive mute wait %d", __func__, muteWaitMs);
+        usleep(muteWaitMs * 1000);
+    }
     // Toggle the device state: UNAVAILABLE -> AVAILABLE
     // This will force reading again the device configuration
     status_t status = setDeviceConnectionState(device,
@@ -679,8 +709,10 @@
     audio_attributes_t attr = { .source = AUDIO_SOURCE_VOICE_COMMUNICATION };
     auto txSourceDevice = mEngine->getInputDeviceForAttributes(attr);
 
-    disconnectTelephonyAudioSource(mCallRxSourceClient);
-    disconnectTelephonyAudioSource(mCallTxSourceClient);
+    if (!fix_call_audio_patch()) {
+        disconnectTelephonyAudioSource(mCallRxSourceClient);
+        disconnectTelephonyAudioSource(mCallTxSourceClient);
+    }
 
     if (rxDevices.isEmpty()) {
         ALOGW("%s() no selected output device", __func__);
@@ -733,13 +765,16 @@
     // Use legacy routing method for voice calls via setOutputDevice() on primary output.
     // Otherwise, create two audio patches for TX and RX path.
     if (!createRxPatch) {
+        if (fix_call_audio_patch()) {
+            disconnectTelephonyAudioSource(mCallRxSourceClient);
+        }
         if (!hasPrimaryOutput()) {
             ALOGW("%s() no primary output available", __func__);
             return INVALID_OPERATION;
         }
         muteWaitMs = setOutputDevices(__func__, mPrimaryOutput, rxDevices, true, delayMs);
     } else { // create RX path audio patch
-        connectTelephonyRxAudioSource();
+        connectTelephonyRxAudioSource(delayMs);
         // If the TX device is on the primary HW module but RX device is
         // on other HW module, SinkMetaData of telephony input should handle it
         // assuming the device uses audio HAL V5.0 and above
@@ -755,6 +790,8 @@
             }
         }
         connectTelephonyTxAudioSource(txSourceDevice, txSinkDevice, delayMs);
+    } else if (fix_call_audio_patch()) {
+        disconnectTelephonyAudioSource(mCallTxSourceClient);
     }
     if (waitMs != nullptr) {
         *waitMs = muteWaitMs;
@@ -774,19 +811,40 @@
     return false;
 }
 
-void AudioPolicyManager::connectTelephonyRxAudioSource()
+void AudioPolicyManager::connectTelephonyRxAudioSource(uint32_t delayMs)
 {
-    disconnectTelephonyAudioSource(mCallRxSourceClient);
+    const auto aa = mEngine->getAttributesForStreamType(AUDIO_STREAM_VOICE_CALL);
+
+    if (fix_call_audio_patch()) {
+        if (mCallRxSourceClient != nullptr) {
+            DeviceVector rxDevices =
+                  mEngine->getOutputDevicesForAttributes(aa, nullptr, false /*fromCache*/);
+            ALOG_ASSERT(!rxDevices.isEmpty() || !mCallRxSourceClient->isConnected(),
+                        "connectTelephonyRxAudioSource(): no device found for call RX source");
+            sp<DeviceDescriptor> rxDevice = rxDevices.itemAt(0);
+            if (mCallRxSourceClient->isConnected()
+                    && mCallRxSourceClient->sinkDevice()->equals(rxDevice)) {
+                return;
+            }
+            disconnectTelephonyAudioSource(mCallRxSourceClient);
+        }
+    } else {
+        disconnectTelephonyAudioSource(mCallRxSourceClient);
+    }
+
     const struct audio_port_config source = {
         .role = AUDIO_PORT_ROLE_SOURCE, .type = AUDIO_PORT_TYPE_DEVICE,
         .ext.device.type = AUDIO_DEVICE_IN_TELEPHONY_RX, .ext.device.address = ""
     };
-    const auto aa = mEngine->getAttributesForStreamType(AUDIO_STREAM_VOICE_CALL);
-
     audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE;
-    status_t status = startAudioSource(&source, &aa, &portId, 0 /*uid*/, true /*internal*/);
+
+    status_t status = startAudioSourceInternal(&source, &aa, &portId, 0 /*uid*/,
+                                       true /*internal*/, true /*isCallRx*/, delayMs);
     ALOGE_IF(status != OK, "%s: failed to start audio source (%d)", __func__, status);
     mCallRxSourceClient = mAudioSources.valueFor(portId);
+    ALOGV("%s portdID %d between source %s and sink %s", __func__, portId,
+        mCallRxSourceClient->srcDevice()->toString().c_str(),
+        mCallRxSourceClient->sinkDevice()->toString().c_str());
     ALOGE_IF(mCallRxSourceClient == nullptr,
              "%s failed to start Telephony Rx AudioSource", __func__);
 }
@@ -805,15 +863,26 @@
         const sp<DeviceDescriptor> &srcDevice, const sp<DeviceDescriptor> &sinkDevice,
         uint32_t delayMs)
 {
-    disconnectTelephonyAudioSource(mCallTxSourceClient);
     if (srcDevice == nullptr || sinkDevice == nullptr) {
         ALOGW("%s could not create patch, invalid sink and/or source device(s)", __func__);
         return;
     }
+
+    if (fix_call_audio_patch()) {
+        if (mCallTxSourceClient != nullptr) {
+            if (mCallTxSourceClient->isConnected()
+                    && mCallTxSourceClient->srcDevice()->equals(srcDevice)) {
+                return;
+            }
+            disconnectTelephonyAudioSource(mCallTxSourceClient);
+        }
+    } else {
+        disconnectTelephonyAudioSource(mCallTxSourceClient);
+    }
+
     PatchBuilder patchBuilder;
     patchBuilder.addSource(srcDevice).addSink(sinkDevice);
-    ALOGV("%s between source %s and sink %s", __func__,
-            srcDevice->toString().c_str(), sinkDevice->toString().c_str());
+
     auto callTxSourceClientPortId = PolicyAudioPort::getNextUniqueId();
     const auto aa = mEngine->getAttributesForStreamType(AUDIO_STREAM_VOICE_CALL);
 
@@ -821,7 +890,8 @@
     srcDevice->toAudioPortConfig(&source);
     mCallTxSourceClient = new SourceClientDescriptor(
                 callTxSourceClientPortId, mUidCached, aa, source, srcDevice, AUDIO_STREAM_PATCH,
-                mCommunnicationStrategy, toVolumeSource(aa), true);
+                mCommunnicationStrategy, toVolumeSource(aa), true,
+                false /*isCallRx*/, true /*isCallTx*/);
     mCallTxSourceClient->setPreferredDeviceId(sinkDevice->getId());
 
     audio_patch_handle_t patchHandle = AUDIO_PATCH_HANDLE_NONE;
@@ -829,6 +899,8 @@
                 mCallTxSourceClient, sinkDevice, patchBuilder.patch(), patchHandle, mUidCached,
                 delayMs);
     ALOGE_IF(status != NO_ERROR, "%s() error %d creating TX audio patch", __func__, status);
+    ALOGV("%s portdID %d between source %s and sink %s", __func__, callTxSourceClientPortId,
+        srcDevice->toString().c_str(), sinkDevice->toString().c_str());
     if (status == NO_ERROR) {
         mAudioSources.add(callTxSourceClientPortId, mCallTxSourceClient);
     }
@@ -912,15 +984,15 @@
     for (size_t i = 0; i < mOutputs.size(); i++) {
         sp<SwAudioOutputDescriptor> desc = mOutputs.valueAt(i);
         DeviceVector newDevices = getNewOutputDevices(desc, true /*fromCache*/);
+        if (state != AUDIO_MODE_NORMAL && oldState == AUDIO_MODE_NORMAL
+                && desc->mPreferredAttrInfo != nullptr) {
+            // If the output is using preferred mixer attributes and the audio mode is not normal,
+            // the output need to reopen with default configuration.
+            outputsToReopen.emplace(mOutputs.keyAt(i), newDevices);
+            continue;
+        }
         if (state != AUDIO_MODE_IN_CALL || (desc != mPrimaryOutput && !isTelephonyRxOrTx(desc))) {
             bool forceRouting = !newDevices.isEmpty();
-            if (desc->mUsePreferredMixerAttributes && newDevices != desc->devices()) {
-                // If the device is using preferred mixer attributes, the output need to reopen
-                // with default configuration when the new selected devices are different from
-                // current routing devices.
-                outputsToReopen.emplace(mOutputs.keyAt(i), newDevices);
-                continue;
-            }
             setOutputDevices(__func__, desc, newDevices, forceRouting, 0 /*delayMs*/, nullptr,
                              true /*requiresMuteCheck*/, !forceRouting /*requiresVolumeCheck*/);
         }
@@ -1050,11 +1122,11 @@
     sp<IOProfile> profile;
     for (const auto& hwModule : hwModules) {
         for (const auto& curProfile : hwModule->getOutputProfiles()) {
-             if (!curProfile->isCompatibleProfile(devices,
+             if (curProfile->getCompatibilityScore(devices,
                      samplingRate, NULL /*updatedSamplingRate*/,
                      format, NULL /*updatedFormat*/,
                      channelMask, NULL /*updatedChannelMask*/,
-                     flags)) {
+                     flags) == IOProfile::NO_MATCH) {
                  continue;
              }
              // reject profiles not corresponding to a device currently available
@@ -1245,7 +1317,7 @@
             status = openDirectOutput(
                     *stream, session, config,
                     (audio_output_flags_t)(*flags | AUDIO_OUTPUT_FLAG_DIRECT),
-                    DeviceVector(policyMixDevice), &newOutput);
+                    DeviceVector(policyMixDevice), &newOutput, *resultAttr);
             if (status == NO_ERROR) {
                 policyDesc = mOutputs.valueFor(newOutput);
                 primaryMix->setOutput(policyDesc);
@@ -1332,23 +1404,40 @@
             // Only use preferred mixer if the uid matches or the preferred mixer is bit-perfect
             // and it is currently active.
             if (info != nullptr && info->getUid() != uid &&
-                ((info->getFlags() & AUDIO_OUTPUT_FLAG_BIT_PERFECT) == AUDIO_OUTPUT_FLAG_NONE ||
-                        info->getActiveClientCount() == 0)) {
+                (!info->isBitPerfect() || info->getActiveClientCount() == 0)) {
                 info = nullptr;
             }
+            if (com::android::media::audioserver::
+                    fix_concurrent_playback_behavior_with_bit_perfect_client()) {
+                if (info != nullptr && info->getUid() == uid &&
+                    info->configMatches(*config) &&
+                    (mEngine->getPhoneState() != AUDIO_MODE_NORMAL ||
+                            std::any_of(gHighPriorityUseCases.begin(), gHighPriorityUseCases.end(),
+                                        [this, &outputDevices](audio_usage_t usage) {
+                                            return mOutputs.isUsageActiveOnDevice(
+                                                    usage, outputDevices[0]); }))) {
+                    // Bit-perfect request is not allowed when the phone mode is not normal or
+                    // there is any higher priority user case active.
+                    return INVALID_OPERATION;
+                }
+            }
         }
         *output = getOutputForDevices(outputDevices, session, resultAttr, config,
                 flags, isSpatialized, info, resultAttr->flags & AUDIO_FLAG_MUTE_HAPTIC);
         // The client will be active if the client is currently preferred mixer owner and the
         // requested configuration matches the preferred mixer configuration.
         *isBitPerfect = (info != nullptr
-                && (info->getFlags() & AUDIO_OUTPUT_FLAG_BIT_PERFECT) != AUDIO_OUTPUT_FLAG_NONE
+                && info->isBitPerfect()
                 && info->getUid() == uid
                 && *output != AUDIO_IO_HANDLE_NONE
                 // When bit-perfect output is selected for the preferred mixer attributes owner,
                 // only need to consider the config matches.
                 && mOutputs.valueFor(*output)->isConfigurationMatched(
                         clientConfig, AUDIO_OUTPUT_FLAG_NONE));
+
+        if (*isBitPerfect) {
+            *flags = (audio_output_flags_t)(*flags | AUDIO_OUTPUT_FLAG_BIT_PERFECT);
+        }
     }
     if (*output == AUDIO_IO_HANDLE_NONE) {
         AudioProfileVector profiles;
@@ -1466,7 +1555,8 @@
                                               const audio_config_t *config,
                                               audio_output_flags_t flags,
                                               const DeviceVector &devices,
-                                              audio_io_handle_t *output) {
+                                              audio_io_handle_t *output,
+                                              audio_attributes_t attributes) {
 
     *output = AUDIO_IO_HANDLE_NONE;
 
@@ -1507,7 +1597,7 @@
                 (config->channel_mask == desc->getChannelMask()) &&
                 (session == desc->mDirectClientSession)) {
                 desc->mDirectOpenCount++;
-                ALOGV("%s reusing direct output %d for session %d", __func__,
+                ALOGI("%s reusing direct output %d for session %d", __func__,
                     mOutputs.keyAt(i), session);
                 *output = mOutputs.keyAt(i);
                 return NO_ERROR;
@@ -1517,17 +1607,23 @@
 
     if (!profile->canOpenNewIo()) {
         if (!com::android::media::audioserver::direct_track_reprioritization()) {
+            ALOGW("%s profile %s can't open new output maxOpenCount reached", __func__,
+                  profile->getName().c_str());
             return NAME_NOT_FOUND;
         } else if ((profile->getFlags() & AUDIO_OUTPUT_FLAG_MMAP_NOIRQ) != 0) {
             // MMAP gracefully handles lack of an exclusive track resource by mixing
             // above the audio framework. For AAudio to know that the limit is reached,
             // return an error.
+            ALOGW("%s profile %s can't open new mmap output maxOpenCount reached", __func__,
+                  profile->getName().c_str());
             return NAME_NOT_FOUND;
         } else {
             // Close outputs on this profile, if available, to free resources for this request
             for (int i = 0; i < mOutputs.size() && !profile->canOpenNewIo(); i++) {
                 const auto desc = mOutputs.valueAt(i);
                 if (desc->mProfile == profile) {
+                    ALOGV("%s closeOutput %d to prioritize session %d on profile %s", __func__,
+                          desc->mIoHandle, session, profile->getName().c_str());
                     closeOutput(desc->mIoHandle);
                 }
             }
@@ -1536,6 +1632,8 @@
 
     // Unable to close streams to find free resources for this request
     if (!profile->canOpenNewIo()) {
+        ALOGW("%s profile %s can't open new output maxOpenCount reached", __func__,
+              profile->getName().c_str());
         return NAME_NOT_FOUND;
     }
 
@@ -1546,7 +1644,8 @@
     releaseMsdOutputPatches(devices);
 
     status_t status =
-            outputDesc->open(config, nullptr /* mixerConfig */, devices, stream, flags, output);
+            outputDesc->open(config, nullptr /* mixerConfig */, devices, stream, flags, output,
+                             attributes);
 
     // only accept an output with the requested parameters
     if (status != NO_ERROR ||
@@ -1572,6 +1671,11 @@
     outputDesc->mDirectClientSession = session;
 
     addOutput(*output, outputDesc);
+    setOutputDevices(__func__, outputDesc,
+                     devices,
+                     true,
+                     0,
+                     NULL);
     mPreviousOutputs = mOutputs;
     ALOGV("%s returns new direct output %d", __func__, *output);
     mpClientInterface->onAudioPortListUpdate();
@@ -1632,17 +1736,24 @@
         *flags = (audio_output_flags_t)(*flags | AUDIO_OUTPUT_FLAG_ULTRASOUND);
     }
 
+    // Use the spatializer output if the content can be spatialized, no preferred mixer
+    // was specified and offload or direct playback is not explicitly requested, and there is no
+    // haptic channel included in playback
     *isSpatialized = false;
-    if (mSpatializerOutput != nullptr
-            && canBeSpatializedInt(attr, config, devices.toTypeAddrVector())
-            && prefMixerConfigInfo == nullptr) {
+    if (mSpatializerOutput != nullptr &&
+        canBeSpatializedInt(attr, config, devices.toTypeAddrVector()) &&
+        prefMixerConfigInfo == nullptr &&
+        ((*flags & (AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD | AUDIO_OUTPUT_FLAG_DIRECT)) == 0) &&
+        checkHapticCompatibilityOnSpatializerOutput(config, session)) {
         *isSpatialized = true;
         return mSpatializerOutput->mIoHandle;
     }
 
     audio_config_t directConfig = *config;
     directConfig.channel_mask = channelMask;
-    status_t status = openDirectOutput(stream, session, &directConfig, *flags, devices, &output);
+
+    status_t status = openDirectOutput(stream, session, &directConfig, *flags, devices, &output,
+                                       *attr);
     if (status != NAME_NOT_FOUND) {
         return output;
     }
@@ -1693,6 +1804,24 @@
             // at this stage we should ignore the DIRECT flag as no direct output could be
             // found earlier
             *flags = (audio_output_flags_t) (*flags & ~AUDIO_OUTPUT_FLAG_DIRECT);
+            if (com::android::media::audioserver::
+                    fix_concurrent_playback_behavior_with_bit_perfect_client()) {
+                // If the preferred mixer attributes is null, do not select the bit-perfect output
+                // unless the bit-perfect output is the only output.
+                // The bit-perfect output can exist while the passed in preferred mixer attributes
+                // info is null when it is a high priority client. The high priority clients are
+                // ringtone or alarm, which is not a bit-perfect use case.
+                size_t i = 0;
+                while (i < outputs.size() && outputs.size() > 1) {
+                    auto desc = mOutputs.valueFor(outputs[i]);
+                    // The output descriptor must not be null here.
+                    if (desc->isBitPerfect()) {
+                        outputs.removeItemsAt(i);
+                    } else {
+                        i += 1;
+                    }
+                }
+            }
             output = selectOutput(
                     outputs, *flags, config->format, channelMask, config->sample_rate, session);
         }
@@ -2061,6 +2190,7 @@
     // matching criteria values in priority order for best matching output so far
     std::vector<uint32_t> bestMatchCriteria(8, 0);
 
+    const bool hasOrphanHaptic = mEffects.hasOrphansForSession(sessionId, FX_IID_HAPTICGENERATOR);
     const uint32_t channelCount = audio_channel_count_from_out_mask(channelMask);
     const uint32_t hapticChannelCount = audio_channel_count_from_out_mask(
         channelMask & AUDIO_CHANNEL_HAPTIC_ALL);
@@ -2081,13 +2211,20 @@
         // When using haptic output, same audio format and sample rate are required.
         const uint32_t outputHapticChannelCount = audio_channel_count_from_out_mask(
             outputDesc->getChannelMask() & AUDIO_CHANNEL_HAPTIC_ALL);
-        if ((hapticChannelCount == 0) != (outputHapticChannelCount == 0)) {
+        // skip if haptic channel specified but output does not support it, or output support haptic
+        // but there is no haptic channel requested AND no orphan haptic effect exist
+        if ((hapticChannelCount != 0 && outputHapticChannelCount == 0) ||
+            (hapticChannelCount == 0 && outputHapticChannelCount != 0 && !hasOrphanHaptic)) {
             continue;
         }
-        if (outputHapticChannelCount >= hapticChannelCount
-            && format == outputDesc->getFormat()
-            && samplingRate == outputDesc->getSamplingRate()) {
-                currentMatchCriteria[0] = outputHapticChannelCount;
+        // In the case of audio-coupled-haptic playback, there is no format conversion and
+        // resampling in the framework, same format/channel/sampleRate for client and the output
+        // thread is required. In the case of HapticGenerator effect, do not require format
+        // matching.
+        if ((outputHapticChannelCount >= hapticChannelCount && format == outputDesc->getFormat() &&
+             samplingRate == outputDesc->getSamplingRate()) ||
+            (outputHapticChannelCount != 0 && hasOrphanHaptic)) {
+            currentMatchCriteria[0] = outputHapticChannelCount;
         }
 
         // functional flags match
@@ -2113,7 +2250,14 @@
 
         // sampling rate match
         if (samplingRate > SAMPLE_RATE_HZ_DEFAULT) {
-            currentMatchCriteria[4] = outputDesc->getSamplingRate();
+            int diff;  // avoid unsigned integer overflow.
+            __builtin_sub_overflow(outputDesc->getSamplingRate(), samplingRate, &diff);
+
+            // prefer the closest output sampling rate greater than or equal to target
+            // if none exists, prefer the closest output sampling rate less than target.
+            //
+            // criteria is offset to make non-negative.
+            currentMatchCriteria[4] = diff >= 0 ? -diff + 200'000'000 : diff + 100'000'000;
         }
 
         // performance flags match
@@ -2160,6 +2304,20 @@
     ALOGV("startOutput() output %d, stream %d, session %d",
           outputDesc->mIoHandle, client->stream(), client->session());
 
+    if (com::android::media::audioserver::fix_concurrent_playback_behavior_with_bit_perfect_client()
+            && gHighPriorityUseCases.count(client->attributes().usage) != 0
+            && outputDesc->isBitPerfect()) {
+        // Usually, APM selects bit-perfect output for high priority use cases only when
+        // bit-perfect output is the only output that can be routed to the selected device.
+        // However, here is no need to play high priority use cases such as ringtone and alarm
+        // on the bit-perfect path. Reopen the output and return DEAD_OBJECT so that the client
+        // can attach to new output.
+        ALOGD("%s: reopen bit-perfect output as high priority use case(%d) is starting",
+              __func__, client->stream());
+        reopenOutput(outputDesc, nullptr /*config*/, AUDIO_OUTPUT_FLAG_NONE, __func__);
+        return DEAD_OBJECT;
+    }
+
     status_t status = outputDesc->start();
     if (status != NO_ERROR) {
         return status;
@@ -2178,7 +2336,6 @@
                 ALOGE("%s unable to open output with default config", __func__);
                 return status;
             }
-            desc->mUsePreferredMixerAttributes = true;
         }
         return status;
     }
@@ -2202,14 +2359,13 @@
                 if (desc == nullptr) {
                     return BAD_VALUE;
                 }
-                desc->mUsePreferredMixerAttributes = true;
+                desc->mPreferredAttrInfo = info;
                 // Intentionally return error to let the client side resending request for
                 // creating and starting.
                 return DEAD_OBJECT;
             }
             info->increaseActiveClient();
-            if (info->getActiveClientCount() == 1 &&
-                (info->getFlags() & AUDIO_OUTPUT_FLAG_BIT_PERFECT) != AUDIO_OUTPUT_FLAG_NONE) {
+            if (info->getActiveClientCount() == 1 && info->isBitPerfect()) {
                 // If it is first bit-perfect client, reroute all clients that will be routed to
                 // the bit-perfect sink so that it is guaranteed only bit-perfect stream is active.
                 PortHandleVector clientsToInvalidate;
@@ -2239,6 +2395,15 @@
         usleep(delayMs * 1000);
     }
 
+    if (status == NO_ERROR &&
+        outputDesc->mPreferredAttrInfo != nullptr &&
+        outputDesc->isBitPerfect() &&
+        com::android::media::audioserver::
+                fix_concurrent_playback_behavior_with_bit_perfect_client()) {
+        // A new client is started on bit-perfect output, update all clients internal mute.
+        updateClientsInternalMute(outputDesc);
+    }
+
     return status;
 }
 
@@ -2343,6 +2508,11 @@
              followsSameRouting(clientAttr, attributes_initializer(AUDIO_USAGE_NOTIFICATION)) ||
              (beaconMuteLatency > 0));
         uint32_t waitMs = beaconMuteLatency;
+        const bool needToCloseBitPerfectOutput =
+                (com::android::media::audioserver::
+                        fix_concurrent_playback_behavior_with_bit_perfect_client() &&
+                gHighPriorityUseCases.count(clientAttr.usage) != 0);
+        std::vector<sp<SwAudioOutputDescriptor>> outputsToReopen;
         for (size_t i = 0; i < mOutputs.size(); i++) {
             sp<SwAudioOutputDescriptor> desc = mOutputs.valueAt(i);
             if (desc != outputDesc) {
@@ -2379,15 +2549,22 @@
                 // Note restoring AudioTracks onto this output needs to invoke
                 // a volume ramp if there is no mute.
                 requiresMuteCheck |= sharedDevice && isActive;
+
+                if (needToCloseBitPerfectOutput && desc->isBitPerfect()) {
+                    outputsToReopen.push_back(desc);
+                }
             }
         }
 
-        if (outputDesc->mUsePreferredMixerAttributes && devices != outputDesc->devices()) {
+        if (outputDesc->mPreferredAttrInfo != nullptr && devices != outputDesc->devices()) {
             // If the output is open with preferred mixer attributes, but the routed device is
             // changed when calling this function, returning DEAD_OBJECT to indicate routing
             // changed.
             return DEAD_OBJECT;
         }
+        for (auto& outputToReopen : outputsToReopen) {
+            reopenOutput(outputToReopen, nullptr /*config*/, AUDIO_OUTPUT_FLAG_NONE, __func__);
+        }
         const uint32_t muteWaitMs =
                 setOutputDevices(__func__, outputDesc, devices, force, 0, nullptr,
                                  requiresMuteCheck);
@@ -2466,7 +2643,7 @@
                                     getAudioDeviceOutLeAudioUnicastSet()).isEmpty()))) {
                 DeviceVector newDevices = getNewOutputDevices(desc, false /*fromCache*/);
                 bool force = desc->devices() != newDevices;
-                if (desc->mUsePreferredMixerAttributes && force) {
+                if (desc->mPreferredAttrInfo != nullptr && force) {
                     // If the device is using preferred mixer attributes, the output need to reopen
                     // with default configuration when the new selected devices are different from
                     // current routing devices.
@@ -2514,12 +2691,21 @@
     if (outputDesc->devices().size() == 1) {
         sp<PreferredMixerAttributesInfo> info = getPreferredMixerAttributesInfo(
                 outputDesc->devices()[0]->getId(), client->strategy());
+        bool outputReopened = false;
         if (info != nullptr && info->getUid() == client->uid()) {
             info->decreaseActiveClient();
             if (info->getActiveClientCount() == 0) {
                 reopenOutput(outputDesc, nullptr /*config*/, AUDIO_OUTPUT_FLAG_NONE, __func__);
+                outputReopened = true;
             }
         }
+        if (com::android::media::audioserver::
+                    fix_concurrent_playback_behavior_with_bit_perfect_client() &&
+            !outputReopened && outputDesc->isBitPerfect()) {
+            // Only need to update the clients' internal mute when the output is bit-perfect and it
+            // is not reopened.
+            updateClientsInternalMute(outputDesc);
+        }
     }
     return status;
 }
@@ -2591,7 +2777,7 @@
                     DeviceVector newDevices2 = getNewOutputDevices(desc, false /*fromCache*/);
                     bool force = desc->devices() != newDevices2;
 
-                    if (desc->mUsePreferredMixerAttributes && force) {
+                    if (desc->mPreferredAttrInfo != nullptr && force) {
                         // If the device is using preferred mixer attributes, the output need to
                         // reopen with default configuration when the new selected devices are
                         // different from current routing devices.
@@ -2689,7 +2875,8 @@
                                              audio_input_flags_t flags,
                                              audio_port_handle_t *selectedDeviceId,
                                              input_type_t *inputType,
-                                             audio_port_handle_t *portId)
+                                             audio_port_handle_t *portId,
+                                             uint32_t *virtualDeviceId)
 {
     ALOGV("%s() source %d, sampling rate %d, format %#x, channel mask %#x, session %d, "
           "flags %#x attributes=%s requested device ID %d",
@@ -2791,6 +2978,9 @@
         } else {
             *inputType = API_INPUT_MIX_EXT_POLICY_REROUTE;
         }
+        if (virtualDeviceId) {
+            *virtualDeviceId = policyMix->mVirtualDeviceId;
+        }
     } else {
         if (explicitRoutingDevice != nullptr) {
             device = explicitRoutingDevice;
@@ -2814,6 +3004,10 @@
             // meaning it receives audio injected into the framework, so the recorder doesn't
             // know about it and is therefore considered "legacy"
             *inputType = API_INPUT_LEGACY;
+
+            if (virtualDeviceId) {
+                *virtualDeviceId = policyMix->mVirtualDeviceId;
+            }
         } else if (audio_is_remote_submix_device(device->type())) {
             *inputType = API_INPUT_MIX_CAPTURE;
         } else if (device->type() == AUDIO_DEVICE_IN_TELEPHONY_RX) {
@@ -2845,6 +3039,11 @@
         goto error;
     }
 
+
+    if (policyMix != nullptr && virtualDeviceId != nullptr) {
+        *virtualDeviceId = policyMix->mVirtualDeviceId;
+    }
+
 exit:
 
     *selectedDeviceId = mAvailableInputDevices.contains(device) ?
@@ -2941,43 +3140,115 @@
         }
     }
 
+    bool isPreemptor = false;
     if (!profile->canOpenNewIo()) {
-        for (size_t i = 0; i < mInputs.size(); ) {
-            sp<AudioInputDescriptor> desc = mInputs.valueAt(i);
-            if (desc->mProfile != profile) {
-                i++;
-                continue;
-            }
-            // if sound trigger, reuse input if used by other sound trigger on same session
-            // else
-            //    reuse input if active client app is not in IDLE state
-            //
-            RecordClientVector clients = desc->clientsList();
-            bool doClose = false;
-            for (const auto& client : clients) {
-                if (isSoundTrigger != client->isSoundTrigger()) {
+        if (com::android::media::audioserver::fix_input_sharing_logic()) {
+            //  First pick best candidate for preemption (there may not be any):
+            //  - Preempt and input if:
+            //     - It has only strictly lower priority use cases than the new client
+            //     - It has equal priority use cases than the new client, was not
+            //     opened thanks to preemption or has been active since opened.
+            //  - Order the preemption candidates by inactive first and priority second
+            sp<AudioInputDescriptor> closeCandidate;
+            int leastCloseRank = INT_MAX;
+            static const int sCloseActive = 0x100;
+
+            for (size_t i = 0; i < mInputs.size(); i++) {
+                sp<AudioInputDescriptor> desc = mInputs.valueAt(i);
+                if (desc->mProfile != profile) {
                     continue;
                 }
-                if (client->isSoundTrigger()) {
-                    if (session == client->session()) {
+                sp<RecordClientDescriptor> topPrioClient = desc->getHighestPriorityClient();
+                if (topPrioClient == nullptr) {
+                    continue;
+                }
+                int topPrio = source_priority(topPrioClient->source());
+                if (topPrio < source_priority(attributes.source)
+                      || (topPrio == source_priority(attributes.source)
+                          && !desc->isPreemptor())) {
+                    int closeRank = (desc->isActive() ? sCloseActive : 0) + topPrio;
+                    if (closeRank < leastCloseRank) {
+                        leastCloseRank = closeRank;
+                        closeCandidate = desc;
+                    }
+                }
+            }
+
+            if (closeCandidate != nullptr) {
+                closeInput(closeCandidate->mIoHandle);
+                // Mark the new input as being issued from a preemption
+                // so that is will not be preempted later
+                isPreemptor = true;
+            } else {
+                // Then pick the best reusable input (There is always one)
+                // The order of preference is:
+                // 1) active inputs with same use case as the new client
+                // 2) inactive inputs with same use case
+                // 3) active inputs with different use cases
+                // 4) inactive inputs with different use cases
+                sp<AudioInputDescriptor> reuseCandidate;
+                int leastReuseRank = INT_MAX;
+                static const int sReuseDifferentUseCase = 0x100;
+
+                for (size_t i = 0; i < mInputs.size(); i++) {
+                    sp<AudioInputDescriptor> desc = mInputs.valueAt(i);
+                    if (desc->mProfile != profile) {
+                        continue;
+                    }
+                    int reuseRank = sReuseDifferentUseCase;
+                    for (const auto& client: desc->getClientIterable()) {
+                        if (client->source() == attributes.source) {
+                            reuseRank = 0;
+                            break;
+                        }
+                    }
+                    reuseRank += desc->isActive() ? 0 : 1;
+                    if (reuseRank < leastReuseRank) {
+                        leastReuseRank = reuseRank;
+                        reuseCandidate = desc;
+                    }
+                }
+                return reuseCandidate->mIoHandle;
+            }
+        } else { // fix_input_sharing_logic()
+            for (size_t i = 0; i < mInputs.size(); ) {
+                 sp<AudioInputDescriptor> desc = mInputs.valueAt(i);
+                 if (desc->mProfile != profile) {
+                     i++;
+                     continue;
+                 }
+                // if sound trigger, reuse input if used by other sound trigger on same session
+                // else
+                //    reuse input if active client app is not in IDLE state
+                //
+                RecordClientVector clients = desc->clientsList();
+                bool doClose = false;
+                for (const auto& client : clients) {
+                    if (isSoundTrigger != client->isSoundTrigger()) {
+                        continue;
+                    }
+                    if (client->isSoundTrigger()) {
+                        if (session == client->session()) {
+                            return desc->mIoHandle;
+                        }
+                        continue;
+                    }
+                    if (client->active() && client->appState() != APP_STATE_IDLE) {
                         return desc->mIoHandle;
                     }
-                    continue;
+                    doClose = true;
                 }
-                if (client->active() && client->appState() != APP_STATE_IDLE) {
-                    return desc->mIoHandle;
+                if (doClose) {
+                    closeInput(desc->mIoHandle);
+                } else {
+                    i++;
                 }
-                doClose = true;
-            }
-            if (doClose) {
-                closeInput(desc->mIoHandle);
-            } else {
-                i++;
             }
         }
     }
 
-    sp<AudioInputDescriptor> inputDesc = new AudioInputDescriptor(profile, mpClientInterface);
+    sp<AudioInputDescriptor> inputDesc = new AudioInputDescriptor(
+            profile, mpClientInterface, isPreemptor);
 
     audio_config_t lConfig = AUDIO_CONFIG_INITIALIZER;
     lConfig.sample_rate = profileSamplingRate;
@@ -3169,7 +3440,12 @@
     ALOGV("%s %d", __FUNCTION__, input);
 
     inputDesc->removeClient(portId);
-    mEffects.putOrphanEffects(client->session(), input, &mInputs, mpClientInterface);
+
+    // If no more clients are present in this session, park effects to an orphan chain
+    RecordClientVector clientsOnSession = inputDesc->getClientsForSession(client->session());
+    if (clientsOnSession.size() == 0) {
+        mEffects.putOrphanEffects(client->session(), input, &mInputs, mpClientInterface);
+    }
     if (inputDesc->getClientCount() > 0) {
         ALOGV("%s(%d) %zu clients remaining", __func__, portId, inputDesc->getClientCount());
         return;
@@ -3195,6 +3471,23 @@
     releaseInput(portId);
 }
 
+bool AudioPolicyManager::checkCloseInput(const sp<AudioInputDescriptor>& input) {
+    if (input->clientsList().size() == 0
+            || !mAvailableInputDevices.containsAtLeastOne(input->supportedDevices())) {
+        return true;
+    }
+    for (const auto& client : input->clientsList()) {
+        sp<DeviceDescriptor> device =
+            mEngine->getInputDeviceForAttributes(client->attributes(), client->uid(),
+                                                 client->session());
+        if (!input->supportedDevices().contains(device)) {
+            return true;
+        }
+    }
+    setInputDevice(input->mIoHandle, getNewInputDevice(input));
+    return false;
+}
+
 void AudioPolicyManager::checkCloseInputs() {
     // After connecting or disconnecting an input device, close input if:
     // - it has no client (was just opened to check profile)  OR
@@ -3203,35 +3496,37 @@
     // devices anymore. Otherwise update device selection
     std::vector<audio_io_handle_t> inputsToClose;
     for (size_t i = 0; i < mInputs.size(); i++) {
-        const sp<AudioInputDescriptor> input = mInputs.valueAt(i);
-        if (input->clientsList().size() == 0
-                || !mAvailableInputDevices.containsAtLeastOne(input->supportedDevices())) {
+        if (checkCloseInput(mInputs.valueAt(i))) {
             inputsToClose.push_back(mInputs.keyAt(i));
-        } else {
-            bool close = false;
-            for (const auto& client : input->clientsList()) {
-                sp<DeviceDescriptor> device =
-                    mEngine->getInputDeviceForAttributes(client->attributes(), client->uid(),
-                                                         client->session());
-                if (!input->supportedDevices().contains(device)) {
-                    close = true;
-                    break;
-                }
-            }
-            if (close) {
-                inputsToClose.push_back(mInputs.keyAt(i));
-            } else {
-                setInputDevice(input->mIoHandle, getNewInputDevice(input));
-            }
         }
     }
-
     for (const audio_io_handle_t handle : inputsToClose) {
         ALOGV("%s closing input %d", __func__, handle);
         closeInput(handle);
     }
 }
 
+status_t AudioPolicyManager::setDeviceAbsoluteVolumeEnabled(audio_devices_t deviceType,
+                                                            const char *address __unused,
+                                                            bool enabled,
+                                                            audio_stream_type_t streamToDriveAbs)
+{
+    if (!enabled) {
+        mAbsoluteVolumeDrivingStreams.erase(deviceType);
+        return NO_ERROR;
+    }
+
+    audio_attributes_t attributesToDriveAbs = mEngine->getAttributesForStreamType(streamToDriveAbs);
+    if (attributesToDriveAbs == AUDIO_ATTRIBUTES_INITIALIZER) {
+        ALOGW("%s: no attributes for stream %s, bailing out", __func__,
+              toString(streamToDriveAbs).c_str());
+        return BAD_VALUE;
+    }
+
+    mAbsoluteVolumeDrivingStreams[deviceType] = attributesToDriveAbs;
+    return NO_ERROR;
+}
+
 void AudioPolicyManager::initStreamVolume(audio_stream_type_t stream, int indexMin, int indexMax)
 {
     ALOGV("initStreamVolume() stream %d, min %d, max %d", stream , indexMin, indexMax);
@@ -3259,8 +3554,8 @@
         ALOGW("%s: no group for stream %s, bailing out", __func__, toString(stream).c_str());
         return NO_ERROR;
     }
-    ALOGV("%s: stream %s attributes=%s", __func__,
-          toString(stream).c_str(), toString(attributes).c_str());
+    ALOGV("%s: stream %s attributes=%s, index %d , device 0x%X", __func__,
+          toString(stream).c_str(), toString(attributes).c_str(), index, device);
     return setVolumeIndexForAttributes(attributes, index, device);
 }
 
@@ -3328,6 +3623,7 @@
     // requested device or one of the devices selected by the engine for this stream
     // - For default requested device (AUDIO_DEVICE_OUT_DEFAULT_FOR_VOLUME), apply volume only if
     // no specific device volume value exists for currently selected device.
+    // - Only apply the volume if the requested device is the desired device for volume control.
     for (size_t i = 0; i < mOutputs.size(); i++) {
         sp<SwAudioOutputDescriptor> desc = mOutputs.valueAt(i);
         DeviceTypeSet curDevices = desc->devices().types();
@@ -3347,7 +3643,8 @@
         if (device != AUDIO_DEVICE_OUT_DEFAULT_FOR_VOLUME) {
             curSrcDevices.insert(device);
             applyVolume = (curSrcDevices.find(
-                    Volume::getDeviceForVolume(curDevices)) != curSrcDevices.end());
+                    Volume::getDeviceForVolume(curDevices)) != curSrcDevices.end())
+                    && Volume::getDeviceForVolume(curSrcDevices) == device;
         } else {
             applyVolume = !curves.hasVolumeIndexForDevice(curSrcDevice);
         }
@@ -3417,7 +3714,9 @@
         if (isVolumeConsistentForCalls(vs, {mCallRxSourceClient->sinkDevice()->type()},
                 isVoiceVolSrc, isBtScoVolSrc, __func__)
                 && (isVoiceVolSrc || isBtScoVolSrc)) {
-            setVoiceVolume(index, curves, isVoiceVolSrc, 0);
+            bool voiceVolumeManagedByHost = isVoiceVolSrc &&
+                    !audio_is_ble_out_device(mCallRxSourceClient->sinkDevice()->type());
+            setVoiceVolume(index, curves, voiceVolumeManagedByHost, 0);
         }
     }
 
@@ -3434,8 +3733,8 @@
     bool hasVoice = hasVoiceStream(volumeCurves.getStreamTypes());
     if (((index < volumeCurves.getVolumeIndexMin()) && !(hasVoice && index == 0)) ||
             (index > volumeCurves.getVolumeIndexMax())) {
-        ALOGD("%s: wrong index %d min=%d max=%d", __FUNCTION__, index,
-              volumeCurves.getVolumeIndexMin(), volumeCurves.getVolumeIndexMax());
+        ALOGE("%s: wrong index %d min=%d max=%d, device 0x%X", __FUNCTION__, index,
+              volumeCurves.getVolumeIndexMin(), volumeCurves.getVolumeIndexMax(), device);
         return BAD_VALUE;
     }
     if (!audio_is_output_device(device)) {
@@ -3567,7 +3866,7 @@
                                 int session,
                                 int id)
 {
-    if (session != AUDIO_SESSION_DEVICE) {
+    if (session != AUDIO_SESSION_DEVICE && io != AUDIO_IO_HANDLE_NONE) {
         ssize_t index = mOutputs.indexOfKey(io);
         if (index < 0) {
             index = mInputs.indexOfKey(io);
@@ -3668,6 +3967,7 @@
     status_t res = NO_ERROR;
     bool checkOutputs = false;
     sp<HwModule> rSubmixModule;
+    Vector<AudioMix> registeredMixes;
     // examine each mix's route type
     for (size_t i = 0; i < mixes.size(); i++) {
         AudioMix mix = mixes[i];
@@ -3791,11 +4091,19 @@
                 break;
             } else {
                 checkOutputs = true;
+                registeredMixes.add(mix);
             }
         }
     }
     if (res != NO_ERROR) {
-        unregisterPolicyMixes(mixes);
+        if (audio_flags::audio_mix_ownership()) {
+            // Only unregister mixes that were actually registered to not accidentally unregister
+            // mixes that already existed previously.
+            unregisterPolicyMixes(registeredMixes);
+            registeredMixes.clear();
+        } else {
+            unregisterPolicyMixes(mixes);
+        }
     } else if (checkOutputs) {
         checkForDeviceAndOutputChanges();
         updateCallAndOutputRouting();
@@ -3829,15 +4137,22 @@
                 continue;
             }
 
-            for (auto device : {AUDIO_DEVICE_IN_REMOTE_SUBMIX, AUDIO_DEVICE_OUT_REMOTE_SUBMIX}) {
+            for (auto device: {AUDIO_DEVICE_IN_REMOTE_SUBMIX, AUDIO_DEVICE_OUT_REMOTE_SUBMIX}) {
                 if (getDeviceConnectionState(device, address.c_str()) ==
-                        AUDIO_POLICY_DEVICE_STATE_AVAILABLE)  {
-                    res = setDeviceConnectionStateInt(device, AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
-                                                      address.c_str(), "remote-submix",
-                                                      AUDIO_FORMAT_DEFAULT);
-                    if (res != OK) {
+                    AUDIO_POLICY_DEVICE_STATE_AVAILABLE) {
+                    status_t currentRes =
+                            setDeviceConnectionStateInt(device,
+                                                        AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
+                                                        address.c_str(),
+                                                        "remote-submix",
+                                                        AUDIO_FORMAT_DEFAULT);
+                    if (!audio_flags::audio_mix_ownership()) {
+                        res = currentRes;
+                    }
+                    if (currentRes != OK) {
                         ALOGE("Error making RemoteSubmix device unavailable for mix "
                               "with type %d, address %s", device, address.c_str());
+                        res = INVALID_OPERATION;
                     }
                 }
             }
@@ -3853,6 +4168,7 @@
             }
         }
     }
+
     if (res == NO_ERROR && checkOutputs) {
         checkForDeviceAndOutputChanges();
         updateCallAndOutputRouting();
@@ -3860,6 +4176,26 @@
     return res;
 }
 
+status_t AudioPolicyManager::getRegisteredPolicyMixes(std::vector<AudioMix>& _aidl_return) {
+    if (!audio_flags::audio_mix_test_api()) {
+        return INVALID_OPERATION;
+    }
+
+    _aidl_return.clear();
+    _aidl_return.reserve(mPolicyMixes.size());
+    for (const auto &policyMix: mPolicyMixes) {
+        _aidl_return.emplace_back(policyMix->mCriteria, policyMix->mMixType,
+                             policyMix->mFormat, policyMix->mRouteFlags, policyMix->mDeviceAddress,
+                             policyMix->mCbFlags);
+        _aidl_return.back().mDeviceType = policyMix->mDeviceType;
+        _aidl_return.back().mToken = policyMix->mToken;
+        _aidl_return.back().mVirtualDeviceId = policyMix->mVirtualDeviceId;
+    }
+
+    ALOGVV("%s() returning %zu registered mixes", __func__, _aidl_return.size());
+    return OK;
+}
+
 status_t AudioPolicyManager::updatePolicyMix(
             const AudioMix& mix,
             const std::vector<AudioMixMatchCriterion>& updatedCriteria) {
@@ -4026,7 +4362,7 @@
             // preventing the force re-routing in case of default dev that distinguishes on address.
             // Let's give back to engine full device choice decision however.
             bool forceRouting = !newDevices.isEmpty();
-            if (outputDesc->mUsePreferredMixerAttributes && newDevices != outputDesc->devices()) {
+            if (outputDesc->mPreferredAttrInfo != nullptr && newDevices != outputDesc->devices()) {
                 // If the device is using preferred mixer attributes, the output need to reopen
                 // with default configuration when the new selected devices are different from
                 // current routing devices.
@@ -4306,6 +4642,13 @@
 
     dst->appendFormat("\nPolicy Engine dump:\n");
     mEngine->dump(dst);
+
+    dst->appendFormat("\nAbsolute volume devices with driving streams:\n");
+    for (const auto it : mAbsoluteVolumeDrivingStreams) {
+        dst->appendFormat("   - device type: %s, driving stream %d\n",
+                          dumpDeviceTypes({it.first}).c_str(),
+                          mEngine->getVolumeGroupForAttributes(it.second));
+    }
 }
 
 status_t AudioPolicyManager::dump(int fd)
@@ -4481,11 +4824,11 @@
             outputDevices = getMsdAudioOutDevices();
         }
         for (const auto& curProfile : hwModule->getOutputProfiles()) {
-            if (!curProfile->isCompatibleProfile(outputDevices,
+            if (curProfile->getCompatibilityScore(outputDevices,
                     config->sample_rate, nullptr /*updatedSamplingRate*/,
                     config->format, nullptr /*updatedFormat*/,
                     config->channel_mask, nullptr /*updatedChannelMask*/,
-                    flags)) {
+                    flags) == IOProfile::NO_MATCH) {
                 continue;
             }
             // reject profiles not corresponding to a device currently available
@@ -4591,15 +4934,17 @@
     for (const auto& hwModule : mHwModules) {
         for (const auto& curProfile : hwModule->getOutputProfiles()) {
             if (curProfile->hasDynamicAudioProfile()
-                    && curProfile->isCompatibleProfile(devices,
-                                                       mixerAttributes->config.sample_rate,
-                                                       nullptr /*updatedSamplingRate*/,
-                                                       mixerAttributes->config.format,
-                                                       nullptr /*updatedFormat*/,
-                                                       mixerAttributes->config.channel_mask,
-                                                       nullptr /*updatedChannelMask*/,
-                                                       flags,
-                                                       false /*exactMatchRequiredForInputFlags*/)) {
+                    && curProfile->getCompatibilityScore(
+                            devices,
+                            mixerAttributes->config.sample_rate,
+                            nullptr /*updatedSamplingRate*/,
+                            mixerAttributes->config.format,
+                            nullptr /*updatedFormat*/,
+                            mixerAttributes->config.channel_mask,
+                            nullptr /*updatedChannelMask*/,
+                            flags,
+                            false /*exactMatchRequiredForInputFlags*/)
+                            != IOProfile::NO_MATCH) {
                 profile = curProfile;
                 break;
             }
@@ -4625,7 +4970,7 @@
         const auto output = mOutputs.valueAt(i);
         if (output->mProfile == profile && output->devices().onlyContainsDevice(deviceDescriptor)) {
             if (output->isConfigurationMatched(mixerAttributes->config, flags)) {
-                output->mUsePreferredMixerAttributes = true;
+                output->mPreferredAttrInfo = mixerAttrInfo;
             } else {
                 for (const auto &client: output->getActiveClients()) {
                     if (client->uid() == uid && client->strategy() == strategy) {
@@ -4647,7 +4992,7 @@
             ALOGE("%s, failed to reopen output with preferred mixer attributes", __func__);
             continue;
         }
-        desc->mUsePreferredMixerAttributes = true;
+        desc->mPreferredAttrInfo = mixerAttrInfo;
     }
 
     return NO_ERROR;
@@ -4663,8 +5008,7 @@
     }
     if (activeBitPerfectPreferred) {
         for (auto [strategy, info] : it->second) {
-            if ((info->getFlags() & AUDIO_OUTPUT_FLAG_BIT_PERFECT) != AUDIO_OUTPUT_FLAG_NONE
-                && info->getActiveClientCount() != 0) {
+            if (info->isBitPerfect() && info->getActiveClientCount() != 0) {
                 return info;
             }
         }
@@ -4883,7 +5227,7 @@
             new SourceClientDescriptor(
                 portId, uid, attributes, *source, srcDevice, AUDIO_STREAM_PATCH,
                 mEngine->getProductStrategyForAttributes(attributes), toVolumeSource(attributes),
-                true);
+                true, false /*isCallRx*/, false /*isCallTx*/);
     sourceDesc->setPreferredDeviceId(sinkDevice->getId());
 
     status_t status =
@@ -5004,14 +5348,15 @@
                 return BAD_VALUE;
             }
 
-            if (!outputDesc->mProfile->isCompatibleProfile(DeviceVector(devDesc),
-                                                           patch->sources[0].sample_rate,
-                                                           NULL,  // updatedSamplingRate
-                                                           patch->sources[0].format,
-                                                           NULL,  // updatedFormat
-                                                           patch->sources[0].channel_mask,
-                                                           NULL,  // updatedChannelMask
-                                                           AUDIO_OUTPUT_FLAG_NONE /*FIXME*/)) {
+            if (outputDesc->mProfile->getCompatibilityScore(
+                    DeviceVector(devDesc),
+                    patch->sources[0].sample_rate,
+                    nullptr,  // updatedSamplingRate
+                    patch->sources[0].format,
+                    nullptr,  // updatedFormat
+                    patch->sources[0].channel_mask,
+                    nullptr,  // updatedChannelMask
+                    AUDIO_OUTPUT_FLAG_NONE /*FIXME*/) == IOProfile::NO_MATCH) {
                 ALOGV("%s profile not supported for device %08x", __func__, devDesc->type());
                 return INVALID_OPERATION;
             }
@@ -5059,17 +5404,18 @@
                 return BAD_VALUE;
             }
 
-            if (!inputDesc->mProfile->isCompatibleProfile(DeviceVector(device),
-                                                          patch->sinks[0].sample_rate,
-                                                          NULL, /*updatedSampleRate*/
-                                                          patch->sinks[0].format,
-                                                          NULL, /*updatedFormat*/
-                                                          patch->sinks[0].channel_mask,
-                                                          NULL, /*updatedChannelMask*/
-                                                          // FIXME for the parameter type,
-                                                          // and the NONE
-                                                          (audio_output_flags_t)
-                                                            AUDIO_INPUT_FLAG_NONE)) {
+            if (inputDesc->mProfile->getCompatibilityScore(
+                    DeviceVector(device),
+                    patch->sinks[0].sample_rate,
+                    nullptr, /*updatedSampleRate*/
+                    patch->sinks[0].format,
+                    nullptr, /*updatedFormat*/
+                    patch->sinks[0].channel_mask,
+                    nullptr, /*updatedChannelMask*/
+                    // FIXME for the parameter type,
+                    // and the NONE
+                    (audio_output_flags_t)
+                    AUDIO_INPUT_FLAG_NONE) == IOProfile::NO_MATCH) {
                 return INVALID_OPERATION;
             }
             // TODO: reconfigure output format and channels here
@@ -5213,7 +5559,7 @@
                         outputDesc->toAudioPortConfig(&srcMixPortConfig, nullptr);
                         // for volume control, we may need a valid stream
                         srcMixPortConfig.ext.mix.usecase.stream =
-                            (!sourceDesc->isInternal() || isCallTxAudioSource(sourceDesc)) ?
+                            (!sourceDesc->isInternal() || sourceDesc->isCallTx()) ?
                                     mEngine->getStreamTypeForAttributes(sourceDesc->attributes()) :
                                     AUDIO_STREAM_PATCH;
                         patchBuilder.addSource(srcMixPortConfig);
@@ -5462,7 +5808,7 @@
             invalidateStreams(mEngine->getStreamTypesForProductStrategy(ps));
         } else {
             DeviceVector newDevices = getNewOutputDevices(outputDesc, false /*fromCache*/);
-            if (outputDesc->mUsePreferredMixerAttributes && outputDesc->devices() != newDevices) {
+            if (outputDesc->mPreferredAttrInfo != nullptr && outputDesc->devices() != newDevices) {
                 // If the device is using preferred mixer attributes, the output need to reopen
                 // with default configuration when the new selected devices are different from
                 // current routing devices.
@@ -5551,7 +5897,16 @@
 status_t AudioPolicyManager::startAudioSource(const struct audio_port_config *source,
                                               const audio_attributes_t *attributes,
                                               audio_port_handle_t *portId,
-                                              uid_t uid, bool internal)
+                                              uid_t uid) {
+    return startAudioSourceInternal(source, attributes, portId, uid,
+                                    false /*internal*/, false /*isCallRx*/, 0 /*delayMs*/);
+}
+
+status_t AudioPolicyManager::startAudioSourceInternal(const struct audio_port_config *source,
+                                              const audio_attributes_t *attributes,
+                                              audio_port_handle_t *portId,
+                                              uid_t uid, bool internal, bool isCallRx,
+                                              uint32_t delayMs)
 {
     ALOGV("%s", __FUNCTION__);
     *portId = AUDIO_PORT_HANDLE_NONE;
@@ -5584,16 +5939,17 @@
         new SourceClientDescriptor(*portId, uid, *attributes, *source, srcDevice,
                                    mEngine->getStreamTypeForAttributes(*attributes),
                                    mEngine->getProductStrategyForAttributes(*attributes),
-                                   toVolumeSource(*attributes), internal);
+                                   toVolumeSource(*attributes), internal, isCallRx, false);
 
-    status_t status = connectAudioSource(sourceDesc);
+    status_t status = connectAudioSource(sourceDesc, delayMs);
     if (status == NO_ERROR) {
         mAudioSources.add(*portId, sourceDesc);
     }
     return status;
 }
 
-status_t AudioPolicyManager::connectAudioSource(const sp<SourceClientDescriptor>& sourceDesc)
+status_t AudioPolicyManager::connectAudioSource(const sp<SourceClientDescriptor>& sourceDesc,
+                                                uint32_t delayMs)
 {
     ALOGV("%s handle %d", __FUNCTION__, sourceDesc->portId());
 
@@ -5619,7 +5975,7 @@
     audio_patch_handle_t handle = AUDIO_PATCH_HANDLE_NONE;
 
     return connectAudioSourceToSink(
-                sourceDesc, sinkDevice, patchBuilder.patch(), handle, mUidCached, 0 /*delayMs*/);
+                sourceDesc, sinkDevice, patchBuilder.patch(), handle, mUidCached, delayMs);
 }
 
 status_t AudioPolicyManager::stopAudioSource(audio_port_handle_t portId)
@@ -5677,7 +6033,8 @@
 float AudioPolicyManager::getStreamVolumeDB(
         audio_stream_type_t stream, int index, audio_devices_t device)
 {
-    return computeVolume(getVolumeCurves(stream), toVolumeSource(stream), index, {device});
+    return computeVolume(getVolumeCurves(stream), toVolumeSource(stream), index,
+                         {device}, /* adjustAttenuation= */false);
 }
 
 status_t AudioPolicyManager::getSurroundFormats(unsigned int *numSurroundFormats,
@@ -6001,11 +6358,15 @@
     // The caller can have the audio config criteria ignored by either passing a null ptr or
     // the AUDIO_CONFIG_INITIALIZER value.
     // If an audio config is specified, current policy is to only allow spatialization for
-    // some positional channel masks and PCM format
+    // some positional channel masks and PCM format and for stereo if low latency performance
+    // mode is not requested.
 
     if (config != nullptr && *config != AUDIO_CONFIG_INITIALIZER) {
+        static const bool stereo_spatialization_prop_enabled =
+                property_get_bool("ro.audio.stereo_spatialization_enabled", false);
         const bool channel_mask_spatialized =
-                com_android_media_audio_stereo_spatialization()
+                (stereo_spatialization_prop_enabled
+                        && com_android_media_audio_stereo_spatialization())
                 ? audio_channel_mask_contains_stereo(config->channel_mask)
                 : audio_is_channel_mask_spatialized(config->channel_mask);
         if (!channel_mask_spatialized) {
@@ -6014,6 +6375,10 @@
         if (!audio_is_linear_pcm(config->format)) {
             return false;
         }
+        if (config->channel_mask == AUDIO_CHANNEL_OUT_STEREO
+                && ((attr->flags & AUDIO_FLAG_LOW_LATENCY) != 0)) {
+            return false;
+        }
     }
 
     sp<IOProfile> profile =
@@ -6025,6 +6390,34 @@
     return true;
 }
 
+// The Spatializer output is compatible with Haptic use cases if:
+// 1. the Spatializer output thread supports Haptic, and format/sampleRate are same
+// with client if client haptic channel bits were set, or
+// 2. the Spatializer output thread does not support Haptic, and client did not ask haptic by
+// including the haptic bits or creating the HapticGenerator effect for same session.
+bool AudioPolicyManager::checkHapticCompatibilityOnSpatializerOutput(
+        const audio_config_t* config, audio_session_t sessionId) const {
+    const auto clientHapticChannel =
+            audio_channel_count_from_out_mask(config->channel_mask & AUDIO_CHANNEL_HAPTIC_ALL);
+    const auto threadOutputHapticChannel = audio_channel_count_from_out_mask(
+            mSpatializerOutput->getChannelMask() & AUDIO_CHANNEL_HAPTIC_ALL);
+
+    if (threadOutputHapticChannel) {
+        // check format and sampleRate match if client haptic channel mask exist
+        if (clientHapticChannel) {
+            return mSpatializerOutput->getFormat() == config->format &&
+                   mSpatializerOutput->getSamplingRate() == config->sample_rate;
+        }
+        return true;
+    } else {
+        // in the case of the Spatializer output channel mask does not have haptic channel bits, it
+        // means haptic use cases (either the client channelmask includes haptic bits, or created a
+        // HapticGenerator effect for this session) are not supported.
+        return clientHapticChannel == 0 &&
+               !mEffects.hasOrphansForSession(sessionId, FX_IID_HAPTICGENERATOR);
+    }
+}
+
 void AudioPolicyManager::checkVirtualizerClientRoutes() {
     std::set<audio_stream_type_t> streamsToInvalidate;
     for (size_t i = 0; i < mOutputs.size(); i++) {
@@ -6313,13 +6706,22 @@
             if (!mConfig->getOutputDevices().contains(supportedDevice)) {
                 continue;
             }
+
+            if (outProfile->isMmap() && !outProfile->hasDynamicAudioProfile()
+                && availProfileDevices.areAllDevicesAttached()) {
+                ALOGV("%s skip opening output for mmap profile %s", __func__,
+                        outProfile->getTagName().c_str());
+                continue;
+            }
+
             sp<SwAudioOutputDescriptor> outputDesc = new SwAudioOutputDescriptor(outProfile,
                                                                                  mpClientInterface);
             audio_io_handle_t output = AUDIO_IO_HANDLE_NONE;
+            audio_attributes_t attributes = AUDIO_ATTRIBUTES_INITIALIZER;
             status_t status = outputDesc->open(nullptr /* halConfig */, nullptr /* mixerConfig */,
                                                DeviceVector(supportedDevice),
                                                AUDIO_STREAM_DEFAULT,
-                                               AUDIO_OUTPUT_FLAG_NONE, &output);
+                                               AUDIO_OUTPUT_FLAG_NONE, &output, attributes);
             if (status != NO_ERROR) {
                 ALOGW("Cannot open output stream for devices %s on hw module %s",
                       supportedDevice->toString().c_str(), hwModule->getName());
@@ -6372,19 +6774,27 @@
                     __func__, inProfile->getTagName().c_str());
                 continue;
             }
-            sp<AudioInputDescriptor> inputDesc =
-                    new AudioInputDescriptor(inProfile, mpClientInterface);
+
+            if (inProfile->isMmap() && !inProfile->hasDynamicAudioProfile()
+                && availProfileDevices.areAllDevicesAttached()) {
+                ALOGV("%s skip opening input for mmap profile %s", __func__,
+                        inProfile->getTagName().c_str());
+                continue;
+            }
+
+            sp<AudioInputDescriptor> inputDesc = new AudioInputDescriptor(
+                    inProfile, mpClientInterface, false /*isPreemptor*/);
 
             audio_io_handle_t input = AUDIO_IO_HANDLE_NONE;
             status_t status = inputDesc->open(nullptr,
                                               availProfileDevices.itemAt(0),
                                               AUDIO_SOURCE_MIC,
-                                              AUDIO_INPUT_FLAG_NONE,
+                                              (audio_input_flags_t) inProfile->getFlags(),
                                               &input);
             if (status != NO_ERROR) {
-                ALOGW("Cannot open input stream for device %s on hw module %s",
-                      availProfileDevices.toString().c_str(),
-                      hwModule->getName());
+                ALOGW("%s: Cannot open input stream for device %s for profile %s on hw module %s",
+                        __func__, availProfileDevices.toString().c_str(),
+                        inProfile->getTagName().c_str(), hwModule->getName());
                 continue;
             }
             for (const auto &device : availProfileDevices) {
@@ -6409,6 +6819,15 @@
         if ((desc->mFlags & AUDIO_OUTPUT_FLAG_SPATIALIZER) != 0
                 && !isOutputOnlyAvailableRouteToSomeDevice(desc)) {
             outputsClosed.push_back(desc->mIoHandle);
+            nextAudioPortGeneration();
+            ssize_t index = mAudioPatches.indexOfKey(desc->getPatchHandle());
+            if (index >= 0) {
+                sp<AudioPatch> patchDesc = mAudioPatches.valueAt(index);
+                (void) /*status_t status*/ mpClientInterface->releaseAudioPatch(
+                            patchDesc->getAfHandle(), 0);
+                mAudioPatches.removeItemsAt(index);
+                mpClientInterface->onAudioPatchListUpdate();
+            }
             desc->close();
         }
     }
@@ -6483,8 +6902,8 @@
                 sp<IOProfile> profile = hwModule->getOutputProfiles()[j];
                 if (profile->supportsDevice(device)) {
                     profiles.add(profile);
-                    ALOGV("checkOutputsForDevice(): adding profile %zu from module %s",
-                          j, hwModule->getName());
+                    ALOGV("%s(): adding profile %s from module %s",
+                            __func__, profile->getTagName().c_str(), hwModule->getName());
                 }
             }
         }
@@ -6517,7 +6936,11 @@
             if (j != outputs.size()) {
                 continue;
             }
-
+            if (profile->isMmap() && !profile->hasDynamicAudioProfile()) {
+                ALOGV("%s skip opening output for mmap profile %s",
+                      __func__, profile->getTagName().c_str());
+                continue;
+            }
             if (!profile->canOpenNewIo()) {
                 ALOGW("Max Output number %u already opened for this profile %s",
                       profile->maxOpenCount, profile->getTagName().c_str());
@@ -6578,9 +7001,8 @@
                 if (!profile->supportsDevice(device)) {
                     continue;
                 }
-                ALOGV("checkOutputsForDevice(): "
-                        "clearing direct output profile %zu on module %s",
-                        j, hwModule->getName());
+                ALOGV("%s(): clearing direct output profile %s on module %s",
+                        __func__, profile->getTagName().c_str(), hwModule->getName());
                 profile->clearAudioProfiles();
                 if (!profile->hasDynamicAudioProfile()) {
                     continue;
@@ -6609,14 +7031,14 @@
 status_t AudioPolicyManager::checkInputsForDevice(const sp<DeviceDescriptor>& device,
                                                   audio_policy_dev_state_t state)
 {
-    sp<AudioInputDescriptor> desc;
-
     if (audio_device_is_digital(device->type())) {
         // erase all current sample rates, formats and channel masks
         device->clearAudioProfiles();
     }
 
     if (state == AUDIO_POLICY_DEVICE_STATE_AVAILABLE) {
+        sp<AudioInputDescriptor> desc;
+
         // first call getAudioPort to get the supported attributes from the HAL
         struct audio_port_v7 port = {};
         device->toAudioPort(&port);
@@ -6635,8 +7057,8 @@
 
                 if (profile->supportsDevice(device)) {
                     profiles.add(profile);
-                    ALOGV("checkInputsForDevice(): adding profile %zu from module %s",
-                          profile_index, hwModule->getName());
+                    ALOGV("%s : adding profile %s from module %s", __func__,
+                          profile->getTagName().c_str(), hwModule->getName());
                 }
             }
         }
@@ -6668,15 +7090,22 @@
                 continue;
             }
 
+            if (profile->isMmap() && !profile->hasDynamicAudioProfile()) {
+                ALOGV("%s skip opening input for mmap profile %s",
+                      __func__, profile->getTagName().c_str());
+                continue;
+            }
             if (!profile->canOpenNewIo()) {
-                ALOGW("Max Input number %u already opened for this profile %s",
-                      profile->maxOpenCount, profile->getTagName().c_str());
+                ALOGW("%s Max Input number %u already opened for this profile %s",
+                      __func__, profile->maxOpenCount, profile->getTagName().c_str());
                 continue;
             }
 
-            desc = new AudioInputDescriptor(profile, mpClientInterface);
+            desc = new AudioInputDescriptor(profile, mpClientInterface, false  /*isPreemptor*/);
             audio_io_handle_t input = AUDIO_IO_HANDLE_NONE;
-            status = desc->open(nullptr, device, AUDIO_SOURCE_MIC, AUDIO_INPUT_FLAG_NONE, &input);
+            ALOGV("%s opening input for profile %s", __func__, profile->getTagName().c_str());
+            status = desc->open(nullptr, device, AUDIO_SOURCE_MIC,
+                                (audio_input_flags_t) profile->getFlags(), &input);
 
             if (status == NO_ERROR) {
                 const String8& address = String8(device->address().c_str());
@@ -6687,7 +7116,8 @@
                 }
                 updateAudioProfiles(device, input, profile);
                 if (!profile->hasValidAudioProfile()) {
-                    ALOGW("checkInputsForDevice() direct input missing param");
+                    ALOGW("%s direct input missing param for profile %s", __func__,
+                            profile->getTagName().c_str());
                     desc->close();
                     input = AUDIO_IO_HANDLE_NONE;
                 }
@@ -6698,15 +7128,22 @@
             } // endif input != 0
 
             if (input == AUDIO_IO_HANDLE_NONE) {
-                ALOGW("%s could not open input for device %s", __func__,
-                       device->toString().c_str());
+                ALOGW("%s could not open input for device %s on profile %s", __func__,
+                       device->toString().c_str(), profile->getTagName().c_str());
                 profiles.removeAt(profile_index);
                 profile_index--;
             } else {
                 if (audio_device_is_digital(device->type())) {
                     device->importAudioPortAndPickAudioProfile(profile);
                 }
-                ALOGV("checkInputsForDevice(): adding input %d", input);
+                ALOGV("%s: adding input %d for profile %s", __func__,
+                        input, profile->getTagName().c_str());
+
+                if (checkCloseInput(desc)) {
+                    ALOGV("%s: closing input %d for profile %s", __func__,
+                            input, profile->getTagName().c_str());
+                    closeInput(input);
+                }
             }
         } // end scan profiles
 
@@ -6723,8 +7160,8 @@
                  profile_index++) {
                 sp<IOProfile> profile = hwModule->getInputProfiles()[profile_index];
                 if (profile->supportsDevice(device)) {
-                    ALOGV("checkInputsForDevice(): clearing direct input profile %zu on module %s",
-                            profile_index, hwModule->getName());
+                    ALOGV("%s: clearing direct input profile %s on module %s", __func__,
+                            profile->getTagName().c_str(), hwModule->getName());
                     profile->clearAudioProfiles();
                 }
             }
@@ -6789,8 +7226,7 @@
         closingOutput->stop();
     }
     closingOutput->close();
-    if ((closingOutput->getFlags().output & AUDIO_OUTPUT_FLAG_BIT_PERFECT)
-            == AUDIO_OUTPUT_FLAG_BIT_PERFECT) {
+    if (closingOutput->isBitPerfect()) {
         for (const auto device : closingOutput->devices()) {
             device->setPreferredConfig(nullptr);
         }
@@ -6822,6 +7258,10 @@
             setMsdOutputPatches();
         }
     }
+
+    if (closingOutput->mPreferredAttrInfo != nullptr) {
+        closingOutput->mPreferredAttrInfo->resetActiveClient();
+    }
 }
 
 void AudioPolicyManager::closeInput(audio_io_handle_t input)
@@ -6912,8 +7352,8 @@
         sp<SourceClientDescriptor> sourceDesc = mAudioSources.valueAt(i);
         if (sourceDesc != nullptr && followsSameRouting(attr, sourceDesc->attributes())
                 && sourceDesc->getPatchHandle() == AUDIO_PATCH_HANDLE_NONE
-                && !isCallRxAudioSource(sourceDesc) && !sourceDesc->isInternal()) {
-            connectAudioSource(sourceDesc);
+                && !sourceDesc->isCallRx() && !sourceDesc->isInternal()) {
+            connectAudioSource(sourceDesc, 0 /*delayMs*/);
         }
     }
 }
@@ -7019,8 +7459,8 @@
                 }
             }
             sp<SourceClientDescriptor> source = getSourceForAttributesOnOutput(srcOut, attr);
-            if (source != nullptr && !isCallRxAudioSource(source) && !source->isInternal()) {
-                connectAudioSource(source);
+            if (source != nullptr && !source->isCallRx() && !source->isInternal()) {
+                connectAudioSource(source, 0 /*delayMs*/);
             }
         }
 
@@ -7218,7 +7658,6 @@
     DeviceVector devices;
     for (const auto &productStrategy : mEngine->getOrderedProductStrategies()) {
         StreamTypeVector streams = mEngine->getStreamTypesForProductStrategy(productStrategy);
-        auto attr = mEngine->getAllAttributesForProductStrategy(productStrategy).front();
         auto hasStreamActive = [&](auto stream) {
             return hasStream(streams, stream) && isStreamActive(stream, 0);
         };
@@ -7243,6 +7682,7 @@
                 mOutputs.isStrategyActiveOnSameModule(productStrategy, outputDesc))) {
             // Retrieval of devices for voice DL is done on primary output profile, cannot
             // check the route (would force modifying configuration file for this profile)
+            auto attr = mEngine->getAllAttributesForProductStrategy(productStrategy).front();
             devices = mEngine->getOutputDevicesForAttributes(attr, nullptr, fromCache);
             break;
         }
@@ -7684,9 +8124,6 @@
     // Choose an input profile based on the requested capture parameters: select the first available
     // profile supporting all requested parameters.
     // The flags can be ignored if it doesn't contain a much match flag.
-    //
-    // TODO: perhaps isCompatibleProfile should return a "matching" score so we can return
-    // the best matching profile, not the first one.
 
     using underlying_input_flag_t = std::underlying_type_t<audio_input_flags_t>;
     const underlying_input_flag_t mustMatchFlag = AUDIO_INPUT_FLAG_MMAP_NOIRQ |
@@ -7703,27 +8140,35 @@
             for (const auto& profile : hwModule->getInputProfiles()) {
                 // profile->log();
                 //updatedFormat = format;
-                if (profile->isCompatibleProfile(DeviceVector(device), samplingRate,
-                                                 &samplingRate  /*updatedSamplingRate*/,
-                                                 format,
-                                                 &format,       /*updatedFormat*/
-                                                 channelMask,
-                                                 &channelMask   /*updatedChannelMask*/,
-                                                 // FIXME ugly cast
-                                                 (audio_output_flags_t) flags,
-                                                 true /*exactMatchRequiredForInputFlags*/)) {
+                if (profile->getCompatibilityScore(
+                        DeviceVector(device),
+                        samplingRate,
+                        &updatedSamplingRate,
+                        format,
+                        &updatedFormat,
+                        channelMask,
+                        &updatedChannelMask,
+                        // FIXME ugly cast
+                        (audio_output_flags_t) flags,
+                        true /*exactMatchRequiredForInputFlags*/) == IOProfile::EXACT_MATCH) {
+                    samplingRate = updatedSamplingRate;
+                    format = updatedFormat;
+                    channelMask = updatedChannelMask;
                     return profile;
                 }
-                if (firstInexact == nullptr && profile->isCompatibleProfile(DeviceVector(device),
-                                                 samplingRate,
-                                                 &updatedSamplingRate,
-                                                 format,
-                                                 &updatedFormat,
-                                                 channelMask,
-                                                 &updatedChannelMask,
-                                                 // FIXME ugly cast
-                                                 (audio_output_flags_t) flags,
-                                                 false /*exactMatchRequiredForInputFlags*/)) {
+                if (firstInexact == nullptr
+                        && profile->getCompatibilityScore(
+                                DeviceVector(device),
+                                samplingRate,
+                                &updatedSamplingRate,
+                                format,
+                                &updatedFormat,
+                                channelMask,
+                                &updatedChannelMask,
+                                // FIXME ugly cast
+                                (audio_output_flags_t) flags,
+                                false /*exactMatchRequiredForInputFlags*/)
+                                != IOProfile::NO_MATCH) {
                     firstInexact = profile;
                 }
             }
@@ -7750,12 +8195,69 @@
     return nullptr;
 }
 
+float AudioPolicyManager::adjustDeviceAttenuationForAbsVolume(IVolumeCurves &curves,
+                                                              VolumeSource volumeSource,
+                                                              int index,
+                                                              const DeviceTypeSet &deviceTypes)
+{
+    audio_devices_t volumeDevice = Volume::getDeviceForVolume(deviceTypes);
+    device_category deviceCategory = Volume::getDeviceCategory({volumeDevice});
+    float volumeDb = curves.volIndexToDb(deviceCategory, index);
+
+    if (com_android_media_audio_abs_volume_index_fix()) {
+        if (mAbsoluteVolumeDrivingStreams.find(volumeDevice) !=
+            mAbsoluteVolumeDrivingStreams.end()) {
+            audio_attributes_t attributesToDriveAbs = mAbsoluteVolumeDrivingStreams[volumeDevice];
+            auto groupToDriveAbs = mEngine->getVolumeGroupForAttributes(attributesToDriveAbs);
+            if (groupToDriveAbs == VOLUME_GROUP_NONE) {
+                ALOGD("%s: no group matching with %s", __FUNCTION__,
+                      toString(attributesToDriveAbs).c_str());
+                return volumeDb;
+            }
+
+            float volumeDbMax = curves.volIndexToDb(deviceCategory, curves.getVolumeIndexMax());
+            VolumeSource vsToDriveAbs = toVolumeSource(groupToDriveAbs);
+            if (vsToDriveAbs == volumeSource) {
+                // attenuation is applied by the abs volume controller
+                return (index != 0) ? volumeDbMax : volumeDb;
+            } else {
+                IVolumeCurves &curvesAbs = getVolumeCurves(vsToDriveAbs);
+                int indexAbs = curvesAbs.getVolumeIndex({volumeDevice});
+                float volumeDbAbs = curvesAbs.volIndexToDb(deviceCategory, indexAbs);
+                float volumeDbAbsMax = curvesAbs.volIndexToDb(deviceCategory,
+                                                              curvesAbs.getVolumeIndexMax());
+                float newVolumeDb = fminf(volumeDb + volumeDbAbsMax - volumeDbAbs, volumeDbMax);
+                ALOGV("%s: abs vol stream %d with attenuation %f is adjusting stream %d from "
+                      "attenuation %f to attenuation %f %f", __func__, vsToDriveAbs, volumeDbAbs,
+                      volumeSource, volumeDb, newVolumeDb, volumeDbMax);
+                return newVolumeDb;
+            }
+        }
+        return volumeDb;
+    } else {
+        return volumeDb;
+    }
+}
+
 float AudioPolicyManager::computeVolume(IVolumeCurves &curves,
                                         VolumeSource volumeSource,
                                         int index,
-                                        const DeviceTypeSet& deviceTypes)
+                                        const DeviceTypeSet& deviceTypes,
+                                        bool adjustAttenuation,
+                                        bool computeInternalInteraction)
 {
-    float volumeDb = curves.volIndexToDb(Volume::getDeviceCategory(deviceTypes), index);
+    float volumeDb;
+    if (adjustAttenuation) {
+        volumeDb = adjustDeviceAttenuationForAbsVolume(curves, volumeSource, index, deviceTypes);
+    } else {
+        volumeDb = curves.volIndexToDb(Volume::getDeviceCategory(deviceTypes), index);
+    }
+    ALOGV("%s volume source %d, index %d,  devices %s, compute internal %b ", __func__,
+          volumeSource, index, dumpDeviceTypes(deviceTypes).c_str(), computeInternalInteraction);
+
+    if (!computeInternalInteraction) {
+        return volumeDb;
+    }
 
     // handle the case of accessibility active while a ringtone is playing: if the ringtone is much
     // louder than the accessibility prompt, the prompt cannot be heard, thus masking the touch
@@ -7766,14 +8268,12 @@
     const auto musicVolumeSrc = toVolumeSource(AUDIO_STREAM_MUSIC, false);
     const auto alarmVolumeSrc = toVolumeSource(AUDIO_STREAM_ALARM, false);
     const auto a11yVolumeSrc = toVolumeSource(AUDIO_STREAM_ACCESSIBILITY, false);
-    // Verify that the current volume source is not the ringer volume to prevent recursively
-    // calling to compute volume. This could happen in cases where a11y and ringer sounds belong
-    // to the same volume group.
-    if (volumeSource != ringVolumeSrc && volumeSource == a11yVolumeSrc
-            && (AUDIO_MODE_RINGTONE == mEngine->getPhoneState()) &&
+    if (AUDIO_MODE_RINGTONE == mEngine->getPhoneState() &&
             mOutputs.isActive(ringVolumeSrc, 0)) {
         auto &ringCurves = getVolumeCurves(AUDIO_STREAM_RING);
-        const float ringVolumeDb = computeVolume(ringCurves, ringVolumeSrc, index, deviceTypes);
+        const float ringVolumeDb = computeVolume(ringCurves, ringVolumeSrc, index, deviceTypes,
+                                                 adjustAttenuation,
+                                                 /* computeInternalInteraction= */false);
         return ringVolumeDb - 4 > volumeDb ? ringVolumeDb - 4 : volumeDb;
     }
 
@@ -7790,7 +8290,8 @@
         auto &voiceCurves = getVolumeCurves(callVolumeSrc);
         int voiceVolumeIndex = voiceCurves.getVolumeIndex(deviceTypes);
         const float maxVoiceVolDb =
-                computeVolume(voiceCurves, callVolumeSrc, voiceVolumeIndex, deviceTypes)
+                computeVolume(voiceCurves, callVolumeSrc, voiceVolumeIndex, deviceTypes,
+                        adjustAttenuation, /* computeInternalInteraction= */false)
                 + IN_CALL_EARPIECE_HEADROOM_DB;
         // FIXME: Workaround for call screening applications until a proper audio mode is defined
         // to support this scenario : Exempt the RING stream from the audio cap if the audio was
@@ -7832,12 +8333,8 @@
         // when the phone is ringing we must consider that music could have been paused just before
         // by the music application and behave as if music was active if the last music track was
         // just stopped
-        // Verify that the current volume source is not the music volume to prevent recursively
-        // calling to compute volume. This could happen in cases where music and
-        // (alarm, ring, notification, system, etc.) sounds belong to the same volume group.
-        if (volumeSource != musicVolumeSrc &&
-            (isStreamActive(AUDIO_STREAM_MUSIC, SONIFICATION_HEADSET_MUSIC_DELAY)
-                || mLimitRingtoneVolume)) {
+        if (isStreamActive(AUDIO_STREAM_MUSIC, SONIFICATION_HEADSET_MUSIC_DELAY)
+                || mLimitRingtoneVolume) {
             volumeDb += SONIFICATION_HEADSET_VOLUME_FACTOR_DB;
             DeviceTypeSet musicDevice =
                     mEngine->getOutputDevicesForAttributes(attributes_initializer(AUDIO_USAGE_MEDIA),
@@ -7846,7 +8343,9 @@
             float musicVolDb = computeVolume(musicCurves,
                                              musicVolumeSrc,
                                              musicCurves.getVolumeIndex(musicDevice),
-                                             musicDevice);
+                                             musicDevice,
+                                              adjustAttenuation,
+                                              /* computeInternalInteraction= */ false);
             float minVolDb = (musicVolDb > SONIFICATION_HEADSET_VOLUME_MIN_DB) ?
                         musicVolDb : SONIFICATION_HEADSET_VOLUME_MIN_DB;
             if (volumeDb > minVolDb) {
@@ -7855,9 +8354,10 @@
             }
             if (Volume::getDeviceForVolume(deviceTypes) != AUDIO_DEVICE_OUT_SPEAKER
                     &&  !Intersection(deviceTypes, {AUDIO_DEVICE_OUT_BLUETOOTH_A2DP,
-                        AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES}).empty()) {
-                // on A2DP, also ensure notification volume is not too low compared to media when
-                // intended to be played
+                        AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES,
+                        AUDIO_DEVICE_OUT_BLE_HEADSET}).empty()) {
+                // on A2DP/BLE, also ensure notification volume is not too low compared to media
+                // when intended to be played.
                 if ((volumeDb > -96.0f) &&
                         (musicVolDb - SONIFICATION_A2DP_MAX_MEDIA_DIFF_DB > volumeDb)) {
                     ALOGV("%s increasing volume for volume source=%d device=%s from %f to %f",
@@ -7928,7 +8428,7 @@
     if (deviceTypes.empty()) {
         deviceTypes = outputDesc->devices().types();
         index = curves.getVolumeIndex(deviceTypes);
-        ALOGD("%s if deviceTypes is change from none to device %s, need get index %d",
+        ALOGV("%s if deviceTypes is change from none to device %s, need get index %d",
                 __func__, dumpDeviceTypes(deviceTypes).c_str(), index);
     }
 
@@ -7939,9 +8439,10 @@
 
     float volumeDb = computeVolume(curves, volumeSource, index, deviceTypes);
     if (outputDesc->isFixedVolume(deviceTypes) ||
-            // Force VoIP volume to max for bluetooth SCO device except if muted
+            // Force VoIP volume to max for bluetooth SCO/BLE device except if muted
             (index != 0 && (isVoiceVolSrc || isBtScoVolSrc) &&
-                    isSingleDeviceType(deviceTypes, audio_is_bluetooth_out_sco_device))) {
+                    (isSingleDeviceType(deviceTypes, audio_is_bluetooth_out_sco_device)
+                    || isSingleDeviceType(deviceTypes, audio_is_ble_out_device)))) {
         volumeDb = 0.0f;
     }
     const bool muted = (index == 0) && (volumeDb != 0.0f);
@@ -7949,17 +8450,19 @@
             deviceTypes, delayMs, force, isVoiceVolSrc);
 
     if (outputDesc == mPrimaryOutput && (isVoiceVolSrc || isBtScoVolSrc)) {
-        setVoiceVolume(index, curves, isVoiceVolSrc, delayMs);
+        bool voiceVolumeManagedByHost = isVoiceVolSrc &&
+                !isSingleDeviceType(deviceTypes, audio_is_ble_out_device);
+        setVoiceVolume(index, curves, voiceVolumeManagedByHost, delayMs);
     }
     return NO_ERROR;
 }
 
 void AudioPolicyManager::setVoiceVolume(
-        int index, IVolumeCurves &curves, bool isVoiceVolSrc, int delayMs) {
+        int index, IVolumeCurves &curves, bool voiceVolumeManagedByHost, int delayMs) {
     float voiceVolume;
-    // Force voice volume to max or mute for Bluetooth SCO as other attenuations are managed
+    // Force voice volume to max or mute for Bluetooth SCO/BLE as other attenuations are managed
     // by the headset
-    if (isVoiceVolSrc) {
+    if (voiceVolumeManagedByHost) {
         voiceVolume = (float)index/(float)curves.getVolumeIndexMax();
     } else {
         voiceVolume = index == 0 ? 0.0 : 1.0;
@@ -7976,11 +8479,19 @@
                                                    bool& isBtScoVolSrc,
                                                    const char* caller) {
     const VolumeSource callVolSrc = toVolumeSource(AUDIO_STREAM_VOICE_CALL, false);
-    const VolumeSource btScoVolSrc = toVolumeSource(AUDIO_STREAM_BLUETOOTH_SCO, false);
+    isVoiceVolSrc = (volumeSource != VOLUME_SOURCE_NONE) && (callVolSrc == volumeSource);
+
     const bool isScoRequested = isScoRequestedForComm();
     const bool isHAUsed = isHearingAidUsedForComm();
 
-    isVoiceVolSrc = (volumeSource != VOLUME_SOURCE_NONE) && (callVolSrc == volumeSource);
+    if (com_android_media_audio_replace_stream_bt_sco()) {
+        ALOGV("%s stream bt sco is replaced, no volume consistency check for calls", __func__);
+        isBtScoVolSrc = (volumeSource != VOLUME_SOURCE_NONE) && (callVolSrc == volumeSource) &&
+                        (isScoRequested || isHAUsed);
+        return true;
+    }
+
+    const VolumeSource btScoVolSrc = toVolumeSource(AUDIO_STREAM_BLUETOOTH_SCO, false);
     isBtScoVolSrc = (volumeSource != VOLUME_SOURCE_NONE) && (btScoVolSrc == volumeSource);
 
     if ((callVolSrc != btScoVolSrc) &&
@@ -8137,7 +8648,7 @@
         sp<SourceClientDescriptor> sourceDesc = mAudioSources.valueAt(i);
         if (sourceDesc->isConnected() && (sourceDesc->srcDevice()->equals(deviceDesc) ||
                                           sourceDesc->sinkDevice()->equals(deviceDesc))
-                && !isCallRxAudioSource(sourceDesc)) {
+                && !sourceDesc->isCallRx()) {
             disconnectAudioSource(sourceDesc);
         }
     }
@@ -8423,8 +8934,9 @@
     }
     sp<SwAudioOutputDescriptor> desc = new SwAudioOutputDescriptor(profile, mpClientInterface);
     audio_io_handle_t output = AUDIO_IO_HANDLE_NONE;
+    audio_attributes_t attributes = AUDIO_ATTRIBUTES_INITIALIZER;
     status_t status = desc->open(halConfig, mixerConfig, devices,
-            AUDIO_STREAM_DEFAULT, flags, &output);
+            AUDIO_STREAM_DEFAULT, flags, &output, attributes);
     if (status != NO_ERROR) {
         ALOGE("%s failed to open output %d", __func__, status);
         return nullptr;
@@ -8462,14 +8974,19 @@
         config.offload_info.channel_mask = config.channel_mask;
         config.offload_info.format = config.format;
 
-        status = desc->open(&config, mixerConfig, devices, AUDIO_STREAM_DEFAULT, flags, &output);
+        status = desc->open(&config, mixerConfig, devices, AUDIO_STREAM_DEFAULT, flags, &output,
+                            attributes);
         if (status != NO_ERROR) {
             return nullptr;
         }
     }
 
     addOutput(output, desc);
-
+    setOutputDevices(__func__, desc,
+                     devices,
+                     true,
+                     0,
+                     NULL);
     sp<DeviceDescriptor> speaker = mAvailableOutputDevices.getDevice(
             AUDIO_DEVICE_OUT_SPEAKER, String8(""), AUDIO_FORMAT_DEFAULT);
 
@@ -8667,4 +9184,60 @@
     mpClientInterface->invalidateTracks(clients);
 }
 
+void AudioPolicyManager::updateClientsInternalMute(
+        const sp<android::SwAudioOutputDescriptor> &desc) {
+    if (!desc->isBitPerfect() ||
+        !com::android::media::audioserver::
+                fix_concurrent_playback_behavior_with_bit_perfect_client()) {
+        // This is only used for bit perfect output now.
+        return;
+    }
+    sp<TrackClientDescriptor> bitPerfectClient = nullptr;
+    bool bitPerfectClientInternalMute = false;
+    std::vector<media::TrackInternalMuteInfo> clientsInternalMute;
+    for (const sp<TrackClientDescriptor>& client : desc->getActiveClients()) {
+        if ((client->flags() & AUDIO_OUTPUT_FLAG_BIT_PERFECT) != AUDIO_OUTPUT_FLAG_NONE) {
+            bitPerfectClient = client;
+            continue;
+        }
+        bool muted = false;
+        if (client->stream() == AUDIO_STREAM_SYSTEM) {
+            // System sound is muted.
+            muted = true;
+        } else {
+            bitPerfectClientInternalMute = true;
+        }
+        if (client->setInternalMute(muted)) {
+            auto result = legacy2aidl_audio_port_handle_t_int32_t(client->portId());
+            if (!result.ok()) {
+                ALOGE("%s, failed to convert port id(%d) to aidl", __func__, client->portId());
+                continue;
+            }
+            media::TrackInternalMuteInfo info;
+            info.portId = result.value();
+            info.muted = client->getInternalMute();
+            clientsInternalMute.push_back(std::move(info));
+        }
+    }
+    if (bitPerfectClient != nullptr &&
+        bitPerfectClient->setInternalMute(bitPerfectClientInternalMute)) {
+        auto result = legacy2aidl_audio_port_handle_t_int32_t(bitPerfectClient->portId());
+        if (result.ok()) {
+            media::TrackInternalMuteInfo info;
+            info.portId = result.value();
+            info.muted = bitPerfectClient->getInternalMute();
+            clientsInternalMute.push_back(std::move(info));
+        } else {
+            ALOGE("%s, failed to convert port id(%d) of bit perfect client to aidl",
+                  __func__, bitPerfectClient->portId());
+        }
+    }
+    if (!clientsInternalMute.empty()) {
+        if (status_t status = mpClientInterface->setTracksInternalMute(clientsInternalMute);
+                status != NO_ERROR) {
+            ALOGE("%s, failed to update tracks internal mute, err=%d", __func__, status);
+        }
+    }
+}
+
 } // namespace android
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.h b/services/audiopolicy/managerdefault/AudioPolicyManager.h
index ea60c2b..c0a5012 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.h
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.h
@@ -103,7 +103,7 @@
         virtual status_t setDeviceConnectionState(audio_policy_dev_state_t state,
                 const android::media::audio::common::AudioPort& port, audio_format_t encodedFormat);
         virtual audio_policy_dev_state_t getDeviceConnectionState(audio_devices_t device,
-                                                                              const char *device_address);
+                                                                  const char *device_address);
         virtual status_t handleDeviceConfigChange(audio_devices_t device,
                                                   const char *device_address,
                                                   const char *device_name,
@@ -141,7 +141,8 @@
                                          audio_input_flags_t flags,
                                          audio_port_handle_t *selectedDeviceId,
                                          input_type_t *inputType,
-                                         audio_port_handle_t *portId);
+                                         audio_port_handle_t *portId,
+                                         uint32_t *virtualDeviceId);
 
         // indicates to the audio policy manager that the input starts being used.
         virtual status_t startInput(audio_port_handle_t portId);
@@ -150,6 +151,10 @@
         virtual status_t stopInput(audio_port_handle_t portId);
         virtual void releaseInput(audio_port_handle_t portId);
         virtual void checkCloseInputs();
+        virtual status_t setDeviceAbsoluteVolumeEnabled(audio_devices_t deviceType,
+                                                        const char *address,
+                                                        bool enabled,
+                                                        audio_stream_type_t streamToDriveAbs);
         /**
          * @brief initStreamVolume: even if the engine volume files provides min and max, keep this
          * api for compatibility reason.
@@ -292,6 +297,7 @@
 
         virtual status_t registerPolicyMixes(const Vector<AudioMix>& mixes);
         virtual status_t unregisterPolicyMixes(Vector<AudioMix> mixes);
+        virtual status_t getRegisteredPolicyMixes(std::vector<AudioMix>& mixes) override;
         virtual status_t updatePolicyMix(
                 const AudioMix& mix,
                 const std::vector<AudioMixMatchCriterion>& updatedCriteria) override;
@@ -339,8 +345,7 @@
         virtual status_t startAudioSource(const struct audio_port_config *source,
                                           const audio_attributes_t *attributes,
                                           audio_port_handle_t *portId,
-                                          uid_t uid,
-                                          bool internal = false);
+                                          uid_t uid);
         virtual status_t stopAudioSource(audio_port_handle_t portId);
 
         virtual status_t setMasterMono(bool mono);
@@ -527,6 +532,7 @@
         void addOutput(audio_io_handle_t output, const sp<SwAudioOutputDescriptor>& outputDesc);
         void removeOutput(audio_io_handle_t output);
         void addInput(audio_io_handle_t input, const sp<AudioInputDescriptor>& inputDesc);
+        bool checkCloseInput(const sp<AudioInputDescriptor>& input);
 
         /**
          * @brief setOutputDevices change the route of the specified output.
@@ -563,12 +569,39 @@
         status_t resetInputDevice(audio_io_handle_t input,
                                   audio_patch_handle_t *patchHandle = NULL);
 
-        // compute the actual volume for a given stream according to the requested index and a particular
-        // device
-        virtual float computeVolume(IVolumeCurves &curves,
-                                    VolumeSource volumeSource,
-                                    int index,
-                                    const DeviceTypeSet& deviceTypes);
+        /**
+         * Compute volume in DB that should be applied for a volume source and device types for a
+         * particular volume index.
+         *
+         * <p><b>Note:</b>Internally the compute method recursively calls itself to accurately
+         * determine the volume given the currently active sources and devices. Some of the
+         * interaction that require recursive computation are:
+         * <ul>
+         * <li>Match accessibility volume if ringtone volume is much louder</li>
+         * <li>If voice call is active cap other volumes (except ringtone and accessibility)</li>
+         * <li>Attenuate notification if headset is connected to prevent burst in user's ear</li>
+         * <li>Attenuate ringtone if headset is connected and music is not playing and speaker is
+         *      part of the devices to prevent burst in user's ear</li>
+         * <li>Limit music volume if headset is connected and notification is also active</li>
+         * </ul>
+         *
+         * @param curves volume curves to use for calculating volume value given the index
+         * @param volumeSource source (use case) of the volume
+         * @param index index to match in the volume curves for the calculation
+         * @param deviceTypes devices that should be considered in the volume curves for the
+         *        calculation
+         * @param adjustAttenuation boolean indicating whether we should adjust the value to
+         *        avoid double attenuation when controlling an avrcp device
+         * @param computeInternalInteraction boolean indicating whether recursive volume computation
+         *        should continue within the volume computation. Defaults to {@code true} so the
+         *        volume interactions can be computed. Calls within the method should always set the
+         *        the value to {@code false} to prevent infinite recursion.
+         * @return computed volume in DB
+         */
+        virtual float computeVolume(IVolumeCurves &curves, VolumeSource volumeSource,
+                               int index, const DeviceTypeSet& deviceTypes,
+                               bool adjustAttenuation = true,
+                               bool computeInternalInteraction = true);
 
         // rescale volume index from srcStream within range of dstStream
         int rescaleVolumeIndex(int srcIndex,
@@ -674,15 +707,7 @@
         void updateCallAndOutputRouting(bool forceVolumeReeval = true, uint32_t delayMs = 0,
                 bool skipDelays = false);
 
-        bool isCallRxAudioSource(const sp<SourceClientDescriptor> &source) {
-            return mCallRxSourceClient != nullptr && source == mCallRxSourceClient;
-        }
-
-        bool isCallTxAudioSource(const sp<SourceClientDescriptor> &source) {
-            return mCallTxSourceClient != nullptr && source == mCallTxSourceClient;
-        }
-
-        void connectTelephonyRxAudioSource();
+        void connectTelephonyRxAudioSource(uint32_t delayMs);
 
         void disconnectTelephonyAudioSource(sp<SourceClientDescriptor> &clientDesc);
 
@@ -907,7 +932,8 @@
 
         status_t hasPrimaryOutput() const { return mPrimaryOutput != 0; }
 
-        status_t connectAudioSource(const sp<SourceClientDescriptor>& sourceDesc);
+        status_t connectAudioSource(const sp<SourceClientDescriptor>& sourceDesc,
+                                    uint32_t delayMs);
         status_t disconnectAudioSource(const sp<SourceClientDescriptor>& sourceDesc);
 
         status_t connectAudioSourceToSink(const sp<SourceClientDescriptor>& sourceDesc,
@@ -945,6 +971,13 @@
         void checkLeBroadcastRoutes(bool wasUnicastActive,
                 sp<SwAudioOutputDescriptor> ignoredOutput, uint32_t delayMs);
 
+        status_t startAudioSourceInternal(const struct audio_port_config *source,
+                                          const audio_attributes_t *attributes,
+                                          audio_port_handle_t *portId,
+                                          uid_t uid,
+                                          bool internal,
+                                          bool isCallRx,
+                                          uint32_t delayMs);
         const uid_t mUidCached;                         // AID_AUDIOSERVER
         sp<const AudioPolicyConfig> mConfig;
         EngineInstance mEngine;                         // Audio Policy Engine instance
@@ -1122,7 +1155,8 @@
                 const audio_config_t *config,
                 audio_output_flags_t flags,
                 const DeviceVector &devices,
-                audio_io_handle_t *output);
+                audio_io_handle_t *output,
+                audio_attributes_t attributes);
 
         /**
          * @brief Queries if some kind of spatialization will be performed if the audio playback
@@ -1357,6 +1391,20 @@
 
         PortHandleVector getClientsForStream(audio_stream_type_t streamType) const;
         void invalidateStreams(StreamTypeVector streams) const;
+
+        bool checkHapticCompatibilityOnSpatializerOutput(const audio_config_t* config,
+                                                         audio_session_t sessionId) const;
+
+        void updateClientsInternalMute(const sp<SwAudioOutputDescriptor>& desc);
+
+        float adjustDeviceAttenuationForAbsVolume(IVolumeCurves &curves,
+                                                  VolumeSource volumeSource,
+                                                  int index,
+                                                  const DeviceTypeSet &deviceTypes);
+
+        // Contains for devices that support absolute volume the audio attributes
+        // corresponding to the streams that are driving the volume changes
+        std::unordered_map<audio_devices_t, audio_attributes_t> mAbsoluteVolumeDrivingStreams;
 };
 
 };
diff --git a/services/audiopolicy/permission/Android.bp b/services/audiopolicy/permission/Android.bp
new file mode 100644
index 0000000..cfbeaae
--- /dev/null
+++ b/services/audiopolicy/permission/Android.bp
@@ -0,0 +1,111 @@
+package {
+    default_team: "trendy_team_android_media_audio_framework",
+    default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
+cc_library_headers {
+    name: "audiopermissioncontroller_headers",
+    host_supported: true,
+    export_include_dirs: ["include"],
+}
+
+cc_library {
+    name: "audiopermissioncontroller",
+
+    srcs: [
+        "NativePermissionController.cpp",
+        "ValidatedAttributionSourceState.cpp",
+    ],
+    export_include_dirs: [
+        "include",
+    ],
+
+    header_libs: [
+        "libcutils_headers",
+        "liberror_headers",
+    ],
+    export_header_lib_headers: [
+        "liberror_headers",
+    ],
+    static_libs: [
+        "audio-permission-aidl-cpp",
+        "framework-permission-aidl-cpp",
+    ],
+    shared_libs: [
+        "libbase",
+        "libbinder",
+        "liblog",
+        "libutils",
+    ],
+
+    host_supported: true,
+    sanitize: {
+        integer_overflow: true,
+    },
+    cflags: [
+        "-DANDROID_BASE_UNIQUE_FD_DISABLE_IMPLICIT_CONVERSION",
+        "-DANDROID_UTILS_REF_BASE_DISABLE_IMPLICIT_CONSTRUCTION",
+        "-Wall",
+        "-Wconditional-uninitialized",
+        "-Wdeprecated",
+        "-Werror",
+        "-Werror=format",
+        "-Werror=reorder-init-list",
+        "-Wextra",
+        "-Wextra-semi",
+        "-Wimplicit-fallthrough",
+        "-Wreorder-init-list",
+        "-Wshadow-all",
+        "-Wthread-safety",
+        "-Wunreachable-code-aggressive",
+    ],
+    tidy: true,
+    tidy_checks: [
+        "android-*",
+        "bugprone-*",
+        "cert-*",
+        "clang-analyzer-security*",
+        "google-*",
+        "misc-*",
+        "modernize-*",
+        "performance-*",
+    ],
+    tidy_checks_as_errors: [
+        "android-*",
+        "bugprone-*",
+        "cert-*",
+        "clang-analyzer-security*",
+        "google-*",
+        "misc-*",
+        "modernize-*",
+        "performance-*",
+    ],
+}
+
+cc_test {
+    name: "audiopermissioncontroller_test",
+    host_supported: true,
+    defaults: [
+        "libmediautils_tests_config",
+    ],
+    static_libs: [
+        "audio-permission-aidl-cpp",
+        "audiopermissioncontroller",
+        "framework-permission-aidl-cpp",
+        "libgmock",
+    ],
+    shared_libs: [
+        "libbase",
+        "libbinder",
+        "liblog",
+        "libutils",
+    ],
+    srcs: [
+        "tests/NativePermissionControllerTest.cpp",
+        "tests/ValidatedAttributionSourceStateTest.cpp",
+    ],
+    test_options: {
+        unit_test: true,
+    },
+    test_suites: ["general-tests"],
+}
diff --git a/services/audiopolicy/permission/NativePermissionController.cpp b/services/audiopolicy/permission/NativePermissionController.cpp
new file mode 100644
index 0000000..5743076
--- /dev/null
+++ b/services/audiopolicy/permission/NativePermissionController.cpp
@@ -0,0 +1,160 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <media/NativePermissionController.h>
+
+#include <algorithm>
+#include <optional>
+#include <utility>
+
+#include <android-base/expected.h>
+#include <cutils/android_filesystem_config.h>
+#include <utils/Errors.h>
+
+using ::android::binder::Status;
+using ::android::error::BinderResult;
+using ::android::error::unexpectedExceptionCode;
+
+namespace com::android::media::permission {
+static std::optional<std::string> getFixedPackageName(uid_t uid) {
+    // These values are in sync with AppOpsService
+    switch (uid % AID_USER_OFFSET) {
+        case AID_ROOT:
+            return "root";
+        case AID_SYSTEM:
+            return "system";
+        case AID_SHELL:
+            return "shell";
+        case AID_MEDIA:
+            return "media";
+        case AID_AUDIOSERVER:
+            return "audioserver";
+        case AID_CAMERASERVER:
+            return "cameraserver";
+        default:
+            return std::nullopt;
+    }
+}
+
+// -- Begin Binder methods
+Status NativePermissionController::populatePackagesForUids(
+        const std::vector<UidPackageState>& initialPackageStates) {
+    std::lock_guard l{m_};
+    if (!is_package_populated_) is_package_populated_ = true;
+    package_map_.clear();
+    std::transform(initialPackageStates.begin(), initialPackageStates.end(),
+                   std::inserter(package_map_, package_map_.end()),
+                   [](const auto& x) -> std::pair<uid_t, std::vector<std::string>> {
+                       return {x.uid, x.packageNames};
+                   });
+    std::erase_if(package_map_, [](const auto& x) { return x.second.empty(); });
+    return Status::ok();
+}
+
+Status NativePermissionController::updatePackagesForUid(const UidPackageState& newPackageState) {
+    std::lock_guard l{m_};
+    package_map_.insert_or_assign(newPackageState.uid, newPackageState.packageNames);
+    const auto& cursor = package_map_.find(newPackageState.uid);
+
+    if (newPackageState.packageNames.empty()) {
+        if (cursor != package_map_.end()) {
+            package_map_.erase(cursor);
+        }
+    } else {
+        if (cursor != package_map_.end()) {
+            cursor->second = newPackageState.packageNames;
+        } else {
+            package_map_.insert({newPackageState.uid, newPackageState.packageNames});
+        }
+    }
+    return Status::ok();
+}
+
+Status NativePermissionController::populatePermissionState(PermissionEnum perm,
+                                                           const std::vector<int>& uids) {
+    if (perm >= PermissionEnum::ENUM_SIZE || static_cast<int>(perm) < 0) {
+        return Status::fromExceptionCode(Status::EX_ILLEGAL_ARGUMENT);
+    }
+    std::lock_guard l{m_};
+    auto& cursor = permission_map_[static_cast<size_t>(perm)];
+    cursor = std::vector<uid_t>{uids.begin(), uids.end()};
+    // should be sorted
+    std::sort(cursor.begin(), cursor.end());
+    return Status::ok();
+}
+
+// -- End Binder methods
+
+BinderResult<std::vector<std::string>> NativePermissionController::getPackagesForUid(
+        uid_t uid) const {
+    uid = uid % AID_USER_OFFSET;
+    const auto fixed_package_opt = getFixedPackageName(uid);
+    if (fixed_package_opt.has_value()) {
+        return BinderResult<std::vector<std::string>>{std::in_place_t{},
+                                                      {fixed_package_opt.value()}};
+    }
+    std::lock_guard l{m_};
+    if (!is_package_populated_) {
+        return unexpectedExceptionCode(
+                Status::EX_ILLEGAL_STATE,
+                "NPC::getPackagesForUid: controller never populated by system_server");
+    }
+    const auto cursor = package_map_.find(uid);
+    if (cursor != package_map_.end()) {
+        return cursor->second;
+    } else {
+        return unexpectedExceptionCode(
+                Status::EX_ILLEGAL_ARGUMENT,
+                ("NPC::getPackagesForUid: uid not found: " + std::to_string(uid)).c_str());
+    }
+}
+
+BinderResult<bool> NativePermissionController::validateUidPackagePair(
+        uid_t uid, const std::string& packageName) const {
+    if (uid == AID_ROOT || uid == AID_SYSTEM) return true;
+    uid = uid % AID_USER_OFFSET;
+    const auto fixed_package_opt = getFixedPackageName(uid);
+    if (fixed_package_opt.has_value()) {
+        return (uid == AID_ROOT || uid == AID_SYSTEM) ? true :
+                packageName == fixed_package_opt.value();
+    }
+    std::lock_guard l{m_};
+    if (!is_package_populated_) {
+        return unexpectedExceptionCode(
+                Status::EX_ILLEGAL_STATE,
+                "NPC::validatedUidPackagePair: controller never populated by system_server");
+    }
+    const auto cursor = package_map_.find(uid);
+    return (cursor != package_map_.end()) &&
+           (std::find(cursor->second.begin(), cursor->second.end(), packageName) !=
+            cursor->second.end());
+}
+
+BinderResult<bool> NativePermissionController::checkPermission(PermissionEnum perm,
+                                                               uid_t uid) const {
+    if (uid == AID_ROOT || uid == AID_SYSTEM || uid == getuid()) return true;
+    std::lock_guard l{m_};
+    const auto& uids = permission_map_[static_cast<size_t>(perm)];
+    if (!uids.empty()) {
+        return std::binary_search(uids.begin(), uids.end(), uid);
+    } else {
+        return unexpectedExceptionCode(
+                Status::EX_ILLEGAL_STATE,
+                "NPC::checkPermission: controller never populated by system_server");
+    }
+}
+
+}  // namespace com::android::media::permission
diff --git a/services/audiopolicy/permission/ValidatedAttributionSourceState.cpp b/services/audiopolicy/permission/ValidatedAttributionSourceState.cpp
new file mode 100644
index 0000000..f313422
--- /dev/null
+++ b/services/audiopolicy/permission/ValidatedAttributionSourceState.cpp
@@ -0,0 +1,61 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <media/ValidatedAttributionSourceState.h>
+
+#include <binder/IPCThreadState.h>
+#include <error/expected_utils.h>
+#include <utils/Log.h>
+
+using ::android::binder::Status;
+using ::android::error::BinderResult;
+using ::android::error::unexpectedExceptionCode;
+
+namespace com::android::media::permission {
+
+BinderResult<ValidatedAttributionSourceState>
+ValidatedAttributionSourceState::createFromBinderContext(AttributionSourceState attr,
+                                                         const IPermissionProvider& provider) {
+    attr.pid = ::android::IPCThreadState::self()->getCallingPid();
+    attr.uid = ::android::IPCThreadState::self()->getCallingUid();
+    return createFromTrustedUidNoPackage(std::move(attr), provider);
+}
+
+BinderResult<ValidatedAttributionSourceState>
+ValidatedAttributionSourceState::createFromTrustedUidNoPackage(
+        AttributionSourceState attr, const IPermissionProvider& provider) {
+    if (attr.packageName.has_value() && attr.packageName->size() != 0) {
+        if (VALUE_OR_RETURN(provider.validateUidPackagePair(attr.uid, attr.packageName.value()))) {
+            return ValidatedAttributionSourceState{std::move(attr)};
+        } else {
+            return unexpectedExceptionCode(Status::EX_SECURITY,
+                                           attr.toString()
+                                                   .insert(0, ": invalid attr ")
+                                                   .insert(0, __PRETTY_FUNCTION__)
+                                                   .c_str());
+        }
+    } else {
+        // For APIs which don't appropriately pass attribution sources or packages, we need
+        // to populate the package name with our best guess.
+        const auto packageNames = VALUE_OR_RETURN(provider.getPackagesForUid(attr.uid));
+        LOG_ALWAYS_FATAL_IF(packageNames.empty(), "%s BUG: empty package list from controller",
+                            __PRETTY_FUNCTION__);
+        attr.packageName = std::move(packageNames[0]);
+        return ValidatedAttributionSourceState{std::move(attr)};
+    }
+}
+
+}  // namespace com::android::media::permission
diff --git a/services/audiopolicy/permission/include/media/IPermissionProvider.h b/services/audiopolicy/permission/include/media/IPermissionProvider.h
new file mode 100644
index 0000000..8d90543
--- /dev/null
+++ b/services/audiopolicy/permission/include/media/IPermissionProvider.h
@@ -0,0 +1,50 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <sys/types.h>
+
+#include <optional>
+#include <vector>
+
+#include <com/android/media/permission/PermissionEnum.h>
+#include <error/BinderResult.h>
+
+namespace com::android::media::permission {
+
+class IPermissionProvider {
+  public:
+    // Get all package names which run under a certain app-id. Returns non-empty.
+    // Not user specific, since packages are across users. Special app-ids (system,
+    // shell, etc.) are handled.  Fails if the provider does not know about the
+    // app-id or if the provider has not been initialized.
+    virtual ::android::error::BinderResult<std::vector<std::string>> getPackagesForUid(
+            uid_t uid) const = 0;
+    // True iff the provided package name runs under the app-id of uid.
+    // Special app-ids (system, shell, etc.) are handled.
+    // Fails if the provider does not know about the app-id or if the provider has not been
+    // initialized.
+    virtual ::android::error::BinderResult<bool> validateUidPackagePair(
+            uid_t uid, const std::string& packageName) const = 0;
+
+    // True iff the uid holds the permission (user aware).
+    // Fails with NO_INIT if cache hasn't been populated.
+    virtual ::android::error::BinderResult<bool> checkPermission(PermissionEnum permission,
+                                                                 uid_t uid) const = 0;
+    virtual ~IPermissionProvider() = default;
+};
+}  // namespace com::android::media::permission
diff --git a/services/audiopolicy/permission/include/media/NativePermissionController.h b/services/audiopolicy/permission/include/media/NativePermissionController.h
new file mode 100644
index 0000000..a81c7a2
--- /dev/null
+++ b/services/audiopolicy/permission/include/media/NativePermissionController.h
@@ -0,0 +1,56 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <mutex>
+#include <optional>
+#include <unordered_map>
+
+#include "IPermissionProvider.h"
+
+#include <android-base/thread_annotations.h>
+#include <com/android/media/permission/BnNativePermissionController.h>
+#include <error/BinderResult.h>
+
+namespace com::android::media::permission {
+
+class NativePermissionController : public BnNativePermissionController, public IPermissionProvider {
+    using Status = ::android::binder::Status;
+
+  public:
+    Status populatePackagesForUids(const std::vector<UidPackageState>& initialPackageStates) final;
+    Status updatePackagesForUid(const UidPackageState& newPackageState) final;
+    Status populatePermissionState(PermissionEnum permission, const std::vector<int>& uids) final;
+    // end binder methods
+
+    ::android::error::BinderResult<std::vector<std::string>> getPackagesForUid(
+            uid_t uid) const final;
+    ::android::error::BinderResult<bool> validateUidPackagePair(
+            uid_t uid, const std::string& packageName) const final;
+    ::android::error::BinderResult<bool> checkPermission(PermissionEnum permission,
+                                                         uid_t uid) const final;
+
+  private:
+    mutable std::mutex m_;
+    // map of app_ids to the set of packages names which could run in them (should be 1)
+    std::unordered_map<uid_t, std::vector<std::string>> package_map_ GUARDED_BY(m_);
+    bool is_package_populated_ GUARDED_BY(m_);
+    // (logical) map of PermissionEnum to list of uids (not appid) which hold the perm
+    std::array<std::vector<uid_t>, static_cast<size_t>(PermissionEnum::ENUM_SIZE)> permission_map_
+            GUARDED_BY(m_);
+};
+}  // namespace com::android::media::permission
diff --git a/services/audiopolicy/permission/include/media/ValidatedAttributionSourceState.h b/services/audiopolicy/permission/include/media/ValidatedAttributionSourceState.h
new file mode 100644
index 0000000..46f7d0a
--- /dev/null
+++ b/services/audiopolicy/permission/include/media/ValidatedAttributionSourceState.h
@@ -0,0 +1,72 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <android/content/AttributionSourceState.h>
+#include <error/BinderResult.h>
+
+#include "IPermissionProvider.h"
+
+namespace com::android::media::permission {
+
+using ::android::content::AttributionSourceState;
+
+class ValidatedAttributionSourceState {
+  public:
+    /**
+     * Validates an attribution source from within the context of a binder transaction.
+     * Overwrites the uid/pid and validates the packageName.
+     * Returns EX_SECURITY on package validation fail.
+     */
+    static ::android::error::BinderResult<ValidatedAttributionSourceState> createFromBinderContext(
+            AttributionSourceState attr, const IPermissionProvider& provider);
+
+    /**
+     * Creates a ValidatedAttributionSourceState in cases where the source is passed from a
+     * trusted entity which already performed validation.
+     */
+    static ValidatedAttributionSourceState createFromTrustedSource(AttributionSourceState attr) {
+        return ValidatedAttributionSourceState(attr);
+    }
+
+    /**
+     * Create a ValidatedAttribubtionSourceState in cases where the uid/pid is trusted, but the
+     * packages have not been validated. Proper use of the previous two methods should avoid the
+     * necessity of this, but it is useful for migration purposes as well as testing this class.
+     * Returns EX_SECURITY on package validation fail.
+     */
+    static ::android::error::BinderResult<ValidatedAttributionSourceState>
+    createFromTrustedUidNoPackage(AttributionSourceState attr, const IPermissionProvider& provider);
+
+    operator AttributionSourceState() const { return state_; }
+
+    operator const AttributionSourceState&() const { return state_; }
+
+    AttributionSourceState unwrapInto() && { return std::move(state_); }
+
+    bool operator==(const ValidatedAttributionSourceState& other) const {
+        return operator==(other.state_);
+    }
+
+    bool operator==(const AttributionSourceState& other) const { return state_ == other; }
+
+  private:
+    ValidatedAttributionSourceState(AttributionSourceState attr) : state_(attr) {}
+
+    AttributionSourceState state_;
+};
+}  // namespace com::android::media::permission
diff --git a/services/audiopolicy/permission/tests/NativePermissionControllerTest.cpp b/services/audiopolicy/permission/tests/NativePermissionControllerTest.cpp
new file mode 100644
index 0000000..f2423c1
--- /dev/null
+++ b/services/audiopolicy/permission/tests/NativePermissionControllerTest.cpp
@@ -0,0 +1,211 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <media/NativePermissionController.h>
+
+#include <gmock/gmock.h>
+#include <gtest/gtest.h>
+
+#include <error/BinderStatusMatcher.h>
+#include <error/ExpectedMatchers.h>
+
+using android::binder::Status::EX_ILLEGAL_ARGUMENT;
+using android::binder::Status::EX_ILLEGAL_STATE;
+using android::error::BinderStatusMatcher;
+using android::error::IsErrorAnd;
+using android::error::IsOkAnd;
+using com::android::media::permission::NativePermissionController;
+using com::android::media::permission::PermissionEnum;
+using com::android::media::permission::UidPackageState;
+
+using ::testing::ElementsAre;
+using ::testing::IsFalse;
+using ::testing::IsTrue;
+
+class NativePermissionControllerTest : public ::testing::Test {
+  protected:
+    android::sp<NativePermissionController> holder_ =
+            android::sp<NativePermissionController>::make();
+    NativePermissionController& controller_ = *holder_;
+};
+static UidPackageState createState(uid_t uid, std::vector<std::string> packagesNames) {
+    UidPackageState out{};
+    out.uid = uid;
+    out.packageNames = std::move(packagesNames);
+    return out;
+}
+
+// ---  Tests for non-populated ----
+TEST_F(NativePermissionControllerTest, getPackagesForUid_NotPopulated) {
+    // Verify errors are returned
+    EXPECT_THAT(controller_.getPackagesForUid(10000),
+                IsErrorAnd(BinderStatusMatcher::hasException(EX_ILLEGAL_STATE)));
+    EXPECT_THAT(controller_.getPackagesForUid(10001),
+                IsErrorAnd(BinderStatusMatcher::hasException(EX_ILLEGAL_STATE)));
+
+    // fixed uids should work
+    EXPECT_THAT(controller_.getPackagesForUid(1000), IsOkAnd(ElementsAre(std::string{"system"})));
+}
+
+TEST_F(NativePermissionControllerTest, validateUidPackagePair_NotPopulated) {
+    // Verify errors are returned
+    EXPECT_THAT(controller_.validateUidPackagePair(10000, "com.package"),
+                IsErrorAnd(BinderStatusMatcher::hasException(EX_ILLEGAL_STATE)));
+
+    // fixed uids should work
+    EXPECT_THAT(controller_.validateUidPackagePair(1000, "system"), IsOkAnd(IsTrue()));
+}
+
+// ---  Tests for populatePackagesForUids ----
+TEST_F(NativePermissionControllerTest, populatePackages_EmptyInput) {
+    std::vector<UidPackageState> input;
+
+    // succeeds
+    EXPECT_THAT(controller_.populatePackagesForUids(input), BinderStatusMatcher::isOk());
+
+    // Verify unknown uid behavior
+    EXPECT_THAT(controller_.getPackagesForUid(10000),
+                IsErrorAnd(BinderStatusMatcher::hasException(EX_ILLEGAL_ARGUMENT)));
+}
+
+TEST_F(NativePermissionControllerTest, populatePackages_ValidInput) {
+    std::vector<UidPackageState> input{
+            createState(10000, {"com.example.app1", "com.example.app2"}),
+            createState(10001, {"com.example2.app1"}),
+    };
+
+    EXPECT_THAT(controller_.populatePackagesForUids(input), BinderStatusMatcher::isOk());
+
+    EXPECT_THAT(controller_.getPackagesForUid(10000),
+                IsOkAnd(ElementsAre("com.example.app1", "com.example.app2")));
+    EXPECT_THAT(controller_.getPackagesForUid(10001), IsOkAnd(ElementsAre("com.example2.app1")));
+}
+
+// --- Tests for updatePackagesForUid ---
+TEST_F(NativePermissionControllerTest, updatePackages_NewUid) {
+    std::vector<UidPackageState> input{
+            createState(10000, {"com.example.app1", "com.example.app2"}),
+            createState(10001, {"com.example2.app1"}),
+    };
+    UidPackageState newState = createState(12000, {"com.example.other"});
+
+    EXPECT_THAT(controller_.populatePackagesForUids(input), BinderStatusMatcher::isOk());
+    EXPECT_THAT(controller_.updatePackagesForUid(newState), BinderStatusMatcher::isOk());
+
+    // Verify the results: only the updated package should be changed
+    EXPECT_THAT(controller_.getPackagesForUid(10000),
+                IsOkAnd(ElementsAre("com.example.app1", "com.example.app2")));
+    EXPECT_THAT(controller_.getPackagesForUid(10001), IsOkAnd(ElementsAre("com.example2.app1")));
+    EXPECT_THAT(controller_.getPackagesForUid(12000), IsOkAnd(ElementsAre("com.example.other")));
+}
+
+TEST_F(NativePermissionControllerTest, updatePackages_ExistingUid) {
+    std::vector<UidPackageState> input{
+            createState(10000, {"com.example.app1", "com.example.app2", "com.example.app3"}),
+            createState(10001, {"com.example2.app1"}),
+    };
+
+    EXPECT_THAT(controller_.populatePackagesForUids(input), BinderStatusMatcher::isOk());
+    // Update packages for existing uid
+    UidPackageState newState = createState(10000, {"com.example.other", "com.example.new"});
+    EXPECT_THAT(controller_.updatePackagesForUid(newState), BinderStatusMatcher::isOk());
+
+    // Verify update
+    EXPECT_THAT(controller_.getPackagesForUid(10000),
+                IsOkAnd(ElementsAre("com.example.other", "com.example.new")));
+}
+
+TEST_F(NativePermissionControllerTest, updatePackages_EmptyRemovesEntry) {
+    std::vector<UidPackageState> input{
+            createState(10000, {"com.example.app1"}),
+    };
+
+    EXPECT_THAT(controller_.populatePackagesForUids(input), BinderStatusMatcher::isOk());
+
+    UidPackageState newState{};  // Empty package list
+    newState.uid = 10000;
+    EXPECT_THAT(controller_.updatePackagesForUid(newState), BinderStatusMatcher::isOk());
+    // getPackages for unknown UID should error out
+    EXPECT_THAT(controller_.getPackagesForUid(10000),
+                IsErrorAnd(BinderStatusMatcher::hasException(EX_ILLEGAL_ARGUMENT)));
+}
+
+TEST_F(NativePermissionControllerTest, validateUidPackagePair_ValidPair) {
+    std::vector<UidPackageState> input{
+            createState(10000, {"com.example.app1", "com.example.app2"}),
+    };
+
+    EXPECT_THAT(controller_.populatePackagesForUids(input), BinderStatusMatcher::isOk());
+
+    EXPECT_THAT(controller_.validateUidPackagePair(10000, "com.example.app1"), IsOkAnd(IsTrue()));
+}
+
+TEST_F(NativePermissionControllerTest, validateUidPackagePair_InvalidPackage) {
+    std::vector<UidPackageState> input{
+            createState(10000, {"com.example.app1", "com.example.app2"}),
+    };
+
+    EXPECT_THAT(controller_.populatePackagesForUids(input), BinderStatusMatcher::isOk());
+
+    EXPECT_THAT(controller_.validateUidPackagePair(10000, "com.example.other"), IsOkAnd(IsFalse()));
+}
+
+TEST_F(NativePermissionControllerTest, validateUidPackagePair_UnknownUid) {
+    std::vector<UidPackageState> input{
+            createState(10000, {"com.example.app1", "com.example.app2"}),
+    };
+
+    EXPECT_THAT(controller_.populatePackagesForUids(input), BinderStatusMatcher::isOk());
+
+    EXPECT_THAT(controller_.validateUidPackagePair(12000, "any.package"), IsOkAnd(IsFalse()));
+}
+
+TEST_F(NativePermissionControllerTest, populatePermissionState_InvalidPermission) {
+    EXPECT_THAT(controller_.populatePermissionState(PermissionEnum::ENUM_SIZE, {}),
+                BinderStatusMatcher::hasException(EX_ILLEGAL_ARGUMENT));
+    EXPECT_THAT(
+            controller_.populatePermissionState(
+                    static_cast<PermissionEnum>(static_cast<int>(PermissionEnum::ENUM_SIZE) + 1),
+                    {}),
+            BinderStatusMatcher::hasException(EX_ILLEGAL_ARGUMENT));
+}
+
+TEST_F(NativePermissionControllerTest, populatePermissionState_HoldsPermission) {
+    // Unsorted
+    std::vector<int> uids{3, 1, 2, 4, 5};
+
+    EXPECT_THAT(controller_.populatePermissionState(PermissionEnum::MODIFY_AUDIO_ROUTING, uids),
+                BinderStatusMatcher::isOk());
+
+    EXPECT_THAT(controller_.checkPermission(PermissionEnum::MODIFY_AUDIO_ROUTING, 3),
+                IsOkAnd(IsTrue()));
+}
+
+TEST_F(NativePermissionControllerTest, populatePermissionState_DoesNotHoldPermission) {
+    // Unsorted
+    std::vector<int> uids{3, 1, 2, 4, 5};
+
+    EXPECT_THAT(controller_.populatePermissionState(PermissionEnum::MODIFY_AUDIO_ROUTING, uids),
+                BinderStatusMatcher::isOk());
+
+    EXPECT_THAT(controller_.checkPermission(PermissionEnum::MODIFY_AUDIO_ROUTING, 6),
+                IsOkAnd(IsFalse()));
+}
+
+TEST_F(NativePermissionControllerTest, populatePermissionState_NotInitialized) {
+    EXPECT_THAT(controller_.checkPermission(PermissionEnum::MODIFY_AUDIO_ROUTING, 3),
+                IsErrorAnd(BinderStatusMatcher::hasException(EX_ILLEGAL_STATE)));
+}
diff --git a/services/audiopolicy/permission/tests/ValidatedAttributionSourceStateTest.cpp b/services/audiopolicy/permission/tests/ValidatedAttributionSourceStateTest.cpp
new file mode 100644
index 0000000..0dd8814
--- /dev/null
+++ b/services/audiopolicy/permission/tests/ValidatedAttributionSourceStateTest.cpp
@@ -0,0 +1,125 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <media/ValidatedAttributionSourceState.h>
+
+#include <gmock/gmock.h>
+#include <gtest/gtest.h>
+
+#include <android-base/expected.h>
+#include <error/ExpectedMatchers.h>
+#include <media/IPermissionProvider.h>
+#include "error/BinderStatusMatcher.h"
+
+using ::android::base::unexpected;
+using ::android::binder::Status;
+using ::android::binder::Status::EX_ILLEGAL_ARGUMENT;
+using ::android::binder::Status::EX_ILLEGAL_STATE;
+using ::android::binder::Status::EX_SECURITY;
+using ::android::content::AttributionSourceState;
+using ::android::error::BinderResult;
+using ::android::error::BinderStatusMatcher;
+using ::android::error::IsErrorAnd;
+using ::android::error::IsOkAnd;
+using ::com::android::media::permission::IPermissionProvider;
+using ::com::android::media::permission::PermissionEnum;
+using ::com::android::media::permission::ValidatedAttributionSourceState;
+
+using ::testing::Eq;
+using ::testing::Return;
+
+class MockPermissionProvider : public IPermissionProvider {
+  public:
+    MOCK_METHOD(BinderResult<std::vector<std::string>>, getPackagesForUid, (uid_t uid),
+                (override, const));
+    MOCK_METHOD(BinderResult<bool>, validateUidPackagePair, (uid_t uid, const std::string&),
+                (override, const));
+    MOCK_METHOD(BinderResult<bool>, checkPermission, (PermissionEnum perm, uid_t),
+                (override, const));
+};
+
+class ValidatedAttributionSourceStateTest : public ::testing::Test {
+  protected:
+    MockPermissionProvider mMockProvider;
+    const uid_t mUid = 10001;
+    const std::vector<std::string> mPackageList{"com.package1", "com.package2"};
+};
+
+TEST_F(ValidatedAttributionSourceStateTest, providedPackageValid) {
+    const std::string package = "com.package1";
+    EXPECT_CALL(mMockProvider, validateUidPackagePair(mUid, package)).WillOnce(Return(true));
+    AttributionSourceState attr;
+    attr.uid = mUid;
+    attr.packageName = package;
+    EXPECT_THAT(ValidatedAttributionSourceState::createFromTrustedUidNoPackage(attr, mMockProvider),
+                IsOkAnd(Eq(attr)));
+}
+
+TEST_F(ValidatedAttributionSourceStateTest, providedPackageInvalid) {
+    const std::string package = "com.package.spoof";
+    EXPECT_CALL(mMockProvider, validateUidPackagePair(mUid, package)).WillOnce(Return(false));
+    AttributionSourceState attr;
+    attr.uid = mUid;
+    attr.packageName = package;
+    EXPECT_THAT(ValidatedAttributionSourceState::createFromTrustedUidNoPackage(attr, mMockProvider),
+                IsErrorAnd(BinderStatusMatcher::hasException(EX_SECURITY)));
+}
+
+TEST_F(ValidatedAttributionSourceStateTest, packageLookup_whenMissingPackage) {
+    EXPECT_CALL(mMockProvider, getPackagesForUid(mUid)).WillOnce(Return(mPackageList));
+    AttributionSourceState attr;
+    attr.uid = mUid;
+    AttributionSourceState expectedAttr;
+    expectedAttr.uid = mUid;
+    expectedAttr.packageName = "com.package1";
+    EXPECT_THAT(ValidatedAttributionSourceState::createFromTrustedUidNoPackage(attr, mMockProvider),
+                IsOkAnd(Eq(expectedAttr)));
+}
+
+TEST_F(ValidatedAttributionSourceStateTest, packageLookup_whenEmptyPackage) {
+    EXPECT_CALL(mMockProvider, getPackagesForUid(mUid)).WillOnce(Return(mPackageList));
+    AttributionSourceState attr;
+    attr.uid = mUid;
+    attr.packageName = std::string{};
+    AttributionSourceState expectedAttr;
+    expectedAttr.uid = mUid;
+    expectedAttr.packageName = "com.package1";
+    EXPECT_THAT(ValidatedAttributionSourceState::createFromTrustedUidNoPackage(attr, mMockProvider),
+                IsOkAnd(Eq(expectedAttr)));
+}
+
+TEST_F(ValidatedAttributionSourceStateTest, controllerNotInitialized) {
+    EXPECT_CALL(mMockProvider, getPackagesForUid(mUid))
+            .WillOnce(Return(unexpected{Status::fromExceptionCode(EX_ILLEGAL_STATE)}));
+    AttributionSourceState attr;
+    attr.uid = mUid;
+    attr.packageName = std::string{};
+    AttributionSourceState expectedAttr;
+    expectedAttr.uid = mUid;
+    expectedAttr.packageName = "com.package1";
+    EXPECT_THAT(ValidatedAttributionSourceState::createFromTrustedUidNoPackage(attr, mMockProvider),
+                IsErrorAnd(BinderStatusMatcher::hasException(EX_ILLEGAL_STATE)));
+}
+
+TEST_F(ValidatedAttributionSourceStateTest, uidNotFound) {
+    EXPECT_CALL(mMockProvider, getPackagesForUid(mUid))
+            .WillOnce(Return(unexpected{Status::fromExceptionCode(EX_ILLEGAL_ARGUMENT)}));
+    AttributionSourceState attr;
+    attr.uid = mUid;
+    attr.packageName = std::string{};
+    EXPECT_THAT(ValidatedAttributionSourceState::createFromTrustedUidNoPackage(attr, mMockProvider),
+                IsErrorAnd(BinderStatusMatcher::hasException(EX_ILLEGAL_ARGUMENT)));
+}
diff --git a/services/audiopolicy/service/Android.bp b/services/audiopolicy/service/Android.bp
index fb55225..e157808 100644
--- a/services/audiopolicy/service/Android.bp
+++ b/services/audiopolicy/service/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
@@ -10,7 +11,22 @@
 cc_defaults {
     name: "libaudiopolicyservice_dependencies",
 
+    include_dirs: [
+        "frameworks/av/services/audiopolicy", // include path outside of libaudiopolicyservice
+    ],
+
     shared_libs: [
+        "android.media.audiopolicy-aconfig-cc",
+        "audio-permission-aidl-cpp",
+        "audioclient-types-aidl-cpp",
+        "audioflinger-aidl-cpp",
+        "audiopermissioncontroller",
+        "audiopolicy-aidl-cpp",
+        "audiopolicy-types-aidl-cpp",
+        "capture_state_listener-aidl-cpp",
+        "com.android.media.audio-aconfig-cc",
+        "framework-permission-aidl-cpp",
+        "libPlatformProperties",
         "libactivitymanager_aidl",
         "libaudioclient",
         "libaudioclient_aidl_conversion",
@@ -23,6 +39,7 @@
         "libaudioutils",
         "libbinder",
         "libcutils",
+        "libeffectsconfig",
         "libhardware_legacy",
         "libheadtracking",
         "libheadtracking-binding",
@@ -31,59 +48,42 @@
         "libmediametrics",
         "libmediautils",
         "libpermission",
-        "libPlatformProperties",
         "libsensor",
         "libsensorprivacy",
         "libshmemcompat",
         "libstagefright_foundation",
         "libutils",
         "libxml2",
-        "audioclient-types-aidl-cpp",
-        "audioflinger-aidl-cpp",
-        "audiopolicy-aidl-cpp",
-        "audiopolicy-types-aidl-cpp",
-        "capture_state_listener-aidl-cpp",
-        "com.android.media.audio-aconfig-cc",
-        "framework-permission-aidl-cpp",
         "packagemanager_aidl-cpp",
         "spatializer-aidl-cpp",
     ],
-
-    static_libs: [
-        "libeffectsconfig",
-        "libaudiopolicycomponents",
-    ]
 }
 
 cc_library {
     name: "libaudiopolicyservice",
 
     defaults: [
-        "libaudiopolicyservice_dependencies",
         "latest_android_media_audio_common_types_cpp_shared",
+        "libaudiopolicyservice_dependencies",
     ],
 
     srcs: [
-        "AudioRecordClient.cpp",
         "AudioPolicyClientImpl.cpp",
         "AudioPolicyEffects.cpp",
         "AudioPolicyInterfaceImpl.cpp",
         "AudioPolicyService.cpp",
+        "AudioRecordClient.cpp",
         "CaptureStateNotifier.cpp",
         "Spatializer.cpp",
         "SpatializerPoseController.cpp",
     ],
 
     include_dirs: [
-        "frameworks/av/services/audioflinger"
-    ],
-
-
-    static_libs: [
-        "framework-permission-aidl-cpp",
+        "frameworks/av/services/audioflinger",
     ],
 
     header_libs: [
+        "audiopolicyservicelocal_headers",
         "libaudiohal_headers",
         "libaudiopolicycommon",
         "libaudiopolicyengine_interface_headers",
@@ -91,19 +91,29 @@
         "libaudioutils_headers",
     ],
 
-    cflags: [
-        "-fvisibility=hidden",
-        "-Werror",
-        "-Wall",
-        "-Wthread-safety",
-    ],
+    export_include_dirs: ["."],
 
-    export_shared_lib_headers: [
-        "libactivitymanager_aidl",
-        "libaudiousecasevalidation",
-        "libheadtracking",
-        "libheadtracking-binding",
-        "libsensorprivacy",
-        "framework-permission-aidl-cpp",
+    cflags: [
+        "-Wall",
+        "-Werror",
+        "-Wthread-safety",
+        "-fvisibility=hidden",
     ],
 }
+
+cc_library_headers {
+    name: "libaudiopolicyservice_headers",
+    host_supported: true,
+    export_include_dirs: [
+        ".",
+        "include",
+    ],
+}
+
+cc_library_headers {
+    name: "audiopolicyservicelocal_headers",
+    host_supported: true,
+    export_include_dirs: ["include"],
+    header_libs: ["audiopermissioncontroller_headers"],
+    export_header_lib_headers: ["audiopermissioncontroller_headers"],
+}
diff --git a/services/audiopolicy/service/AudioPolicyClientImpl.cpp b/services/audiopolicy/service/AudioPolicyClientImpl.cpp
index 6de71a3..22fc151 100644
--- a/services/audiopolicy/service/AudioPolicyClientImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyClientImpl.cpp
@@ -56,7 +56,8 @@
                                                            audio_config_base_t *mixerConfig,
                                                            const sp<DeviceDescriptorBase>& device,
                                                            uint32_t *latencyMs,
-                                                           audio_output_flags_t flags)
+                                                           audio_output_flags_t flags,
+                                                           audio_attributes_t attributes)
 {
     sp<IAudioFlinger> af = AudioSystem::get_audio_flinger();
     if (af == 0) {
@@ -74,6 +75,8 @@
             legacy2aidl_audio_config_base_t_AudioConfigBase(*mixerConfig, false /*isInput*/));
     request.device = VALUE_OR_RETURN_STATUS(legacy2aidl_DeviceDescriptorBase(device));
     request.flags = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_output_flags_t_int32_t_mask(flags));
+    request.attributes = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_attributes_t_AudioAttributes(attributes));
 
     status_t status = af->openOutput(request, &response);
     if (status == OK) {
@@ -352,4 +355,13 @@
     return af->getAudioMixPort(devicePort, port);
 }
 
+status_t AudioPolicyService::AudioPolicyClient::setTracksInternalMute(
+        const std::vector<media::TrackInternalMuteInfo>& tracksInternalMute) {
+    sp<IAudioFlinger> af = AudioSystem::get_audio_flinger();
+    if (af == nullptr) {
+        return PERMISSION_DENIED;
+    }
+    return af->setTracksInternalMute(tracksInternalMute);
+}
+
 } // namespace android
diff --git a/services/audiopolicy/service/AudioPolicyEffects.cpp b/services/audiopolicy/service/AudioPolicyEffects.cpp
index 71edd57..d67ddb6 100644
--- a/services/audiopolicy/service/AudioPolicyEffects.cpp
+++ b/services/audiopolicy/service/AudioPolicyEffects.cpp
@@ -61,11 +61,6 @@
     }
 }
 
-void AudioPolicyEffects::setDefaultDeviceEffects() {
-    mDefaultDeviceEffectFuture = std::async(
-                std::launch::async, &AudioPolicyEffects::initDefaultDeviceEffects, this);
-}
-
 status_t AudioPolicyEffects::addInputEffects(audio_io_handle_t input,
                              audio_source_t inputSource,
                              audio_session_t audioSession)
diff --git a/services/audiopolicy/service/AudioPolicyEffects.h b/services/audiopolicy/service/AudioPolicyEffects.h
index a9628c2..259b84a 100644
--- a/services/audiopolicy/service/AudioPolicyEffects.h
+++ b/services/audiopolicy/service/AudioPolicyEffects.h
@@ -116,10 +116,8 @@
     // Remove the default stream effect from wherever it's attached.
     status_t removeStreamDefaultEffect(audio_unique_id_t id) EXCLUDES_AudioPolicyEffects_Mutex;
 
-    // Called by AudioPolicyService::onFirstRef() to load device effects
-    // on a separate worker thread.
-    // TODO(b/319515492) move this initialization after AudioPolicyService::onFirstRef().
-    void setDefaultDeviceEffects();
+    // Initializes the Effects (AudioSystem must be ready as this creates audio client objects).
+    void initDefaultDeviceEffects() EXCLUDES(mDeviceEffectsMutex) EXCLUDES_EffectHandle_Mutex;
 
 private:
 
@@ -201,11 +199,6 @@
 
     };
 
-    // Called on an async thread because it creates AudioEffects
-    // which register with AudioFlinger and AudioPolicy.
-    // We must therefore exclude the EffectHandle_Mutex.
-    void initDefaultDeviceEffects() EXCLUDES(mDeviceEffectsMutex) EXCLUDES_EffectHandle_Mutex;
-
     status_t loadAudioEffectConfig_ll(const sp<EffectsFactoryHalInterface>& effectsFactoryHal)
             REQUIRES(mMutex, mDeviceEffectsMutex);
 
@@ -281,18 +274,6 @@
     std::mutex mDeviceEffectsMutex;
     std::map<std::string, std::unique_ptr<DeviceEffects>> mDeviceEffects
             GUARDED_BY(mDeviceEffectsMutex);
-
-    /**
-     * Device Effect initialization must be asynchronous: the audio_policy service parses and init
-     * effect on first reference. AudioFlinger will handle effect creation and register these
-     * effect on audio_policy service.
-     *
-     * The future is associated with the std::async launched thread - no need to lock as
-     * it is only set once on init.  Due to the async nature, it is conceivable that
-     * some device effects are not available immediately after AudioPolicyService::onFirstRef()
-     * while the effects are being created.
-     */
-    std::future<void> mDefaultDeviceEffectFuture;
 };
 
 } // namespace android
diff --git a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
index 6e1ecec..e598897 100644
--- a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
@@ -14,19 +14,23 @@
  * limitations under the License.
  */
 
-#define LOG_TAG "AudioPolicyIntefaceImpl"
+#define LOG_TAG "AudioPolicyInterfaceImpl"
 //#define LOG_NDEBUG 0
 
 #include "AudioPolicyService.h"
 #include "AudioRecordClient.h"
 #include "TypeConverter.h"
+
+#include <android/content/AttributionSourceState.h>
+#include <android_media_audiopolicy.h>
+#include <com_android_media_audio.h>
+#include <error/expected_utils.h>
 #include <media/AidlConversion.h>
 #include <media/AudioPolicy.h>
 #include <media/AudioValidator.h>
 #include <media/MediaMetricsItem.h>
 #include <media/PolicyAidlConversion.h>
 #include <utils/Log.h>
-#include <android/content/AttributionSourceState.h>
 
 #define VALUE_OR_RETURN_BINDER_STATUS(x) \
     ({ auto _tmp = (x); \
@@ -42,11 +46,30 @@
         if (!_tmp.isOk()) return _tmp; \
     }
 
+#define CHECK_PERM(expr1, expr2) \
+    VALUE_OR_RETURN_STATUS(getPermissionProvider().checkPermission((expr1), (expr2)))
+
 #define MAX_ITEMS_PER_LIST 1024
 
 namespace android {
+namespace audiopolicy_flags = android::media::audiopolicy;
 using binder::Status;
 using aidl_utils::binderStatusFromStatusT;
+using com::android::media::audio::audioserver_permissions;
+using com::android::media::permission::NativePermissionController;
+using com::android::media::permission::PermissionEnum::ACCESS_ULTRASOUND;
+using com::android::media::permission::PermissionEnum::CALL_AUDIO_INTERCEPTION;
+using com::android::media::permission::PermissionEnum::CAPTURE_AUDIO_HOTWORD;
+using com::android::media::permission::PermissionEnum::CAPTURE_VOICE_COMMUNICATION_OUTPUT;
+using com::android::media::permission::PermissionEnum::CAPTURE_AUDIO_OUTPUT;
+using com::android::media::permission::PermissionEnum::CAPTURE_MEDIA_OUTPUT;
+using com::android::media::permission::PermissionEnum::CAPTURE_TUNER_AUDIO_INPUT;
+using com::android::media::permission::PermissionEnum::MODIFY_AUDIO_ROUTING;
+using com::android::media::permission::PermissionEnum::MODIFY_AUDIO_SETTINGS;
+using com::android::media::permission::PermissionEnum::MODIFY_DEFAULT_AUDIO_EFFECTS;
+using com::android::media::permission::PermissionEnum::MODIFY_PHONE_STATE;
+using com::android::media::permission::PermissionEnum::RECORD_AUDIO;
+using com::android::media::permission::PermissionEnum::WRITE_SECURE_SETTINGS;
 using content::AttributionSourceState;
 using media::audio::common::AudioConfig;
 using media::audio::common::AudioConfigBase;
@@ -62,6 +85,8 @@
 using media::audio::common::AudioUuid;
 using media::audio::common::Int;
 
+constexpr int kDefaultVirtualDeviceId = 0;
+
 const std::vector<audio_usage_t>& SYSTEM_USAGES = {
     AUDIO_USAGE_CALL_ASSISTANT,
     AUDIO_USAGE_EMERGENCY,
@@ -80,31 +105,37 @@
         != std::end(mSupportedSystemUsages);
 }
 
-status_t AudioPolicyService::validateUsage(const audio_attributes_t& attr) {
+Status AudioPolicyService::validateUsage(const audio_attributes_t& attr) {
      return validateUsage(attr, getCallingAttributionSource());
 }
 
-status_t AudioPolicyService::validateUsage(const audio_attributes_t& attr,
+Status AudioPolicyService::validateUsage(const audio_attributes_t& attr,
         const AttributionSourceState& attributionSource) {
     if (isSystemUsage(attr.usage)) {
         if (isSupportedSystemUsage(attr.usage)) {
             if (attr.usage == AUDIO_USAGE_CALL_ASSISTANT
                     && ((attr.flags & AUDIO_FLAG_CALL_REDIRECTION) != 0)) {
-                if (!callAudioInterceptionAllowed(attributionSource)) {
+                if (!(audioserver_permissions() ?
+                            CHECK_PERM(CALL_AUDIO_INTERCEPTION, attributionSource.uid)
+                            : callAudioInterceptionAllowed(attributionSource))) {
                     ALOGE("%s: call audio interception not allowed for attribution source: %s",
                            __func__, attributionSource.toString().c_str());
-                    return PERMISSION_DENIED;
+                    return Status::fromExceptionCode(Status::EX_SECURITY,
+                            "Call audio interception not allowed");
                 }
-            } else if (!modifyAudioRoutingAllowed(attributionSource)) {
+            } else if (!(audioserver_permissions() ?
+                        CHECK_PERM(MODIFY_AUDIO_ROUTING, attributionSource.uid)
+                        : modifyAudioRoutingAllowed(attributionSource))) {
                 ALOGE("%s: modify audio routing not allowed for attribution source: %s",
                         __func__, attributionSource.toString().c_str());
-                return PERMISSION_DENIED;
+                    return Status::fromExceptionCode(Status::EX_SECURITY,
+                            "Modify audio routing not allowed");
             }
         } else {
-            return BAD_VALUE;
+            return Status::fromExceptionCode(Status::EX_ILLEGAL_ARGUMENT);
         }
     }
-    return NO_ERROR;
+    return Status::ok();
 }
 
 
@@ -131,7 +162,9 @@
     if (mAudioPolicyManager == NULL) {
         return binderStatusFromStatusT(NO_INIT);
     }
-    if (!settingsAllowed()) {
+    if (!(audioserver_permissions() ?
+            CHECK_PERM(MODIFY_AUDIO_SETTINGS, IPCThreadState::self()->getCallingUid())
+            : settingsAllowed())) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
     if (state != AUDIO_POLICY_DEVICE_STATE_AVAILABLE &&
@@ -185,7 +218,9 @@
     if (mAudioPolicyManager == NULL) {
         return binderStatusFromStatusT(NO_INIT);
     }
-    if (!settingsAllowed()) {
+    if (!(audioserver_permissions() ?
+            CHECK_PERM(MODIFY_AUDIO_SETTINGS, IPCThreadState::self()->getCallingUid())
+            : settingsAllowed())) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
 
@@ -209,7 +244,9 @@
     if (mAudioPolicyManager == NULL) {
         return binderStatusFromStatusT(NO_INIT);
     }
-    if (!settingsAllowed()) {
+    if (!(audioserver_permissions() ?
+            CHECK_PERM(MODIFY_AUDIO_SETTINGS, IPCThreadState::self()->getCallingUid())
+            : settingsAllowed())) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
     if (uint32_t(state) >= AUDIO_MODE_CNT) {
@@ -259,7 +296,9 @@
         return binderStatusFromStatusT(NO_INIT);
     }
 
-    if (!modifyAudioRoutingAllowed()) {
+    if (!(audioserver_permissions() ?
+            CHECK_PERM(MODIFY_AUDIO_ROUTING, IPCThreadState::self()->getCallingUid())
+            : modifyAudioRoutingAllowed())) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
 
@@ -349,7 +388,7 @@
 
     RETURN_IF_BINDER_ERROR(
             binderStatusFromStatusT(AudioValidator::validateAudioAttributes(attr, "68953950")));
-    RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(validateUsage(attr, attributionSource)));
+    RETURN_IF_BINDER_ERROR(validateUsage(attr, attributionSource));
 
     ALOGV("%s()", __func__);
     audio_utils::lock_guard _l(mMutex);
@@ -358,14 +397,22 @@
         aidl2legacy_int32_t_uid_t(attributionSource.uid)))) {
         attr.flags = static_cast<audio_flags_mask_t>(attr.flags | AUDIO_FLAG_NO_MEDIA_PROJECTION);
     }
+    const bool bypassInterruptionAllowed = audioserver_permissions() ? (
+            CHECK_PERM(MODIFY_AUDIO_ROUTING, attributionSource.uid) ||
+            CHECK_PERM(MODIFY_PHONE_STATE, attributionSource.uid) ||
+            CHECK_PERM(WRITE_SECURE_SETTINGS, attributionSource.uid))
+            : bypassInterruptionPolicyAllowed(attributionSource);
+
     if (((attr.flags & (AUDIO_FLAG_BYPASS_INTERRUPTION_POLICY|AUDIO_FLAG_BYPASS_MUTE)) != 0)
-            && !bypassInterruptionPolicyAllowed(attributionSource)) {
+            && !bypassInterruptionAllowed) {
         attr.flags = static_cast<audio_flags_mask_t>(
                 attr.flags & ~(AUDIO_FLAG_BYPASS_INTERRUPTION_POLICY|AUDIO_FLAG_BYPASS_MUTE));
     }
 
     if (attr.content_type == AUDIO_CONTENT_TYPE_ULTRASOUND) {
-        if (!accessUltrasoundAllowed(attributionSource)) {
+        if (!(audioserver_permissions() ?
+                CHECK_PERM(ACCESS_ULTRASOUND, attributionSource.uid)
+                : accessUltrasoundAllowed(attributionSource))) {
             ALOGE("%s: permission denied: ultrasound not allowed for uid %d pid %d",
                     __func__, attributionSource.uid, attributionSource.pid);
             return binderStatusFromStatusT(PERMISSION_DENIED);
@@ -394,18 +441,24 @@
             break;
         case AudioPolicyInterface::API_OUTPUT_TELEPHONY_TX:
             if (((attr.flags & AUDIO_FLAG_CALL_REDIRECTION) != 0)
-                && !callAudioInterceptionAllowed(attributionSource)) {
+                && !(audioserver_permissions() ?
+                        CHECK_PERM(CALL_AUDIO_INTERCEPTION, attributionSource.uid)
+                : callAudioInterceptionAllowed(attributionSource))) {
                 ALOGE("%s() permission denied: call redirection not allowed for uid %d",
                     __func__, attributionSource.uid);
                 result = PERMISSION_DENIED;
-            } else if (!modifyPhoneStateAllowed(attributionSource)) {
+            } else if (!(audioserver_permissions() ?
+                        CHECK_PERM(MODIFY_PHONE_STATE, attributionSource.uid)
+                    : modifyPhoneStateAllowed(attributionSource))) {
                 ALOGE("%s() permission denied: modify phone state not allowed for uid %d",
                     __func__, attributionSource.uid);
                 result = PERMISSION_DENIED;
             }
             break;
         case AudioPolicyInterface::API_OUT_MIX_PLAYBACK:
-            if (!modifyAudioRoutingAllowed(attributionSource)) {
+            if (!(audioserver_permissions() ?
+                        CHECK_PERM(MODIFY_AUDIO_ROUTING, attributionSource.uid)
+                    : modifyAudioRoutingAllowed(attributionSource))) {
                 ALOGE("%s() permission denied: modify audio routing not allowed for uid %d",
                     __func__, attributionSource.uid);
                 result = PERMISSION_DENIED;
@@ -424,7 +477,7 @@
 
         sp<AudioPlaybackClient> client =
                 new AudioPlaybackClient(attr, output, attributionSource, session,
-                    portId, selectedDeviceId, stream, isSpatialized);
+                    portId, selectedDeviceId, stream, isSpatialized, config.channel_mask);
         mAudioPlaybackClients.add(portId, client);
 
         _aidl_return->output = VALUE_OR_RETURN_BINDER_STATUS(
@@ -624,8 +677,9 @@
         return binderStatusFromStatusT(BAD_VALUE);
     }
 
-    RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(validateUsage(attr,
-            attributionSource)));
+    RETURN_IF_BINDER_ERROR(validateUsage(attr, attributionSource));
+
+    uint32_t virtualDeviceId = kDefaultVirtualDeviceId;
 
     // check calling permissions.
     // Capturing from the following sources does not require permission RECORD_AUDIO
@@ -636,7 +690,10 @@
     // type is API_INPUT_MIX_EXT_POLICY_REROUTE and by AudioService if a media projection
     // is used and input type is API_INPUT_MIX_PUBLIC_CAPTURE_PLAYBACK
     // - ECHO_REFERENCE source is controlled by captureAudioOutputAllowed()
-    if (!(recordingAllowed(attributionSource, inputSource)
+    const auto isRecordingAllowed = audioserver_permissions() ?
+            CHECK_PERM(RECORD_AUDIO, attributionSource.uid) :
+            recordingAllowed(attributionSource, inputSource);
+    if (!(isRecordingAllowed
             || inputSource == AUDIO_SOURCE_FM_TUNER
             || inputSource == AUDIO_SOURCE_REMOTE_SUBMIX
             || inputSource == AUDIO_SOURCE_ECHO_REFERENCE)) {
@@ -645,8 +702,12 @@
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
 
-    bool canCaptureOutput = captureAudioOutputAllowed(attributionSource);
-    bool canInterceptCallAudio = callAudioInterceptionAllowed(attributionSource);
+    bool canCaptureOutput = audioserver_permissions() ?
+                        CHECK_PERM(CAPTURE_AUDIO_OUTPUT, attributionSource.uid)
+                        : captureAudioOutputAllowed(attributionSource);
+    bool canInterceptCallAudio = audioserver_permissions() ?
+                        CHECK_PERM(CALL_AUDIO_INTERCEPTION, attributionSource.uid)
+                        : callAudioInterceptionAllowed(attributionSource);
     bool isCallAudioSource = inputSource == AUDIO_SOURCE_VOICE_UPLINK
              || inputSource == AUDIO_SOURCE_VOICE_DOWNLINK
              || inputSource == AUDIO_SOURCE_VOICE_CALL;
@@ -660,11 +721,15 @@
     }
     if (inputSource == AUDIO_SOURCE_FM_TUNER
         && !canCaptureOutput
-        && !captureTunerAudioInputAllowed(attributionSource)) {
+        && !(audioserver_permissions() ?
+                        CHECK_PERM(CAPTURE_TUNER_AUDIO_INPUT, attributionSource.uid)
+            : captureTunerAudioInputAllowed(attributionSource))) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
 
-    bool canCaptureHotword = captureHotwordAllowed(attributionSource);
+    bool canCaptureHotword = audioserver_permissions() ?
+                        CHECK_PERM(CAPTURE_AUDIO_HOTWORD, attributionSource.uid)
+                        : captureHotwordAllowed(attributionSource);
     if ((inputSource == AUDIO_SOURCE_HOTWORD) && !canCaptureHotword) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
@@ -679,7 +744,9 @@
     }
 
     if (attr.source == AUDIO_SOURCE_ULTRASOUND) {
-        if (!accessUltrasoundAllowed(attributionSource)) {
+        if (!(audioserver_permissions() ?
+                CHECK_PERM(ACCESS_ULTRASOUND, attributionSource.uid)
+                : accessUltrasoundAllowed(attributionSource))) {
             ALOGE("%s: permission denied: ultrasound not allowed for uid %d pid %d",
                     __func__, attributionSource.uid, attributionSource.pid);
             return binderStatusFromStatusT(PERMISSION_DENIED);
@@ -698,7 +765,8 @@
             status = mAudioPolicyManager->getInputForAttr(&attr, &input, riid, session,
                                                           attributionSource, &config,
                                                           flags, &selectedDeviceId,
-                                                          &inputType, &portId);
+                                                          &inputType, &portId,
+                                                          &virtualDeviceId);
 
         }
         audioPolicyEffects = mAudioPolicyEffects;
@@ -724,19 +792,42 @@
                     status = PERMISSION_DENIED;
                 }
                 break;
-            case AudioPolicyInterface::API_INPUT_MIX_EXT_POLICY_REROUTE:
-                if (!(modifyAudioRoutingAllowed(attributionSource)
+            case AudioPolicyInterface::API_INPUT_MIX_EXT_POLICY_REROUTE: {
+                bool modAudioRoutingAllowed;
+                if (audioserver_permissions()) {
+                        auto result = getPermissionProvider().checkPermission(
+                                MODIFY_AUDIO_ROUTING, attributionSource.uid);
+                        if (!result.ok()) {
+                            ALOGE("%s permission provider error: %s", __func__,
+                                    result.error().toString8().c_str());
+                            status = aidl_utils::statusTFromBinderStatus(result.error());
+                            break;
+                        }
+                        modAudioRoutingAllowed = result.value();
+                } else {
+                    modAudioRoutingAllowed = modifyAudioRoutingAllowed(attributionSource);
+                }
+                if (!(modAudioRoutingAllowed
                         || ((attr.flags & AUDIO_FLAG_CALL_REDIRECTION) != 0
                             && canInterceptCallAudio))) {
                     ALOGE("%s permission denied for remote submix capture", __func__);
                     status = PERMISSION_DENIED;
                 }
                 break;
+            }
             case AudioPolicyInterface::API_INPUT_INVALID:
             default:
                 LOG_ALWAYS_FATAL("%s encountered an invalid input type %d",
                         __func__, (int)inputType);
             }
+
+            if (audiopolicy_flags::record_audio_device_aware_permission()) {
+                // enforce device-aware RECORD_AUDIO permission
+                if (virtualDeviceId != kDefaultVirtualDeviceId &&
+                    !recordingAllowed(attributionSource, virtualDeviceId, inputSource)) {
+                    status = PERMISSION_DENIED;
+                }
+            }
         }
 
         if (status != NO_ERROR) {
@@ -752,6 +843,7 @@
 
         sp<AudioRecordClient> client = new AudioRecordClient(attr, input, session, portId,
                                                              selectedDeviceId, attributionSource,
+                                                             virtualDeviceId,
                                                              canCaptureOutput, canCaptureHotword,
                                                              mOutputCommandThread);
         mAudioRecordClients.add(portId, client);
@@ -807,8 +899,8 @@
     msg << "Audio recording on session " << client->session;
 
     // check calling permissions
-    if (!(startRecording(client->attributionSource, String16(msg.str().c_str()),
-                         client->attributes.source)
+    if (!(startRecording(client->attributionSource, client->virtualDeviceId,
+                         String16(msg.str().c_str()), client->attributes.source)
             || client->attributes.source == AUDIO_SOURCE_FM_TUNER
             || client->attributes.source == AUDIO_SOURCE_REMOTE_SUBMIX
             || client->attributes.source == AUDIO_SOURCE_ECHO_REFERENCE)) {
@@ -826,7 +918,8 @@
     if (client->active) {
         ALOGE("Client should never be active before startInput. Uid %d port %d",
                 client->attributionSource.uid, portId);
-        finishRecording(client->attributionSource, client->attributes.source);
+        finishRecording(client->attributionSource, client->virtualDeviceId,
+                        client->attributes.source);
         return binderStatusFromStatusT(INVALID_OPERATION);
     }
 
@@ -922,7 +1015,8 @@
         client->active = false;
         client->startTimeNs = 0;
         updateUidStates_l();
-        finishRecording(client->attributionSource, client->attributes.source);
+        finishRecording(client->attributionSource, client->virtualDeviceId,
+                        client->attributes.source);
     }
 
     return binderStatusFromStatusT(status);
@@ -951,7 +1045,7 @@
     updateUidStates_l();
 
     // finish the recording app op
-    finishRecording(client->attributionSource, client->attributes.source);
+    finishRecording(client->attributionSource, client->virtualDeviceId, client->attributes.source);
     AutoCallerClear acc;
     return binderStatusFromStatusT(mAudioPolicyManager->stopInput(portId));
 }
@@ -1002,6 +1096,34 @@
     return Status::ok();
 }
 
+Status AudioPolicyService::setDeviceAbsoluteVolumeEnabled(const AudioDevice& deviceAidl,
+                                                          bool enabled,
+                                                          AudioStreamType streamToDriveAbsAidl) {
+    audio_stream_type_t streamToDriveAbs = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_AudioStreamType_audio_stream_type_t(streamToDriveAbsAidl));
+    audio_devices_t deviceType;
+    std::string address;
+    RETURN_BINDER_STATUS_IF_ERROR(
+            aidl2legacy_AudioDevice_audio_device(deviceAidl, &deviceType, &address));
+
+    if (mAudioPolicyManager == nullptr) {
+        return binderStatusFromStatusT(NO_INIT);
+    }
+    if (!(audioserver_permissions() ?
+            CHECK_PERM(MODIFY_AUDIO_SETTINGS, IPCThreadState::self()->getCallingUid())
+            : settingsAllowed())) {
+        return binderStatusFromStatusT(PERMISSION_DENIED);
+    }
+    if (uint32_t(streamToDriveAbs) >= AUDIO_STREAM_PUBLIC_CNT) {
+        return binderStatusFromStatusT(BAD_VALUE);
+    }
+    audio_utils::lock_guard _l(mMutex);
+    AutoCallerClear acc;
+    return binderStatusFromStatusT(
+            mAudioPolicyManager->setDeviceAbsoluteVolumeEnabled(deviceType, address.c_str(),
+                                                                enabled, streamToDriveAbs));
+}
+
 Status AudioPolicyService::initStreamVolume(AudioStreamType streamAidl,
                                             int32_t indexMinAidl,
                                             int32_t indexMaxAidl) {
@@ -1013,7 +1135,9 @@
     if (mAudioPolicyManager == NULL) {
         return binderStatusFromStatusT(NO_INIT);
     }
-    if (!settingsAllowed()) {
+    if (!(audioserver_permissions() ?
+            CHECK_PERM(MODIFY_AUDIO_SETTINGS, IPCThreadState::self()->getCallingUid())
+            : settingsAllowed())) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
     if (uint32_t(stream) >= AUDIO_STREAM_PUBLIC_CNT) {
@@ -1037,7 +1161,9 @@
     if (mAudioPolicyManager == NULL) {
         return binderStatusFromStatusT(NO_INIT);
     }
-    if (!settingsAllowed()) {
+    if (!(audioserver_permissions() ?
+            CHECK_PERM(MODIFY_AUDIO_SETTINGS, IPCThreadState::self()->getCallingUid())
+            : settingsAllowed())) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
     if (uint32_t(stream) >= AUDIO_STREAM_PUBLIC_CNT) {
@@ -1087,7 +1213,9 @@
     if (mAudioPolicyManager == NULL) {
         return binderStatusFromStatusT(NO_INIT);
     }
-    if (!settingsAllowed()) {
+    if (!(audioserver_permissions() ?
+            CHECK_PERM(MODIFY_AUDIO_SETTINGS, IPCThreadState::self()->getCallingUid())
+            : settingsAllowed())) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
     audio_utils::lock_guard _l(mMutex);
@@ -1393,7 +1521,9 @@
 
     sp<AudioPolicyEffects>audioPolicyEffects;
     RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(getAudioPolicyEffects(audioPolicyEffects)));
-    if (!modifyDefaultAudioEffectsAllowed()) {
+    if (!(audioserver_permissions() ?
+                CHECK_PERM(MODIFY_DEFAULT_AUDIO_EFFECTS, IPCThreadState::self()->getCallingUid())
+                : modifyDefaultAudioEffectsAllowed())) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
     RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(audioPolicyEffects->addSourceDefaultEffect(
@@ -1419,7 +1549,9 @@
 
     sp<AudioPolicyEffects> audioPolicyEffects;
     RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(getAudioPolicyEffects(audioPolicyEffects)));
-    if (!modifyDefaultAudioEffectsAllowed()) {
+    if (!(audioserver_permissions() ?
+                CHECK_PERM(MODIFY_DEFAULT_AUDIO_EFFECTS, IPCThreadState::self()->getCallingUid())
+                : modifyDefaultAudioEffectsAllowed())) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
     RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(audioPolicyEffects->addStreamDefaultEffect(
@@ -1434,7 +1566,9 @@
             aidl2legacy_int32_t_audio_unique_id_t(idAidl));
     sp<AudioPolicyEffects>audioPolicyEffects;
     RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(getAudioPolicyEffects(audioPolicyEffects)));
-    if (!modifyDefaultAudioEffectsAllowed()) {
+    if (!(audioserver_permissions() ?
+                CHECK_PERM(MODIFY_DEFAULT_AUDIO_EFFECTS, IPCThreadState::self()->getCallingUid())
+                : modifyDefaultAudioEffectsAllowed())) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
     return binderStatusFromStatusT(audioPolicyEffects->removeSourceDefaultEffect(id));
@@ -1446,7 +1580,9 @@
             aidl2legacy_int32_t_audio_unique_id_t(idAidl));
     sp<AudioPolicyEffects>audioPolicyEffects;
     RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(getAudioPolicyEffects(audioPolicyEffects)));
-    if (!modifyDefaultAudioEffectsAllowed()) {
+    if (!(audioserver_permissions() ?
+                CHECK_PERM(MODIFY_DEFAULT_AUDIO_EFFECTS, IPCThreadState::self()->getCallingUid())
+                : modifyDefaultAudioEffectsAllowed())) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
     return binderStatusFromStatusT(audioPolicyEffects->removeStreamDefaultEffect(id));
@@ -1464,7 +1600,9 @@
                          std::back_inserter(systemUsages), aidl2legacy_AudioUsage_audio_usage_t)));
 
     audio_utils::lock_guard _l(mMutex);
-    if(!modifyAudioRoutingAllowed()) {
+    if (!(audioserver_permissions() ?
+                CHECK_PERM(MODIFY_AUDIO_ROUTING, IPCThreadState::self()->getCallingUid())
+                : modifyAudioRoutingAllowed())) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
 
@@ -1523,26 +1661,13 @@
         return binderStatusFromStatusT(NO_INIT);
     }
 
-    RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(validateUsage(attributes)));
+    RETURN_IF_BINDER_ERROR(validateUsage(attributes));
 
     audio_utils::lock_guard _l(mMutex);
     *_aidl_return = mAudioPolicyManager->isDirectOutputSupported(config, attributes);
     return Status::ok();
 }
 
-template <typename Port>
-void anonymizePortBluetoothAddress(Port *port) {
-    if (port->type != AUDIO_PORT_TYPE_DEVICE) {
-        return;
-    }
-    if (!(audio_is_a2dp_device(port->ext.device.type)
-            || audio_is_ble_device(port->ext.device.type)
-            || audio_is_bluetooth_sco_device(port->ext.device.type)
-            || audio_is_hearing_aid_out_device(port->ext.device.type))) {
-        return;
-    }
-    anonymizeBluetoothAddress(port->ext.device.address);
-}
 
 Status AudioPolicyService::listAudioPorts(media::AudioPortRole roleAidl,
                                           media::AudioPortType typeAidl, Int* count,
@@ -1565,20 +1690,10 @@
     if (mAudioPolicyManager == NULL) {
         return binderStatusFromStatusT(NO_INIT);
     }
-
-    const AttributionSourceState attributionSource = getCallingAttributionSource();
-
     AutoCallerClear acc;
     RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
             mAudioPolicyManager->listAudioPorts(role, type, &num_ports, ports.get(), &generation)));
     numPortsReq = std::min(numPortsReq, num_ports);
-
-    if (mustAnonymizeBluetoothAddress(attributionSource, String16(__func__))) {
-        for (size_t i = 0; i < numPortsReq; ++i) {
-            anonymizePortBluetoothAddress(&ports[i]);
-        }
-    }
-
     RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
             convertRange(ports.get(), ports.get() + numPortsReq, std::back_inserter(*portsAidl),
                          legacy2aidl_audio_port_v7_AudioPortFw)));
@@ -1605,16 +1720,8 @@
     if (mAudioPolicyManager == NULL) {
         return binderStatusFromStatusT(NO_INIT);
     }
-
-    const AttributionSourceState attributionSource = getCallingAttributionSource();
-
     AutoCallerClear acc;
     RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(mAudioPolicyManager->getAudioPort(&port)));
-
-    if (mustAnonymizeBluetoothAddress(attributionSource, String16(__func__))) {
-        anonymizePortBluetoothAddress(&port);
-    }
-
     *_aidl_return = VALUE_OR_RETURN_BINDER_STATUS(legacy2aidl_audio_port_v7_AudioPortFw(port));
     return Status::ok();
 }
@@ -1629,7 +1736,9 @@
     RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(AudioValidator::validateAudioPatch(patch)));
 
     audio_utils::lock_guard _l(mMutex);
-    if(!modifyAudioRoutingAllowed()) {
+    if (!(audioserver_permissions() ?
+                CHECK_PERM(MODIFY_AUDIO_ROUTING, IPCThreadState::self()->getCallingUid())
+                : modifyAudioRoutingAllowed())) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
     if (mAudioPolicyManager == NULL) {
@@ -1648,7 +1757,9 @@
     audio_patch_handle_t handle = VALUE_OR_RETURN_BINDER_STATUS(
             aidl2legacy_int32_t_audio_patch_handle_t(handleAidl));
     audio_utils::lock_guard _l(mMutex);
-    if(!modifyAudioRoutingAllowed()) {
+    if (!(audioserver_permissions() ?
+                CHECK_PERM(MODIFY_AUDIO_ROUTING, IPCThreadState::self()->getCallingUid())
+                : modifyAudioRoutingAllowed())) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
     if (mAudioPolicyManager == NULL) {
@@ -1676,25 +1787,10 @@
     if (mAudioPolicyManager == NULL) {
         return binderStatusFromStatusT(NO_INIT);
     }
-
-    const AttributionSourceState attributionSource = getCallingAttributionSource();
-
     AutoCallerClear acc;
     RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
             mAudioPolicyManager->listAudioPatches(&num_patches, patches.get(), &generation)));
     numPatchesReq = std::min(numPatchesReq, num_patches);
-
-    if (mustAnonymizeBluetoothAddress(attributionSource, String16(__func__))) {
-        for (size_t i = 0; i < numPatchesReq; ++i) {
-            for (size_t j = 0; j < patches[i].num_sources; ++j) {
-                anonymizePortBluetoothAddress(&patches[i].sources[j]);
-            }
-            for (size_t j = 0; j < patches[i].num_sinks; ++j) {
-                anonymizePortBluetoothAddress(&patches[i].sinks[j]);
-            }
-        }
-    }
-
     RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
             convertRange(patches.get(), patches.get() + numPatchesReq,
                          std::back_inserter(*patchesAidl), legacy2aidl_audio_patch_AudioPatchFw)));
@@ -1711,7 +1807,9 @@
             binderStatusFromStatusT(AudioValidator::validateAudioPortConfig(config)));
 
     audio_utils::lock_guard _l(mMutex);
-    if(!modifyAudioRoutingAllowed()) {
+    if (!(audioserver_permissions() ?
+                CHECK_PERM(MODIFY_AUDIO_ROUTING, IPCThreadState::self()->getCallingUid())
+                : modifyAudioRoutingAllowed())) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
     if (mAudioPolicyManager == NULL) {
@@ -1774,7 +1872,9 @@
     // loopback|render only need a MediaProjection (checked in caller AudioService.java)
     bool needModifyAudioRouting = std::any_of(mixes.begin(), mixes.end(), [](auto& mix) {
             return !is_mix_loopback_render(mix.mRouteFlags); });
-    if (needModifyAudioRouting && !modifyAudioRoutingAllowed()) {
+    if (needModifyAudioRouting && !(audioserver_permissions() ?
+                CHECK_PERM(MODIFY_AUDIO_ROUTING, IPCThreadState::self()->getCallingUid())
+                : modifyAudioRoutingAllowed())) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
 
@@ -1790,12 +1890,16 @@
     const AttributionSourceState attributionSource = getCallingAttributionSource();
 
 
-    if (needCaptureMediaOutput && !captureMediaOutputAllowed(attributionSource)) {
+    if (needCaptureMediaOutput && !(audioserver_permissions() ?
+                CHECK_PERM(CAPTURE_MEDIA_OUTPUT, attributionSource.uid)
+                : modifyAudioRoutingAllowed())) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
 
     if (needCaptureVoiceCommunicationOutput &&
-        !captureVoiceCommunicationOutputAllowed(attributionSource)) {
+        !(audioserver_permissions() ?
+                CHECK_PERM(CAPTURE_VOICE_COMMUNICATION_OUTPUT, attributionSource.uid)
+                : captureVoiceCommunicationOutputAllowed(attributionSource))) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
 
@@ -1810,6 +1914,23 @@
     }
 }
 
+Status
+AudioPolicyService::getRegisteredPolicyMixes(std::vector<::android::media::AudioMix>* mixesAidl) {
+    if (mAudioPolicyManager == nullptr) {
+        return binderStatusFromStatusT(NO_INIT);
+    }
+
+    std::vector<AudioMix> mixes;
+    int status = mAudioPolicyManager->getRegisteredPolicyMixes(mixes);
+
+    for (const auto& mix : mixes) {
+        media::AudioMix aidlMix = VALUE_OR_RETURN_BINDER_STATUS(legacy2aidl_AudioMix(mix));
+        mixesAidl->push_back(aidlMix);
+    }
+
+    return binderStatusFromStatusT(status);
+}
+
 Status AudioPolicyService::updatePolicyMixes(
         const ::std::vector<::android::media::AudioMixUpdate>& updates) {
     audio_utils::lock_guard _l(mMutex);
@@ -1835,7 +1956,9 @@
                                                         aidl2legacy_AudioDeviceTypeAddress));
 
     audio_utils::lock_guard _l(mMutex);
-    if(!modifyAudioRoutingAllowed()) {
+    if (!(audioserver_permissions() ?
+                CHECK_PERM(MODIFY_AUDIO_ROUTING, IPCThreadState::self()->getCallingUid())
+                : modifyAudioRoutingAllowed())) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
     if (mAudioPolicyManager == NULL) {
@@ -1849,7 +1972,9 @@
     uid_t uid = VALUE_OR_RETURN_BINDER_STATUS(aidl2legacy_int32_t_uid_t(uidAidl));
 
     audio_utils::lock_guard _l(mMutex);
-    if(!modifyAudioRoutingAllowed()) {
+    if (!(audioserver_permissions() ?
+                CHECK_PERM(MODIFY_AUDIO_ROUTING, IPCThreadState::self()->getCallingUid())
+                : modifyAudioRoutingAllowed())) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
     if (mAudioPolicyManager == NULL) {
@@ -1868,7 +1993,9 @@
                                                         aidl2legacy_AudioDeviceTypeAddress));
 
     audio_utils::lock_guard _l(mMutex);
-    if(!modifyAudioRoutingAllowed()) {
+    if (!(audioserver_permissions() ?
+                CHECK_PERM(MODIFY_AUDIO_ROUTING, IPCThreadState::self()->getCallingUid())
+                : modifyAudioRoutingAllowed())) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
     if (mAudioPolicyManager == NULL) {
@@ -1882,7 +2009,9 @@
     int userId = VALUE_OR_RETURN_BINDER_STATUS(convertReinterpret<int>(userIdAidl));
 
     audio_utils::lock_guard _l(mMutex);
-    if(!modifyAudioRoutingAllowed()) {
+    if (!(audioserver_permissions() ?
+            CHECK_PERM(MODIFY_AUDIO_ROUTING, IPCThreadState::self()->getCallingUid())
+            : modifyAudioRoutingAllowed())) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
     if (mAudioPolicyManager == NULL) {
@@ -1910,7 +2039,7 @@
         return binderStatusFromStatusT(NO_INIT);
     }
 
-    RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(validateUsage(attributes)));
+    RETURN_IF_BINDER_ERROR(validateUsage(attributes));
 
     // startAudioSource should be created as the calling uid
     const uid_t callingUid = IPCThreadState::self()->getCallingUid();
@@ -1939,7 +2068,9 @@
     if (mAudioPolicyManager == NULL) {
         return binderStatusFromStatusT(NO_INIT);
     }
-    if (!settingsAllowed()) {
+    if (!(audioserver_permissions() ?
+            CHECK_PERM(MODIFY_AUDIO_SETTINGS, IPCThreadState::self()->getCallingUid())
+            : settingsAllowed())) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
     audio_utils::lock_guard _l(mMutex);
@@ -2598,4 +2729,9 @@
             mAudioPolicyManager->clearPreferredMixerAttributes(&attr, portId, uid));
 }
 
+Status AudioPolicyService::getPermissionController(sp<INativePermissionController>* out) {
+    *out = mPermissionController;
+    return Status::ok();
+}
+
 } // namespace android
diff --git a/services/audiopolicy/service/AudioPolicyService.cpp b/services/audiopolicy/service/AudioPolicyService.cpp
index 57e2718..cbc0b41 100644
--- a/services/audiopolicy/service/AudioPolicyService.cpp
+++ b/services/audiopolicy/service/AudioPolicyService.cpp
@@ -18,9 +18,6 @@
 //#define LOG_NDEBUG 0
 
 #include "Configuration.h"
-#undef __STRICT_ANSI__
-#define __STDINT_LIMITS
-#define __STDC_LIMIT_MACROS
 #include <stdint.h>
 #include <sys/time.h>
 #include <dlfcn.h>
@@ -82,6 +79,7 @@
 BINDER_METHOD_ENTRY(startInput) \
 BINDER_METHOD_ENTRY(stopInput) \
 BINDER_METHOD_ENTRY(releaseInput) \
+BINDER_METHOD_ENTRY(setDeviceAbsoluteVolumeEnabled) \
 BINDER_METHOD_ENTRY(initStreamVolume) \
 BINDER_METHOD_ENTRY(setStreamVolumeIndex) \
 BINDER_METHOD_ENTRY(getStreamVolumeIndex) \
@@ -166,7 +164,9 @@
 BINDER_METHOD_ENTRY(setPreferredMixerAttributes) \
 BINDER_METHOD_ENTRY(getPreferredMixerAttributes) \
 BINDER_METHOD_ENTRY(clearPreferredMixerAttributes) \
-
+BINDER_METHOD_ENTRY(getRegisteredPolicyMixes) \
+BINDER_METHOD_ENTRY(getPermissionController) \
+                                                     \
 // singleton for Binder Method Statistics for IAudioPolicyService
 static auto& getIAudioPolicyServiceStatistics() {
     using Code = int;
@@ -228,8 +228,11 @@
       mCaptureStateNotifier(false),
       mCreateAudioPolicyManager(createAudioPolicyManager),
       mDestroyAudioPolicyManager(destroyAudioPolicyManager),
-      mUsecaseValidator(media::createUsecaseValidator()) {
+      mUsecaseValidator(media::createUsecaseValidator()),
+      mPermissionController(sp<NativePermissionController>::make())
+{
       setMinSchedulerPolicy(SCHED_NORMAL, ANDROID_PRIORITY_AUDIO);
+      setInheritRt(true);
 }
 
 void AudioPolicyService::loadAudioPolicyManager()
@@ -312,9 +315,20 @@
         }
     }
     AudioSystem::audioPolicyReady();
-    // AudioFlinger will handle effect creation and register these effects on audio_policy
-    // service. Hence, audio_policy service must be ready.
-    audioPolicyEffects->setDefaultDeviceEffects();
+}
+
+const IPermissionProvider& AudioPolicyService::getPermissionProvider() const {
+    return *mPermissionController;
+}
+
+void AudioPolicyService::onAudioSystemReady() {
+    sp<AudioPolicyEffects> audioPolicyEffects;
+    {
+        audio_utils::lock_guard _l(mMutex);
+
+        audioPolicyEffects = mAudioPolicyEffects;
+    }
+    audioPolicyEffects->initDefaultDeviceEffects();
 }
 
 void AudioPolicyService::unloadAudioPolicyManager()
@@ -576,12 +590,13 @@
             if (status == NO_ERROR && currentOutput == newOutput) {
                 return;
             }
-            size_t numActiveTracks = countActiveClientsOnOutput_l(newOutput);
+            std::vector<audio_channel_mask_t> activeTracksMasks =
+                    getActiveTracksMasks_l(newOutput);
             mMutex.unlock();
             // It is OK to call detachOutput() is none is already attached.
             mSpatializer->detachOutput();
             if (status == NO_ERROR && newOutput != AUDIO_IO_HANDLE_NONE) {
-                status = mSpatializer->attachOutput(newOutput, numActiveTracks);
+                status = mSpatializer->attachOutput(newOutput, activeTracksMasks);
             }
             mMutex.lock();
             if (status != NO_ERROR) {
@@ -599,17 +614,17 @@
     }
 }
 
-size_t AudioPolicyService::countActiveClientsOnOutput_l(
+std::vector<audio_channel_mask_t> AudioPolicyService::getActiveTracksMasks_l(
         audio_io_handle_t output, bool spatializedOnly) {
-    size_t count = 0;
+    std::vector<audio_channel_mask_t> activeTrackMasks;
     for (size_t i = 0; i < mAudioPlaybackClients.size(); i++) {
         auto client = mAudioPlaybackClients.valueAt(i);
         if (client->io == output && client->active
                 && (!spatializedOnly || client->isSpatialized)) {
-            count++;
+            activeTrackMasks.push_back(client->channelMask);
         }
     }
-    return count;
+    return activeTrackMasks;
 }
 
 void AudioPolicyService::onUpdateActiveSpatializerTracks_l() {
@@ -625,12 +640,12 @@
         return;
     }
     audio_io_handle_t output = mSpatializer->getOutput();
-    size_t activeClients;
+    std::vector<audio_channel_mask_t> activeTracksMasks;
     {
         audio_utils::lock_guard _l(mMutex);
-        activeClients = countActiveClientsOnOutput_l(output);
+        activeTracksMasks = getActiveTracksMasks_l(output);
     }
-    mSpatializer->updateActiveTracks(activeClients);
+    mSpatializer->updateActiveTracks(activeTracksMasks);
 }
 
 status_t AudioPolicyService::clientCreateAudioPatch(const struct audio_patch *patch,
@@ -859,6 +874,8 @@
 //            OR client has CAPTURE_AUDIO_OUTPUT privileged permission
 //    OR the client is the current InputMethodService
 //        AND a RTT call is active AND the source is VOICE_RECOGNITION
+//    OR The client is an active communication owner
+//        AND is on TOP or latest started
 //    OR Any client
 //        AND The assistant is not on TOP
 //        AND is on TOP or latest started
@@ -1023,7 +1040,12 @@
         bool isTopOrLatestAssistant = latestActiveAssistant == nullptr ? false :
             current->attributionSource.uid == latestActiveAssistant->attributionSource.uid;
 
-        auto canCaptureIfInCallOrCommunication = [&](const auto &recordClient) REQUIRES(mMutex) {
+        // TODO: b/339112720
+        // Refine this logic when we have the correct phone state owner UID. The current issue is
+        // when a VOIP APP use Telecom API to manage calls, the mPhoneStateOwnerUid is AID_SYSTEM
+        // instead of the actual VOIP APP UID, so isPhoneStateOwnerActive here is not accurate.
+        const bool canCaptureIfInCallOrCommunication = [&](const auto& recordClient) REQUIRES(
+                                                               mMutex) {
             uid_t recordUid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(
                 recordClient->attributionSource.uid));
             bool canCaptureCall = recordClient->canCaptureOutput;
@@ -1032,19 +1054,26 @@
                 || recordUid == mPhoneStateOwnerUid;
             return !(isInCall && !canCaptureCall)
                 && !(isInCommunication && !canCaptureCommunication);
-        };
+        }(current);
 
         // By default allow capture if:
         //     The assistant is not on TOP
-        //     AND is on TOP or latest started
-        //     AND there is no active privacy sensitive capture or call
+        //         AND is on TOP or latest started
+        //         AND there is no active privacy sensitive capture or call
         //             OR client has CAPTURE_AUDIO_OUTPUT privileged permission
-        bool allowSensitiveCapture =
+        //     The assistant is on TOP
+        //         AND is ongoing communication owner
+        //         AND is on TOP or latest started
+        const bool allowSensitiveCapture =
             !isSensitiveActive || isTopOrLatestSensitive || current->canCaptureOutput;
-        bool allowCapture = !isAssistantOnTop
-                && (isTopOrLatestActive || isTopOrLatestSensitive)
-                && allowSensitiveCapture
-                && canCaptureIfInCallOrCommunication(current);
+        bool allowCapture = false;
+        if (!isAssistantOnTop) {
+            allowCapture = (isTopOrLatestActive || isTopOrLatestSensitive) &&
+                           allowSensitiveCapture && canCaptureIfInCallOrCommunication;
+        } else {
+            allowCapture = isInCommunication && isTopOrLatestSensitive &&
+                           canCaptureIfInCallOrCommunication;
+        }
 
         if (!current->hasOp()) {
             // Never allow capture if app op is denied
@@ -1067,7 +1096,7 @@
                     allowCapture = true;
                 }
             } else if (allowSensitiveCapture
-                    && canCaptureIfInCallOrCommunication(current)) {
+                    && canCaptureIfInCallOrCommunication) {
                 if (isTopOrLatestAssistant
                     && (source == AUDIO_SOURCE_VOICE_RECOGNITION
                         || source == AUDIO_SOURCE_HOTWORD)) {
@@ -1088,7 +1117,7 @@
                     allowCapture = true;
                 }
             } else if (allowSensitiveCapture
-                        && canCaptureIfInCallOrCommunication(current)) {
+                        && canCaptureIfInCallOrCommunication) {
                 if ((source == AUDIO_SOURCE_VOICE_RECOGNITION) || (source == AUDIO_SOURCE_HOTWORD))
                 {
                     allowCapture = true;
@@ -1103,7 +1132,7 @@
             //         Is on TOP AND the source is VOICE_RECOGNITION or HOTWORD
             if (!isAssistantOnTop
                     && allowSensitiveCapture
-                    && canCaptureIfInCallOrCommunication(current)) {
+                    && canCaptureIfInCallOrCommunication) {
                 allowCapture = true;
             }
             if (isA11yOnTop) {
@@ -1117,7 +1146,7 @@
             //     AND no call is active
             //         OR client has CAPTURE_AUDIO_OUTPUT privileged permission
             if (onlyHotwordActive
-                    && canCaptureIfInCallOrCommunication(current)) {
+                    && canCaptureIfInCallOrCommunication) {
                 allowCapture = true;
             }
         } else if (mUidPolicy->isCurrentImeUid(currentUid)) {
@@ -1184,12 +1213,13 @@
         if (client->silenced != silenced) {
             if (client->active) {
                 if (silenced) {
-                    finishRecording(client->attributionSource, client->attributes.source);
+                    finishRecording(client->attributionSource, client->virtualDeviceId,
+                                    client->attributes.source);
                 } else {
                     std::stringstream msg;
                     msg << "Audio recording un-silenced on session " << client->session;
-                    if (!startRecording(client->attributionSource, String16(msg.str().c_str()),
-                            client->attributes.source)) {
+                    if (!startRecording(client->attributionSource, client->virtualDeviceId,
+                                        String16(msg.str().c_str()), client->attributes.source)) {
                         silenced = true;
                     }
                 }
@@ -1300,6 +1330,7 @@
         case TRANSACTION_setPhoneState:
 //FIXME: Allow setForceUse calls from system apps until a better use case routing API is available
 //      case TRANSACTION_setForceUse:
+        case TRANSACTION_setDeviceAbsoluteVolumeEnabled:
         case TRANSACTION_initStreamVolume:
         case TRANSACTION_setStreamVolumeIndex:
         case TRANSACTION_setVolumeIndexForAttributes:
@@ -1348,7 +1379,8 @@
         case TRANSACTION_getDevicesForRoleAndCapturePreset:
         case TRANSACTION_getSpatializer:
         case TRANSACTION_setPreferredMixerAttributes:
-        case TRANSACTION_clearPreferredMixerAttributes: {
+        case TRANSACTION_clearPreferredMixerAttributes:
+        case TRANSACTION_getRegisteredPolicyMixes: {
             if (!isServiceUid(IPCThreadState::self()->getCallingUid())) {
                 ALOGW("%s: transaction %d received from PID %d unauthorized UID %d",
                       __func__, code, IPCThreadState::self()->getCallingPid(),
@@ -1360,6 +1392,17 @@
             break;
     }
 
+    switch (code) {
+        case TRANSACTION_getPermissionController: {
+            if (!isAudioServerOrSystemServerUid(IPCThreadState::self()->getCallingUid())) {
+                ALOGW("%s: transaction %d received from PID %d unauthorized UID %d",
+                      __func__, code, IPCThreadState::self()->getCallingPid(),
+                      IPCThreadState::self()->getCallingUid());
+                return INVALID_OPERATION;
+            }
+        }
+    }
+
     const std::string methodName = getIAudioPolicyServiceStatistics().getMethodForCode(code);
     mediautils::TimeCheck check(
             std::string("IAudioPolicyService::").append(methodName),
@@ -1373,8 +1416,8 @@
         } else {
             getIAudioPolicyServiceStatistics().event(code, elapsedMs);
         }
-    }, mediautils::TimeCheck::kDefaultTimeoutDuration,
-    mediautils::TimeCheck::kDefaultSecondChanceDuration,
+    }, mediautils::TimeCheck::getDefaultTimeoutDuration(),
+    mediautils::TimeCheck::getDefaultSecondChanceDuration(),
     true /* crashOnTimeout */);
 
     switch (code) {
@@ -1417,144 +1460,13 @@
     if (in == BAD_TYPE || out == BAD_TYPE || err == BAD_TYPE) {
         return BAD_VALUE;
     }
-    if (args.size() >= 3 && args[0] == String16("set-uid-state")) {
-        return handleSetUidState(args, err);
-    } else if (args.size() >= 2 && args[0] == String16("reset-uid-state")) {
-        return handleResetUidState(args, err);
-    } else if (args.size() >= 2 && args[0] == String16("get-uid-state")) {
-        return handleGetUidState(args, out, err);
-    } else if (args.size() >= 1 && args[0] == String16("purge_permission-cache")) {
+    if (args.size() >= 1 && args[0] == String16("purge_permission-cache")) {
         purgePermissionCache();
         return NO_ERROR;
-    } else if (args.size() == 1 && args[0] == String16("help")) {
-        printHelp(out);
-        return NO_ERROR;
     }
-    printHelp(err);
     return BAD_VALUE;
 }
 
-static status_t getUidForPackage(String16 packageName, int userId, /*inout*/uid_t& uid, int err) {
-    if (userId < 0) {
-        ALOGE("Invalid user: %d", userId);
-        dprintf(err, "Invalid user: %d\n", userId);
-        return BAD_VALUE;
-    }
-
-    PermissionController pc;
-    uid = pc.getPackageUid(packageName, 0);
-    if (uid <= 0) {
-        ALOGE("Unknown package: '%s'", String8(packageName).c_str());
-        dprintf(err, "Unknown package: '%s'\n", String8(packageName).c_str());
-        return BAD_VALUE;
-    }
-
-    uid = multiuser_get_uid(userId, uid);
-    return NO_ERROR;
-}
-
-status_t AudioPolicyService::handleSetUidState(Vector<String16>& args, int err) {
-    // Valid arg.size() is 3 or 5, args.size() is 5 with --user option.
-    if (!(args.size() == 3 || args.size() == 5)) {
-        printHelp(err);
-        return BAD_VALUE;
-    }
-
-    bool active = false;
-    if (args[2] == String16("active")) {
-        active = true;
-    } else if ((args[2] != String16("idle"))) {
-        ALOGE("Expected active or idle but got: '%s'", String8(args[2]).c_str());
-        return BAD_VALUE;
-    }
-
-    int userId = 0;
-    if (args.size() >= 5 && args[3] == String16("--user")) {
-        userId = atoi(String8(args[4]));
-    }
-
-    uid_t uid;
-    if (getUidForPackage(args[1], userId, uid, err) == BAD_VALUE) {
-        return BAD_VALUE;
-    }
-
-    sp<UidPolicy> uidPolicy;
-    {
-        audio_utils::lock_guard _l(mMutex);
-        uidPolicy = mUidPolicy;
-    }
-    if (uidPolicy) {
-        uidPolicy->addOverrideUid(uid, active);
-        return NO_ERROR;
-    }
-    return NO_INIT;
-}
-
-status_t AudioPolicyService::handleResetUidState(Vector<String16>& args, int err) {
-    // Valid arg.size() is 2 or 4, args.size() is 4 with --user option.
-    if (!(args.size() == 2 || args.size() == 4)) {
-        printHelp(err);
-        return BAD_VALUE;
-    }
-
-    int userId = 0;
-    if (args.size() >= 4 && args[2] == String16("--user")) {
-        userId = atoi(String8(args[3]));
-    }
-
-    uid_t uid;
-    if (getUidForPackage(args[1], userId, uid, err) == BAD_VALUE) {
-        return BAD_VALUE;
-    }
-
-    sp<UidPolicy> uidPolicy;
-    {
-        audio_utils::lock_guard _l(mMutex);
-        uidPolicy = mUidPolicy;
-    }
-    if (uidPolicy) {
-        uidPolicy->removeOverrideUid(uid);
-        return NO_ERROR;
-    }
-    return NO_INIT;
-}
-
-status_t AudioPolicyService::handleGetUidState(Vector<String16>& args, int out, int err) {
-    // Valid arg.size() is 2 or 4, args.size() is 4 with --user option.
-    if (!(args.size() == 2 || args.size() == 4)) {
-        printHelp(err);
-        return BAD_VALUE;
-    }
-
-    int userId = 0;
-    if (args.size() >= 4 && args[2] == String16("--user")) {
-        userId = atoi(String8(args[3]));
-    }
-
-    uid_t uid;
-    if (getUidForPackage(args[1], userId, uid, err) == BAD_VALUE) {
-        return BAD_VALUE;
-    }
-
-    sp<UidPolicy> uidPolicy;
-    {
-        audio_utils::lock_guard _l(mMutex);
-        uidPolicy = mUidPolicy;
-    }
-    if (uidPolicy) {
-        return dprintf(out, uidPolicy->isUidActive(uid) ? "active\n" : "idle\n");
-    }
-    return NO_INIT;
-}
-
-status_t AudioPolicyService::printHelp(int out) {
-    return dprintf(out, "Audio policy service commands:\n"
-        "  get-uid-state <PACKAGE> [--user USER_ID] gets the uid state\n"
-        "  set-uid-state <PACKAGE> <active|idle> [--user USER_ID] overrides the uid state\n"
-        "  reset-uid-state <PACKAGE> [--user USER_ID] clears the uid state override\n"
-        "  help print this message\n");
-}
-
 status_t AudioPolicyService::registerOutput(audio_io_handle_t output,
                         const audio_config_base_t& config,
                         const audio_output_flags_t flags) {
@@ -1615,10 +1527,6 @@
     checkRegistered();
     {
         audio_utils::lock_guard _l(mMutex);
-        auto overrideIter = mOverrideUids.find(uid);
-        if (overrideIter != mOverrideUids.end()) {
-            return overrideIter->second.first;
-        }
         // In an absense of the ActivityManager, assume everything to be active.
         if (!mObserverRegistered) return true;
         auto cacheIter = mCachedUids.find(uid);
@@ -1644,20 +1552,6 @@
     checkRegistered();
     {
         audio_utils::lock_guard _l(mMutex);
-        auto overrideIter = mOverrideUids.find(uid);
-        if (overrideIter != mOverrideUids.end()) {
-            if (overrideIter->second.first) {
-                if (overrideIter->second.second != ActivityManager::PROCESS_STATE_UNKNOWN) {
-                    return overrideIter->second.second;
-                } else {
-                    auto cacheIter = mCachedUids.find(uid);
-                    if (cacheIter != mCachedUids.end()) {
-                        return cacheIter->second.second;
-                    }
-                }
-            }
-            return ActivityManager::PROCESS_STATE_UNKNOWN;
-        }
         // In an absense of the ActivityManager, assume everything to be active.
         if (!mObserverRegistered) {
             return ActivityManager::PROCESS_STATE_TOP;
@@ -1710,10 +1604,6 @@
 void AudioPolicyService::UidPolicy::onUidProcAdjChanged(uid_t uid __unused, int32_t adj __unused) {
 }
 
-void AudioPolicyService::UidPolicy::updateOverrideUid(uid_t uid, bool active, bool insert) {
-    updateUid(&mOverrideUids, uid, active, ActivityManager::PROCESS_STATE_UNKNOWN, insert);
-}
-
 void AudioPolicyService::UidPolicy::notifyService() {
     sp<AudioPolicyService> service = mService.promote();
     if (service != nullptr) {
@@ -2481,7 +2371,7 @@
     while (command->mWaitStatus) {
         nsecs_t timeOutNs = kAudioCommandTimeoutNs + milliseconds(delayMs);
         if (command->mCond.wait_for(
-                    ul, std::chrono::nanoseconds(timeOutNs)) == std::cv_status::timeout) {
+                ul, std::chrono::nanoseconds(timeOutNs), getTid()) == std::cv_status::timeout) {
             command->mStatus = TIMED_OUT;
             command->mWaitStatus = false;
         }
diff --git a/services/audiopolicy/service/AudioPolicyService.h b/services/audiopolicy/service/AudioPolicyService.h
index 9a8a056..92c162f 100644
--- a/services/audiopolicy/service/AudioPolicyService.h
+++ b/services/audiopolicy/service/AudioPolicyService.h
@@ -21,6 +21,7 @@
 #include <android/media/GetSpatializerResponse.h>
 #include <android-base/thread_annotations.h>
 #include <audio_utils/mutex.h>
+#include <com/android/media/permission/INativePermissionController.h>
 #include <cutils/misc.h>
 #include <cutils/config_utils.h>
 #include <cutils/compiler.h>
@@ -35,6 +36,8 @@
 #include <media/ToneGenerator.h>
 #include <media/AudioEffect.h>
 #include <media/AudioPolicy.h>
+#include <media/IAudioPolicyServiceLocal.h>
+#include <media/NativePermissionController.h>
 #include <media/UsecaseValidator.h>
 #include <mediautils/ServiceUtilities.h>
 #include "AudioPolicyEffects.h"
@@ -68,12 +71,16 @@
 }
 
 using ::android::media::audiopolicy::AudioRecordClient;
+using ::com::android::media::permission::INativePermissionController;
+using ::com::android::media::permission::NativePermissionController;
+using ::com::android::media::permission::IPermissionProvider;
 
 class AudioPolicyService :
     public BinderService<AudioPolicyService>,
     public media::BnAudioPolicyService,
     public IBinder::DeathRecipient,
-    public SpatializerPolicyCallback
+    public SpatializerPolicyCallback,
+    public media::IAudioPolicyServiceLocal
 {
     friend class sp<AudioPolicyService>;
 
@@ -121,6 +128,9 @@
     binder::Status startInput(int32_t portId) override;
     binder::Status stopInput(int32_t portId) override;
     binder::Status releaseInput(int32_t portId) override;
+    binder::Status setDeviceAbsoluteVolumeEnabled(const AudioDevice& device,
+                                                  bool enabled,
+                                                  AudioStreamType streamToDriveAbs) override;
     binder::Status initStreamVolume(AudioStreamType stream, int32_t indexMin,
                                     int32_t indexMax) override;
     binder::Status setStreamVolumeIndex(AudioStreamType stream,
@@ -311,15 +321,26 @@
     binder::Status clearPreferredMixerAttributes(const media::audio::common::AudioAttributes& attr,
                                                  int32_t portId,
                                                  int32_t uid) override;
+    binder::Status getRegisteredPolicyMixes(
+            std::vector <::android::media::AudioMix>* mixes) override;
+
+    // Should only be called by AudioService to push permission data down to audioserver
+    binder::Status getPermissionController(sp<INativePermissionController>* out) override;
 
     status_t onTransact(uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags) override;
 
+    // -- IAudioPolicyLocal methods
+    const IPermissionProvider& getPermissionProvider() const override;
+
     // IBinder::DeathRecipient
     virtual     void        binderDied(const wp<IBinder>& who);
 
     // RefBase
     virtual     void        onFirstRef();
 
+    // Commence initialization when AudioSystem is ready.
+    void onAudioSystemReady();
+
     //
     // Helpers for the struct audio_policy_service_ops implementation.
     // This is used by the audio policy manager for certain operations that
@@ -431,8 +452,8 @@
     app_state_t apmStatFromAmState(int amState);
 
     bool isSupportedSystemUsage(audio_usage_t usage);
-    status_t validateUsage(const audio_attributes_t& attr);
-    status_t validateUsage(const audio_attributes_t& attr,
+    binder::Status validateUsage(const audio_attributes_t& attr);
+    binder::Status validateUsage(const audio_attributes_t& attr,
                            const AttributionSourceState& attributionSource);
 
     void updateUidStates();
@@ -495,8 +516,6 @@
                 int32_t capability) override;
         void onUidProcAdjChanged(uid_t uid, int32_t adj) override;
 
-        void addOverrideUid(uid_t uid, bool active) { updateOverrideUid(uid, active, true); }
-        void removeOverrideUid(uid_t uid) { updateOverrideUid(uid, false, false); }
 
         void updateUid(std::unordered_map<uid_t, std::pair<bool, int>> *uids,
                        uid_t uid, bool active, int state, bool insert);
@@ -505,7 +524,6 @@
 
      private:
         void notifyService();
-        void updateOverrideUid(uid_t uid, bool active, bool insert);
         void updateUidLocked(std::unordered_map<uid_t, std::pair<bool, int>> *uids,
                              uid_t uid, bool active, int state, bool insert);
         void checkRegistered();
@@ -514,7 +532,6 @@
         audio_utils::mutex mMutex{audio_utils::MutexOrder::kUidPolicy_Mutex};
         ActivityManager mAm;
         bool mObserverRegistered = false;
-        std::unordered_map<uid_t, std::pair<bool, int>> mOverrideUids GUARDED_BY(mMutex);
         std::unordered_map<uid_t, std::pair<bool, int>> mCachedUids GUARDED_BY(mMutex);
         std::vector<uid_t> mAssistantUids;
         std::vector<uid_t> mActiveAssistantUids;
@@ -540,6 +557,10 @@
             binder::Status onSensorPrivacyChanged(int toggleType, int sensor,
                                                   bool enabled);
 
+            binder::Status onSensorPrivacyStateChanged(int, int, int) {
+                return binder::Status::ok();
+            }
+
         private:
             wp<AudioPolicyService> mService;
             std::atomic_bool mSensorPrivacyEnabled = false;
@@ -769,7 +790,8 @@
                                     audio_config_base_t *mixerConfig,
                                     const sp<DeviceDescriptorBase>& device,
                                     uint32_t *latencyMs,
-                                    audio_output_flags_t flags);
+                                    audio_output_flags_t flags,
+                                    audio_attributes_t attributes);
         // creates a special output that is duplicated to the two outputs passed as arguments. The duplication is performed by
         // a special mixer thread in the AudioFlinger.
         virtual audio_io_handle_t openDuplicateOutput(audio_io_handle_t output1, audio_io_handle_t output2);
@@ -867,6 +889,9 @@
         status_t getAudioMixPort(const struct audio_port_v7 *devicePort,
                                  struct audio_port_v7 *port) override;
 
+        status_t setTracksInternalMute(
+                const std::vector<media::TrackInternalMuteInfo>& tracksInternalMute) override;
+
      private:
         AudioPolicyService *mAudioPolicyService;
     };
@@ -952,13 +977,15 @@
                       const audio_io_handle_t io, AttributionSourceState attributionSource,
                             const audio_session_t session, audio_port_handle_t portId,
                             audio_port_handle_t deviceId, audio_stream_type_t stream,
-                            bool isSpatialized) :
+                            bool isSpatialized, audio_channel_mask_t channelMask) :
                     AudioClient(attributes, io, attributionSource, session, portId,
-                        deviceId), stream(stream), isSpatialized(isSpatialized)  {}
+                        deviceId), stream(stream), isSpatialized(isSpatialized),
+                        channelMask(channelMask) {}
                 ~AudioPlaybackClient() override = default;
 
         const audio_stream_type_t stream;
         const bool isSpatialized;
+        const audio_channel_mask_t channelMask;
     };
 
     void getPlaybackClientAndEffects(audio_port_handle_t portId,
@@ -989,14 +1016,14 @@
     void unloadAudioPolicyManager();
 
     /**
-     * Returns the number of active audio tracks on the specified output mixer.
+     * Returns the channel masks for active audio tracks on the specified output mixer.
      * The query can be specified to only include spatialized audio tracks or consider
      * all tracks.
      * @param output the I/O handle of the output mixer to consider
      * @param spatializedOnly true if only spatialized tracks should be considered
-     * @return the number of active tracks.
+     * @return a list of channel masks for all active tracks matching the condition.
      */
-    size_t countActiveClientsOnOutput_l(
+    std::vector<audio_channel_mask_t> getActiveTracksMasks_l(
             audio_io_handle_t output, bool spatializedOnly = true) REQUIRES(mMutex);
 
     mutable audio_utils::mutex mMutex{audio_utils::MutexOrder::kAudioPolicyService_Mutex};
@@ -1040,6 +1067,7 @@
     CreateAudioPolicyManagerInstance mCreateAudioPolicyManager;
     DestroyAudioPolicyManagerInstance mDestroyAudioPolicyManager;
     std::unique_ptr<media::UsecaseValidator> mUsecaseValidator;
+    const sp<NativePermissionController> mPermissionController;
 };
 
 } // namespace android
diff --git a/services/audiopolicy/service/AudioRecordClient.cpp b/services/audiopolicy/service/AudioRecordClient.cpp
index a89a84d..6d8b3cf 100644
--- a/services/audiopolicy/service/AudioRecordClient.cpp
+++ b/services/audiopolicy/service/AudioRecordClient.cpp
@@ -18,9 +18,10 @@
 
 #include "AudioRecordClient.h"
 #include "AudioPolicyService.h"
+#include <android_media_audiopolicy.h>
 
 namespace android::media::audiopolicy {
-
+namespace audiopolicy_flags = android::media::audiopolicy;
 using android::AudioPolicyService;
 
 namespace {
@@ -59,8 +60,10 @@
 // static
 sp<OpRecordAudioMonitor>
 OpRecordAudioMonitor::createIfNeeded(
-            const AttributionSourceState& attributionSource, const audio_attributes_t& attr,
-            wp<AudioPolicyService::AudioCommandThread> commandThread)
+        const AttributionSourceState &attributionSource,
+        const uint32_t virtualDeviceId,
+        const audio_attributes_t &attr,
+        wp<AudioPolicyService::AudioCommandThread> commandThread)
 {
     if (isAudioServerOrRootUid(attributionSource.uid)) {
         ALOGV("not silencing record for audio or root source %s",
@@ -78,15 +81,19 @@
             || attributionSource.packageName.value().size() == 0) {
         return nullptr;
     }
-    return new OpRecordAudioMonitor(attributionSource, getOpForSource(attr.source), commandThread);
+
+    return new OpRecordAudioMonitor(attributionSource, virtualDeviceId, attr,
+                                    getOpForSource(attr.source), commandThread);
 }
 
 OpRecordAudioMonitor::OpRecordAudioMonitor(
-        const AttributionSourceState& attributionSource, int32_t appOp,
+        const AttributionSourceState &attributionSource,
+        const uint32_t virtualDeviceId, const audio_attributes_t &attr,
+        int32_t appOp,
         wp<AudioPolicyService::AudioCommandThread> commandThread) :
-            mHasOp(true), mAttributionSource(attributionSource), mAppOp(appOp),
-            mCommandThread(commandThread)
-{
+        mHasOp(true), mAttributionSource(attributionSource),
+        mVirtualDeviceId(virtualDeviceId), mAttr(attr), mAppOp(appOp),
+        mCommandThread(commandThread) {
 }
 
 OpRecordAudioMonitor::~OpRecordAudioMonitor()
@@ -131,7 +138,12 @@
     const int32_t mode = mAppOpsManager.checkOp(mAppOp,
             mAttributionSource.uid, VALUE_OR_FATAL(aidl2legacy_string_view_String16(
                 mAttributionSource.packageName.value_or(""))));
-    const bool hasIt = (mode == AppOpsManager::MODE_ALLOWED);
+    bool hasIt = (mode == AppOpsManager::MODE_ALLOWED);
+
+    if (audiopolicy_flags::record_audio_device_aware_permission()) {
+        const bool canRecord = recordingAllowed(mAttributionSource, mVirtualDeviceId, mAttr.source);
+        hasIt = hasIt && canRecord;
+    }
     // verbose logging only log when appOp changed
     ALOGI_IF(hasIt != mHasOp.load(),
             "App op %d missing, %ssilencing record %s",
diff --git a/services/audiopolicy/service/AudioRecordClient.h b/services/audiopolicy/service/AudioRecordClient.h
index d3be316..76aff41 100644
--- a/services/audiopolicy/service/AudioRecordClient.h
+++ b/services/audiopolicy/service/AudioRecordClient.h
@@ -38,12 +38,16 @@
 
     static sp<OpRecordAudioMonitor> createIfNeeded(
             const AttributionSourceState& attributionSource,
+            uint32_t virtualDeviceId,
             const audio_attributes_t& attr,
             wp<AudioPolicyService::AudioCommandThread> commandThread);
 
 private:
-    OpRecordAudioMonitor(const AttributionSourceState& attributionSource, int32_t appOp,
-            wp<AudioPolicyService::AudioCommandThread> commandThread);
+    OpRecordAudioMonitor(const AttributionSourceState &attributionSource,
+                         uint32_t virtualDeviceId,
+                         const audio_attributes_t &attr,
+                         int32_t appOp,
+                         wp<AudioPolicyService::AudioCommandThread> commandThread);
 
     void onFirstRef() override;
 
@@ -67,6 +71,8 @@
 
     std::atomic_bool mHasOp;
     const AttributionSourceState mAttributionSource;
+    const uint32_t mVirtualDeviceId;
+    const audio_attributes_t mAttr;
     const int32_t mAppOp;
     wp<AudioPolicyService::AudioCommandThread> mCommandThread;
 };
@@ -81,15 +87,20 @@
                       const audio_session_t session, audio_port_handle_t portId,
                       const audio_port_handle_t deviceId,
                       const AttributionSourceState& attributionSource,
+                      const uint32_t virtualDeviceId,
                       bool canCaptureOutput, bool canCaptureHotword,
                       wp<AudioPolicyService::AudioCommandThread> commandThread) :
                 AudioClient(attributes, io, attributionSource,
                     session, portId, deviceId), attributionSource(attributionSource),
+                    virtualDeviceId(virtualDeviceId),
                     startTimeNs(0), canCaptureOutput(canCaptureOutput),
                     canCaptureHotword(canCaptureHotword), silenced(false),
                     mOpRecordAudioMonitor(
                             OpRecordAudioMonitor::createIfNeeded(attributionSource,
-                            attributes, commandThread)) {}
+                                                                 virtualDeviceId,
+                                                                 attributes, commandThread)) {
+
+            }
             ~AudioRecordClient() override = default;
 
     bool hasOp() const {
@@ -97,6 +108,7 @@
     }
 
     const AttributionSourceState attributionSource; // attribution source of client
+    const uint32_t virtualDeviceId; // id of the virtual device associated with the audio device
     nsecs_t startTimeNs;
     const bool canCaptureOutput;
     const bool canCaptureHotword;
diff --git a/services/audiopolicy/service/Spatializer.cpp b/services/audiopolicy/service/Spatializer.cpp
index ca3e0e0..9cc3b8f 100644
--- a/services/audiopolicy/service/Spatializer.cpp
+++ b/services/audiopolicy/service/Spatializer.cpp
@@ -31,6 +31,7 @@
 #include <audio_utils/fixedfft.h>
 #include <com_android_media_audio.h>
 #include <cutils/bitops.h>
+#include <cutils/properties.h>
 #include <hardware/sensors.h>
 #include <media/stagefright/foundation/AHandler.h>
 #include <media/stagefright/foundation/AMessage.h>
@@ -49,12 +50,12 @@
 using aidl_utils::statusTFromBinderStatus;
 using android::content::AttributionSourceState;
 using binder::Status;
+using internal::ToString;
 using media::HeadTrackingMode;
 using media::Pose3f;
 using media::SensorPoseProvider;
 using media::audio::common::HeadTracking;
 using media::audio::common::Spatialization;
-using ::android::internal::ToString;
 
 using namespace std::chrono_literals;
 
@@ -291,6 +292,7 @@
       mPolicyCallback(callback) {
     ALOGV("%s", __func__);
     setMinSchedulerPolicy(SCHED_NORMAL, ANDROID_PRIORITY_AUDIO);
+    setInheritRt(true);
 }
 
 void Spatializer::onFirstRef() {
@@ -348,7 +350,8 @@
     bool activeLevelFound = false;
     for (const auto spatializationLevel : spatializationLevels) {
         if (!aidl_utils::isValidEnum(spatializationLevel)) {
-            ALOGW("%s: ignoring spatializationLevel:%d", __func__, (int)spatializationLevel);
+            ALOGW("%s: ignoring spatializationLevel:%s", __func__,
+                  ToString(spatializationLevel).c_str());
             continue;
         }
         if (spatializationLevel == Spatialization::Level::NONE) {
@@ -375,7 +378,8 @@
 
     for (const auto spatializationMode : spatializationModes) {
         if (!aidl_utils::isValidEnum(spatializationMode)) {
-            ALOGW("%s: ignoring spatializationMode:%d", __func__, (int)spatializationMode);
+            ALOGW("%s: ignoring spatializationMode:%s", __func__,
+                  ToString(spatializationMode).c_str());
             continue;
         }
         // we don't detect duplicates.
@@ -394,8 +398,10 @@
         return status;
     }
     for (const auto channelMask : channelMasks) {
+        static const bool stereo_spatialization_enabled =
+                property_get_bool("ro.audio.stereo_spatialization_enabled", false);
         const bool channel_mask_spatialized =
-                com_android_media_audio_stereo_spatialization()
+                (stereo_spatialization_enabled && com_android_media_audio_stereo_spatialization())
                 ? audio_channel_mask_contains_stereo(channelMask)
                 : audio_is_channel_mask_spatialized(channelMask);
         if (!channel_mask_spatialized) {
@@ -410,27 +416,26 @@
         return BAD_VALUE;
     }
 
-    //TODO b/273373363: use AIDL enum when available
     if (com::android::media::audio::dsa_over_bt_le_audio()
             && mSupportsHeadTracking) {
-        mHeadtrackingConnectionMode = HEADTRACKING_CONNECTION_FRAMEWORK_PROCESSED;
-        std::vector<uint8_t> headtrackingConnectionModes;
+        mHeadtrackingConnectionMode = HeadTracking::ConnectionMode::FRAMEWORK_PROCESSED;
+        std::vector<HeadTracking::ConnectionMode> headtrackingConnectionModes;
         status = getHalParameter<true>(effect, SPATIALIZER_PARAM_SUPPORTED_HEADTRACKING_CONNECTION,
                 &headtrackingConnectionModes);
         if (status == NO_ERROR) {
             for (const auto htConnectionMode : headtrackingConnectionModes) {
-                if (htConnectionMode < HEADTRACKING_CONNECTION_FRAMEWORK_PROCESSED ||
-                        htConnectionMode > HEADTRACKING_CONNECTION_DIRECT_TO_SENSOR_TUNNEL) {
-                    ALOGW("%s: ignoring HT connection mode:%d", __func__, (int)htConnectionMode);
+                if (htConnectionMode < HeadTracking::ConnectionMode::FRAMEWORK_PROCESSED ||
+                    htConnectionMode > HeadTracking::ConnectionMode::DIRECT_TO_SENSOR_TUNNEL) {
+                    ALOGW("%s: ignoring HT connection mode:%s", __func__,
+                          ToString(htConnectionMode).c_str());
                     continue;
                 }
-                mSupportedHeadtrackingConnectionModes.insert(
-                        static_cast<headtracking_connection_t> (htConnectionMode));
+                mSupportedHeadtrackingConnectionModes.insert(htConnectionMode);
             }
             ALOGW_IF(mSupportedHeadtrackingConnectionModes.find(
-                    HEADTRACKING_CONNECTION_FRAMEWORK_PROCESSED)
-                        == mSupportedHeadtrackingConnectionModes.end(),
-                    "%s: HEADTRACKING_CONNECTION_FRAMEWORK_PROCESSED not reported", __func__);
+                    HeadTracking::ConnectionMode::FRAMEWORK_PROCESSED) ==
+                        mSupportedHeadtrackingConnectionModes.end(),
+                    "%s: Headtracking FRAMEWORK_PROCESSED not reported", __func__);
         }
     }
 
@@ -557,12 +562,12 @@
     }
     audio_utils::lock_guard lock(mMutex);
     *level = mLevel;
-    ALOGV("%s level %d", __func__, (int)*level);
+    ALOGV("%s level %s", __func__, ToString(*level).c_str());
     return Status::ok();
 }
 
 Status Spatializer::isHeadTrackingSupported(bool *supports) {
-    ALOGV("%s mSupportsHeadTracking %d", __func__, mSupportsHeadTracking);
+    ALOGV("%s mSupportsHeadTracking %s", __func__, ToString(mSupportsHeadTracking).c_str());
     if (supports == nullptr) {
         return binderStatusFromStatusT(BAD_VALUE);
     }
@@ -857,7 +862,7 @@
 }
 
 void Spatializer::onActualModeChange(HeadTrackingMode mode) {
-    std::string modeStr = media::toString(mode);
+    std::string modeStr = ToString(mode);
     ALOGV("%s(%s)", __func__, modeStr.c_str());
     sp<AMessage> msg = new AMessage(EngineCallbackHandler::kWhatOnActualModeChange, mHandler);
     msg->setInt32(EngineCallbackHandler::kModeKey, static_cast<int>(mode));
@@ -865,7 +870,7 @@
 }
 
 void Spatializer::onActualModeChangeMsg(HeadTrackingMode mode) {
-    ALOGV("%s(%d)", __func__, (int) mode);
+    ALOGV("%s(%s)", __func__, ToString(mode).c_str());
     sp<media::ISpatializerHeadTrackingCallback> callback;
     HeadTracking::Mode spatializerMode;
     {
@@ -884,7 +889,7 @@
                     spatializerMode = HeadTracking::Mode::RELATIVE_SCREEN;
                     break;
                 default:
-                    LOG_ALWAYS_FATAL("Unknown mode: %d", mode);
+                    LOG_ALWAYS_FATAL("Unknown mode: %s", ToString(mode).c_str());
             }
         }
         mActualHeadTrackingMode = spatializerMode;
@@ -898,7 +903,7 @@
             }
         }
         callback = mHeadTrackingCallback;
-        mLocalLog.log("%s: updating mode to %s", __func__, media::toString(mode).c_str());
+        mLocalLog.log("%s: updating mode to %s", __func__, ToString(mode).c_str());
     }
     if (callback != nullptr) {
         callback->onHeadTrackingModeChanged(spatializerMode);
@@ -931,7 +936,8 @@
             });
 }
 
-status_t Spatializer::attachOutput(audio_io_handle_t output, size_t numActiveTracks) {
+status_t Spatializer::attachOutput(audio_io_handle_t output,
+          const std::vector<audio_channel_mask_t>& activeTracksMasks) {
     bool outputChanged = false;
     sp<media::INativeSpatializerCallback> callback;
 
@@ -939,7 +945,7 @@
         audio_utils::lock_guard lock(mMutex);
         ALOGV("%s output %d mOutput %d", __func__, (int)output, (int)mOutput);
         mLocalLog.log("%s with output %d tracks %zu (mOutput %d)", __func__, (int)output,
-                      numActiveTracks, (int)mOutput);
+                      activeTracksMasks.size(), (int)mOutput);
         if (mOutput != AUDIO_IO_HANDLE_NONE) {
             LOG_ALWAYS_FATAL_IF(mEngine == nullptr, "%s output set without FX engine", __func__);
             // remove FX instance
@@ -964,7 +970,7 @@
 
         outputChanged = mOutput != output;
         mOutput = output;
-        mNumActiveTracks = numActiveTracks;
+        mActiveTracksMasks = activeTracksMasks;
         AudioSystem::addSupportedLatencyModesCallback(this);
 
         std::vector<audio_latency_mode_t> latencyModes;
@@ -1003,7 +1009,8 @@
 
     {
         audio_utils::lock_guard lock(mMutex);
-        mLocalLog.log("%s with output %d tracks %zu", __func__, (int)mOutput, mNumActiveTracks);
+        mLocalLog.log("%s with output %d num tracks %zu",
+            __func__, (int)mOutput, mActiveTracksMasks.size());
         ALOGV("%s mOutput %d", __func__, (int)mOutput);
         if (mOutput == AUDIO_IO_HANDLE_NONE) {
             return output;
@@ -1046,34 +1053,35 @@
     }
 }
 
-void Spatializer::updateActiveTracks(size_t numActiveTracks) {
+void Spatializer::updateActiveTracks(
+        const std::vector<audio_channel_mask_t>& activeTracksMasks) {
     audio_utils::lock_guard lock(mMutex);
-    if (mNumActiveTracks != numActiveTracks) {
-        mLocalLog.log("%s from %zu to %zu", __func__, mNumActiveTracks, numActiveTracks);
-        mNumActiveTracks = numActiveTracks;
+    if (mActiveTracksMasks != activeTracksMasks) {
+        mLocalLog.log("%s from %zu to %zu",
+                __func__, mActiveTracksMasks.size(), activeTracksMasks.size());
+        mActiveTracksMasks = activeTracksMasks;
         checkEngineState_l();
         checkSensorsState_l();
     }
 }
 
-//TODO b/273373363: use AIDL enum when available
 audio_latency_mode_t Spatializer::selectHeadtrackingConnectionMode_l() {
     if (!com::android::media::audio::dsa_over_bt_le_audio()) {
         return AUDIO_LATENCY_MODE_LOW;
     }
     // mSupportedLatencyModes is ordered according to system preferences loaded in
     // mOrderedLowLatencyModes
-    mHeadtrackingConnectionMode = HEADTRACKING_CONNECTION_FRAMEWORK_PROCESSED;
+    mHeadtrackingConnectionMode = HeadTracking::ConnectionMode::FRAMEWORK_PROCESSED;
     audio_latency_mode_t requestedLatencyMode = mSupportedLatencyModes[0];
     if (requestedLatencyMode == AUDIO_LATENCY_MODE_DYNAMIC_SPATIAL_AUDIO_HARDWARE) {
         if (mSupportedHeadtrackingConnectionModes.find(
-                HEADTRACKING_CONNECTION_DIRECT_TO_SENSOR_TUNNEL)
+                HeadTracking::ConnectionMode::DIRECT_TO_SENSOR_TUNNEL)
                     != mSupportedHeadtrackingConnectionModes.end()) {
-            mHeadtrackingConnectionMode = HEADTRACKING_CONNECTION_DIRECT_TO_SENSOR_TUNNEL;
+            mHeadtrackingConnectionMode = HeadTracking::ConnectionMode::DIRECT_TO_SENSOR_TUNNEL;
         } else if (mSupportedHeadtrackingConnectionModes.find(
-                HEADTRACKING_CONNECTION_DIRECT_TO_SENSOR_SW)
+                HeadTracking::ConnectionMode::DIRECT_TO_SENSOR_SW)
                     != mSupportedHeadtrackingConnectionModes.end()) {
-            mHeadtrackingConnectionMode = HEADTRACKING_CONNECTION_DIRECT_TO_SENSOR_SW;
+            mHeadtrackingConnectionMode = HeadTracking::ConnectionMode::DIRECT_TO_SENSOR_SW;
         } else {
             // if the engine does not support direct reading of IMU data, do not allow
             // DYNAMIC_SPATIAL_AUDIO_HARDWARE mode and fallback to next mode
@@ -1093,7 +1101,7 @@
 }
 
 void Spatializer::checkSensorsState_l() {
-    audio_latency_mode_t requestedLatencyMode = AUDIO_LATENCY_MODE_FREE;
+    mRequestedLatencyMode = AUDIO_LATENCY_MODE_FREE;
     const bool supportsSetLatencyMode = !mSupportedLatencyModes.empty();
     bool supportsLowLatencyMode;
     if (com::android::media::audio::dsa_over_bt_le_audio()) {
@@ -1110,11 +1118,11 @@
         if (mPoseController != nullptr) {
             // TODO(b/253297301, b/255433067) reenable low latency condition check
             // for Head Tracking after Bluetooth HAL supports it correctly.
-            if (mNumActiveTracks > 0 && mLevel != Spatialization::Level::NONE
+            if (shouldUseHeadTracking_l() && mLevel != Spatialization::Level::NONE
                     && mDesiredHeadTrackingMode != HeadTrackingMode::STATIC
                     && mHeadSensor != SpatializerPoseController::INVALID_SENSOR) {
                 if (supportsLowLatencyMode) {
-                    requestedLatencyMode = selectHeadtrackingConnectionMode_l();
+                    mRequestedLatencyMode = selectHeadtrackingConnectionMode_l();
                 }
                 if (mEngine != nullptr) {
                     setEffectParameter_l(SPATIALIZER_PARAM_HEADTRACKING_MODE,
@@ -1136,15 +1144,34 @@
     }
     if (mOutput != AUDIO_IO_HANDLE_NONE && supportsSetLatencyMode) {
         const status_t status =
-                AudioSystem::setRequestedLatencyMode(mOutput, requestedLatencyMode);
+                AudioSystem::setRequestedLatencyMode(mOutput, mRequestedLatencyMode);
         ALOGD("%s: setRequestedLatencyMode for output thread(%d) to %s returned %d", __func__,
-              mOutput, toString(requestedLatencyMode).c_str(), status);
+              mOutput, toString(mRequestedLatencyMode).c_str(), status);
     }
 }
 
+
+/* static */
+bool Spatializer::containsImmersiveChannelMask(
+        const std::vector<audio_channel_mask_t>& masks)
+{
+    for (auto mask : masks) {
+        if (audio_is_channel_mask_spatialized(mask)) {
+            return true;
+        }
+    }
+    // Only non-immersive channel masks, e.g. AUDIO_CHANNEL_OUT_STEREO, are present.
+    return false;
+}
+
+bool Spatializer::shouldUseHeadTracking_l() const {
+    // Headtracking only available on immersive channel masks.
+    return containsImmersiveChannelMask(mActiveTracksMasks);
+}
+
 void Spatializer::checkEngineState_l() {
     if (mEngine != nullptr) {
-        if (mLevel != Spatialization::Level::NONE && mNumActiveTracks > 0) {
+        if (mLevel != Spatialization::Level::NONE && mActiveTracksMasks.size() > 0) {
             mEngine->setEnabled(true);
             setEffectParameter_l(SPATIALIZER_PARAM_LEVEL,
                     std::vector<Spatialization::Level>{mLevel});
@@ -1217,7 +1244,7 @@
         base::StringAppendF(&ss, " %s", ToString(mode).c_str());
     }
     base::StringAppendF(&ss, "], Desired: %s, Actual %s\n",
-                        media::toString(mDesiredHeadTrackingMode).c_str(),
+                        ToString(mDesiredHeadTrackingMode).c_str(),
                         ToString(mActualHeadTrackingMode).c_str());
 
     base::StringAppendF(&ss, "%smSpatializationModes: [", prefixSpace.c_str());
@@ -1233,7 +1260,8 @@
     base::StringAppendF(&ss, "\n%smSupportsHeadTracking: %s\n", prefixSpace.c_str(),
                         mSupportsHeadTracking ? "true" : "false");
     // 2. Settings (Output, tracks)
-    base::StringAppendF(&ss, "%smNumActiveTracks: %zu\n", prefixSpace.c_str(), mNumActiveTracks);
+    base::StringAppendF(&ss, "%sNum Active Tracks: %zu\n",
+            prefixSpace.c_str(), mActiveTracksMasks.size());
     base::StringAppendF(&ss, "%sOutputStreamHandle: %d\n", prefixSpace.c_str(), (int)mOutput);
 
     // 3. Sensors, Effect information.
@@ -1244,8 +1272,12 @@
                         mDisplayOrientation);
 
     // 4. Show flag or property state.
+    static const bool stereo_spatialization_prop_enabled =
+            property_get_bool("ro.audio.stereo_spatialization_enabled", false);
+    const bool stereo_spatialization = com_android_media_audio_stereo_spatialization()
+            && stereo_spatialization_prop_enabled;
     base::StringAppendF(&ss, "%sStereo Spatialization: %s\n", prefixSpace.c_str(),
-            com_android_media_audio_stereo_spatialization() ? "true" : "false");
+            stereo_spatialization ? "true" : "false");
 
     ss.append(prefixSpace + "CommandLog:\n");
     ss += mLocalLog.dumpToString((prefixSpace + " ").c_str(), mMaxLocalLogLine);
diff --git a/services/audiopolicy/service/Spatializer.h b/services/audiopolicy/service/Spatializer.h
index 24788dc..5ea3258 100644
--- a/services/audiopolicy/service/Spatializer.h
+++ b/services/audiopolicy/service/Spatializer.h
@@ -27,6 +27,7 @@
 #include <audio_utils/SimpleLog.h>
 #include <math.h>
 #include <media/AudioEffect.h>
+#include <media/MediaMetricsItem.h>
 #include <media/audiohal/EffectsFactoryHalInterface.h>
 #include <media/VectorRecorder.h>
 #include <media/audiohal/EffectHalInterface.h>
@@ -153,10 +154,39 @@
         return mLevel;
     }
 
+    /** For test only */
+    std::unordered_set<media::audio::common::HeadTracking::ConnectionMode>
+            getSupportedHeadtrackingConnectionModes() const {
+        return mSupportedHeadtrackingConnectionModes;
+    }
+
+    /** For test only */
+    media::audio::common::HeadTracking::ConnectionMode getHeadtrackingConnectionMode() const {
+        return mHeadtrackingConnectionMode;
+    }
+
+    /** For test only */
+    std::vector<audio_latency_mode_t> getSupportedLatencyModes() const {
+        audio_utils::lock_guard lock(mMutex);
+        return mSupportedLatencyModes;
+    }
+
+    /** For test only */
+    std::vector<audio_latency_mode_t> getOrderedLowLatencyModes() const {
+        return mOrderedLowLatencyModes;
+    }
+
+    /** For test only */
+    audio_latency_mode_t getRequestedLatencyMode() const {
+        audio_utils::lock_guard lock(mMutex);
+        return mRequestedLatencyMode;
+    }
+
     /** Called by audio policy service when the special output mixer dedicated to spatialization
      * is opened and the spatializer engine must be created.
      */
-    status_t attachOutput(audio_io_handle_t output, size_t numActiveTracks);
+    status_t attachOutput(audio_io_handle_t output,
+                          const std::vector<audio_channel_mask_t>& activeTracksMasks);
     /** Called by audio policy service when the special output mixer dedicated to spatialization
      * is closed and the spatializer engine must be release.
      */
@@ -164,7 +194,13 @@
     /** Returns the output stream the spatializer is attached to. */
     audio_io_handle_t getOutput() const { audio_utils::lock_guard lock(mMutex); return mOutput; }
 
-    void updateActiveTracks(size_t numActiveTracks);
+    /** For test only */
+    void setOutput(audio_io_handle_t output) {
+        audio_utils::lock_guard lock(mMutex);
+        mOutput = output;
+    }
+
+    void updateActiveTracks(const std::vector<audio_channel_mask_t>& activeTracksMasks);
 
     /** Gets the channel mask, sampling rate and format set for the spatializer input. */
     audio_config_base_t getAudioInConfig() const;
@@ -188,6 +224,20 @@
     // NO_INIT: Spatializer creation failed.
     static void sendEmptyCreateSpatializerMetricWithStatus(status_t status);
 
+    /** Made public for test only */
+    void onSupportedLatencyModesChangedMsg(
+            audio_io_handle_t output, std::vector<audio_latency_mode_t>&& modes);
+
+    // Made public for test only
+    /**
+     * Returns true if there exists an immersive channel mask in the vector.
+     *
+     * Example of a non-immersive channel mask such as AUDIO_CHANNEL_OUT_STEREO
+     * versus an immersive channel mask such as AUDIO_CHANNEL_OUT_5POINT1.
+     */
+    static bool containsImmersiveChannelMask(
+            const std::vector<audio_channel_mask_t>& masks);
+
 private:
     Spatializer(effect_descriptor_t engineDescriptor,
                      SpatializerPolicyCallback *callback);
@@ -200,8 +250,6 @@
 
     void onHeadToStagePoseMsg(const std::vector<float>& headToStage);
     void onActualModeChangeMsg(media::HeadTrackingMode mode);
-    void onSupportedLatencyModesChangedMsg(
-            audio_io_handle_t output, std::vector<audio_latency_mode_t>&& modes);
 
     static constexpr int kMaxEffectParamValues = 10;
     /**
@@ -425,6 +473,11 @@
      */
     audio_latency_mode_t selectHeadtrackingConnectionMode_l() REQUIRES(mMutex);
 
+    /**
+     * Indicates if current conditions are compatible with head tracking.
+     */
+    bool shouldUseHeadTracking_l() const REQUIRES(mMutex);
+
     /** Effect engine descriptor */
     const effect_descriptor_t mEngineDescriptor;
     /** Callback interface to parent audio policy service */
@@ -484,13 +537,17 @@
     std::vector<media::audio::common::Spatialization::Mode> mSpatializationModes;
     std::vector<audio_channel_mask_t> mChannelMasks;
     bool mSupportsHeadTracking;
-    /** List of supported headtracking connection modes reported by the spatializer.
+
+    /** List of supported head tracking connection modes reported by the spatializer.
      * If the list is empty, the spatializer does not support any optional connection
-     * mode and mode HEADTRACKING_CONNECTION_FRAMEWORK_PROCESSED is assumed.
+     * mode and mode HeadTracking::ConnectionMode::FRAMEWORK_PROCESSED is assumed.
+     * This is set in the factory constructor and can be accessed without mutex.
      */
-    std::unordered_set<headtracking_connection_t> mSupportedHeadtrackingConnectionModes;
+    std::unordered_set<media::audio::common::HeadTracking::ConnectionMode>
+            mSupportedHeadtrackingConnectionModes;
     /** Selected HT connection mode when several modes are supported by the spatializer */
-    headtracking_connection_t mHeadtrackingConnectionMode;
+    media::audio::common::HeadTracking::ConnectionMode mHeadtrackingConnectionMode =
+            media::audio::common::HeadTracking::ConnectionMode::FRAMEWORK_PROCESSED;
 
     // Looper thread for mEngine callbacks
     class EngineCallbackHandler;
@@ -498,10 +555,13 @@
     sp<ALooper> mLooper;
     sp<EngineCallbackHandler> mHandler;
 
-    size_t mNumActiveTracks GUARDED_BY(mMutex) = 0;
+    std::vector<audio_channel_mask_t> mActiveTracksMasks GUARDED_BY(mMutex);
     std::vector<audio_latency_mode_t> mSupportedLatencyModes GUARDED_BY(mMutex);
     /** preference order for low latency modes according to persist.bluetooth.hid.transport */
     std::vector<audio_latency_mode_t> mOrderedLowLatencyModes;
+
+    audio_latency_mode_t mRequestedLatencyMode GUARDED_BY(mMutex) = AUDIO_LATENCY_MODE_FREE;
+
     /** string to latency mode map used to parse bluetooth.core.le.dsa_transport_preference */
     static const std::map<std::string, audio_latency_mode_t> sStringToLatencyModeMap;
     static const std::vector<const char*> sHeadPoseKeys;
diff --git a/services/audiopolicy/service/include/media/IAudioPolicyServiceLocal.h b/services/audiopolicy/service/include/media/IAudioPolicyServiceLocal.h
new file mode 100644
index 0000000..6776ff9
--- /dev/null
+++ b/services/audiopolicy/service/include/media/IAudioPolicyServiceLocal.h
@@ -0,0 +1,32 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <media/IPermissionProvider.h>
+#include <utils/RefBase.h>
+
+namespace android::media {
+
+class IAudioPolicyServiceLocal : public virtual RefBase {
+  public:
+    virtual const ::com::android::media::permission::IPermissionProvider&
+    getPermissionProvider() const = 0;
+
+    virtual ~IAudioPolicyServiceLocal() = default;
+};
+
+}  // namespace android::media
diff --git a/services/audiopolicy/tests/Android.bp b/services/audiopolicy/tests/Android.bp
index a4a0cd4..4006489 100644
--- a/services/audiopolicy/tests/Android.bp
+++ b/services/audiopolicy/tests/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
@@ -11,6 +12,7 @@
     name: "audiopolicy_tests",
 
     defaults: [
+        "aconfig_lib_cc_shared_link.defaults",
         "latest_android_media_audio_common_types_cpp_static",
     ],
 
@@ -27,17 +29,21 @@
         "libbase",
         "libbinder",
         "libcutils",
+        "libcutils",
         "libhidlbase",
         "liblog",
         "libmedia_helper",
         "libutils",
-        "libcutils",
         "libxml2",
+        "server_configurable_flags",
     ],
 
     static_libs: [
+        "android.media.audiopolicy-aconfig-cc",
         "audioclient-types-aidl-cpp",
+        "com.android.media.audioserver-aconfig-cc",
         "libaudiopolicycomponents",
+        "libflagtest",
         "libgmock",
     ],
 
@@ -49,21 +55,20 @@
 
     srcs: ["audiopolicymanager_tests.cpp"],
 
-    data: [":audiopolicytest_configuration_files",],
+    data: [":audiopolicytest_configuration_files"],
 
     cflags: [
-        "-Werror",
         "-Wall",
+        "-Werror",
     ],
 
     test_suites: [
-        "device-tests",
         "automotive-tests",
+        "device-tests",
     ],
 
 }
 
-
 cc_test {
     name: "audio_health_tests",
 
@@ -98,10 +103,47 @@
     srcs: ["audio_health_tests.cpp"],
 
     cflags: [
-        "-Werror",
         "-Wall",
+        "-Werror",
     ],
 
     test_suites: ["device-tests"],
 
 }
+
+cc_test {
+    name: "spatializer_tests",
+
+    defaults: [
+        "latest_android_media_audio_common_types_cpp_shared",
+        "libaudiopolicyservice_dependencies",
+    ],
+
+    require_root: true,
+
+    shared_libs: [
+        "libaudioclient",
+        "libaudiofoundation",
+        "libcutils",
+        "liblog",
+    ],
+
+    static_libs: [
+        "libaudiopolicyservice",
+    ],
+
+    header_libs: [
+        "libaudiohal_headers",
+        "libaudiopolicyservice_headers",
+        "libmediametrics_headers",
+    ],
+
+    srcs: ["spatializer_tests.cpp"],
+
+    cflags: [
+        "-Wall",
+        "-Werror",
+    ],
+
+    test_suites: ["device-tests"],
+}
diff --git a/services/audiopolicy/tests/AudioPolicyManagerTestClient.h b/services/audiopolicy/tests/AudioPolicyManagerTestClient.h
index 7ef0266..45643f7 100644
--- a/services/audiopolicy/tests/AudioPolicyManagerTestClient.h
+++ b/services/audiopolicy/tests/AudioPolicyManagerTestClient.h
@@ -41,7 +41,8 @@
                         audio_config_base_t * /*mixerConfig*/,
                         const sp<DeviceDescriptorBase>& /*device*/,
                         uint32_t * /*latencyMs*/,
-                        audio_output_flags_t /*flags*/) override {
+                        audio_output_flags_t /*flags*/,
+                        audio_attributes_t /*attributes*/) override {
         if (module >= mNextModuleHandle) {
             ALOGE("%s: Module handle %d has not been allocated yet (next is %d)",
                   __func__, module, mNextModuleHandle);
@@ -70,6 +71,24 @@
             return BAD_VALUE;
         }
         *input = mNextIoHandle++;
+        mOpenedInputs.insert(*input);
+        ALOGD("%s: opened input %d", __func__, *input);
+        mOpenInputCallsCount++;
+        return NO_ERROR;
+    }
+
+    status_t closeInput(audio_io_handle_t input) override {
+        if (mOpenedInputs.erase(input) != 1) {
+            if (input >= mNextIoHandle) {
+                ALOGE("%s: I/O handle %d has not been allocated yet (next is %d)",
+                      __func__, input, mNextIoHandle);
+            } else {
+                ALOGE("%s: Attempt to close input %d twice", __func__, input);
+            }
+            return BAD_VALUE;
+        }
+        ALOGD("%s: closed input %d", __func__, input);
+        mCloseInputCallsCount++;
         return NO_ERROR;
     }
 
@@ -124,6 +143,8 @@
         return &it->second;
     };
 
+    size_t getOpenedInputsCount() const { return mOpenedInputs.size(); }
+
     audio_module_handle_t peekNextModuleHandle() const { return mNextModuleHandle; }
 
     void swapAllowedModuleNames(std::set<std::string>&& names = {}) {
@@ -221,6 +242,15 @@
         return NO_ERROR;
     }
 
+    status_t setTracksInternalMute(
+            const std::vector<media::TrackInternalMuteInfo>& tracksInternalMute) override {
+        for (const auto& trackInternalMute : tracksInternalMute) {
+            mTracksInternalMute[(audio_port_handle_t)trackInternalMute.portId] =
+                    trackInternalMute.muted;
+        }
+        return NO_ERROR;
+    }
+
     void addSupportedFormat(audio_format_t format) {
         mSupportedFormats.insert(format);
     }
@@ -229,6 +259,23 @@
         mSupportedChannelMasks.insert(channelMask);
     }
 
+    bool getTrackInternalMute(audio_port_handle_t portId) {
+        auto it = mTracksInternalMute.find(portId);
+        return it == mTracksInternalMute.end() ? false : it->second;
+    }
+    void resetInputApiCallsCounters() {
+        mOpenInputCallsCount = 0;
+        mCloseInputCallsCount = 0;
+    }
+
+    size_t getCloseInputCallsCount() const {
+        return mCloseInputCallsCount;
+    }
+
+    size_t getOpenInputCallsCount() const {
+        return mOpenInputCallsCount;
+    }
+
 private:
     audio_module_handle_t mNextModuleHandle = AUDIO_MODULE_HANDLE_NONE + 1;
     audio_io_handle_t mNextIoHandle = AUDIO_IO_HANDLE_NONE + 1;
@@ -241,6 +288,10 @@
     std::vector<struct audio_port_v7> mDisconnectedDevicePorts;
     std::set<audio_format_t> mSupportedFormats;
     std::set<audio_channel_mask_t> mSupportedChannelMasks;
+    std::map<audio_port_handle_t, bool> mTracksInternalMute;
+    std::set<audio_io_handle_t> mOpenedInputs;
+    size_t mOpenInputCallsCount = 0;
+    size_t mCloseInputCallsCount = 0;
 };
 
 } // namespace android
diff --git a/services/audiopolicy/tests/AudioPolicyTestClient.h b/services/audiopolicy/tests/AudioPolicyTestClient.h
index e55e935..0299160 100644
--- a/services/audiopolicy/tests/AudioPolicyTestClient.h
+++ b/services/audiopolicy/tests/AudioPolicyTestClient.h
@@ -37,7 +37,8 @@
                         audio_config_base_t* /*mixerConfig*/,
                         const sp<DeviceDescriptorBase>& /*device*/,
                         uint32_t* /*latencyMs*/,
-                        audio_output_flags_t /*flags*/) override { return NO_INIT; }
+                        audio_output_flags_t /*flags*/,
+                        audio_attributes_t /*attributes*/) override { return NO_INIT; }
     audio_io_handle_t openDuplicateOutput(audio_io_handle_t /*output1*/,
                                           audio_io_handle_t /*output2*/) override {
         return AUDIO_IO_HANDLE_NONE;
@@ -110,6 +111,11 @@
                              struct audio_port_v7 *mixPort __unused) override {
         return INVALID_OPERATION;
     }
+
+    status_t setTracksInternalMute(
+            const std::vector<media::TrackInternalMuteInfo>& /*tracksInternalMute*/) override {
+        return INVALID_OPERATION;
+    }
 };
 
 } // namespace android
diff --git a/services/audiopolicy/tests/AudioPolicyTestManager.h b/services/audiopolicy/tests/AudioPolicyTestManager.h
index 31ee252..34ceeab 100644
--- a/services/audiopolicy/tests/AudioPolicyTestManager.h
+++ b/services/audiopolicy/tests/AudioPolicyTestManager.h
@@ -31,8 +31,10 @@
     using AudioPolicyManager::getConfig;
     using AudioPolicyManager::initialize;
     using AudioPolicyManager::getOutputs;
+    using AudioPolicyManager::getInputs;
     using AudioPolicyManager::getAvailableOutputDevices;
     using AudioPolicyManager::getAvailableInputDevices;
+    using AudioPolicyManager::checkInputsForDevice;
     using AudioPolicyManager::setSurroundFormatEnabled;
     using AudioPolicyManager::releaseMsdOutputPatches;
     using AudioPolicyManager::setMsdOutputPatches;
@@ -43,6 +45,7 @@
     using AudioPolicyManager::deviceToAudioPort;
     using AudioPolicyManager::handleDeviceConfigChange;
     uint32_t getAudioPortGeneration() const { return mAudioPortGeneration; }
+    HwModuleCollection getHwModules() const { return mHwModules; }
 };
 
 }  // namespace android
diff --git a/services/audiopolicy/tests/audiopolicymanager_tests.cpp b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
index 74d3474..f66b911 100644
--- a/services/audiopolicy/tests/audiopolicymanager_tests.cpp
+++ b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
@@ -28,6 +28,9 @@
 #include <android-base/file.h>
 #include <android-base/properties.h>
 #include <android/content/AttributionSourceState.h>
+#include <android_media_audiopolicy.h>
+#include <com_android_media_audioserver.h>
+#include <flag_macros.h>
 #include <hardware/audio_effect.h>
 #include <media/AudioPolicy.h>
 #include <media/PatchBuilder.h>
@@ -43,6 +46,7 @@
 
 using namespace android;
 using testing::UnorderedElementsAre;
+using testing::IsEmpty;
 using android::content::AttributionSourceState;
 
 namespace {
@@ -92,6 +96,12 @@
     return attributionSourceState;
 }
 
+bool equals(const audio_config_base_t& config1, const audio_config_base_t& config2) {
+    return config1.format == config2.format
+            && config1.sample_rate == config2.sample_rate
+            && config1.channel_mask == config2.channel_mask;
+}
+
 } // namespace
 
 TEST(AudioPolicyConfigTest, DefaultConfigForTestsIsEmpty) {
@@ -195,7 +205,8 @@
             audio_channel_mask_t channelMask,
             int sampleRate,
             audio_input_flags_t flags = AUDIO_INPUT_FLAG_NONE,
-            audio_port_handle_t *portId = nullptr);
+            audio_port_handle_t *portId = nullptr,
+            uint32_t *virtualDeviceId = nullptr);
     PatchCountCheck snapshotPatchCount() { return PatchCountCheck(mClient.get()); }
 
     void getAudioPorts(audio_port_type_t type, audio_port_role_t role,
@@ -307,7 +318,8 @@
         audio_channel_mask_t channelMask,
         int sampleRate,
         audio_input_flags_t flags,
-        audio_port_handle_t *portId) {
+        audio_port_handle_t *portId,
+        uint32_t *virtualDeviceId) {
     audio_config_base_t config = AUDIO_CONFIG_BASE_INITIALIZER;
     config.sample_rate = sampleRate;
     config.channel_mask = channelMask;
@@ -315,11 +327,12 @@
     audio_port_handle_t localPortId;
     if (!portId) portId = &localPortId;
     *portId = AUDIO_PORT_HANDLE_NONE;
+    if (!virtualDeviceId) virtualDeviceId = 0;
     AudioPolicyInterface::input_type_t inputType;
     AttributionSourceState attributionSource = createAttributionSourceState(/*uid=*/ 0);
     ASSERT_EQ(OK, mManager->getInputForAttr(
             &attr, input, riid, session, attributionSource, &config, flags,
-            selectedDeviceId, &inputType, portId));
+            selectedDeviceId, &inputType, portId, virtualDeviceId));
     ASSERT_NE(AUDIO_PORT_HANDLE_NONE, *portId);
 }
 
@@ -466,8 +479,8 @@
         MsdAudioPatchCount,
         AudioPolicyManagerTestMsd,
         ::testing::Values(
-                MsdAudioPatchCountSpecification(1u, "single"),
-                MsdAudioPatchCountSpecification(2u, "dual")
+                MsdAudioPatchCountSpecification(2u, "single"),
+                MsdAudioPatchCountSpecification(3u, "dual")
         ),
         [](const ::testing::TestParamInfo<MsdAudioPatchCountSpecification> &info) {
                 return std::get<MSD_AUDIO_PATCH_COUNT_NAME_INDEX>(info.param); }
@@ -494,7 +507,7 @@
     mConfig->addDevice(mMsdOutputDevice);
     mConfig->addDevice(mMsdInputDevice);
 
-    if (mExpectedAudioPatchCount == 2) {
+    if (mExpectedAudioPatchCount == 3) {
         // Add SPDIF device with PCM output profile as a second device for dual MSD audio patching.
         mSpdifDevice = new DeviceDescriptor(AUDIO_DEVICE_OUT_SPDIF);
         mSpdifDevice->addAudioProfile(pcmOutputProfile);
@@ -547,7 +560,7 @@
             addOutputProfile(primaryEncodedOutputProfile);
 
     mDefaultOutputDevice = mConfig->getDefaultOutputDevice();
-    if (mExpectedAudioPatchCount == 2) {
+    if (mExpectedAudioPatchCount == 3) {
         mSpdifDevice->addAudioProfile(dtsOutputProfile);
         primaryEncodedOutputProfile->addSupportedDevice(mSpdifDevice);
     }
@@ -596,7 +609,7 @@
     const PatchCountCheck patchCount = snapshotPatchCount();
     mManager->setForceUse(AUDIO_POLICY_FORCE_FOR_ENCODED_SURROUND,
             AUDIO_POLICY_FORCE_ENCODED_SURROUND_ALWAYS);
-    ASSERT_EQ(mExpectedAudioPatchCount, patchCount.deltaFromSnapshot());
+    ASSERT_EQ(mExpectedAudioPatchCount -1 , patchCount.deltaFromSnapshot());
 }
 
 TEST_P(AudioPolicyManagerTestMsd, PatchCreationSetReleaseMsdOutputPatches) {
@@ -604,15 +617,15 @@
     DeviceVector devices = mManager->getAvailableOutputDevices();
     // Remove MSD output device to avoid patching to itself
     devices.remove(mMsdOutputDevice);
-    ASSERT_EQ(mExpectedAudioPatchCount, devices.size());
+    ASSERT_EQ(mExpectedAudioPatchCount -1 , devices.size());
     mManager->setMsdOutputPatches(&devices);
-    ASSERT_EQ(mExpectedAudioPatchCount, patchCount.deltaFromSnapshot());
+    ASSERT_EQ(mExpectedAudioPatchCount - 1, patchCount.deltaFromSnapshot());
     // Dual patch: exercise creating one new audio patch and reusing another existing audio patch.
     DeviceVector singleDevice(devices[0]);
     mManager->releaseMsdOutputPatches(singleDevice);
-    ASSERT_EQ(mExpectedAudioPatchCount - 1, patchCount.deltaFromSnapshot());
+    ASSERT_EQ(mExpectedAudioPatchCount - 2, patchCount.deltaFromSnapshot());
     mManager->setMsdOutputPatches(&devices);
-    ASSERT_EQ(mExpectedAudioPatchCount, patchCount.deltaFromSnapshot());
+    ASSERT_EQ(mExpectedAudioPatchCount - 1, patchCount.deltaFromSnapshot());
     mManager->releaseMsdOutputPatches(devices);
     ASSERT_EQ(0, patchCount.deltaFromSnapshot());
 }
@@ -632,7 +645,7 @@
     getOutputForAttr(&selectedDeviceId,
             AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO, k48000SamplingRate);
     ASSERT_EQ(selectedDeviceId, mDefaultOutputDevice->getId());
-    ASSERT_EQ(mExpectedAudioPatchCount, patchCount.deltaFromSnapshot());
+    ASSERT_EQ(mExpectedAudioPatchCount - 1, patchCount.deltaFromSnapshot());
 }
 
 TEST_P(AudioPolicyManagerTestMsd, GetOutputForAttrEncodedPlusPcmRoutesToMsd) {
@@ -655,7 +668,7 @@
     getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_DTS, AUDIO_CHANNEL_OUT_5POINT1,
             k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT);
     ASSERT_NE(selectedDeviceId, mMsdOutputDevice->getId());
-    ASSERT_EQ(0, patchCount.deltaFromSnapshot());
+    ASSERT_EQ(1, patchCount.deltaFromSnapshot());
 }
 
 TEST_P(AudioPolicyManagerTestMsd, GetOutputForAttrFormatSwitching) {
@@ -669,7 +682,7 @@
         ASSERT_EQ(selectedDeviceId, mDefaultOutputDevice->getId());
         ASSERT_EQ(mExpectedAudioPatchCount, patchCount.deltaFromSnapshot());
         mManager->releaseOutput(portId);
-        ASSERT_EQ(mExpectedAudioPatchCount, patchCount.deltaFromSnapshot());
+        ASSERT_EQ(mExpectedAudioPatchCount - 1, patchCount.deltaFromSnapshot());
     }
     {
         const PatchCountCheck patchCount = snapshotPatchCount();
@@ -678,7 +691,7 @@
         getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_DTS, AUDIO_CHANNEL_OUT_5POINT1,
                 k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT, nullptr /*output*/, &portId);
         ASSERT_NE(selectedDeviceId, mMsdOutputDevice->getId());
-        ASSERT_EQ(-static_cast<int>(mExpectedAudioPatchCount), patchCount.deltaFromSnapshot());
+        ASSERT_EQ(-static_cast<int>(mExpectedAudioPatchCount) + 2, patchCount.deltaFromSnapshot());
         mManager->releaseOutput(portId);
         ASSERT_EQ(0, patchCount.deltaFromSnapshot());
     }
@@ -688,7 +701,7 @@
         getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_AC3, AUDIO_CHANNEL_OUT_5POINT1,
                 k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT);
         ASSERT_EQ(selectedDeviceId, mDefaultOutputDevice->getId());
-        ASSERT_EQ(0, patchCount.deltaFromSnapshot());
+        ASSERT_EQ(1, patchCount.deltaFromSnapshot());
     }
 }
 
@@ -1143,127 +1156,81 @@
                                                            "", "", AUDIO_FORMAT_LDAC));
 }
 
-TEST_F(AudioPolicyManagerTestWithConfigurationFile, BitPerfectPlayback) {
-    const audio_format_t bitPerfectFormat = AUDIO_FORMAT_PCM_16_BIT;
-    const audio_channel_mask_t bitPerfectChannelMask = AUDIO_CHANNEL_OUT_QUAD;
-    const uint32_t bitPerfectSampleRate = 48000;
-    mClient->addSupportedFormat(bitPerfectFormat);
-    mClient->addSupportedChannelMask(bitPerfectChannelMask);
-    ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(AUDIO_DEVICE_OUT_USB_DEVICE,
+TEST_F(AudioPolicyManagerTestWithConfigurationFile, PreferExactConfigForInput) {
+    const audio_channel_mask_t deviceChannelMask = AUDIO_CHANNEL_IN_3POINT1;
+    mClient->addSupportedFormat(AUDIO_FORMAT_PCM_16_BIT);
+    mClient->addSupportedChannelMask(deviceChannelMask);
+    ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(AUDIO_DEVICE_IN_USB_DEVICE,
                                                            AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
                                                            "", "", AUDIO_FORMAT_DEFAULT));
-    auto devices = mManager->getAvailableOutputDevices();
-    audio_port_handle_t usbPortId = AUDIO_PORT_HANDLE_NONE;
-    for (auto device : devices) {
-        if (device->type() == AUDIO_DEVICE_OUT_USB_DEVICE) {
-            usbPortId = device->getId();
-            break;
-        }
-    }
-    EXPECT_NE(AUDIO_PORT_HANDLE_NONE, usbPortId);
 
-    const uid_t uid = 1234;
-    const uid_t anotherUid = 5678;
-    const audio_attributes_t mediaAttr = {
-            .content_type = AUDIO_CONTENT_TYPE_MUSIC,
-            .usage = AUDIO_USAGE_MEDIA,
-    };
-
-    std::vector<audio_mixer_attributes_t> mixerAttributes;
-    EXPECT_EQ(NO_ERROR, mManager->getSupportedMixerAttributes(usbPortId, mixerAttributes));
-    EXPECT_GT(mixerAttributes.size(), 0);
-    size_t bitPerfectIndex = 0;
-    for (; bitPerfectIndex < mixerAttributes.size(); ++bitPerfectIndex) {
-        if (mixerAttributes[bitPerfectIndex].mixer_behavior == AUDIO_MIXER_BEHAVIOR_BIT_PERFECT) {
-            break;
-        }
-    }
-    EXPECT_LT(bitPerfectIndex, mixerAttributes.size());
-    EXPECT_EQ(bitPerfectFormat, mixerAttributes[bitPerfectIndex].config.format);
-    EXPECT_EQ(bitPerfectChannelMask, mixerAttributes[bitPerfectIndex].config.channel_mask);
-    EXPECT_EQ(bitPerfectSampleRate, mixerAttributes[bitPerfectIndex].config.sample_rate);
-    EXPECT_EQ(NO_ERROR,
-              mManager->setPreferredMixerAttributes(
-                      &mediaAttr, usbPortId, uid, &mixerAttributes[bitPerfectIndex]));
-
-    audio_io_handle_t bitPerfectOutput = AUDIO_IO_HANDLE_NONE;
-    audio_io_handle_t output = AUDIO_IO_HANDLE_NONE;
     audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
-    audio_port_handle_t bitPerfectPortId = AUDIO_PORT_HANDLE_NONE;
-    audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE;
-    bool isBitPerfect;
-
-    // When there is no active bit-perfect playback, the output selection will follow default
-    // routing strategy.
-    getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
-            48000, AUDIO_OUTPUT_FLAG_NONE, &output, &portId, mediaAttr, AUDIO_SESSION_NONE,
-            uid, &isBitPerfect);
-    EXPECT_FALSE(isBitPerfect);
-    EXPECT_NE(AUDIO_IO_HANDLE_NONE, output);
-    const auto outputDesc = mManager->getOutputs().valueFor(output);
-    EXPECT_NE(nullptr, outputDesc);
-    EXPECT_NE(AUDIO_OUTPUT_FLAG_BIT_PERFECT, outputDesc->mFlags & AUDIO_OUTPUT_FLAG_BIT_PERFECT);
-
-    // Start bit-perfect playback
-    getOutputForAttr(&selectedDeviceId, bitPerfectFormat, bitPerfectChannelMask,
-            bitPerfectSampleRate, AUDIO_OUTPUT_FLAG_NONE, &bitPerfectOutput, &bitPerfectPortId,
-            mediaAttr, AUDIO_SESSION_NONE, uid, &isBitPerfect);
-    status_t status = mManager->startOutput(bitPerfectPortId);
-    if (status == DEAD_OBJECT) {
-        getOutputForAttr(&selectedDeviceId, bitPerfectFormat, bitPerfectChannelMask,
-                bitPerfectSampleRate, AUDIO_OUTPUT_FLAG_NONE, &bitPerfectOutput, &bitPerfectPortId,
-                mediaAttr, AUDIO_SESSION_NONE, uid, &isBitPerfect);
-        status = mManager->startOutput(bitPerfectPortId);
-    }
-    EXPECT_EQ(NO_ERROR, status);
-    EXPECT_TRUE(isBitPerfect);
-    EXPECT_NE(AUDIO_IO_HANDLE_NONE, bitPerfectOutput);
-    const auto bitPerfectOutputDesc = mManager->getOutputs().valueFor(bitPerfectOutput);
-    EXPECT_NE(nullptr, bitPerfectOutputDesc);
-    EXPECT_EQ(AUDIO_OUTPUT_FLAG_BIT_PERFECT,
-              bitPerfectOutputDesc->mFlags & AUDIO_OUTPUT_FLAG_BIT_PERFECT);
-
-    // If the playback is from preferred mixer attributes owner but the request doesn't match
-    // preferred mixer attributes, it will not be bit-perfect.
-    getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
-            48000, AUDIO_OUTPUT_FLAG_NONE, &output, &portId, mediaAttr, AUDIO_SESSION_NONE,
-            uid, &isBitPerfect);
-    EXPECT_FALSE(isBitPerfect);
-    EXPECT_EQ(bitPerfectOutput, output);
-
-    // When bit-perfect playback is active, all other playback will be routed to bit-perfect output.
-    getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
-            48000, AUDIO_OUTPUT_FLAG_NONE, &output, &portId, mediaAttr, AUDIO_SESSION_NONE,
-            anotherUid, &isBitPerfect);
-    EXPECT_FALSE(isBitPerfect);
-    EXPECT_EQ(bitPerfectOutput, output);
-
-    const audio_attributes_t dtmfAttr = {
-            .content_type = AUDIO_CONTENT_TYPE_UNKNOWN,
-            .usage = AUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING,
+    audio_attributes_t attr = {AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN,
+                               AUDIO_SOURCE_VOICE_COMMUNICATION,AUDIO_FLAG_NONE, ""};
+    AudioPolicyInterface::input_type_t inputType;
+    audio_io_handle_t input = AUDIO_PORT_HANDLE_NONE;
+    AttributionSourceState attributionSource = createAttributionSourceState(/*uid=*/ 0);
+    audio_config_base_t requestedConfig = {
+            .channel_mask = AUDIO_CHANNEL_IN_STEREO,
+            .format = AUDIO_FORMAT_PCM_16_BIT,
+            .sample_rate = 48000
     };
-    audio_io_handle_t dtmfOutput = AUDIO_IO_HANDLE_NONE;
+    audio_config_base_t config = requestedConfig;
+    audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE;
+    uint32_t *virtualDeviceId = 0;
+    ASSERT_EQ(OK, mManager->getInputForAttr(
+            &attr, &input, 1 /*riid*/, AUDIO_SESSION_NONE, attributionSource, &config,
+            AUDIO_INPUT_FLAG_NONE,
+            &selectedDeviceId, &inputType, &portId, virtualDeviceId));
+    ASSERT_NE(AUDIO_PORT_HANDLE_NONE, portId);
+    ASSERT_TRUE(equals(requestedConfig, config));
+
+    attr = {AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN,
+            AUDIO_SOURCE_VOICE_COMMUNICATION, AUDIO_FLAG_NONE, ""};
+    requestedConfig.channel_mask = deviceChannelMask;
+    config = requestedConfig;
     selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+    input = AUDIO_PORT_HANDLE_NONE;
     portId = AUDIO_PORT_HANDLE_NONE;
-    getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
-            48000, AUDIO_OUTPUT_FLAG_NONE, &dtmfOutput, &portId, dtmfAttr,
-            AUDIO_SESSION_NONE, anotherUid, &isBitPerfect);
-    EXPECT_FALSE(isBitPerfect);
-    EXPECT_EQ(bitPerfectOutput, dtmfOutput);
+    ASSERT_EQ(OK, mManager->getInputForAttr(
+            &attr, &input, 1 /*riid*/, AUDIO_SESSION_NONE, attributionSource, &config,
+            AUDIO_INPUT_FLAG_NONE,
+            &selectedDeviceId, &inputType, &portId, virtualDeviceId));
+    ASSERT_NE(AUDIO_PORT_HANDLE_NONE, portId);
+    ASSERT_TRUE(equals(requestedConfig, config));
 
-    // When configuration matches preferred mixer attributes, which is bit-perfect, but the client
-    // is not the owner of preferred mixer attributes, the playback will not be bit-perfect.
-    getOutputForAttr(&selectedDeviceId, bitPerfectFormat, bitPerfectChannelMask,
-            bitPerfectSampleRate, AUDIO_OUTPUT_FLAG_NONE, &output, &portId, mediaAttr,
-            AUDIO_SESSION_NONE, anotherUid, &isBitPerfect);
-    EXPECT_FALSE(isBitPerfect);
-    EXPECT_EQ(bitPerfectOutput, output);
-
-    EXPECT_EQ(NO_ERROR,
-              mManager->clearPreferredMixerAttributes(&mediaAttr, usbPortId, uid));
-    ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(AUDIO_DEVICE_OUT_USB_DEVICE,
+    ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(AUDIO_DEVICE_IN_USB_DEVICE,
                                                            AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
-                                                           "", "", AUDIO_FORMAT_LDAC));
+                                                           "", "", AUDIO_FORMAT_DEFAULT));
+}
+
+TEST_F(AudioPolicyManagerTestWithConfigurationFile, CheckInputsForDeviceClosesStreams) {
+    mClient->addSupportedFormat(AUDIO_FORMAT_PCM_16_BIT);
+    mClient->addSupportedFormat(AUDIO_FORMAT_PCM_24_BIT_PACKED);
+    mClient->addSupportedChannelMask(AUDIO_CHANNEL_IN_MONO);
+    mClient->addSupportedChannelMask(AUDIO_CHANNEL_IN_STEREO);
+    // Since 'checkInputsForDevice' is called as part of the 'setDeviceConnectionState',
+    // call it directly here, as we need to ensure that it does not keep all intermediate
+    // streams opened, as it may cause a rejection from the HAL based on the cap.
+    const size_t streamCountBefore = mClient->getOpenedInputsCount();
+    sp<DeviceDescriptor> device = mManager->getHwModules().getDeviceDescriptor(
+            AUDIO_DEVICE_IN_USB_DEVICE, "", "", AUDIO_FORMAT_DEFAULT, true /*allowToCreate*/);
+    ASSERT_NE(nullptr, device.get());
+    EXPECT_EQ(NO_ERROR,
+            mManager->checkInputsForDevice(device, AUDIO_POLICY_DEVICE_STATE_AVAILABLE));
+    EXPECT_EQ(streamCountBefore, mClient->getOpenedInputsCount());
+}
+
+TEST_F(AudioPolicyManagerTestWithConfigurationFile, SetDeviceConnectionStateClosesStreams) {
+    mClient->addSupportedFormat(AUDIO_FORMAT_PCM_16_BIT);
+    mClient->addSupportedFormat(AUDIO_FORMAT_PCM_24_BIT_PACKED);
+    mClient->addSupportedChannelMask(AUDIO_CHANNEL_IN_MONO);
+    mClient->addSupportedChannelMask(AUDIO_CHANNEL_IN_STEREO);
+    const size_t streamCountBefore = mClient->getOpenedInputsCount();
+    EXPECT_EQ(NO_ERROR, mManager->setDeviceConnectionState(AUDIO_DEVICE_IN_USB_DEVICE,
+                                                           AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
+                                                           "", "", AUDIO_FORMAT_DEFAULT));
+    EXPECT_EQ(streamCountBefore, mClient->getOpenedInputsCount());
 }
 
 class AudioPolicyManagerTestDynamicPolicy : public AudioPolicyManagerTestWithConfigurationFile {
@@ -1273,6 +1240,12 @@
     status_t addPolicyMix(int mixType, int mixFlag, audio_devices_t deviceType,
             std::string mixAddress, const audio_config_t& audioConfig,
             const std::vector<AudioMixMatchCriterion>& matchCriteria);
+
+    status_t addPolicyMix(const AudioMix& mix);
+
+    status_t removePolicyMixes(const Vector<AudioMix>& mixes);
+
+    std::vector<AudioMix> getRegisteredPolicyMixes();
     void clearPolicyMix();
     void addPolicyMixAndStartInputForLoopback(
             int mixType, int mixFlag, audio_devices_t deviceType, std::string mixAddress,
@@ -1307,9 +1280,14 @@
     AudioMix myAudioMix(matchCriteria, mixType, audioConfig, mixFlag,
             String8(mixAddress.c_str()), 0);
     myAudioMix.mDeviceType = deviceType;
+    myAudioMix.mToken = sp<BBinder>::make();
     // Clear mAudioMix before add new one to make sure we don't add already exist mixes.
     mAudioMixes.clear();
-    mAudioMixes.add(myAudioMix);
+    return addPolicyMix(myAudioMix);
+}
+
+status_t AudioPolicyManagerTestDynamicPolicy::addPolicyMix(const AudioMix& mix) {
+    mAudioMixes.add(mix);
 
     // As the policy mixes registration may fail at some case,
     // caller need to check the returned status.
@@ -1317,6 +1295,20 @@
     return ret;
 }
 
+status_t AudioPolicyManagerTestDynamicPolicy::removePolicyMixes(const Vector<AudioMix>& mixes) {
+    status_t ret = mManager->unregisterPolicyMixes(mixes);
+    return ret;
+}
+
+std::vector<AudioMix> AudioPolicyManagerTestDynamicPolicy::getRegisteredPolicyMixes() {
+    std::vector<AudioMix> audioMixes;
+    if (mManager != nullptr) {
+        status_t ret = mManager->getRegisteredPolicyMixes(audioMixes);
+        EXPECT_EQ(NO_ERROR, ret);
+    }
+    return audioMixes;
+}
+
 void AudioPolicyManagerTestDynamicPolicy::clearPolicyMix() {
     if (mManager != nullptr) {
         mManager->stopInput(mLoopbackInputPortId);
@@ -1470,6 +1462,139 @@
     ASSERT_EQ(INVALID_OPERATION, ret);
 }
 
+TEST_F_WITH_FLAGS(
+        AudioPolicyManagerTestDynamicPolicy,
+        RegisterInvalidMixesDoesNotImpactPriorMixes,
+        REQUIRES_FLAGS_ENABLED(ACONFIG_FLAG(android::media::audiopolicy, audio_mix_test_api),
+                               ACONFIG_FLAG(android::media::audiopolicy, audio_mix_ownership))
+) {
+    audio_config_t audioConfig = AUDIO_CONFIG_INITIALIZER;
+    audioConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
+    audioConfig.format = AUDIO_FORMAT_PCM_16_BIT;
+    audioConfig.sample_rate = k48000SamplingRate;
+
+    std::vector<AudioMixMatchCriterion> validMixMatchCriteria = {
+            createUidCriterion(/*uid=*/42),
+            createUsageCriterion(AUDIO_USAGE_MEDIA, /*exclude=*/true)};
+    AudioMix validAudioMix(validMixMatchCriteria, MIX_TYPE_PLAYERS, audioConfig,
+                           MIX_ROUTE_FLAG_LOOP_BACK, String8(mMixAddress.c_str()), 0);
+    validAudioMix.mDeviceType = AUDIO_DEVICE_OUT_REMOTE_SUBMIX;
+
+    mAudioMixes.clear();
+    status_t ret = addPolicyMix(validAudioMix);
+
+    ASSERT_EQ(NO_ERROR, ret);
+
+    std::vector<AudioMix> registeredMixes = getRegisteredPolicyMixes();
+    ASSERT_EQ(1, registeredMixes.size());
+
+    std::vector<AudioMixMatchCriterion> invalidMixMatchCriteria = {
+            createUidCriterion(/*uid=*/42),
+            createUidCriterion(/*uid=*/1235, /*exclude=*/true),
+            createUsageCriterion(AUDIO_USAGE_MEDIA, /*exclude=*/true)};
+
+    AudioMix invalidAudioMix(invalidMixMatchCriteria, MIX_TYPE_PLAYERS, audioConfig,
+                             MIX_ROUTE_FLAG_LOOP_BACK, String8(mMixAddress.c_str()), 0);
+    validAudioMix.mDeviceType = AUDIO_DEVICE_OUT_REMOTE_SUBMIX;
+
+    ret = addPolicyMix(invalidAudioMix);
+
+    ASSERT_EQ(INVALID_OPERATION, ret);
+
+    std::vector<AudioMix> remainingMixes = getRegisteredPolicyMixes();
+    ASSERT_EQ(registeredMixes.size(), remainingMixes.size());
+}
+
+TEST_F_WITH_FLAGS(
+        AudioPolicyManagerTestDynamicPolicy,
+        UnregisterInvalidMixesReturnsError,
+        REQUIRES_FLAGS_ENABLED(ACONFIG_FLAG(android::media::audiopolicy, audio_mix_test_api),
+                               ACONFIG_FLAG(android::media::audiopolicy, audio_mix_ownership))
+) {
+    audio_config_t audioConfig = AUDIO_CONFIG_INITIALIZER;
+    audioConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
+    audioConfig.format = AUDIO_FORMAT_PCM_16_BIT;
+    audioConfig.sample_rate = k48000SamplingRate;
+
+    std::vector<AudioMixMatchCriterion> validMixMatchCriteria = {
+            createUidCriterion(/*uid=*/42),
+            createUsageCriterion(AUDIO_USAGE_MEDIA, /*exclude=*/true)};
+    AudioMix validAudioMix(validMixMatchCriteria, MIX_TYPE_PLAYERS, audioConfig,
+                           MIX_ROUTE_FLAG_LOOP_BACK, String8(mMixAddress.c_str()), 0);
+    validAudioMix.mDeviceType = AUDIO_DEVICE_OUT_REMOTE_SUBMIX;
+
+    mAudioMixes.clear();
+    status_t ret = addPolicyMix(validAudioMix);
+
+    ASSERT_EQ(NO_ERROR, ret);
+
+    std::vector<AudioMix> registeredMixes = getRegisteredPolicyMixes();
+    ASSERT_EQ(1, registeredMixes.size());
+
+    std::vector<AudioMixMatchCriterion> invalidMixMatchCriteria = {
+            createUidCriterion(/*uid=*/42),
+            createUidCriterion(/*uid=*/1235, /*exclude=*/true),
+            createUsageCriterion(AUDIO_USAGE_MEDIA, /*exclude=*/true)};
+
+    AudioMix invalidAudioMix(invalidMixMatchCriteria, MIX_TYPE_PLAYERS, audioConfig,
+                             MIX_ROUTE_FLAG_LOOP_BACK, String8(mMixAddress.c_str()), 0);
+    invalidAudioMix.mDeviceType = AUDIO_DEVICE_OUT_REMOTE_SUBMIX;
+
+    Vector<AudioMix> mixes;
+    mixes.add(invalidAudioMix);
+    mixes.add(validAudioMix);
+    ret = removePolicyMixes(mixes);
+
+    ASSERT_EQ(INVALID_OPERATION, ret);
+
+    std::vector<AudioMix> remainingMixes = getRegisteredPolicyMixes();
+    EXPECT_THAT(remainingMixes, IsEmpty());
+}
+
+TEST_F_WITH_FLAGS(
+        AudioPolicyManagerTestDynamicPolicy,
+        GetRegisteredPolicyMixes,
+        REQUIRES_FLAGS_ENABLED(ACONFIG_FLAG(android::media::audiopolicy, audio_mix_test_api))
+) {
+    std::vector<AudioMix> mixes = getRegisteredPolicyMixes();
+    EXPECT_THAT(mixes, IsEmpty());
+}
+
+TEST_F_WITH_FLAGS(AudioPolicyManagerTestDynamicPolicy,
+        AddPolicyMixAndVerifyGetRegisteredPolicyMixes,
+        REQUIRES_FLAGS_ENABLED(ACONFIG_FLAG(android::media::audiopolicy, audio_mix_test_api))
+) {
+    audio_config_t audioConfig = AUDIO_CONFIG_INITIALIZER;
+    audioConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
+    audioConfig.format = AUDIO_FORMAT_PCM_16_BIT;
+    audioConfig.sample_rate = k48000SamplingRate;
+
+    std::vector<AudioMixMatchCriterion> mixMatchCriteria = {
+            createUidCriterion(/*uid=*/42),
+            createUsageCriterion(AUDIO_USAGE_MEDIA, /*exclude=*/true)};
+    status_t ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_LOOP_BACK,
+                                AUDIO_DEVICE_OUT_REMOTE_SUBMIX, mMixAddress, audioConfig,
+                                mixMatchCriteria);
+    ASSERT_EQ(NO_ERROR, ret);
+
+    std::vector<AudioMix> mixes = getRegisteredPolicyMixes();
+    ASSERT_EQ(mixes.size(), 1);
+
+    const AudioMix& mix = mixes[0];
+    ASSERT_EQ(mix.mCriteria.size(), mixMatchCriteria.size());
+    for (uint32_t i = 0; i < mixMatchCriteria.size(); i++) {
+        EXPECT_EQ(mix.mCriteria[i].mRule, mixMatchCriteria[i].mRule);
+        EXPECT_EQ(mix.mCriteria[i].mValue.mUsage, mixMatchCriteria[i].mValue.mUsage);
+    }
+    EXPECT_EQ(mix.mDeviceType, AUDIO_DEVICE_OUT_REMOTE_SUBMIX);
+    EXPECT_EQ(mix.mRouteFlags, MIX_ROUTE_FLAG_LOOP_BACK);
+    EXPECT_EQ(mix.mMixType, MIX_TYPE_PLAYERS);
+    EXPECT_EQ(mix.mFormat.channel_mask, audioConfig.channel_mask);
+    EXPECT_EQ(mix.mFormat.format, audioConfig.format);
+    EXPECT_EQ(mix.mFormat.sample_rate, audioConfig.sample_rate);
+    EXPECT_EQ(mix.mFormat.frame_count, audioConfig.frame_count);
+}
+
 class AudioPolicyManagerTestForHdmi
         : public AudioPolicyManagerTestWithConfigurationFile,
           public testing::WithParamInterface<audio_format_t> {
@@ -2373,6 +2498,118 @@
                 )
         );
 
+namespace {
+
+class AudioPolicyManagerTestClientOpenFails : public AudioPolicyManagerTestClient {
+  public:
+    status_t openOutput(audio_module_handle_t module,
+                        audio_io_handle_t *output,
+                        audio_config_t * halConfig,
+                        audio_config_base_t * mixerConfig,
+                        const sp<DeviceDescriptorBase>& device,
+                        uint32_t * latencyMs,
+                        audio_output_flags_t flags,
+                        audio_attributes_t attributes) override {
+        return mSimulateFailure ? BAD_VALUE :
+                AudioPolicyManagerTestClient::openOutput(
+                        module, output, halConfig, mixerConfig, device, latencyMs, flags,
+                        attributes);
+    }
+
+    status_t openInput(audio_module_handle_t module,
+                       audio_io_handle_t *input,
+                       audio_config_t * config,
+                       audio_devices_t * device,
+                       const String8 & address,
+                       audio_source_t source,
+                       audio_input_flags_t flags) override {
+        return mSimulateFailure ? BAD_VALUE :
+                AudioPolicyManagerTestClient::openInput(
+                        module, input, config, device, address, source, flags);
+    }
+
+    void setSimulateFailure(bool simulateFailure) { mSimulateFailure = simulateFailure; }
+
+  private:
+    bool mSimulateFailure = false;
+};
+
+}  // namespace
+
+using DeviceConnectionWithFormatTestParams =
+        std::tuple<audio_devices_t /*type*/, std::string /*name*/, std::string /*address*/,
+        audio_format_t /*format*/>;
+
+class AudioPolicyManagerTestDeviceConnectionFailed :
+        public AudioPolicyManagerTestWithConfigurationFile,
+        public testing::WithParamInterface<DeviceConnectionWithFormatTestParams> {
+  protected:
+    std::string getConfigFile() override { return sBluetoothConfig; }
+  AudioPolicyManagerTestClient* getClient() override {
+        mFullClient = new AudioPolicyManagerTestClientOpenFails;
+        return mFullClient;
+    }
+    void setSimulateOpenFailure(bool simulateFailure) {
+        mFullClient->setSimulateFailure(simulateFailure); }
+
+    static const std::string sBluetoothConfig;
+
+  private:
+    AudioPolicyManagerTestClientOpenFails* mFullClient;
+};
+
+const std::string AudioPolicyManagerTestDeviceConnectionFailed::sBluetoothConfig =
+        AudioPolicyManagerTestDeviceConnectionFailed::sExecutableDir +
+        "test_audio_policy_configuration_bluetooth.xml";
+
+TEST_P(AudioPolicyManagerTestDeviceConnectionFailed, SetDeviceConnectedStateHasAddress) {
+    const audio_devices_t type = std::get<0>(GetParam());
+    const std::string name = std::get<1>(GetParam());
+    const std::string address = std::get<2>(GetParam());
+    const audio_format_t format = std::get<3>(GetParam());
+
+    EXPECT_EQ(0, mClient->getConnectedDevicePortCount());
+    EXPECT_EQ(0, mClient->getDisconnectedDevicePortCount());
+
+    setSimulateOpenFailure(true);
+    ASSERT_EQ(INVALID_OPERATION, mManager->setDeviceConnectionState(
+            type, AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
+            address.c_str(), name.c_str(), format));
+
+    // Since the failure happens when opening input/output, the device must be connected
+    // first and then disconnected.
+    EXPECT_EQ(1, mClient->getConnectedDevicePortCount());
+    EXPECT_EQ(1, mClient->getDisconnectedDevicePortCount());
+
+    if (mClient->getConnectedDevicePortCount() > 0) {
+        auto port = mClient->getLastConnectedDevicePort();
+        EXPECT_EQ(type, port->ext.device.type);
+        EXPECT_EQ(0, strncmp(port->ext.device.address, address.c_str(),
+                        AUDIO_DEVICE_MAX_ADDRESS_LEN)) << "\"" << port->ext.device.address << "\"";
+    }
+    if (mClient->getDisconnectedDevicePortCount() > 0) {
+        auto port = mClient->getLastDisconnectedDevicePort();
+        EXPECT_EQ(type, port->ext.device.type);
+        EXPECT_EQ(0, strncmp(port->ext.device.address, address.c_str(),
+                        AUDIO_DEVICE_MAX_ADDRESS_LEN)) << "\"" << port->ext.device.address << "\"";
+    }
+}
+
+INSTANTIATE_TEST_CASE_P(
+        DeviceConnectionFailure,
+        AudioPolicyManagerTestDeviceConnectionFailed,
+        testing::Values(
+                DeviceConnectionWithFormatTestParams({AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET,
+                            "bt_hfp_in", "00:11:22:33:44:55", AUDIO_FORMAT_DEFAULT}),
+                DeviceConnectionWithFormatTestParams({AUDIO_DEVICE_OUT_BLUETOOTH_SCO,
+                            "bt_hfp_out", "00:11:22:33:44:55", AUDIO_FORMAT_DEFAULT}),
+                DeviceConnectionWithFormatTestParams({AUDIO_DEVICE_OUT_BLUETOOTH_A2DP,
+                            "bt_a2dp_out", "00:11:22:33:44:55", AUDIO_FORMAT_DEFAULT}),
+                DeviceConnectionWithFormatTestParams({AUDIO_DEVICE_OUT_BLUETOOTH_A2DP,
+                            "bt_a2dp_out", "00:11:22:33:44:66", AUDIO_FORMAT_LDAC})
+                )
+        );
+
 class AudioPolicyManagerCarTest : public AudioPolicyManagerTestDynamicPolicy {
 protected:
     std::string getConfigFile() override { return sCarConfig; }
@@ -3283,4 +3520,404 @@
 
     // unregister effect should succeed since effect shall have been restore on the client session
     ASSERT_EQ(NO_ERROR, mManager->unregisterEffect(effectId));
-}
\ No newline at end of file
+}
+
+class AudioPolicyManagerTestBitPerfectBase : public AudioPolicyManagerTestWithConfigurationFile {
+protected:
+    void SetUp() override;
+    void TearDown() override;
+
+    void startBitPerfectOutput();
+    void reset();
+    void getBitPerfectOutput(status_t expected);
+
+    const audio_format_t mBitPerfectFormat = AUDIO_FORMAT_PCM_16_BIT;
+    const audio_channel_mask_t mBitPerfectChannelMask = AUDIO_CHANNEL_OUT_STEREO;
+    const uint32_t mBitPerfectSampleRate = 48000;
+    const uid_t mUid = 1234;
+    audio_port_handle_t mUsbPortId = AUDIO_PORT_HANDLE_NONE;
+
+    audio_io_handle_t mBitPerfectOutput = AUDIO_IO_HANDLE_NONE;
+    audio_port_handle_t mSelectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+    audio_port_handle_t mBitPerfectPortId = AUDIO_PORT_HANDLE_NONE;
+
+    static constexpr audio_attributes_t sMediaAttr = {
+            .content_type = AUDIO_CONTENT_TYPE_MUSIC,
+            .usage = AUDIO_USAGE_MEDIA,
+    };
+};
+
+void AudioPolicyManagerTestBitPerfectBase::SetUp() {
+    ASSERT_NO_FATAL_FAILURE(AudioPolicyManagerTestWithConfigurationFile::SetUp());
+
+    mClient->addSupportedFormat(mBitPerfectFormat);
+    mClient->addSupportedChannelMask(mBitPerfectChannelMask);
+    ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(AUDIO_DEVICE_OUT_USB_DEVICE,
+                                                           AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
+                                                           "", "", AUDIO_FORMAT_DEFAULT));
+    auto devices = mManager->getAvailableOutputDevices();
+    mUsbPortId = AUDIO_PORT_HANDLE_NONE;
+    for (auto device : devices) {
+        if (device->type() == AUDIO_DEVICE_OUT_USB_DEVICE) {
+            mUsbPortId = device->getId();
+            break;
+        }
+    }
+    EXPECT_NE(AUDIO_PORT_HANDLE_NONE, mUsbPortId);
+
+    std::vector<audio_mixer_attributes_t> mixerAttributes;
+    EXPECT_EQ(NO_ERROR, mManager->getSupportedMixerAttributes(mUsbPortId, mixerAttributes));
+    EXPECT_GT(mixerAttributes.size(), 0);
+    size_t bitPerfectIndex = 0;
+    for (; bitPerfectIndex < mixerAttributes.size(); ++bitPerfectIndex) {
+        if (mixerAttributes[bitPerfectIndex].mixer_behavior == AUDIO_MIXER_BEHAVIOR_BIT_PERFECT) {
+            break;
+        }
+    }
+    EXPECT_LT(bitPerfectIndex, mixerAttributes.size());
+    EXPECT_EQ(mBitPerfectFormat, mixerAttributes[bitPerfectIndex].config.format);
+    EXPECT_EQ(mBitPerfectChannelMask, mixerAttributes[bitPerfectIndex].config.channel_mask);
+    EXPECT_EQ(mBitPerfectSampleRate, mixerAttributes[bitPerfectIndex].config.sample_rate);
+    EXPECT_EQ(NO_ERROR,
+              mManager->setPreferredMixerAttributes(
+                      &sMediaAttr, mUsbPortId, mUid, &mixerAttributes[bitPerfectIndex]));
+}
+
+void AudioPolicyManagerTestBitPerfectBase::TearDown() {
+    EXPECT_EQ(NO_ERROR,
+              mManager->clearPreferredMixerAttributes(&sMediaAttr, mUsbPortId, mUid));
+    ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(AUDIO_DEVICE_OUT_USB_DEVICE,
+                                                           AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
+                                                           "", "", AUDIO_FORMAT_LDAC));
+
+    ASSERT_NO_FATAL_FAILURE(AudioPolicyManagerTestWithConfigurationFile::TearDown());
+}
+
+void AudioPolicyManagerTestBitPerfectBase::startBitPerfectOutput() {
+    reset();
+    bool isBitPerfect;
+
+    getOutputForAttr(&mSelectedDeviceId, mBitPerfectFormat, mBitPerfectChannelMask,
+                     mBitPerfectSampleRate, AUDIO_OUTPUT_FLAG_NONE, &mBitPerfectOutput,
+                     &mBitPerfectPortId, sMediaAttr, AUDIO_SESSION_NONE, mUid, &isBitPerfect);
+    status_t status = mManager->startOutput(mBitPerfectPortId);
+    if (status == DEAD_OBJECT) {
+        getOutputForAttr(&mSelectedDeviceId, mBitPerfectFormat, mBitPerfectChannelMask,
+                         mBitPerfectSampleRate, AUDIO_OUTPUT_FLAG_NONE, &mBitPerfectOutput,
+                         &mBitPerfectPortId, sMediaAttr, AUDIO_SESSION_NONE, mUid, &isBitPerfect);
+        status = mManager->startOutput(mBitPerfectPortId);
+    }
+    EXPECT_EQ(NO_ERROR, status);
+    EXPECT_TRUE(isBitPerfect);
+    EXPECT_NE(AUDIO_IO_HANDLE_NONE, mBitPerfectOutput);
+    const auto bitPerfectOutputDesc = mManager->getOutputs().valueFor(mBitPerfectOutput);
+    EXPECT_NE(nullptr, bitPerfectOutputDesc);
+    EXPECT_EQ(AUDIO_OUTPUT_FLAG_BIT_PERFECT,
+              bitPerfectOutputDesc->mFlags & AUDIO_OUTPUT_FLAG_BIT_PERFECT);
+};
+
+void AudioPolicyManagerTestBitPerfectBase::reset() {
+    mBitPerfectOutput = AUDIO_IO_HANDLE_NONE;
+    mSelectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+    mBitPerfectPortId = AUDIO_PORT_HANDLE_NONE;
+}
+
+void AudioPolicyManagerTestBitPerfectBase::getBitPerfectOutput(status_t expected) {
+    reset();
+    audio_stream_type_t stream = AUDIO_STREAM_DEFAULT;
+    AttributionSourceState attributionSource = createAttributionSourceState(mUid);
+    audio_config_t config = AUDIO_CONFIG_INITIALIZER;
+    config.sample_rate = mBitPerfectSampleRate;
+    config.channel_mask = mBitPerfectChannelMask;
+    config.format = mBitPerfectFormat;
+    audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_BIT_PERFECT;
+    AudioPolicyInterface::output_type_t outputType;
+    bool isSpatialized;
+    bool isBitPerfect;
+    EXPECT_EQ(expected,
+              mManager->getOutputForAttr(&sMediaAttr, &mBitPerfectOutput, AUDIO_SESSION_NONE,
+                                         &stream, attributionSource, &config, &flags,
+                                         &mSelectedDeviceId, &mBitPerfectPortId, {}, &outputType,
+                                         &isSpatialized, &isBitPerfect));
+}
+
+class AudioPolicyManagerTestBitPerfect : public AudioPolicyManagerTestBitPerfectBase {
+};
+
+TEST_F(AudioPolicyManagerTestBitPerfect, UseBitPerfectOutput) {
+    const uid_t anotherUid = 5678;
+    audio_io_handle_t output = AUDIO_IO_HANDLE_NONE;
+    audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+    audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE;
+    bool isBitPerfect;
+
+    // When there is no active bit-perfect playback, the output selection will follow default
+    // routing strategy.
+    getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_QUAD,
+                     48000, AUDIO_OUTPUT_FLAG_NONE, &output, &portId, sMediaAttr,
+                     AUDIO_SESSION_NONE, mUid, &isBitPerfect);
+    EXPECT_FALSE(isBitPerfect);
+    EXPECT_NE(AUDIO_IO_HANDLE_NONE, output);
+    const auto outputDesc = mManager->getOutputs().valueFor(output);
+    EXPECT_NE(nullptr, outputDesc);
+    EXPECT_NE(AUDIO_OUTPUT_FLAG_BIT_PERFECT, outputDesc->mFlags & AUDIO_OUTPUT_FLAG_BIT_PERFECT);
+
+    // Start bit-perfect playback
+    ASSERT_NO_FATAL_FAILURE(startBitPerfectOutput());
+
+    // If the playback is from preferred mixer attributes owner but the request doesn't match
+    // preferred mixer attributes, it will not be bit-perfect.
+    getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_QUAD,
+                     48000, AUDIO_OUTPUT_FLAG_NONE, &output, &portId, sMediaAttr,
+                     AUDIO_SESSION_NONE, mUid, &isBitPerfect);
+    EXPECT_FALSE(isBitPerfect);
+    EXPECT_EQ(mBitPerfectOutput, output);
+
+    // When bit-perfect playback is active, all other playback will be routed to bit-perfect output.
+    getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+                     48000, AUDIO_OUTPUT_FLAG_NONE, &output, &portId, sMediaAttr,
+                     AUDIO_SESSION_NONE, anotherUid, &isBitPerfect);
+    EXPECT_FALSE(isBitPerfect);
+    EXPECT_EQ(mBitPerfectOutput, output);
+
+    // When bit-pefect playback is active, dtmf will also be routed to bit-perfect output.
+    const audio_attributes_t dtmfAttr = {
+            .content_type = AUDIO_CONTENT_TYPE_UNKNOWN,
+            .usage = AUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING,
+    };
+    audio_io_handle_t dtmfOutput = AUDIO_IO_HANDLE_NONE;
+    selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+    portId = AUDIO_PORT_HANDLE_NONE;
+    getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+                     48000, AUDIO_OUTPUT_FLAG_NONE, &dtmfOutput, &portId, dtmfAttr,
+                     AUDIO_SESSION_NONE, anotherUid, &isBitPerfect);
+    EXPECT_FALSE(isBitPerfect);
+    EXPECT_EQ(mBitPerfectOutput, dtmfOutput);
+
+    // When configuration matches preferred mixer attributes, which is bit-perfect, but the client
+    // is not the owner of preferred mixer attributes, the playback will not be bit-perfect.
+    getOutputForAttr(&selectedDeviceId, mBitPerfectFormat, mBitPerfectChannelMask,
+                     mBitPerfectSampleRate, AUDIO_OUTPUT_FLAG_NONE, &output, &portId, sMediaAttr,
+                     AUDIO_SESSION_NONE, anotherUid, &isBitPerfect);
+    EXPECT_FALSE(isBitPerfect);
+    EXPECT_EQ(mBitPerfectOutput, output);
+}
+
+TEST_F_WITH_FLAGS(
+        AudioPolicyManagerTestBitPerfect,
+        InternalMuteWhenBitPerfectCLientIsActive,
+        REQUIRES_FLAGS_ENABLED(
+                ACONFIG_FLAG(com::android::media::audioserver,
+                             fix_concurrent_playback_behavior_with_bit_perfect_client))
+) {
+    ASSERT_NO_FATAL_FAILURE(startBitPerfectOutput());
+
+    // When bit-perfect playback is active, the system sound will be routed to bit-perfect output.
+    // The system sound will be muted internally in this case. The bit-perfect client will be
+    // played normally.
+    const uint32_t anotherSampleRate = 44100;
+    audio_port_handle_t systemSoundPortId = AUDIO_PORT_HANDLE_NONE;
+    audio_io_handle_t systemSoundOutput = AUDIO_IO_HANDLE_NONE;
+    const audio_attributes_t systemSoundAttr = {
+            .content_type = AUDIO_CONTENT_TYPE_SONIFICATION,
+            .usage = AUDIO_USAGE_ASSISTANCE_SONIFICATION,
+    };
+    audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+    bool isBitPerfect;
+    getOutputForAttr(&selectedDeviceId, mBitPerfectFormat, mBitPerfectChannelMask,
+                     anotherSampleRate, AUDIO_OUTPUT_FLAG_NONE, &systemSoundOutput,
+                     &systemSoundPortId, systemSoundAttr, AUDIO_SESSION_NONE, mUid, &isBitPerfect);
+    EXPECT_FALSE(isBitPerfect);
+    EXPECT_EQ(mBitPerfectOutput, systemSoundOutput);
+    EXPECT_EQ(NO_ERROR, mManager->startOutput(systemSoundPortId));
+    EXPECT_TRUE(mClient->getTrackInternalMute(systemSoundPortId));
+    EXPECT_FALSE(mClient->getTrackInternalMute(mBitPerfectPortId));
+    EXPECT_EQ(NO_ERROR, mManager->stopOutput(systemSoundPortId));
+    EXPECT_FALSE(mClient->getTrackInternalMute(mBitPerfectPortId));
+
+    // When bit-perfect playback is active, the notification will be routed to bit-perfect output.
+    // The notification sound will be played normally while the bit-perfect client will be muted
+    // internally.
+    audio_port_handle_t notificationPortId = AUDIO_PORT_HANDLE_NONE;
+    audio_io_handle_t notificationOutput = AUDIO_IO_HANDLE_NONE;
+    const audio_attributes_t notificationAttr = {
+            .content_type = AUDIO_CONTENT_TYPE_SONIFICATION,
+            .usage = AUDIO_USAGE_NOTIFICATION,
+    };
+    getOutputForAttr(&selectedDeviceId, mBitPerfectFormat, mBitPerfectChannelMask,
+                     anotherSampleRate, AUDIO_OUTPUT_FLAG_NONE, &notificationOutput,
+                     &notificationPortId, notificationAttr, AUDIO_SESSION_NONE, mUid,
+                     &isBitPerfect);
+    EXPECT_FALSE(isBitPerfect);
+    EXPECT_EQ(mBitPerfectOutput, notificationOutput);
+    EXPECT_EQ(NO_ERROR, mManager->startOutput(notificationPortId));
+    EXPECT_FALSE(mClient->getTrackInternalMute(notificationPortId));
+    EXPECT_TRUE(mClient->getTrackInternalMute(mBitPerfectPortId));
+    EXPECT_EQ(NO_ERROR, mManager->stopOutput(notificationPortId));
+    EXPECT_FALSE(mClient->getTrackInternalMute(mBitPerfectPortId));
+
+    EXPECT_EQ(NO_ERROR, mManager->stopOutput(mBitPerfectPortId));
+}
+
+class AudioPolicyManagerTestBitPerfectPhoneMode : public AudioPolicyManagerTestBitPerfectBase,
+        public testing::WithParamInterface<audio_mode_t> {
+};
+
+TEST_P(AudioPolicyManagerTestBitPerfectPhoneMode, RejectBitPerfectWhenPhoneModeIsNotNormal) {
+    if (!com::android::media::audioserver::
+            fix_concurrent_playback_behavior_with_bit_perfect_client()) {
+        GTEST_SKIP()
+                << "Flag fix_concurrent_playback_behavior_with_bit_perfect_client is not enabled";
+    }
+
+    ASSERT_NO_FATAL_FAILURE(startBitPerfectOutput());
+
+    audio_mode_t mode = GetParam();
+    mManager->setPhoneState(mode);
+    // When the phone mode is not normal, the bit-perfect output will be reopned
+    EXPECT_EQ(nullptr, mManager->getOutputs().valueFor(mBitPerfectOutput));
+
+    // When the phone mode is not normal, the bit-perfect output will be closed.
+    ASSERT_NO_FATAL_FAILURE(getBitPerfectOutput(INVALID_OPERATION));
+
+    mManager->setPhoneState(AUDIO_MODE_NORMAL);
+}
+
+INSTANTIATE_TEST_CASE_P(
+        PhoneMode,
+        AudioPolicyManagerTestBitPerfectPhoneMode,
+        testing::Values(AUDIO_MODE_IN_CALL,
+                        AUDIO_MODE_RINGTONE,
+                        AUDIO_MODE_IN_COMMUNICATION,
+                        AUDIO_MODE_CALL_SCREEN)
+);
+
+class AudioPolicyManagerTestBitPerfectHigherPriorityUseCaseActive :
+        public AudioPolicyManagerTestBitPerfectBase,
+        public testing::WithParamInterface<audio_usage_t> {
+};
+
+TEST_P(AudioPolicyManagerTestBitPerfectHigherPriorityUseCaseActive,
+       RejectBitPerfectWhenHigherPriorityUseCaseIsActive) {
+    if (!com::android::media::audioserver::
+                fix_concurrent_playback_behavior_with_bit_perfect_client()) {
+        GTEST_SKIP()
+                << "Flag fix_concurrent_playback_behavior_with_bit_perfect_client is not enabled";
+    }
+
+    ASSERT_NO_FATAL_FAILURE(startBitPerfectOutput());
+
+    audio_attributes_t attr = {
+            .usage = GetParam(),
+            .content_type = AUDIO_CONTENT_TYPE_UNKNOWN
+    };
+    audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+    audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE;
+    audio_io_handle_t output = AUDIO_IO_HANDLE_NONE;
+    ASSERT_NO_FATAL_FAILURE(
+            getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+                   48000, AUDIO_OUTPUT_FLAG_NONE, &output, &portId, attr));
+    EXPECT_NE(mBitPerfectOutput, output);
+    EXPECT_EQ(NO_ERROR, mManager->startOutput(portId));
+    // When a high priority use case is active, the bit-perfect output will be closed.
+    EXPECT_EQ(nullptr, mManager->getOutputs().valueFor(mBitPerfectOutput));
+
+    // When any higher priority use case is active, the bit-perfect request will be rejected.
+    ASSERT_NO_FATAL_FAILURE(getBitPerfectOutput(INVALID_OPERATION));
+}
+
+INSTANTIATE_TEST_CASE_P(
+        HigherPriorityUseCases,
+        AudioPolicyManagerTestBitPerfectHigherPriorityUseCaseActive,
+        testing::Values(AUDIO_USAGE_NOTIFICATION_TELEPHONY_RINGTONE,
+                        AUDIO_USAGE_ALARM)
+);
+
+class AudioPolicyManagerInputPreemptionTest : public AudioPolicyManagerTestWithConfigurationFile {
+};
+
+TEST_F_WITH_FLAGS(
+        AudioPolicyManagerInputPreemptionTest,
+        SameSessionReusesInput,
+        REQUIRES_FLAGS_ENABLED(
+                ACONFIG_FLAG(com::android::media::audioserver, fix_input_sharing_logic))
+) {
+    mClient->resetInputApiCallsCounters();
+
+    audio_attributes_t attr = AUDIO_ATTRIBUTES_INITIALIZER;
+    attr.source = AUDIO_SOURCE_MIC;
+    audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+    audio_io_handle_t input1 = AUDIO_PORT_HANDLE_NONE;
+    ASSERT_NO_FATAL_FAILURE(getInputForAttr(attr, &input1, TEST_SESSION_ID, 1, &selectedDeviceId,
+                                            AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO,
+                                            48000));
+
+    EXPECT_EQ(1, mClient->getOpenInputCallsCount());
+
+    audio_io_handle_t input2 = AUDIO_PORT_HANDLE_NONE;
+    ASSERT_NO_FATAL_FAILURE(getInputForAttr(attr, &input2, TEST_SESSION_ID, 1, &selectedDeviceId,
+                                        AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO,
+                                        48000));
+
+    EXPECT_EQ(1, mClient->getOpenInputCallsCount());
+    EXPECT_EQ(0, mClient->getCloseInputCallsCount());
+    EXPECT_EQ(input1, input2);
+}
+
+TEST_F_WITH_FLAGS(
+        AudioPolicyManagerInputPreemptionTest,
+        LesserPriorityReusesInput,
+        REQUIRES_FLAGS_ENABLED(
+                ACONFIG_FLAG(com::android::media::audioserver, fix_input_sharing_logic))
+) {
+    mClient->resetInputApiCallsCounters();
+
+    audio_attributes_t attr = AUDIO_ATTRIBUTES_INITIALIZER;
+    attr.source = AUDIO_SOURCE_MIC;
+    audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+    audio_io_handle_t input1 = AUDIO_PORT_HANDLE_NONE;
+    ASSERT_NO_FATAL_FAILURE(getInputForAttr(attr, &input1, TEST_SESSION_ID, 1, &selectedDeviceId,
+                                            AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO,
+                                            48000));
+
+    EXPECT_EQ(1, mClient->getOpenInputCallsCount());
+
+    audio_io_handle_t input2 = AUDIO_PORT_HANDLE_NONE;
+    attr.source = AUDIO_SOURCE_VOICE_RECOGNITION;
+    ASSERT_NO_FATAL_FAILURE(getInputForAttr(attr, &input2, OTHER_SESSION_ID, 1, &selectedDeviceId,
+                                        AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO,
+                                        48000));
+
+    EXPECT_EQ(1, mClient->getOpenInputCallsCount());
+    EXPECT_EQ(0, mClient->getCloseInputCallsCount());
+    EXPECT_EQ(input1, input2);
+}
+
+TEST_F_WITH_FLAGS(
+        AudioPolicyManagerInputPreemptionTest,
+        HigherPriorityPreemptsInput,
+        REQUIRES_FLAGS_ENABLED(
+                ACONFIG_FLAG(com::android::media::audioserver, fix_input_sharing_logic))
+) {
+    mClient->resetInputApiCallsCounters();
+
+    audio_attributes_t attr = AUDIO_ATTRIBUTES_INITIALIZER;
+    attr.source = AUDIO_SOURCE_MIC;
+    audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+    audio_io_handle_t input1 = AUDIO_PORT_HANDLE_NONE;
+    ASSERT_NO_FATAL_FAILURE(getInputForAttr(attr, &input1, TEST_SESSION_ID, 1, &selectedDeviceId,
+                                            AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO,
+                                            48000));
+
+    EXPECT_EQ(1, mClient->getOpenInputCallsCount());
+
+    audio_io_handle_t input2 = AUDIO_PORT_HANDLE_NONE;
+    attr.source = AUDIO_SOURCE_CAMCORDER;
+    ASSERT_NO_FATAL_FAILURE(getInputForAttr(attr, &input2, OTHER_SESSION_ID, 1, &selectedDeviceId,
+                                        AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO,
+                                        48000));
+
+    EXPECT_EQ(2, mClient->getOpenInputCallsCount());
+    EXPECT_EQ(1, mClient->getCloseInputCallsCount());
+    EXPECT_NE(input1, input2);
+}
diff --git a/services/audiopolicy/tests/resources/Android.bp b/services/audiopolicy/tests/resources/Android.bp
index 5e71210..1c191f5 100644
--- a/services/audiopolicy/tests/resources/Android.bp
+++ b/services/audiopolicy/tests/resources/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
@@ -11,10 +12,11 @@
     name: "audiopolicytest_configuration_files",
     srcs: [
         "test_audio_policy_configuration.xml",
+        "test_audio_policy_configuration_bluetooth.xml",
         "test_audio_policy_primary_only_configuration.xml",
         "test_car_ap_atmos_offload_configuration.xml",
         "test_invalid_audio_policy_configuration.xml",
-        "test_tv_apm_configuration.xml",
         "test_settop_box_surround_configuration.xml",
+        "test_tv_apm_configuration.xml",
     ],
 }
diff --git a/services/audiopolicy/tests/resources/test_audio_policy_configuration.xml b/services/audiopolicy/tests/resources/test_audio_policy_configuration.xml
index 4efdf8a..67e99f2 100644
--- a/services/audiopolicy/tests/resources/test_audio_policy_configuration.xml
+++ b/services/audiopolicy/tests/resources/test_audio_policy_configuration.xml
@@ -30,7 +30,7 @@
                     <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
                              samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
                 </mixPort>
-                <mixPort name="primary input" role="sink">
+                <mixPort name="primary input" role="sink"  maxActiveCount="1" maxOpenCount="1">
                     <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
                              samplingRates="48000"
                              channelMasks="AUDIO_CHANNEL_IN_STEREO"/>
@@ -65,6 +65,7 @@
                         samplingRates="48000"
                         channelMasks="AUDIO_CHANNEL_IN_STEREO"/>
                 </mixPort>
+                <mixPort name="hifi_input" role="sink" />
             </mixPorts>
             <devicePorts>
                 <devicePort tagName="Speaker" type="AUDIO_DEVICE_OUT_SPEAKER" role="sink">
@@ -98,7 +99,7 @@
                 <route type="mix" sink="primary input"
                        sources="Built-In Mic,Hdmi-In Mic,USB Device In"/>
                 <route type="mix" sink="voip_tx"
-                       sources="Built-In Mic"/>
+                       sources="Built-In Mic,USB Device In"/>
                 <route type="mix" sink="Hdmi"
                        sources="primary output"/>
                 <route type="mix" sink="BT SCO"
@@ -111,6 +112,8 @@
                        sources="primary output,hifi_output,mmap_no_irq_out"/>
                 <route type="mix" sink="mixport_bus_input"
                     sources="BUS Device In"/>
+                <route type="mix" sink="hifi_input"
+                        sources="USB Device In" />
             </routes>
         </module>
 
diff --git a/services/audiopolicy/tests/resources/test_audio_policy_configuration_bluetooth.xml b/services/audiopolicy/tests/resources/test_audio_policy_configuration_bluetooth.xml
new file mode 100644
index 0000000..0cf1688
--- /dev/null
+++ b/services/audiopolicy/tests/resources/test_audio_policy_configuration_bluetooth.xml
@@ -0,0 +1,157 @@
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+<!-- Copyright (C) 2024 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+
+<audioPolicyConfiguration version="7.0" xmlns:xi="http://www.w3.org/2001/XInclude">
+    <globalConfiguration speaker_drc_enabled="true"/>
+
+    <modules>
+        <!-- Primary module -->
+        <module name="primary" halVersion="2.0">
+            <attachedDevices>
+                <item>Speaker</item>
+                <item>Built-In Mic</item>
+            </attachedDevices>
+            <defaultOutputDevice>Speaker</defaultOutputDevice>
+            <mixPorts>
+                <mixPort name="primary output" role="source" flags="AUDIO_OUTPUT_FLAG_PRIMARY">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+                </mixPort>
+                <mixPort name="primary input" role="sink">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="48000"
+                             channelMasks="AUDIO_CHANNEL_IN_STEREO"/>
+                </mixPort>
+                <mixPort name="mixport_bt_hfp_output" role="source">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+                </mixPort>
+                <mixPort name="mixport_bt_hfp_input" role="sink">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="8000 11025 16000 44100 48000"
+                             channelMasks="AUDIO_CHANNEL_IN_STEREO AUDIO_CHANNEL_IN_MONO"/>
+                </mixPort>
+                <mixPort name="voip_tx" role="sink"
+                         flags="AUDIO_INPUT_FLAG_VOIP_TX">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="8000 16000 32000 48000" channelMasks="AUDIO_CHANNEL_IN_MONO"/>
+                </mixPort>
+                <mixPort name="voip_rx" role="source"
+                         flags="AUDIO_OUTPUT_FLAG_VOIP_RX">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                           samplingRates="8000 16000 32000 48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+                </mixPort>
+            </mixPorts>
+            <devicePorts>
+                <devicePort tagName="Speaker" type="AUDIO_DEVICE_OUT_SPEAKER" role="sink">
+                </devicePort>
+                <devicePort tagName="Built-In Mic" type="AUDIO_DEVICE_IN_BUILTIN_MIC" role="source">
+                </devicePort>
+                <devicePort tagName="Hdmi" type="AUDIO_DEVICE_OUT_HDMI" role="sink"
+                            encodedFormats="AUDIO_FORMAT_AC3">
+                </devicePort>
+                <devicePort tagName="Hdmi-In Mic" type="AUDIO_DEVICE_IN_HDMI" role="source">
+                </devicePort>
+                <devicePort tagName="BT SCO" type="AUDIO_DEVICE_OUT_BLUETOOTH_SCO" role="sink" />
+                <devicePort tagName="BT SCO Headset Mic" type="AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET"
+                            role="source" />
+                <devicePort tagName="BT A2DP Out" type="AUDIO_DEVICE_OUT_BLUETOOTH_A2DP" role="sink"
+                            encodedFormats="AUDIO_FORMAT_SBC">
+                    <profile name="" format="AUDIO_FORMAT_PCM_8_BIT"
+                             samplingRates="44100 48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+                </devicePort>
+                <devicePort tagName="USB Device Out" type="AUDIO_DEVICE_OUT_USB_DEVICE" role="sink">
+                </devicePort>
+                <devicePort tagName="USB Device In" type="AUDIO_DEVICE_IN_USB_DEVICE" role="source">
+                </devicePort>
+            </devicePorts>
+            <routes>
+                <route type="mix" sink="Speaker"
+                       sources="primary output,voip_rx"/>
+                <route type="mix" sink="primary input"
+                       sources="Built-In Mic,Hdmi-In Mic,USB Device In"/>
+                <route type="mix" sink="voip_tx"
+                       sources="Built-In Mic"/>
+                <route type="mix" sink="Hdmi"
+                       sources="primary output"/>
+                <route type="mix" sink="BT SCO"
+                       sources="mixport_bt_hfp_output"/>
+                <route type="mix" sink="mixport_bt_hfp_input"
+                       sources="BT SCO Headset Mic"/>
+                <route type="mix" sink="BT A2DP Out"
+                       sources="primary output"/>
+                <route type="mix" sink="USB Device Out"
+                       sources="primary output"/>
+            </routes>
+        </module>
+
+        <!-- Remote Submix module -->
+        <module name="r_submix" halVersion="2.0">
+            <attachedDevices>
+                <item>Remote Submix In</item>
+            </attachedDevices>
+            <mixPorts>
+                <mixPort name="r_submix output" role="source">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+                </mixPort>
+                <mixPort name="r_submix input" role="sink">
+                   <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                            samplingRates="48000" channelMasks="AUDIO_CHANNEL_IN_STEREO"/>
+                </mixPort>
+           </mixPorts>
+           <devicePorts>
+               <devicePort tagName="Remote Submix Out" type="AUDIO_DEVICE_OUT_REMOTE_SUBMIX"  role="sink">
+                   <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                            samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+               </devicePort>
+               <devicePort tagName="Remote Submix In" type="AUDIO_DEVICE_IN_REMOTE_SUBMIX"  role="source">
+                   <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                            samplingRates="48000" channelMasks="AUDIO_CHANNEL_IN_STEREO"/>
+                </devicePort>
+            </devicePorts>
+            <routes>
+                <route type="mix" sink="Remote Submix Out"
+                       sources="r_submix output"/>
+                <route type="mix" sink="r_submix input"
+                       sources="Remote Submix In"/>
+            </routes>
+        </module>
+
+        <!-- Software Bluetooth Module -->
+        <module name="bluetooth" halVersion="2.0">
+          <mixPorts>
+            <mixPort name="a2dp_sw_output" role="source">
+              <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                       samplingRates="44100 48000 88200 96000"
+                       channelMasks="AUDIO_CHANNEL_IN_STEREO"/>
+            </mixPort>
+          </mixPorts>
+          <devicePorts>
+            <devicePort tagName="BTS A2DP Out" type="AUDIO_DEVICE_OUT_BLUETOOTH_A2DP" role="sink">
+              <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                       samplingRates="44100 48000 88200 96000"
+                       channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+            </devicePort>
+          </devicePorts>
+          <routes>
+            <route type="mix" sink="BTS A2DP Out"
+                   sources="a2dp_sw_output"/>
+          </routes>
+        </module>
+
+      </modules>
+</audioPolicyConfiguration>
diff --git a/services/audiopolicy/tests/spatializer_tests.cpp b/services/audiopolicy/tests/spatializer_tests.cpp
new file mode 100644
index 0000000..0b40f32
--- /dev/null
+++ b/services/audiopolicy/tests/spatializer_tests.cpp
@@ -0,0 +1,263 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "Spatializer_Test"
+
+#include "Spatializer.h"
+
+#include <string>
+#include <unordered_set>
+
+#include <gtest/gtest.h>
+
+#include <android/media/audio/common/AudioLatencyMode.h>
+#include <android/media/audio/common/HeadTracking.h>
+#include <android/media/audio/common/Spatialization.h>
+#include <com_android_media_audio.h>
+#include <utils/Log.h>
+
+using namespace android;
+using media::audio::common::HeadTracking;
+using media::audio::common::Spatialization;
+
+// Test Spatializer Helper Methods
+
+TEST(Spatializer, containsImmersiveChannelMask) {
+    // Regardless of the implementation, we expect the following
+    // behavior.
+
+    // Pure non-immersive
+    EXPECT_FALSE(Spatializer::containsImmersiveChannelMask(
+            { AUDIO_CHANNEL_OUT_STEREO }));
+    EXPECT_FALSE(Spatializer::containsImmersiveChannelMask(
+            { AUDIO_CHANNEL_OUT_MONO, AUDIO_CHANNEL_OUT_STEREO }));
+    EXPECT_FALSE(Spatializer::containsImmersiveChannelMask(
+            { AUDIO_CHANNEL_OUT_MONO, AUDIO_CHANNEL_OUT_STEREO,
+              AUDIO_CHANNEL_OUT_STEREO, AUDIO_CHANNEL_OUT_MONO }));
+
+    // Pure immersive
+    EXPECT_TRUE(Spatializer::containsImmersiveChannelMask(
+            { AUDIO_CHANNEL_OUT_5POINT1 }));
+    EXPECT_TRUE(Spatializer::containsImmersiveChannelMask(
+            { AUDIO_CHANNEL_OUT_7POINT1 }));
+    EXPECT_TRUE(Spatializer::containsImmersiveChannelMask(
+            { AUDIO_CHANNEL_OUT_5POINT1, AUDIO_CHANNEL_OUT_7POINT1,
+              AUDIO_CHANNEL_OUT_22POINT2 }));
+
+    // Mixed immersive/non-immersive
+    EXPECT_TRUE(Spatializer::containsImmersiveChannelMask(
+            { AUDIO_CHANNEL_OUT_MONO, AUDIO_CHANNEL_OUT_7POINT1POINT4 }));
+    EXPECT_TRUE(Spatializer::containsImmersiveChannelMask(
+            { AUDIO_CHANNEL_OUT_MONO, AUDIO_CHANNEL_OUT_STEREO,
+              AUDIO_CHANNEL_OUT_7POINT1 }));
+}
+
+class TestSpatializerPolicyCallback :
+        public SpatializerPolicyCallback {
+public:
+    void onCheckSpatializer() override {};
+};
+
+class SpatializerTest : public ::testing::Test {
+protected:
+    void SetUp() override {
+        const sp<EffectsFactoryHalInterface> effectsFactoryHal
+                = EffectsFactoryHalInterface::create();
+        mSpatializer = Spatializer::create(&mTestCallback, effectsFactoryHal);
+        if (mSpatializer == nullptr) {
+            GTEST_SKIP() << "Skipping Spatializer tests: no spatializer";
+        }
+        std::vector<Spatialization::Level> levels;
+        binder::Status status = mSpatializer->getSupportedLevels(&levels);
+        ASSERT_TRUE(status.isOk());
+        for (auto level : levels) {
+            if (level != Spatialization::Level::NONE) {
+                mSpatializer->setLevel(level);
+                break;
+            }
+        }
+        mSpatializer->setOutput(sTestOutput);
+    }
+
+    void TearDown() override {
+        if (mSpatializer == nullptr) {
+            return;
+        }
+        mSpatializer->setLevel(Spatialization::Level::NONE);
+        mSpatializer->setOutput(AUDIO_IO_HANDLE_NONE);
+        mSpatializer->setDesiredHeadTrackingMode(HeadTracking::Mode::DISABLED);
+        mSpatializer->setHeadSensor(SpatializerPoseController::INVALID_SENSOR);
+        mSpatializer->updateActiveTracks({});
+    }
+
+    static constexpr audio_io_handle_t sTestOutput= 1977;
+    static constexpr int sTestSensorHandle = 1980;
+
+    const static inline std::vector<audio_latency_mode_t> sA2DPLatencyModes = {
+        AUDIO_LATENCY_MODE_LOW,
+        AUDIO_LATENCY_MODE_FREE
+    };
+    const static inline std::vector<audio_latency_mode_t> sBLELatencyModes = {
+        AUDIO_LATENCY_MODE_LOW,
+        AUDIO_LATENCY_MODE_DYNAMIC_SPATIAL_AUDIO_SOFTWARE,
+        AUDIO_LATENCY_MODE_DYNAMIC_SPATIAL_AUDIO_HARDWARE,
+        AUDIO_LATENCY_MODE_FREE
+    };
+
+    bool setpUpForHeadtracking() {
+        bool htSupported;
+        mSpatializer->isHeadTrackingSupported(&htSupported);
+        if (!htSupported) {
+            return false;
+        }
+
+        std::vector<HeadTracking::Mode> htModes;
+        mSpatializer->getSupportedHeadTrackingModes(&htModes);
+        for (auto htMode : htModes) {
+            if (htMode != HeadTracking::Mode::DISABLED) {
+                mSpatializer->setDesiredHeadTrackingMode(htMode);
+                break;
+            }
+        }
+
+        mSpatializer->setHeadSensor(sTestSensorHandle);
+        return true;
+    }
+
+    TestSpatializerPolicyCallback mTestCallback;
+    sp<Spatializer> mSpatializer;
+};
+
+TEST_F(SpatializerTest, SupportedA2dpLatencyTest) {
+    if (!setpUpForHeadtracking()) {
+        GTEST_SKIP() << "Skipping SupportedA2dpLatencyTest: head tracking not supported";
+    }
+    std::vector<audio_latency_mode_t> latencies = sA2DPLatencyModes;
+    mSpatializer->onSupportedLatencyModesChangedMsg(sTestOutput, std::move(latencies));
+
+    std::vector<audio_latency_mode_t> supportedLatencies =
+            mSpatializer->getSupportedLatencyModes();
+
+    ASSERT_TRUE(supportedLatencies == sA2DPLatencyModes);
+    // Free mode must always be the last of the ordered list
+    ASSERT_TRUE(supportedLatencies.back() == AUDIO_LATENCY_MODE_FREE);
+}
+
+TEST_F(SpatializerTest, SupportedBleLatencyTest) {
+    if (!setpUpForHeadtracking()) {
+        GTEST_SKIP() << "Skipping SupportedBleLatencyTest: head tracking not supported";
+    }
+    if (!com::android::media::audio::dsa_over_bt_le_audio()) {
+        GTEST_SKIP() << "Skipping SupportedBleLatencyTest: DSA over LE not enabled";
+    }
+    std::vector<audio_latency_mode_t> latencies = sBLELatencyModes;
+    mSpatializer->onSupportedLatencyModesChangedMsg(sTestOutput, std::move(latencies));
+
+    std::vector<audio_latency_mode_t> supportedLatencies =
+            mSpatializer->getSupportedLatencyModes();
+
+    ASSERT_TRUE(supportedLatencies.back() == AUDIO_LATENCY_MODE_FREE);
+    ASSERT_TRUE(std::find(supportedLatencies.begin(), supportedLatencies.end(),
+            AUDIO_LATENCY_MODE_LOW) != supportedLatencies.end());
+
+    std::vector<audio_latency_mode_t> orderedLowLatencyModes =
+        mSpatializer->getOrderedLowLatencyModes();
+
+    std::vector<audio_latency_mode_t> supportedLowLatencyModes;
+    // remove free mode at the end of the supported list to only retain low latency modes
+    std::copy(supportedLatencies.begin(),
+              supportedLatencies.begin() + supportedLatencies.size() - 1,
+              std::back_inserter(supportedLowLatencyModes));
+
+    // Verify that supported low latency modes are always in ordered latency modes list and
+    // in the same order
+    std::vector<audio_latency_mode_t>::iterator lastIt = orderedLowLatencyModes.begin();
+    for (auto latency : supportedLowLatencyModes) {
+        auto it = std::find(orderedLowLatencyModes.begin(), orderedLowLatencyModes.end(), latency);
+        ASSERT_NE(it, orderedLowLatencyModes.end());
+        ASSERT_LE(lastIt, it);
+        lastIt = it;
+    }
+}
+
+TEST_F(SpatializerTest, RequestedA2dpLatencyTest) {
+    if (!setpUpForHeadtracking()) {
+        GTEST_SKIP() << "Skipping RequestedA2dpLatencyTest: head tracking not supported";
+    }
+
+    std::vector<audio_latency_mode_t> latencies = sA2DPLatencyModes;
+    mSpatializer->onSupportedLatencyModesChangedMsg(sTestOutput, std::move(latencies));
+
+    // requested latency mode must be free if no spatialized tracks are active
+    audio_latency_mode_t requestedLatencyMode = mSpatializer->getRequestedLatencyMode();
+    ASSERT_EQ(requestedLatencyMode, AUDIO_LATENCY_MODE_FREE);
+
+    // requested latency mode must be low if at least one spatialized tracks is active
+    mSpatializer->updateActiveTracks({AUDIO_CHANNEL_OUT_5POINT1});
+    requestedLatencyMode = mSpatializer->getRequestedLatencyMode();
+    ASSERT_EQ(requestedLatencyMode, AUDIO_LATENCY_MODE_LOW);
+
+    // requested latency mode must be free after stopping the last spatialized tracks
+    mSpatializer->updateActiveTracks({});
+    requestedLatencyMode = mSpatializer->getRequestedLatencyMode();
+    ASSERT_EQ(requestedLatencyMode, AUDIO_LATENCY_MODE_FREE);
+}
+
+TEST_F(SpatializerTest, RequestedBleLatencyTest) {
+    if (!setpUpForHeadtracking()) {
+        GTEST_SKIP() << "Skipping RequestedBleLatencyTest: head tracking not supported";
+    }
+    if (!com::android::media::audio::dsa_over_bt_le_audio()) {
+        GTEST_SKIP() << "Skipping RequestedBleLatencyTest: DSA over LE not enabled";
+    }
+
+    mSpatializer->onSupportedLatencyModesChangedMsg(sTestOutput,
+            { AUDIO_LATENCY_MODE_DYNAMIC_SPATIAL_AUDIO_SOFTWARE,
+              AUDIO_LATENCY_MODE_FREE });
+
+    // requested latency mode must be free if no spatialized tracks are active
+    audio_latency_mode_t requestedLatencyMode = mSpatializer->getRequestedLatencyMode();
+    ASSERT_EQ(requestedLatencyMode, AUDIO_LATENCY_MODE_FREE);
+
+    // requested latency mode must be low software if at least one spatialized tracks is active
+    // and the only supported low latency mode is low software
+    mSpatializer->updateActiveTracks({AUDIO_CHANNEL_OUT_5POINT1});
+    requestedLatencyMode = mSpatializer->getRequestedLatencyMode();
+    ASSERT_EQ(requestedLatencyMode, AUDIO_LATENCY_MODE_DYNAMIC_SPATIAL_AUDIO_SOFTWARE);
+
+    mSpatializer->onSupportedLatencyModesChangedMsg(sTestOutput,
+            { AUDIO_LATENCY_MODE_DYNAMIC_SPATIAL_AUDIO_SOFTWARE,
+              AUDIO_LATENCY_MODE_DYNAMIC_SPATIAL_AUDIO_HARDWARE,
+              AUDIO_LATENCY_MODE_FREE });
+
+    requestedLatencyMode = mSpatializer->getRequestedLatencyMode();
+    HeadTracking::ConnectionMode connectionMode = mSpatializer->getHeadtrackingConnectionMode();
+
+    // If low hardware mode is used, the spatializer must use either use one of the sensor
+    // connection tunneled modes.
+    // Otherwise, low software mode must be used
+    if (requestedLatencyMode == AUDIO_LATENCY_MODE_DYNAMIC_SPATIAL_AUDIO_HARDWARE) {
+        ASSERT_TRUE(connectionMode == HeadTracking::ConnectionMode::DIRECT_TO_SENSOR_TUNNEL
+                        || connectionMode == HeadTracking::ConnectionMode::DIRECT_TO_SENSOR_SW);
+    } else {
+        ASSERT_EQ(requestedLatencyMode, AUDIO_LATENCY_MODE_DYNAMIC_SPATIAL_AUDIO_SOFTWARE);
+    }
+
+    // requested latency mode must be free after stopping the last spatialized tracks
+    mSpatializer->updateActiveTracks({});
+    requestedLatencyMode = mSpatializer->getRequestedLatencyMode();
+    ASSERT_EQ(requestedLatencyMode, AUDIO_LATENCY_MODE_FREE);
+}
diff --git a/services/camera/libcameraservice/Android.bp b/services/camera/libcameraservice/Android.bp
index 575e0fb..38476a4 100644
--- a/services/camera/libcameraservice/Android.bp
+++ b/services/camera/libcameraservice/Android.bp
@@ -72,8 +72,10 @@
         "libsensorprivacy",
         "libstagefright",
         "libstagefright_foundation",
+        "libvendorsupport",
         "libxml2",
         "libyuv",
+        "android.companion.virtual.virtualdevice_aidl-cpp",
         "android.companion.virtualdevice.flags-aconfig-cc",
         "android.hardware.camera.common@1.0",
         "android.hardware.camera.device@1.0",
@@ -86,6 +88,7 @@
         "android.hardware.common-V2-ndk",
         "android.hardware.common.fmq-V1-ndk",
         "camera_platform_flags_c_lib",
+        "com.android.window.flags.window-aconfig_flags_c_lib",
         "media_permission-aidl-cpp",
     ],
 
@@ -112,6 +115,7 @@
         "libcameraservice_device_independent",
         "libdynamic_depth",
         "libprocessinfoservice_aidl",
+        "libvirtualdevicebuildflags",
         "media_permission-aidl-cpp",
     ],
 }
@@ -185,17 +189,17 @@
         "aidl/AidlCameraServiceListener.cpp",
         "aidl/AidlUtils.cpp",
         "aidl/DeathPipe.cpp",
+        "utils/AttributionAndPermissionUtils.cpp",
         "utils/CameraServiceProxyWrapper.cpp",
-        "utils/CameraThreadState.cpp",
         "utils/CameraTraces.cpp",
         "utils/AutoConditionLock.cpp",
         "utils/SchedulingPolicyUtils.cpp",
         "utils/SessionConfigurationUtils.cpp",
         "utils/SessionConfigurationUtilsHidl.cpp",
-        "utils/SessionStatsBuilder.cpp",
         "utils/TagMonitor.cpp",
         "utils/LatencyHistogram.cpp",
         "utils/Utils.cpp",
+        "utils/VirtualDeviceCameraIdMapper.cpp",
     ],
 
     header_libs: [
@@ -227,7 +231,6 @@
         "-Werror",
         "-Wno-ignored-qualifiers",
     ],
-
 }
 
 cc_library_static {
@@ -244,6 +247,7 @@
         "device3/ZoomRatioMapper.cpp",
         "utils/ExifUtils.cpp",
         "utils/SessionConfigurationUtilsHost.cpp",
+        "utils/SessionStatsBuilder.cpp",
     ],
 
     header_libs: [
@@ -261,7 +265,7 @@
         "liblog",
         "libutils",
         "libxml2",
-        "camera_platform_flags_c_lib"
+        "camera_platform_flags_c_lib",
     ],
 
     include_dirs: [
@@ -277,5 +281,4 @@
         "-Werror",
         "-Wno-ignored-qualifiers",
     ],
-
 }
diff --git a/services/camera/libcameraservice/CameraFlashlight.cpp b/services/camera/libcameraservice/CameraFlashlight.cpp
index d9d8a3d..d21241b 100644
--- a/services/camera/libcameraservice/CameraFlashlight.cpp
+++ b/services/camera/libcameraservice/CameraFlashlight.cpp
@@ -22,13 +22,9 @@
 #include <utils/Trace.h>
 #include <cutils/properties.h>
 
-#include "camera/CameraMetadata.h"
 #include "CameraFlashlight.h"
-#include "gui/IGraphicBufferConsumer.h"
-#include "gui/BufferQueue.h"
+#include "camera/CameraMetadata.h"
 #include "camera/camera2/CaptureRequest.h"
-#include "device3/Camera3Device.h"
-
 
 namespace android {
 
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index 3f43af5..f94300e 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -38,7 +38,8 @@
 #include <aidl/AidlCameraService.h>
 #include <android-base/macros.h>
 #include <android-base/parseint.h>
-#include <android/permission/PermissionChecker.h>
+#include <android_companion_virtualdevice_flags.h>
+#include <android/companion/virtualnative/IVirtualDeviceManagerNative.h>
 #include <binder/ActivityManager.h>
 #include <binder/AppOpsManager.h>
 #include <binder/IPCThreadState.h>
@@ -73,6 +74,7 @@
 #include <system/camera_metadata.h>
 #include <binder/IServiceManager.h>
 #include <binder/IActivityManager.h>
+#include <camera/CameraUtils.h>
 #include <camera/StringUtils.h>
 
 #include <system/camera.h>
@@ -80,11 +82,11 @@
 #include "CameraService.h"
 #include "api1/Camera2Client.h"
 #include "api2/CameraDeviceClient.h"
-#include "utils/CameraTraces.h"
-#include "utils/TagMonitor.h"
-#include "utils/CameraThreadState.h"
 #include "utils/CameraServiceProxyWrapper.h"
+#include "utils/CameraTraces.h"
 #include "utils/SessionConfigurationUtils.h"
+#include "utils/TagMonitor.h"
+#include "utils/Utils.h"
 
 namespace {
     const char* kPermissionServiceName = "permission";
@@ -92,7 +94,18 @@
     const char* kSensorPrivacyServiceName = "sensor_privacy";
     const char* kAppopsServiceName = "appops";
     const char* kProcessInfoServiceName = "processinfo";
-}; // namespace anonymous
+    const char* kVirtualDeviceBackCameraId = "0";
+    const char* kVirtualDeviceFrontCameraId = "1";
+
+    int32_t getDeviceId(const android::CameraMetadata& cameraInfo) {
+        if (!cameraInfo.exists(ANDROID_INFO_DEVICE_ID)) {
+            return android::kDefaultDeviceId;
+        }
+
+        const auto &deviceIdEntry = cameraInfo.find(ANDROID_INFO_DEVICE_ID);
+        return deviceIdEntry.data.i32[0];
+    }
+} // namespace anonymous
 
 namespace android {
 
@@ -100,6 +113,7 @@
 using namespace camera3::SessionConfigurationUtils;
 
 using binder::Status;
+using companion::virtualnative::IVirtualDeviceManagerNative;
 using frameworks::cameraservice::service::V2_0::implementation::HidlCameraService;
 using frameworks::cameraservice::service::implementation::AidlCameraService;
 using hardware::ICamera;
@@ -109,7 +123,9 @@
 using hardware::camera2::ICameraInjectionSession;
 using hardware::camera2::utils::CameraIdAndSessionConfiguration;
 using hardware::camera2::utils::ConcurrentCameraIdCombination;
+
 namespace flags = com::android::internal::camera::flags;
+namespace vd_flags = android::companion::virtualdevice::flags;
 
 // ----------------------------------------------------------------------------
 // Logging support -- this is for debugging only
@@ -129,18 +145,16 @@
 
 // ----------------------------------------------------------------------------
 
-static const std::string sDumpPermission("android.permission.DUMP");
-static const std::string sManageCameraPermission("android.permission.MANAGE_CAMERA");
-static const std::string sCameraPermission("android.permission.CAMERA");
-static const std::string sSystemCameraPermission("android.permission.SYSTEM_CAMERA");
-static const std::string sCameraHeadlessSystemUserPermission(
-        "android.permission.CAMERA_HEADLESS_SYSTEM_USER");
-static const std::string
-        sCameraSendSystemEventsPermission("android.permission.CAMERA_SEND_SYSTEM_EVENTS");
-static const std::string sCameraOpenCloseListenerPermission(
-        "android.permission.CAMERA_OPEN_CLOSE_LISTENER");
-static const std::string
-        sCameraInjectExternalCameraPermission("android.permission.CAMERA_INJECT_EXTERNAL_CAMERA");
+// Permission strings (references to AttributionAndPermissionUtils for brevity)
+static const std::string &sDumpPermission =
+        AttributionAndPermissionUtils::sDumpPermission;
+static const std::string &sManageCameraPermission =
+        AttributionAndPermissionUtils::sManageCameraPermission;
+static const std::string &sCameraSendSystemEventsPermission =
+        AttributionAndPermissionUtils::sCameraSendSystemEventsPermission;
+static const std::string &sCameraInjectExternalCameraPermission =
+        AttributionAndPermissionUtils::sCameraInjectExternalCameraPermission;
+
 // Constant integer for FGS Logging, used to denote the API type for logger
 static const int LOG_FGS_CAMERA_API = 1;
 const char *sFileName = "lastOpenSessionDumpFile";
@@ -152,11 +166,17 @@
 const std::string CameraService::kOfflineDevice("offline-");
 const std::string CameraService::kWatchAllClientsFlag("all");
 
+constexpr int32_t kInvalidDeviceId = -1;
+
 // Set to keep track of logged service error events.
 static std::set<std::string> sServiceErrorEventSet;
 
 CameraService::CameraService(
-        std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper) :
+        std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper,
+        std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils) :
+        AttributionAndPermissionUtilsEncapsulator(attributionAndPermissionUtils == nullptr ?
+                std::make_shared<AttributionAndPermissionUtils>()\
+                : attributionAndPermissionUtils),
         mCameraServiceProxyWrapper(cameraServiceProxyWrapper == nullptr ?
                 std::make_shared<CameraServiceProxyWrapper>() : cameraServiceProxyWrapper),
         mEventLog(DEFAULT_EVENT_LOG_LENGTH),
@@ -165,6 +185,7 @@
         mSoundRef(0), mInitialized(false),
         mAudioRestriction(hardware::camera2::ICameraDeviceUser::AUDIO_RESTRICTION_NONE) {
     ALOGI("CameraService started (pid=%d)", getpid());
+    mAttributionAndPermissionUtils->setCameraService(this);
     mServiceLockWrapper = std::make_shared<WaitableMutexWrapper>(&mServiceLock);
     mMemFd = memfd_create(sFileName, MFD_ALLOW_SEALING);
     if (mMemFd == -1) {
@@ -172,12 +193,6 @@
     }
 }
 
-// The word 'System' here does not refer to clients only on the system
-// partition. They just need to have a android system uid.
-static bool doesClientHaveSystemUid() {
-    return (CameraThreadState::getCallingUid() < AID_APP_START);
-}
-
 // Enable processes with isolated AID to request the binder
 void CameraService::instantiate() {
     CameraService::publish(true);
@@ -194,7 +209,6 @@
 
 void CameraService::onFirstRef()
 {
-
     ALOGI("CameraService process starting");
 
     BnCameraService::onFirstRef();
@@ -213,7 +227,7 @@
 
     mUidPolicy = new UidPolicy(this);
     mUidPolicy->registerSelf();
-    mSensorPrivacyPolicy = new SensorPrivacyPolicy(this);
+    mSensorPrivacyPolicy = new SensorPrivacyPolicy(this, mAttributionAndPermissionUtils);
     mSensorPrivacyPolicy->registerSelf();
     mInjectionStatusListener = new InjectionStatusListener(this);
 
@@ -266,7 +280,6 @@
             }
         }
 
-
         // Setup vendor tags before we call get_camera_info the first time
         // because HAL might need to setup static vendor keys in get_camera_info
         // TODO: maybe put this into CameraProviderManager::initialize()?
@@ -284,7 +297,6 @@
         deviceIds = mCameraProviderManager->getCameraDeviceIds(&unavailPhysicalIds);
     }
 
-
     for (auto& cameraId : deviceIds) {
         if (getCameraState(cameraId) == nullptr) {
             onDeviceStatusChanged(cameraId, CameraDeviceStatus::PRESENT);
@@ -313,6 +325,10 @@
 
 void CameraService::broadcastTorchModeStatus(const std::string& cameraId, TorchModeStatus status,
         SystemCameraKind systemCameraKind) {
+    // Get the device id and app-visible camera id for the given HAL-visible camera id.
+    auto [deviceId, mappedCameraId] =
+            mVirtualDeviceCameraIdMapper.getDeviceIdAndMappedCameraIdPair(cameraId);
+
     Mutex::Autolock lock(mStatusListenerLock);
     for (auto& i : mListenerList) {
         if (shouldSkipStatusUpdates(systemCameraKind, i->isVendorListener(), i->getListenerPid(),
@@ -321,19 +337,11 @@
                     __FUNCTION__, cameraId.c_str());
             continue;
         }
+
         auto ret = i->getListener()->onTorchStatusChanged(mapToInterface(status),
-                cameraId);
+                mappedCameraId, deviceId);
         i->handleBinderStatus(ret, "%s: Failed to trigger onTorchStatusChanged for %d:%d: %d",
                 __FUNCTION__, i->getListenerUid(), i->getListenerPid(), ret.exceptionCode());
-        // Also trigger the torch callbacks for cameras that were remapped to the current cameraId
-        // for the specific package that this listener belongs to.
-        std::vector<std::string> remappedCameraIds =
-                findOriginalIdsForRemappedCameraId(cameraId, i->getListenerUid());
-        for (auto& remappedCameraId : remappedCameraIds) {
-            ret = i->getListener()->onTorchStatusChanged(mapToInterface(status), remappedCameraId);
-            i->handleBinderStatus(ret, "%s: Failed to trigger onTorchStatusChanged for %d:%d: %d",
-                    __FUNCTION__, i->getListenerUid(), i->getListenerPid(), ret.exceptionCode());
-        }
     }
 }
 
@@ -351,10 +359,29 @@
 void CameraService::filterAPI1SystemCameraLocked(
         const std::vector<std::string> &normalDeviceIds) {
     mNormalDeviceIdsWithoutSystemCamera.clear();
-    for (auto &deviceId : normalDeviceIds) {
+    for (auto &cameraId : normalDeviceIds) {
+        if (vd_flags::camera_device_awareness()) {
+            CameraMetadata cameraInfo;
+            status_t res = mCameraProviderManager->getCameraCharacteristics(
+                    cameraId, false, &cameraInfo,
+                    hardware::ICameraService::ROTATION_OVERRIDE_NONE);
+            int32_t deviceId = kDefaultDeviceId;
+            if (res != OK) {
+                ALOGW("%s: Not able to get camera characteristics for camera id %s",
+                      __FUNCTION__, cameraId.c_str());
+            } else {
+                deviceId = getDeviceId(cameraInfo);
+            }
+            // Cameras associated with non-default device id's (i.e., virtual cameras) can never be
+            // system cameras, so skip for non-default device id's.
+            if (deviceId != kDefaultDeviceId) {
+                continue;
+            }
+        }
+
         SystemCameraKind deviceKind = SystemCameraKind::PUBLIC;
-        if (getSystemCameraKind(deviceId, &deviceKind) != OK) {
-            ALOGE("%s: Invalid camera id %s, skipping", __FUNCTION__, deviceId.c_str());
+        if (getSystemCameraKind(cameraId, &deviceKind) != OK) {
+            ALOGE("%s: Invalid camera id %s, skipping", __FUNCTION__, cameraId.c_str());
             continue;
         }
         if (deviceKind == SystemCameraKind::SYSTEM_ONLY_CAMERA) {
@@ -362,7 +389,7 @@
             // device ids as per the HAL interface contract.
             break;
         }
-        mNormalDeviceIdsWithoutSystemCamera.push_back(deviceId);
+        mNormalDeviceIdsWithoutSystemCamera.push_back(cameraId);
     }
     ALOGV("%s: number of API1 compatible public cameras is %zu", __FUNCTION__,
               mNormalDeviceIdsWithoutSystemCamera.size());
@@ -400,8 +427,9 @@
         int facing = -1;
         int orientation = 0;
         int portraitRotation;
-        getDeviceVersion(cameraId, /*overrideToPortrait*/false, /*out*/&portraitRotation,
-                /*out*/&facing, /*out*/&orientation);
+        getDeviceVersion(cameraId,
+                /*rotationOverride*/hardware::ICameraService::ROTATION_OVERRIDE_NONE,
+                /*out*/&portraitRotation, /*out*/&facing, /*out*/&orientation);
         if (facing == -1) {
             ALOGE("%s: Unable to get camera device \"%s\" facing", __FUNCTION__, cameraId.c_str());
             return;
@@ -485,7 +513,7 @@
 void CameraService::onDeviceStatusChanged(const std::string& cameraId,
         CameraDeviceStatus newHalStatus) {
     ALOGI("%s: Status changed for cameraId=%s, newStatus=%d", __FUNCTION__,
-            cameraId.c_str(), newHalStatus);
+            cameraId.c_str(), eToI(newHalStatus));
 
     StatusInternal newStatus = mapToInternal(newHalStatus);
 
@@ -509,17 +537,18 @@
     StatusInternal oldStatus = state->getStatus();
 
     if (oldStatus == newStatus) {
-        ALOGE("%s: State transition to the same status %#x not allowed", __FUNCTION__, newStatus);
+        ALOGE("%s: State transition to the same status %#x not allowed", __FUNCTION__,
+                eToI(newStatus));
         return;
     }
 
     if (newStatus == StatusInternal::NOT_PRESENT) {
         logDeviceRemoved(cameraId, fmt::format("Device status changed from {} to {}",
                 oldStatus, newStatus));
-
         // Set the device status to NOT_PRESENT, clients will no longer be able to connect
         // to this device until the status changes
         updateStatus(StatusInternal::NOT_PRESENT, cameraId);
+        mVirtualDeviceCameraIdMapper.removeCamera(cameraId);
 
         sp<BasicClient> clientToDisconnectOnline, clientToDisconnectOffline;
         {
@@ -552,7 +581,7 @@
         const std::string& physicalId,
         CameraDeviceStatus newHalStatus) {
     ALOGI("%s: Status changed for cameraId=%s, physicalCameraId=%s, newStatus=%d",
-            __FUNCTION__, id.c_str(), physicalId.c_str(), newHalStatus);
+            __FUNCTION__, id.c_str(), physicalId.c_str(), eToI(newHalStatus));
 
     StatusInternal newStatus = mapToInternal(newHalStatus);
 
@@ -568,7 +597,7 @@
     if (logicalCameraStatus != StatusInternal::PRESENT &&
             logicalCameraStatus != StatusInternal::NOT_AVAILABLE) {
         ALOGE("%s: Physical camera id %s status %d change for an invalid logical camera state %d",
-                __FUNCTION__, physicalId.c_str(), newHalStatus, logicalCameraStatus);
+                __FUNCTION__, physicalId.c_str(), eToI(newHalStatus), eToI(logicalCameraStatus));
         return;
     }
 
@@ -601,7 +630,7 @@
                 continue;
             }
             auto ret = listener->getListener()->onPhysicalCameraStatusChanged(
-                    mapToInterface(newStatus), id, physicalId);
+                    mapToInterface(newStatus), id, physicalId, kDefaultDeviceId);
             listener->handleBinderStatus(ret,
                     "%s: Failed to trigger onPhysicalCameraStatusChanged for %d:%d: %d",
                     __FUNCTION__, listener->getListenerUid(), listener->getListenerPid(),
@@ -635,7 +664,6 @@
     onTorchStatusChangedLocked(cameraId, newStatus, systemCameraKind);
 }
 
-
 void CameraService::onTorchStatusChanged(const std::string& cameraId,
         TorchModeStatus newStatus, SystemCameraKind systemCameraKind) {
     Mutex::Autolock al(mTorchStatusMutex);
@@ -644,9 +672,14 @@
 
 void CameraService::broadcastTorchStrengthLevel(const std::string& cameraId,
         int32_t newStrengthLevel) {
+    // Get the device id and app-visible camera id for the given HAL-visible camera id.
+    auto [deviceId, mappedCameraId] =
+            mVirtualDeviceCameraIdMapper.getDeviceIdAndMappedCameraIdPair(cameraId);
+
     Mutex::Autolock lock(mStatusListenerLock);
     for (auto& i : mListenerList) {
-        auto ret = i->getListener()->onTorchStrengthLevelChanged(cameraId, newStrengthLevel);
+        auto ret = i->getListener()->onTorchStrengthLevelChanged(mappedCameraId,
+                newStrengthLevel, deviceId);
         i->handleBinderStatus(ret,
                 "%s: Failed to trigger onTorchStrengthLevelChanged for %d:%d: %d", __FUNCTION__,
                 i->getListenerUid(), i->getListenerPid(), ret.exceptionCode());
@@ -656,7 +689,7 @@
 void CameraService::onTorchStatusChangedLocked(const std::string& cameraId,
         TorchModeStatus newStatus, SystemCameraKind systemCameraKind) {
     ALOGI("%s: Torch status changed for cameraId=%s, newStatus=%d",
-            __FUNCTION__, cameraId.c_str(), newStatus);
+            __FUNCTION__, cameraId.c_str(), eToI(newStatus));
 
     TorchModeStatus status;
     status_t res = getTorchStatusLocked(cameraId, &status);
@@ -706,34 +739,7 @@
     broadcastTorchModeStatus(cameraId, newStatus, systemCameraKind);
 }
 
-static bool isAutomotiveDevice() {
-    // Checks the property ro.hardware.type and returns true if it is
-    // automotive.
-    char value[PROPERTY_VALUE_MAX] = {0};
-    property_get("ro.hardware.type", value, "");
-    return strncmp(value, "automotive", PROPERTY_VALUE_MAX) == 0;
-}
-
-static bool isHeadlessSystemUserMode() {
-    // Checks if the device is running in headless system user mode
-    // by checking the property ro.fw.mu.headless_system_user.
-    char value[PROPERTY_VALUE_MAX] = {0};
-    property_get("ro.fw.mu.headless_system_user", value, "");
-    return strncmp(value, "true", PROPERTY_VALUE_MAX) == 0;
-}
-
-static bool isAutomotivePrivilegedClient(int32_t uid) {
-    // Returns false if this is not an automotive device type.
-    if (!isAutomotiveDevice())
-        return false;
-
-    // Returns true if the uid is AID_AUTOMOTIVE_EVS which is a
-    // privileged client uid used for safety critical use cases such as
-    // rear view and surround view.
-    return uid == AID_AUTOMOTIVE_EVS;
-}
-
-bool CameraService::isAutomotiveExteriorSystemCamera(const std::string& cam_id) const{
+bool CameraService::isAutomotiveExteriorSystemCamera(const std::string& cam_id) const {
     // Returns false if this is not an automotive device type.
     if (!isAutomotiveDevice())
         return false;
@@ -756,7 +762,7 @@
 
     CameraMetadata cameraInfo;
     status_t res = mCameraProviderManager->getCameraCharacteristics(
-            cam_id, false, &cameraInfo, false);
+            cam_id, false, &cameraInfo, hardware::ICameraService::ROTATION_OVERRIDE_NONE);
     if (res != OK){
         ALOGE("%s: Not able to get camera characteristics for camera id %s",__FUNCTION__,
                 cam_id.c_str());
@@ -778,54 +784,20 @@
     return true;
 }
 
-bool CameraService::checkPermission(const std::string& cameraId, const std::string& permission,
-        const AttributionSourceState& attributionSource, const std::string& message,
-        int32_t attributedOpCode) const{
-    if (isAutomotivePrivilegedClient(attributionSource.uid)) {
-        // If cameraId is empty, then it means that this check is not used for the
-        // purpose of accessing a specific camera, hence grant permission just
-        // based on uid to the automotive privileged client.
-        if (cameraId.empty())
-            return true;
-        // If this call is used for accessing a specific camera then cam_id must be provided.
-        // In that case, only pre-grants the permission for accessing the exterior system only
-        // camera.
-        return isAutomotiveExteriorSystemCamera(cameraId);
+Status CameraService::getNumberOfCameras(int32_t type,
+        const AttributionSourceState& clientAttribution, int32_t devicePolicy,
+        int32_t* numCameras) {
+    ATRACE_CALL();
+    if (vd_flags::camera_device_awareness() && (clientAttribution.deviceId != kDefaultDeviceId)
+            && (devicePolicy != IVirtualDeviceManagerNative::DEVICE_POLICY_DEFAULT)) {
+        *numCameras = mVirtualDeviceCameraIdMapper.getNumberOfCameras(clientAttribution.deviceId);
+        return Status::ok();
     }
 
-    permission::PermissionChecker permissionChecker;
-    return permissionChecker.checkPermissionForPreflight(toString16(permission), attributionSource,
-            toString16(message), attributedOpCode)
-            != permission::PermissionChecker::PERMISSION_HARD_DENIED;
-}
-
-bool CameraService::hasPermissionsForSystemCamera(const std::string& cameraId, int callingPid,
-        int callingUid) const{
-    AttributionSourceState attributionSource{};
-    attributionSource.pid = callingPid;
-    attributionSource.uid = callingUid;
-    bool checkPermissionForSystemCamera = checkPermission(cameraId,
-            sSystemCameraPermission, attributionSource, std::string(), AppOpsManager::OP_NONE);
-    bool checkPermissionForCamera = checkPermission(cameraId,
-            sCameraPermission, attributionSource, std::string(), AppOpsManager::OP_NONE);
-    return checkPermissionForSystemCamera && checkPermissionForCamera;
-}
-
-bool CameraService::hasPermissionsForCameraHeadlessSystemUser(const std::string& cameraId,
-        int callingPid, int callingUid) const{
-    AttributionSourceState attributionSource{};
-    attributionSource.pid = callingPid;
-    attributionSource.uid = callingUid;
-    return checkPermission(cameraId, sCameraHeadlessSystemUserPermission, attributionSource,
-            std::string(), AppOpsManager::OP_NONE);
-}
-
-Status CameraService::getNumberOfCameras(int32_t type, int32_t* numCameras) {
-    ATRACE_CALL();
     Mutex::Autolock l(mServiceLock);
     bool hasSystemCameraPermissions =
-            hasPermissionsForSystemCamera(std::string(), CameraThreadState::getCallingPid(),
-                    CameraThreadState::getCallingUid());
+            hasPermissionsForSystemCamera(std::string(), getCallingPid(),
+                    getCallingUid());
     switch (type) {
         case CAMERA_TYPE_BACKWARD_COMPATIBLE:
             if (hasSystemCameraPermissions) {
@@ -850,25 +822,8 @@
     return Status::ok();
 }
 
-Status CameraService::remapCameraIds(const hardware::CameraIdRemapping& cameraIdRemapping) {
-    if (!checkCallingPermission(toString16(sCameraInjectExternalCameraPermission))) {
-        const int pid = CameraThreadState::getCallingPid();
-        const int uid = CameraThreadState::getCallingUid();
-        ALOGE("%s: Permission Denial: can't configure camera ID mapping pid=%d, uid=%d",
-                __FUNCTION__, pid, uid);
-        return STATUS_ERROR(ERROR_PERMISSION_DENIED,
-                "Permission Denial: no permission to configure camera id mapping");
-    }
-    TCameraIdRemapping cameraIdRemappingMap{};
-    binder::Status parseStatus = parseCameraIdRemapping(cameraIdRemapping, &cameraIdRemappingMap);
-    if (!parseStatus.isOk()) {
-        return parseStatus;
-    }
-    remapCameraIds(cameraIdRemappingMap);
-    return Status::ok();
-}
-
 Status CameraService::createDefaultRequest(const std::string& unresolvedCameraId, int templateId,
+        const AttributionSourceState& clientAttribution, int32_t devicePolicy,
         /* out */
         hardware::camera2::impl::CameraMetadataNative* request) {
     ATRACE_CALL();
@@ -883,8 +838,15 @@
         return STATUS_ERROR(ERROR_DISCONNECTED, "Camera subsystem is not available");
     }
 
-    const std::string cameraId = resolveCameraId(unresolvedCameraId,
-            CameraThreadState::getCallingUid());
+    std::optional<std::string> cameraIdOptional =
+            resolveCameraId(unresolvedCameraId, clientAttribution.deviceId, devicePolicy);
+    if (!cameraIdOptional.has_value()) {
+        std::string msg = fmt::sprintf("Camera %s: Invalid camera id for device id %d",
+                unresolvedCameraId.c_str(), clientAttribution.deviceId);
+        ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
+    }
+    std::string cameraId = cameraIdOptional.value();
 
     binder::Status res;
     if (request == nullptr) {
@@ -906,13 +868,6 @@
                 "request for system only device %s: ", cameraId.c_str());
     }
 
-    // Check for camera permissions
-    if (!hasCameraPermissions()) {
-        return STATUS_ERROR(ERROR_PERMISSION_DENIED,
-                "android.permission.CAMERA needed to call"
-                "createDefaultRequest");
-    }
-
     CameraMetadata metadata;
     status_t err = mCameraProviderManager->createDefaultRequest(cameraId, tempId, &metadata);
     if (err == OK) {
@@ -930,10 +885,10 @@
 }
 
 Status CameraService::isSessionConfigurationWithParametersSupported(
-        const std::string& unresolvedCameraId,
+        const std::string& unresolvedCameraId, int targetSdkVersion,
         const SessionConfiguration& sessionConfiguration,
-        /*out*/
-        bool* supported) {
+        const AttributionSourceState& clientAttribution, int32_t devicePolicy,
+        /*out*/ bool* supported) {
     ATRACE_CALL();
 
     if (!flags::feature_combination_query()) {
@@ -946,8 +901,16 @@
         return STATUS_ERROR(ERROR_DISCONNECTED, "Camera subsystem is not available");
     }
 
-    const std::string cameraId = resolveCameraId(unresolvedCameraId,
-            CameraThreadState::getCallingUid());
+    std::optional<std::string> cameraIdOptional =
+            resolveCameraId(unresolvedCameraId, clientAttribution.deviceId, devicePolicy);
+    if (!cameraIdOptional.has_value()) {
+        std::string msg = fmt::sprintf("Camera %s: Invalid camera id for device id %d",
+                unresolvedCameraId.c_str(), clientAttribution.deviceId);
+        ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
+    }
+    std::string cameraId = cameraIdOptional.value();
+
     if (supported == nullptr) {
         std::string msg = fmt::sprintf("Camera %s: Invalid 'support' input!",
                 unresolvedCameraId.c_str());
@@ -961,153 +924,248 @@
                 cameraId.c_str());
     }
 
-    // Check for camera permissions
-    if (!hasCameraPermissions()) {
-        return STATUS_ERROR(ERROR_PERMISSION_DENIED,
-                "android.permission.CAMERA needed to call"
-                "isSessionConfigurationWithParametersSupported");
-    }
+    bool overrideForPerfClass = flags::calculate_perf_override_during_session_support() &&
+                                SessionConfigurationUtils::targetPerfClassPrimaryCamera(
+                                        mPerfClassPrimaryCameraIds, cameraId, targetSdkVersion);
 
+    auto ret = isSessionConfigurationWithParametersSupportedUnsafe(cameraId,
+            sessionConfiguration, overrideForPerfClass, supported);
+    if (flags::analytics_24q3()) {
+        mCameraServiceProxyWrapper->logFeatureCombinationQuery(cameraId,
+                getCallingUid(), sessionConfiguration, ret);
+    }
+    return ret;
+}
+
+Status CameraService::isSessionConfigurationWithParametersSupportedUnsafe(
+        const std::string& cameraId, const SessionConfiguration& sessionConfiguration,
+        bool overrideForPerfClass, /*out*/ bool* supported) {
     *supported = false;
-    status_t ret = mCameraProviderManager->isSessionConfigurationSupported(cameraId.c_str(),
-            sessionConfiguration, /*mOverrideForPerfClass*/false, /*checkSessionParams*/true,
-            supported);
+    status_t ret = mCameraProviderManager->isSessionConfigurationSupported(
+            cameraId, sessionConfiguration, overrideForPerfClass,
+            /*checkSessionParams=*/true, supported);
     binder::Status res;
     switch (ret) {
         case OK:
-            // Expected, do nothing.
+            // Expected. Do Nothing.
+            return Status::ok();
+        case INVALID_OPERATION: {
+                std::string msg = fmt::sprintf(
+                        "Camera %s: Session configuration with parameters supported query not "
+                        "supported!",
+                        cameraId.c_str());
+                ALOGW("%s: %s", __FUNCTION__, msg.c_str());
+                logServiceError(msg, CameraService::ERROR_INVALID_OPERATION);
+                *supported = false;
+                return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.c_str());
+            }
+            break;
+        case NAME_NOT_FOUND: {
+                std::string msg = fmt::sprintf("Camera %s: Unknown camera ID.", cameraId.c_str());
+                ALOGW("%s: %s", __FUNCTION__, msg.c_str());
+                logServiceError(msg, CameraService::ERROR_ILLEGAL_ARGUMENT);
+                *supported = false;
+                return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
+            }
+            break;
+        default: {
+                std::string msg = fmt::sprintf(
+                        "Unable to retrieve session configuration support for camera "
+                        "device %s: Error: %s (%d)",
+                        cameraId.c_str(), strerror(-ret), ret);
+                ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+                logServiceError(msg, CameraService::ERROR_ILLEGAL_ARGUMENT);
+                *supported = false;
+                return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
+            }
+            break;
+    }
+}
+
+Status CameraService::getSessionCharacteristics(const std::string& unresolvedCameraId,
+        int targetSdkVersion, int rotationOverride,
+        const SessionConfiguration& sessionConfiguration,
+        const AttributionSourceState& clientAttribution, int32_t devicePolicy,
+        /*out*/ CameraMetadata* outMetadata) {
+    ATRACE_CALL();
+
+    if (outMetadata == nullptr) {
+        std::string msg =
+                fmt::sprintf("Camera %s: Invalid 'outMetadata' input!", unresolvedCameraId.c_str());
+        ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
+    }
+
+    if (!mInitialized) {
+        ALOGE("%s: Camera HAL couldn't be initialized", __FUNCTION__);
+        logServiceError("Camera subsystem is not available", ERROR_DISCONNECTED);
+        return STATUS_ERROR(ERROR_DISCONNECTED, "Camera subsystem is not available");
+    }
+
+    std::optional<std::string> cameraIdOptional =
+            resolveCameraId(unresolvedCameraId, clientAttribution.deviceId, devicePolicy);
+    if (!cameraIdOptional.has_value()) {
+        std::string msg = fmt::sprintf("Camera %s: Invalid camera id for device id %d",
+                                       unresolvedCameraId.c_str(), clientAttribution.deviceId);
+        ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
+    }
+    std::string cameraId = cameraIdOptional.value();
+
+    if (shouldRejectSystemCameraConnection(cameraId)) {
+        return STATUS_ERROR_FMT(ERROR_INVALID_OPERATION,
+                                "Unable to retrieve camera"
+                                "characteristics for system only device %s: ",
+                                cameraId.c_str());
+    }
+
+    bool overrideForPerfClass = SessionConfigurationUtils::targetPerfClassPrimaryCamera(
+            mPerfClassPrimaryCameraIds, cameraId, targetSdkVersion);
+    if (flags::check_session_support_before_session_char()) {
+        bool sessionConfigSupported;
+        Status res = isSessionConfigurationWithParametersSupportedUnsafe(
+                cameraId, sessionConfiguration, overrideForPerfClass, &sessionConfigSupported);
+        if (!res.isOk()) {
+            // isSessionConfigurationWithParametersSupportedUnsafe should log what went wrong and
+            // report the correct Status to send to the client. Simply forward the error to
+            // the client.
+            outMetadata->clear();
+            return res;
+        }
+        if (!sessionConfigSupported) {
+            std::string msg = fmt::sprintf(
+                    "Session configuration not supported for camera device %s.", cameraId.c_str());
+            outMetadata->clear();
+            return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
+        }
+    }
+
+    status_t ret = mCameraProviderManager->getSessionCharacteristics(
+            cameraId, sessionConfiguration, overrideForPerfClass, rotationOverride, outMetadata);
+
+    switch (ret) {
+        case OK:
+            // Expected, no handling needed.
             break;
         case INVALID_OPERATION: {
                 std::string msg = fmt::sprintf(
-                        "Camera %s: Session configuration query not supported!",
+                        "Camera %s: Session characteristics query not supported!",
                         cameraId.c_str());
-                ALOGD("%s: %s", __FUNCTION__, msg.c_str());
-                res = STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.c_str());
+                ALOGW("%s: %s", __FUNCTION__, msg.c_str());
+                logServiceError(msg, CameraService::ERROR_INVALID_OPERATION);
+                outMetadata->clear();
+                return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.c_str());
             }
-
+            break;
+        case NAME_NOT_FOUND: {
+                std::string msg = fmt::sprintf(
+                        "Camera %s: Unknown camera ID.",
+                        cameraId.c_str());
+                ALOGW("%s: %s", __FUNCTION__, msg.c_str());
+                logServiceError(msg, CameraService::ERROR_ILLEGAL_ARGUMENT);
+                outMetadata->clear();
+                return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
+            }
             break;
         default: {
-                std::string msg = fmt::sprintf( "Camera %s: Error: %s (%d)", cameraId.c_str(),
-                        strerror(-ret), ret);
+                std::string msg = fmt::sprintf(
+                        "Unable to retrieve session characteristics for camera device %s: "
+                        "Error: %s (%d)",
+                        cameraId.c_str(), strerror(-ret), ret);
                 ALOGE("%s: %s", __FUNCTION__, msg.c_str());
-                res = STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
-                        msg.c_str());
+                logServiceError(msg, CameraService::ERROR_INVALID_OPERATION);
+                outMetadata->clear();
+                return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.c_str());
             }
     }
 
+    Status res = filterSensitiveMetadataIfNeeded(cameraId, outMetadata);
+    if (flags::analytics_24q3()) {
+        mCameraServiceProxyWrapper->logSessionCharacteristicsQuery(cameraId,
+                getCallingUid(), sessionConfiguration, res);
+    }
     return res;
 }
 
-Status CameraService::parseCameraIdRemapping(
-        const hardware::CameraIdRemapping& cameraIdRemapping,
-        /* out */ TCameraIdRemapping* cameraIdRemappingMap) {
-    std::string packageName;
-    std::string cameraIdToReplace, updatedCameraId;
-    for(const auto& packageIdRemapping: cameraIdRemapping.packageIdRemappings) {
-        packageName = packageIdRemapping.packageName;
-        if (packageName.empty()) {
-            return STATUS_ERROR(ERROR_ILLEGAL_ARGUMENT,
-                    "CameraIdRemapping: Package name cannot be empty");
-        }
-        if (packageIdRemapping.cameraIdsToReplace.size()
-            != packageIdRemapping.updatedCameraIds.size()) {
-            return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT,
-                    "CameraIdRemapping: Mismatch in CameraId Remapping lists sizes for package %s",
-                    packageName.c_str());
-        }
-        for(size_t i = 0; i < packageIdRemapping.cameraIdsToReplace.size(); i++) {
-            cameraIdToReplace = packageIdRemapping.cameraIdsToReplace[i];
-            updatedCameraId = packageIdRemapping.updatedCameraIds[i];
-            if (cameraIdToReplace.empty() || updatedCameraId.empty()) {
-                return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT,
-                        "CameraIdRemapping: Camera Id cannot be empty for package %s",
-                        packageName.c_str());
-            }
-            if (cameraIdToReplace == updatedCameraId) {
-                return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT,
-                        "CameraIdRemapping: CameraIdToReplace cannot be the same"
-                        " as updatedCameraId for %s",
-                        packageName.c_str());
-            }
-            (*cameraIdRemappingMap)[packageName][cameraIdToReplace] = updatedCameraId;
+Status CameraService::filterSensitiveMetadataIfNeeded(
+        const std::string& cameraId, CameraMetadata* metadata) {
+    int callingPid = getCallingPid();
+    int callingUid = getCallingUid();
+
+    if (callingPid == getpid()) {
+        // Caller is cameraserver; no need to remove keys
+        return Status::ok();
+    }
+
+    SystemCameraKind deviceKind = SystemCameraKind::PUBLIC;
+    if (getSystemCameraKind(cameraId, &deviceKind) != OK) {
+        ALOGE("%s: Couldn't get camera kind for camera id %s", __FUNCTION__, cameraId.c_str());
+        metadata->clear();
+        return STATUS_ERROR_FMT(ERROR_INVALID_OPERATION,
+                                "Unable to retrieve camera kind for device %s", cameraId.c_str());
+    }
+    if (deviceKind == SystemCameraKind::SYSTEM_ONLY_CAMERA) {
+        // Attempting to query system only camera without system camera permission would have
+        // failed the shouldRejectSystemCameraConnection in the caller. So if we get here
+        // for a system only camera, then the caller has the required permission.
+        // No need to remove keys
+        return Status::ok();
+    }
+
+    std::vector<int32_t> tagsRemoved;
+    // Get the device id that owns this camera.
+    auto [cameraOwnerDeviceId, _] = mVirtualDeviceCameraIdMapper.getDeviceIdAndMappedCameraIdPair(
+            cameraId);
+    bool hasCameraPermission = hasPermissionsForCamera(cameraId, callingPid, callingUid,
+            cameraOwnerDeviceId);
+    if (hasCameraPermission) {
+        // Caller has camera permission; no need to remove keys
+        return Status::ok();
+    }
+
+    status_t ret = metadata->removePermissionEntries(
+            mCameraProviderManager->getProviderTagIdLocked(cameraId), &tagsRemoved);
+    if (ret != OK) {
+        metadata->clear();
+        return STATUS_ERROR_FMT(ERROR_INVALID_OPERATION,
+                                "Failed to remove camera characteristics needing camera permission "
+                                "for device %s:%s (%d)",
+                                cameraId.c_str(), strerror(-ret), ret);
+    }
+
+    if (!tagsRemoved.empty()) {
+        ret = metadata->update(ANDROID_REQUEST_CHARACTERISTIC_KEYS_NEEDING_PERMISSION,
+                                  tagsRemoved.data(), tagsRemoved.size());
+        if (ret != OK) {
+            metadata->clear();
+            return STATUS_ERROR_FMT(
+                    ERROR_INVALID_OPERATION,
+                    "Failed to insert camera keys needing permission for device %s: %s (%d)",
+                    cameraId.c_str(), strerror(-ret), ret);
         }
     }
     return Status::ok();
 }
 
-void CameraService::remapCameraIds(const TCameraIdRemapping& cameraIdRemapping) {
-    // Acquire mServiceLock and prevent other clients from connecting
-    std::unique_ptr<AutoConditionLock> serviceLockWrapper =
-            AutoConditionLock::waitAndAcquire(mServiceLockWrapper);
-
-    // Collect all existing clients for camera Ids that are being
-    // remapped in the new cameraIdRemapping, but only if they were being used by a
-    // targeted packageName.
-    std::vector<sp<BasicClient>> clientsToDisconnect;
-    std::vector<std::string> cameraIdsToUpdate;
-    for (const auto& [packageName, injectionMap] : cameraIdRemapping) {
-        for (auto& [id0, id1] : injectionMap) {
-            ALOGI("%s: UPDATE:= %s: %s: %s", __FUNCTION__, packageName.c_str(),
-                    id0.c_str(), id1.c_str());
-            auto clientDescriptor = mActiveClientManager.get(id0);
-            if (clientDescriptor != nullptr) {
-                sp<BasicClient> clientSp = clientDescriptor->getValue();
-                if (clientSp->getPackageName() == packageName) {
-                    // This camera is being used by a targeted packageName and
-                    // being remapped to a new camera Id. We should disconnect it.
-                    clientsToDisconnect.push_back(clientSp);
-                    cameraIdsToUpdate.push_back(id0);
-                }
-            }
-        }
-    }
-
-    for (auto& clientSp : clientsToDisconnect) {
-        // Notify the clients about the disconnection.
-        clientSp->notifyError(hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_DISCONNECTED,
-                CaptureResultExtras{});
-    }
-
-    // Do not hold mServiceLock while disconnecting clients, but retain the condition
-    // blocking other clients from connecting in mServiceLockWrapper if held.
-    mServiceLock.unlock();
-
-    // Clear calling identity for disconnect() PID checks.
-    int64_t token = CameraThreadState::clearCallingIdentity();
-
-    // Disconnect clients.
-    for (auto& clientSp : clientsToDisconnect) {
-        // This also triggers a call to updateStatus() which also reads mCameraIdRemapping
-        // and requires mCameraIdRemappingLock.
-        clientSp->disconnect();
-    }
-
-    // Invoke destructors (which call disconnect()) now while we don't hold the mServiceLock.
-    clientsToDisconnect.clear();
-
-    CameraThreadState::restoreCallingIdentity(token);
-    mServiceLock.lock();
-
-    {
-        Mutex::Autolock lock(mCameraIdRemappingLock);
-        // Update mCameraIdRemapping.
-        mCameraIdRemapping.clear();
-        mCameraIdRemapping.insert(cameraIdRemapping.begin(), cameraIdRemapping.end());
-    }
-}
-
 Status CameraService::injectSessionParams(
-            const std::string& cameraId,
-            const CameraMetadata& sessionParams) {
-   if (!checkCallingPermission(toString16(sCameraInjectExternalCameraPermission))) {
-        const int pid = CameraThreadState::getCallingPid();
-        const int uid = CameraThreadState::getCallingUid();
+        const std::string& cameraId,
+        const CameraMetadata& sessionParams) {
+    if (!checkCallingPermission(toString16(sCameraInjectExternalCameraPermission))) {
+        const int pid = getCallingPid();
+        const int uid = getCallingUid();
         ALOGE("%s: Permission Denial: can't inject session params pid=%d, uid=%d",
                 __FUNCTION__, pid, uid);
         return STATUS_ERROR(ERROR_PERMISSION_DENIED,
                 "Permission Denial: no permission to inject session params");
     }
 
+    // Do not allow session params injection for a virtual camera.
+    auto [deviceId, _] = mVirtualDeviceCameraIdMapper.getDeviceIdAndMappedCameraIdPair(cameraId);
+    if (deviceId != kDefaultDeviceId) {
+        return STATUS_ERROR(ERROR_ILLEGAL_ARGUMENT,
+                "Cannot inject session params for a virtual camera");
+    }
+
     std::unique_ptr<AutoConditionLock> serviceLockWrapper =
             AutoConditionLock::waitAndAcquire(mServiceLockWrapper);
 
@@ -1129,57 +1187,40 @@
     return Status::ok();
 }
 
-std::vector<std::string> CameraService::findOriginalIdsForRemappedCameraId(
-    const std::string& inputCameraId, int clientUid) {
-    std::string packageName = getPackageNameFromUid(clientUid);
-    std::vector<std::string> cameraIds;
-    Mutex::Autolock lock(mCameraIdRemappingLock);
-    if (auto packageMapIter = mCameraIdRemapping.find(packageName);
-        packageMapIter != mCameraIdRemapping.end()) {
-        for (auto& [id0, id1]: packageMapIter->second) {
-            if (id1 == inputCameraId) {
-                cameraIds.push_back(id0);
-            }
+std::optional<std::string> CameraService::resolveCameraId(
+        const std::string& inputCameraId,
+        int32_t deviceId,
+        int32_t devicePolicy) {
+    if ((deviceId == kDefaultDeviceId)
+            || (devicePolicy == IVirtualDeviceManagerNative::DEVICE_POLICY_DEFAULT)) {
+        auto [storedDeviceId, _] =
+                mVirtualDeviceCameraIdMapper.getDeviceIdAndMappedCameraIdPair(inputCameraId);
+        if (storedDeviceId != kDefaultDeviceId) {
+            // Trying to access a virtual camera from default-policy device context, we should fail.
+            std::string msg = fmt::sprintf("Camera %s: Invalid camera id for device id %d",
+                    inputCameraId.c_str(), deviceId);
+            ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+            return std::nullopt;
         }
-    }
-    return cameraIds;
-}
-
-std::string CameraService::resolveCameraId(
-    const std::string& inputCameraId,
-    int clientUid,
-    const std::string& packageName) {
-    std::string packageNameVal = packageName;
-    if (packageName.empty()) {
-        packageNameVal = getPackageNameFromUid(clientUid);
-    }
-    if (clientUid < AID_APP_START || packageNameVal.empty()) {
-        // We shouldn't remap cameras for processes with system/vendor UIDs.
         return inputCameraId;
     }
-    Mutex::Autolock lock(mCameraIdRemappingLock);
-    if (auto packageMapIter = mCameraIdRemapping.find(packageNameVal);
-        packageMapIter != mCameraIdRemapping.end()) {
-        auto packageMap = packageMapIter->second;
-        if (auto replacementIdIter = packageMap.find(inputCameraId);
-            replacementIdIter != packageMap.end()) {
-            ALOGI("%s: resolveCameraId: remapping cameraId %s for %s to %s",
-                    __FUNCTION__, inputCameraId.c_str(),
-                    packageNameVal.c_str(),
-                    replacementIdIter->second.c_str());
-            return replacementIdIter->second;
-        }
-    }
-    return inputCameraId;
+
+    return mVirtualDeviceCameraIdMapper.getActualCameraId(deviceId, inputCameraId);
 }
 
-Status CameraService::getCameraInfo(int cameraId, bool overrideToPortrait,
+Status CameraService::getCameraInfo(int cameraId,  int rotationOverride,
+        const AttributionSourceState& clientAttribution, int32_t devicePolicy,
         CameraInfo* cameraInfo) {
     ATRACE_CALL();
     Mutex::Autolock l(mServiceLock);
-    std::string unresolvedCameraId = cameraIdIntToStrLocked(cameraId);
-    std::string cameraIdStr = resolveCameraId(
-            unresolvedCameraId, CameraThreadState::getCallingUid());
+    std::string cameraIdStr =
+            cameraIdIntToStrLocked(cameraId, clientAttribution.deviceId, devicePolicy);
+    if (cameraIdStr.empty()) {
+        std::string msg = fmt::sprintf("Camera %d: Invalid camera id for device id %d",
+                cameraId, clientAttribution.deviceId);
+        ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
+    }
 
     if (shouldRejectSystemCameraConnection(cameraIdStr)) {
         return STATUS_ERROR_FMT(ERROR_INVALID_OPERATION, "Unable to retrieve camera"
@@ -1192,7 +1233,7 @@
                 "Camera subsystem is not available");
     }
     bool hasSystemCameraPermissions = hasPermissionsForSystemCamera(std::to_string(cameraId),
-            CameraThreadState::getCallingPid(), CameraThreadState::getCallingUid());
+            getCallingPid(), getCallingUid());
     int cameraIdBound = mNumberOfCamerasWithoutSystemCamera;
     if (hasSystemCameraPermissions) {
         cameraIdBound = mNumberOfCameras;
@@ -1205,7 +1246,7 @@
     Status ret = Status::ok();
     int portraitRotation;
     status_t err = mCameraProviderManager->getCameraInfo(
-            cameraIdStr, overrideToPortrait, &portraitRotation, cameraInfo);
+            cameraIdStr, rotationOverride, &portraitRotation, cameraInfo);
     if (err != OK) {
         ret = STATUS_ERROR_FMT(ERROR_INVALID_OPERATION,
                 "Error retrieving camera info from device %d: %s (%d)", cameraId,
@@ -1217,40 +1258,43 @@
     return ret;
 }
 
-std::string CameraService::cameraIdIntToStrLocked(int cameraIdInt) {
-    const std::vector<std::string> *deviceIds = &mNormalDeviceIdsWithoutSystemCamera;
-    auto callingPid = CameraThreadState::getCallingPid();
-    auto callingUid = CameraThreadState::getCallingUid();
-    AttributionSourceState attributionSource{};
-    attributionSource.pid = callingPid;
-    attributionSource.uid = callingUid;
-    bool checkPermissionForSystemCamera = checkPermission(std::to_string(cameraIdInt),
-                sSystemCameraPermission, attributionSource, std::string(),
-                AppOpsManager::OP_NONE);
-    if (checkPermissionForSystemCamera || getpid() == callingPid) {
-        deviceIds = &mNormalDeviceIds;
+std::string CameraService::cameraIdIntToStrLocked(int cameraIdInt,
+        int32_t deviceId, int32_t devicePolicy) {
+    if (vd_flags::camera_device_awareness() && (deviceId != kDefaultDeviceId)
+            && (devicePolicy != IVirtualDeviceManagerNative::DEVICE_POLICY_DEFAULT)) {
+        std::optional<std::string> cameraIdOptional =
+                mVirtualDeviceCameraIdMapper.getActualCameraId(cameraIdInt, deviceId);
+        return cameraIdOptional.has_value() ? cameraIdOptional.value() : std::string{};
     }
-    if (cameraIdInt < 0 || cameraIdInt >= static_cast<int>(deviceIds->size())) {
-        ALOGE("%s: input id %d invalid: valid range  (0, %zu)",
-                __FUNCTION__, cameraIdInt, deviceIds->size());
+
+    const std::vector<std::string> *cameraIds = &mNormalDeviceIdsWithoutSystemCamera;
+    auto callingPid = getCallingPid();
+    auto callingUid = getCallingUid();
+    bool systemCameraPermissions = hasPermissionsForSystemCamera(std::to_string(cameraIdInt),
+            callingPid, callingUid, /* checkCameraPermissions= */ false);
+    if (systemCameraPermissions || getpid() == callingPid) {
+        cameraIds = &mNormalDeviceIds;
+    }
+    if (cameraIdInt < 0 || cameraIdInt >= static_cast<int>(cameraIds->size())) {
+        ALOGE("%s: input id %d invalid: valid range (0, %zu)",
+                __FUNCTION__, cameraIdInt, cameraIds->size());
         return std::string{};
     }
 
-    return (*deviceIds)[cameraIdInt];
+    return (*cameraIds)[cameraIdInt];
 }
 
-std::string CameraService::cameraIdIntToStr(int cameraIdInt) {
+std::string CameraService::cameraIdIntToStr(int cameraIdInt, int32_t deviceId,
+        int32_t devicePolicy) {
     Mutex::Autolock lock(mServiceLock);
-    return cameraIdIntToStrLocked(cameraIdInt);
+    return cameraIdIntToStrLocked(cameraIdInt, deviceId, devicePolicy);
 }
 
 Status CameraService::getCameraCharacteristics(const std::string& unresolvedCameraId,
-        int targetSdkVersion, bool overrideToPortrait, CameraMetadata* cameraInfo) {
+        int targetSdkVersion, int rotationOverride, const AttributionSourceState& clientAttribution,
+        int32_t devicePolicy, CameraMetadata* cameraInfo) {
     ATRACE_CALL();
 
-    const std::string cameraId = resolveCameraId(unresolvedCameraId,
-            CameraThreadState::getCallingUid());
-
     if (!cameraInfo) {
         ALOGE("%s: cameraInfo is NULL", __FUNCTION__);
         return STATUS_ERROR(ERROR_ILLEGAL_ARGUMENT, "cameraInfo is NULL");
@@ -1263,18 +1307,26 @@
                 "Camera subsystem is not available");;
     }
 
+    std::optional<std::string> cameraIdOptional =
+            resolveCameraId(unresolvedCameraId, clientAttribution.deviceId, devicePolicy);
+    if (!cameraIdOptional.has_value()) {
+        std::string msg = fmt::sprintf("Camera %s: Invalid camera id for device id %d",
+                unresolvedCameraId.c_str(), clientAttribution.deviceId);
+        ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
+    }
+    std::string cameraId = cameraIdOptional.value();
+
     if (shouldRejectSystemCameraConnection(cameraId)) {
         return STATUS_ERROR_FMT(ERROR_INVALID_OPERATION, "Unable to retrieve camera"
                 "characteristics for system only device %s: ", cameraId.c_str());
     }
 
-    Status ret{};
-
     bool overrideForPerfClass =
             SessionConfigurationUtils::targetPerfClassPrimaryCamera(mPerfClassPrimaryCameraIds,
                     cameraId, targetSdkVersion);
     status_t res = mCameraProviderManager->getCameraCharacteristics(
-            cameraId, overrideForPerfClass, cameraInfo, overrideToPortrait);
+            cameraId, overrideForPerfClass, cameraInfo, rotationOverride);
     if (res != OK) {
         if (res == NAME_NOT_FOUND) {
             return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT, "Unable to retrieve camera "
@@ -1288,63 +1340,32 @@
                     strerror(-res), res);
         }
     }
-    SystemCameraKind deviceKind = SystemCameraKind::PUBLIC;
-    if (getSystemCameraKind(cameraId, &deviceKind) != OK) {
-        ALOGE("%s: Invalid camera id %s, skipping", __FUNCTION__, cameraId.c_str());
-        return STATUS_ERROR_FMT(ERROR_INVALID_OPERATION, "Unable to retrieve camera kind "
-                "for device %s", cameraId.c_str());
-    }
-    int callingPid = CameraThreadState::getCallingPid();
-    int callingUid = CameraThreadState::getCallingUid();
-    std::vector<int32_t> tagsRemoved;
-    // If it's not calling from cameraserver, check the permission only if
-    // android.permission.CAMERA is required. If android.permission.SYSTEM_CAMERA was needed,
-    // it would've already been checked in shouldRejectSystemCameraConnection.
-    AttributionSourceState attributionSource{};
-    attributionSource.pid = callingPid;
-    attributionSource.uid = callingUid;
-    bool checkPermissionForCamera = checkPermission(cameraId, sCameraPermission,
-            attributionSource, std::string(), AppOpsManager::OP_NONE);
-    if ((callingPid != getpid()) &&
-            (deviceKind != SystemCameraKind::SYSTEM_ONLY_CAMERA) &&
-            !checkPermissionForCamera) {
-        res = cameraInfo->removePermissionEntries(
-                mCameraProviderManager->getProviderTagIdLocked(cameraId),
-                &tagsRemoved);
-        if (res != OK) {
-            cameraInfo->clear();
-            return STATUS_ERROR_FMT(ERROR_INVALID_OPERATION, "Failed to remove camera"
-                    " characteristics needing camera permission for device %s: %s (%d)",
-                    cameraId.c_str(), strerror(-res), res);
-        }
-    }
 
-    if (!tagsRemoved.empty()) {
-        res = cameraInfo->update(ANDROID_REQUEST_CHARACTERISTIC_KEYS_NEEDING_PERMISSION,
-                tagsRemoved.data(), tagsRemoved.size());
-        if (res != OK) {
-            cameraInfo->clear();
-            return STATUS_ERROR_FMT(ERROR_INVALID_OPERATION, "Failed to insert camera "
-                    "keys needing permission for device %s: %s (%d)", cameraId.c_str(),
-                    strerror(-res), res);
-        }
-    }
-
-    return ret;
+    return filterSensitiveMetadataIfNeeded(cameraId, cameraInfo);
 }
 
 Status CameraService::getTorchStrengthLevel(const std::string& unresolvedCameraId,
-        int32_t* torchStrength) {
+        const AttributionSourceState& clientAttribution,
+        int32_t devicePolicy, int32_t* torchStrength) {
     ATRACE_CALL();
     Mutex::Autolock l(mServiceLock);
-    const std::string cameraId = resolveCameraId(
-        unresolvedCameraId, CameraThreadState::getCallingUid());
+
+    std::optional<std::string> cameraIdOptional = resolveCameraId(unresolvedCameraId,
+            clientAttribution.deviceId, devicePolicy);
+    if (!cameraIdOptional.has_value()) {
+        std::string msg = fmt::sprintf("Camera %s: Invalid camera id for device id %d",
+                unresolvedCameraId.c_str(), clientAttribution.deviceId);
+        ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
+    }
+    std::string cameraId = cameraIdOptional.value();
+
     if (!mInitialized) {
         ALOGE("%s: Camera HAL couldn't be initialized.", __FUNCTION__);
         return STATUS_ERROR(ERROR_DISCONNECTED, "Camera HAL couldn't be initialized.");
     }
 
-    if(torchStrength == NULL) {
+    if (torchStrength == NULL) {
         ALOGE("%s: strength level must not be null.", __FUNCTION__);
         return STATUS_ERROR(ERROR_ILLEGAL_ARGUMENT, "Strength level should not be null.");
     }
@@ -1402,7 +1423,8 @@
 }
 
 std::pair<int, IPCTransport> CameraService::getDeviceVersion(const std::string& cameraId,
-        bool overrideToPortrait, int* portraitRotation, int* facing, int* orientation) {
+        int rotationOverride, int* portraitRotation, int* facing,
+        int* orientation) {
     ATRACE_CALL();
 
     int deviceVersion = 0;
@@ -1420,7 +1442,7 @@
 
     hardware::CameraInfo info;
     if (facing) {
-        res = mCameraProviderManager->getCameraInfo(cameraId, overrideToPortrait,
+        res = mCameraProviderManager->getCameraInfo(cameraId, rotationOverride,
                 portraitRotation, &info);
         if (res != OK) {
             return std::make_pair(-1, IPCTransport::INVALID);
@@ -1456,7 +1478,7 @@
         const std::optional<std::string>& featureId,  const std::string& cameraId,
         int api1CameraId, int facing, int sensorOrientation, int clientPid, uid_t clientUid,
         int servicePid, std::pair<int, IPCTransport> deviceVersionAndTransport,
-        apiLevel effectiveApiLevel, bool overrideForPerfClass, bool overrideToPortrait,
+        apiLevel effectiveApiLevel, bool overrideForPerfClass, int rotationOverride,
         bool forceSlowJpegMode, const std::string& originalCameraId,
         /*out*/sp<BasicClient>* client) {
     // For HIDL devices
@@ -1490,20 +1512,21 @@
     if (effectiveApiLevel == API_1) { // Camera1 API route
         sp<ICameraClient> tmp = static_cast<ICameraClient*>(cameraCb.get());
         *client = new Camera2Client(cameraService, tmp, cameraService->mCameraServiceProxyWrapper,
-                packageName, featureId, cameraId,
+                cameraService->mAttributionAndPermissionUtils, packageName, featureId, cameraId,
                 api1CameraId, facing, sensorOrientation,
-                clientPid, clientUid, servicePid, overrideForPerfClass, overrideToPortrait,
+                clientPid, clientUid, servicePid, overrideForPerfClass, rotationOverride,
                 forceSlowJpegMode);
-        ALOGI("%s: Camera1 API (legacy), override to portrait %d, forceSlowJpegMode %d",
-                __FUNCTION__, overrideToPortrait, forceSlowJpegMode);
+        ALOGI("%s: Camera1 API (legacy), rotationOverride %d, forceSlowJpegMode %d",
+                __FUNCTION__, rotationOverride, forceSlowJpegMode);
     } else { // Camera2 API route
         sp<hardware::camera2::ICameraDeviceCallbacks> tmp =
                 static_cast<hardware::camera2::ICameraDeviceCallbacks*>(cameraCb.get());
         *client = new CameraDeviceClient(cameraService, tmp,
-                cameraService->mCameraServiceProxyWrapper, packageName, systemNativeClient,
+                cameraService->mCameraServiceProxyWrapper,
+                cameraService->mAttributionAndPermissionUtils, packageName, systemNativeClient,
                 featureId, cameraId, facing, sensorOrientation, clientPid, clientUid, servicePid,
-                overrideForPerfClass, overrideToPortrait, originalCameraId);
-        ALOGI("%s: Camera2 API, override to portrait %d", __FUNCTION__, overrideToPortrait);
+                overrideForPerfClass, rotationOverride, originalCameraId);
+        ALOGI("%s: Camera2 API, rotationOverride %d", __FUNCTION__, rotationOverride);
     }
     return Status::ok();
 }
@@ -1519,7 +1542,7 @@
             s << ", " << std::to_string(i);
         }
     }
-    return std::move(s.str());
+    return s.str();
 }
 
 int32_t CameraService::mapToInterface(TorchModeStatus status) {
@@ -1535,7 +1558,7 @@
             serviceStatus = ICameraServiceListener::TORCH_STATUS_AVAILABLE_ON;
             break;
         default:
-            ALOGW("Unknown new flash status: %d", status);
+            ALOGW("Unknown new flash status: %d", eToI(status));
     }
     return serviceStatus;
 }
@@ -1553,7 +1576,7 @@
             serviceStatus = StatusInternal::ENUMERATING;
             break;
         default:
-            ALOGW("Unknown new HAL device status: %d", status);
+            ALOGW("Unknown new HAL device status: %d", eToI(status));
     }
     return serviceStatus;
 }
@@ -1577,23 +1600,27 @@
             serviceStatus = ICameraServiceListener::STATUS_UNKNOWN;
             break;
         default:
-            ALOGW("Unknown new internal device status: %d", status);
+            ALOGW("Unknown new internal device status: %d", eToI(status));
     }
     return serviceStatus;
 }
 
 Status CameraService::initializeShimMetadata(int cameraId) {
-    int uid = CameraThreadState::getCallingUid();
+    int uid = getCallingUid();
 
     std::string cameraIdStr = std::to_string(cameraId);
     Status ret = Status::ok();
     sp<Client> tmp = nullptr;
+
+    logConnectionAttempt(getCallingPid(), kServiceName, cameraIdStr, API_1);
+
     if (!(ret = connectHelper<ICameraClient,Client>(
             sp<ICameraClient>{nullptr}, cameraIdStr, cameraId,
             kServiceName, /*systemNativeClient*/ false, {}, uid, USE_CALLING_PID,
             API_1, /*shimUpdateOnly*/ true, /*oomScoreOffset*/ 0,
-            /*targetSdkVersion*/ __ANDROID_API_FUTURE__, /*overrideToPortrait*/ true,
-            /*forceSlowJpegMode*/false, cameraIdStr, /*out*/ tmp)
+            /*targetSdkVersion*/ __ANDROID_API_FUTURE__,
+            /*rotationOverride*/hardware::ICameraService::ROTATION_OVERRIDE_OVERRIDE_TO_PORTRAIT,
+            /*forceSlowJpegMode*/false, cameraIdStr, /*isNonSystemNdk*/ false, /*out*/ tmp)
             ).isOk()) {
         ALOGE("%s: Error initializing shim metadata: %s", __FUNCTION__, ret.toString8().c_str());
     }
@@ -1613,9 +1640,7 @@
         return STATUS_ERROR(ERROR_ILLEGAL_ARGUMENT, "Parameters must not be null");
     }
 
-    std::string unresolvedCameraId = std::to_string(cameraId);
-    std::string cameraIdStr = resolveCameraId(unresolvedCameraId,
-            CameraThreadState::getCallingUid());
+    std::string cameraIdStr = std::to_string(cameraId);
 
     // Check if we already have parameters
     {
@@ -1634,9 +1659,9 @@
         }
     }
 
-    int64_t token = CameraThreadState::clearCallingIdentity();
+    int64_t token = clearCallingIdentity();
     ret = initializeShimMetadata(cameraId);
-    CameraThreadState::restoreCallingIdentity(token);
+    restoreCallingIdentity(token);
     if (!ret.isOk()) {
         // Error already logged by callee
         return ret;
@@ -1664,56 +1689,21 @@
     return STATUS_ERROR(ERROR_INVALID_OPERATION, "Unable to initialize legacy parameters");
 }
 
-// Can camera service trust the caller based on the calling UID?
-static bool isTrustedCallingUid(uid_t uid) {
-    switch (uid) {
-        case AID_MEDIA:        // mediaserver
-        case AID_CAMERASERVER: // cameraserver
-        case AID_RADIO:        // telephony
-            return true;
-        default:
-            return false;
-    }
-}
-
-static status_t getUidForPackage(const std::string &packageName, int userId, /*inout*/uid_t& uid,
-        int err) {
-    PermissionController pc;
-    uid = pc.getPackageUid(toString16(packageName), 0);
-    if (uid <= 0) {
-        ALOGE("Unknown package: '%s'", packageName.c_str());
-        dprintf(err, "Unknown package: '%s'\n", packageName.c_str());
-        return BAD_VALUE;
-    }
-
-    if (userId < 0) {
-        ALOGE("Invalid user: %d", userId);
-        dprintf(err, "Invalid user: %d\n", userId);
-        return BAD_VALUE;
-    }
-
-    uid = multiuser_get_uid(userId, uid);
-    return NO_ERROR;
-}
-
 Status CameraService::validateConnectLocked(const std::string& cameraId,
-        const std::string& clientName8, /*inout*/int& clientUid, /*inout*/int& clientPid,
-        /*out*/int& originalClientPid) const {
+        const std::string& clientName8, /*inout*/int& clientUid, /*inout*/int& clientPid) const {
 
 #ifdef __BRILLO__
     UNUSED(clientName8);
     UNUSED(clientUid);
     UNUSED(clientPid);
-    UNUSED(originalClientPid);
 #else
-    Status allowed = validateClientPermissionsLocked(cameraId, clientName8, clientUid, clientPid,
-            originalClientPid);
+    Status allowed = validateClientPermissionsLocked(cameraId, clientName8, clientUid, clientPid);
     if (!allowed.isOk()) {
         return allowed;
     }
 #endif  // __BRILLO__
 
-    int callingPid = CameraThreadState::getCallingPid();
+    int callingPid = getCallingPid();
 
     if (!mInitialized) {
         ALOGE("CameraService::connect X (PID %d) rejected (camera HAL module not loaded)",
@@ -1745,12 +1735,9 @@
 }
 
 Status CameraService::validateClientPermissionsLocked(const std::string& cameraId,
-        const std::string& clientName, int& clientUid, int& clientPid,
-        /*out*/int& originalClientPid) const {
-    AttributionSourceState attributionSource{};
-
-    int callingPid = CameraThreadState::getCallingPid();
-    int callingUid = CameraThreadState::getCallingUid();
+        const std::string& clientName, int& clientUid, int& clientPid) const {
+    int callingPid = getCallingPid();
+    int callingUid = getCallingUid();
 
     // Check if we can trust clientUid
     if (clientUid == USE_CALLING_UID) {
@@ -1762,7 +1749,7 @@
                 "Untrusted caller (calling PID %d, UID %d) trying to "
                 "forward camera access to camera %s for client %s (PID %d, UID %d)",
                 callingPid, callingUid, cameraId.c_str(),
-                clientName.c_str(), clientUid, clientPid);
+                clientName.c_str(), clientPid, clientUid);
     }
 
     // Check if we can trust clientPid
@@ -1775,7 +1762,7 @@
                 "Untrusted caller (calling PID %d, UID %d) trying to "
                 "forward camera access to camera %s for client %s (PID %d, UID %d)",
                 callingPid, callingUid, cameraId.c_str(),
-                clientName.c_str(), clientUid, clientPid);
+                clientName.c_str(), clientPid, clientUid);
     }
 
     if (shouldRejectSystemCameraConnection(cameraId)) {
@@ -1789,23 +1776,22 @@
         ALOGE("%s: Invalid camera id %s, skipping", __FUNCTION__, cameraId.c_str());
         return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT, "No camera device with ID \"%s\""
                 "found while trying to query device kind", cameraId.c_str());
-
     }
 
+    // Get the device id that owns this camera.
+    auto [deviceId, _] = mVirtualDeviceCameraIdMapper.getDeviceIdAndMappedCameraIdPair(cameraId);
+
     // If it's not calling from cameraserver, check the permission if the
     // device isn't a system only camera (shouldRejectSystemCameraConnection already checks for
     // android.permission.SYSTEM_CAMERA for system only camera devices).
-    attributionSource.pid = clientPid;
-    attributionSource.uid = clientUid;
-    attributionSource.packageName = clientName;
-    bool checkPermissionForCamera = checkPermission(cameraId, sCameraPermission, attributionSource,
-            std::string(), AppOpsManager::OP_NONE);
+    bool checkPermissionForCamera =
+            hasPermissionsForCamera(cameraId, clientPid, clientUid, clientName, deviceId);
     if (callingPid != getpid() &&
                 (deviceKind != SystemCameraKind::SYSTEM_ONLY_CAMERA) && !checkPermissionForCamera) {
         ALOGE("Permission Denial: can't use the camera pid=%d, uid=%d", clientPid, clientUid);
         return STATUS_ERROR_FMT(ERROR_PERMISSION_DENIED,
                 "Caller \"%s\" (PID %d, UID %d) cannot open camera \"%s\" without camera permission",
-                clientName.c_str(), clientUid, clientPid, cameraId.c_str());
+                clientName.c_str(), clientPid, clientUid, cameraId.c_str());
     }
 
     // Make sure the UID is in an active state to use the camera
@@ -1816,7 +1802,7 @@
         return STATUS_ERROR_FMT(ERROR_DISABLED,
                 "Caller \"%s\" (PID %d, UID %d) cannot open camera \"%s\" from background ("
                 "calling UID %d proc state %" PRId32 ")",
-                clientName.c_str(), clientUid, clientPid, cameraId.c_str(),
+                clientName.c_str(), clientPid, clientUid, cameraId.c_str(),
                 callingUid, procState);
     }
 
@@ -1829,19 +1815,17 @@
         ALOGE("Access Denial: cannot use the camera when sensor privacy is enabled");
         return STATUS_ERROR_FMT(ERROR_DISABLED,
                 "Caller \"%s\" (PID %d, UID %d) cannot open camera \"%s\" when sensor privacy "
-                "is enabled", clientName.c_str(), clientUid, clientPid, cameraId.c_str());
+                "is enabled", clientName.c_str(), clientPid, clientUid, cameraId.c_str());
     }
 
-    // Only use passed in clientPid to check permission. Use calling PID as the client PID that's
-    // connected to camera service directly.
-    originalClientPid = clientPid;
-    clientPid = callingPid;
-
     userid_t clientUserId = multiuser_get_user_id(clientUid);
 
+    // Only use passed in clientPid to check permission. Use calling PID as the client PID that's
+    // connected to camera service directly.
+
     // For non-system clients : Only allow clients who are being used by the current foreground
     // device user, unless calling from our own process.
-    if (!doesClientHaveSystemUid() && callingPid != getpid() &&
+    if (!callerHasSystemUid() && callingPid != getpid() &&
             (mAllowedUsers.find(clientUserId) == mAllowedUsers.end())) {
         ALOGE("CameraService::connect X (PID %d) rejected (cannot connect from "
                 "device user %d, currently allowed device users: %s)", callingPid, clientUserId,
@@ -1855,13 +1839,14 @@
         // If the System User tries to access the camera when the device is running in
         // headless system user mode, ensure that client has the required permission
         // CAMERA_HEADLESS_SYSTEM_USER.
-        if (isHeadlessSystemUserMode() && (clientUserId == USER_SYSTEM) &&
-                !hasPermissionsForCameraHeadlessSystemUser(cameraId, callingPid, callingUid)) {
-            ALOGE("Permission Denial: can't use the camera pid=%d, uid=%d", clientPid, clientUid);
+        if (isHeadlessSystemUserMode()
+                && (clientUserId == USER_SYSTEM)
+                && !hasPermissionsForCameraHeadlessSystemUser(cameraId, callingPid, callingUid)) {
+            ALOGE("Permission Denial: can't use the camera pid=%d, uid=%d", callingPid, clientUid);
             return STATUS_ERROR_FMT(ERROR_PERMISSION_DENIED,
                     "Caller \"%s\" (PID %d, UID %d) cannot open camera \"%s\" as Headless System \
                     User without camera headless system user permission",
-                    clientName.c_str(), clientUid, clientPid, cameraId.c_str());
+                    clientName.c_str(), callingPid, clientUid, cameraId.c_str());
         }
     }
 
@@ -1870,7 +1855,7 @@
 
 status_t CameraService::checkIfDeviceIsUsable(const std::string& cameraId) const {
     auto cameraState = getCameraState(cameraId);
-    int callingPid = CameraThreadState::getCallingPid();
+    int callingPid = getCallingPid();
     if (cameraState == nullptr) {
         ALOGE("CameraService::connect X (PID %d) rejected (invalid camera ID %s)", callingPid,
                 cameraId.c_str());
@@ -1968,7 +1953,7 @@
 
         sp<IServiceManager> sm = defaultServiceManager();
         sp<IBinder> binder = sm->checkService(String16(kProcessInfoServiceName));
-        if (!binder && isAutomotivePrivilegedClient(CameraThreadState::getCallingUid())) {
+        if (!binder && isAutomotivePrivilegedClient(getCallingUid())) {
             // If processinfo service is not available and the client is automotive privileged
             // client used for safety critical uses cases such as rear-view and surround-view which
             // needs to be available before android boot completes, then use the hardcoded values
@@ -2101,7 +2086,7 @@
     mServiceLock.unlock();
 
     // Clear caller identity temporarily so client disconnect PID checks work correctly
-    int64_t token = CameraThreadState::clearCallingIdentity();
+    int64_t token = clearCallingIdentity();
 
     // Destroy evicted clients
     for (auto& i : evictedClients) {
@@ -2109,7 +2094,7 @@
         i->getValue()->disconnect(); // Clients will remove themselves from the active client list
     }
 
-    CameraThreadState::restoreCallingIdentity(token);
+    restoreCallingIdentity(token);
 
     for (const auto& i : evictedClients) {
         ALOGV("%s: Waiting for disconnect to complete for client for device %s (PID %" PRId32 ")",
@@ -2146,30 +2131,40 @@
 Status CameraService::connect(
         const sp<ICameraClient>& cameraClient,
         int api1CameraId,
-        const std::string& clientPackageName,
-        int clientUid,
-        int clientPid,
         int targetSdkVersion,
-        bool overrideToPortrait,
+        int rotationOverride,
         bool forceSlowJpegMode,
+        const AttributionSourceState& clientAttribution,
+        int32_t devicePolicy,
         /*out*/
         sp<ICamera>* device) {
-
     ATRACE_CALL();
     Status ret = Status::ok();
 
-    std::string unresolvedCameraId = cameraIdIntToStr(api1CameraId);
-    std::string cameraIdStr = resolveCameraId(unresolvedCameraId,
-            CameraThreadState::getCallingUid());
+    std::string cameraIdStr =
+            cameraIdIntToStr(api1CameraId, clientAttribution.deviceId, devicePolicy);
+    if (cameraIdStr.empty()) {
+        std::string msg = fmt::sprintf("Camera %d: Invalid camera id for device id %d",
+                api1CameraId, clientAttribution.deviceId);
+        ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
+    }
+
+    std::string clientPackageNameMaybe = clientAttribution.packageName.value_or("");
+    bool isNonSystemNdk = clientPackageNameMaybe.size() == 0;
+    std::string clientPackageName = resolvePackageName(clientAttribution.uid,
+            clientPackageNameMaybe);
+    logConnectionAttempt(clientAttribution.pid, clientPackageName, cameraIdStr, API_1);
 
     sp<Client> client = nullptr;
     ret = connectHelper<ICameraClient,Client>(cameraClient, cameraIdStr, api1CameraId,
-            clientPackageName, /*systemNativeClient*/ false, {}, clientUid, clientPid, API_1,
+            clientPackageName, /*systemNativeClient*/ false, {},
+            clientAttribution.uid, clientAttribution.pid, API_1,
             /*shimUpdateOnly*/ false, /*oomScoreOffset*/ 0, targetSdkVersion,
-            overrideToPortrait, forceSlowJpegMode, cameraIdStr, /*out*/client);
+            rotationOverride, forceSlowJpegMode, cameraIdStr, isNonSystemNdk, /*out*/client);
 
-    if(!ret.isOk()) {
-        logRejected(cameraIdStr, CameraThreadState::getCallingPid(), clientPackageName,
+    if (!ret.isOk()) {
+        logRejected(cameraIdStr, getCallingPid(), clientAttribution.packageName.value_or(""),
                 toStdString(ret.toString8()));
         return ret;
     }
@@ -2180,8 +2175,8 @@
     const auto& mActivityManager = getActivityManager();
     if (mActivityManager) {
         mActivityManager->logFgsApiBegin(LOG_FGS_CAMERA_API,
-            CameraThreadState::getCallingUid(),
-            CameraThreadState::getCallingPid());
+            getCallingUid(),
+            getCallingPid());
     }
 
     return ret;
@@ -2211,9 +2206,9 @@
     //    and the serving thread is a non hwbinder thread, the client must have
     //    android.permission.SYSTEM_CAMERA permissions to connect.
 
-    int cPid = CameraThreadState::getCallingPid();
-    int cUid = CameraThreadState::getCallingUid();
-    bool systemClient = doesClientHaveSystemUid();
+    int cPid = getCallingPid();
+    int cUid = getCallingUid();
+    bool systemClient = callerHasSystemUid();
     SystemCameraKind systemCameraKind = SystemCameraKind::PUBLIC;
     if (getSystemCameraKind(cameraId, &systemCameraKind) != OK) {
         // This isn't a known camera ID, so it's not a system camera
@@ -2222,7 +2217,7 @@
     }
 
     // (1) Cameraserver trying to connect, accept.
-    if (CameraThreadState::getCallingPid() == getpid()) {
+    if (isCallerCameraServerNotDelegating()) {
         return false;
     }
     // (2)
@@ -2247,42 +2242,50 @@
 Status CameraService::connectDevice(
         const sp<hardware::camera2::ICameraDeviceCallbacks>& cameraCb,
         const std::string& unresolvedCameraId,
-        const std::string& clientPackageName,
-        const std::optional<std::string>& clientFeatureId,
-        int clientUid, int oomScoreOffset, int targetSdkVersion,
-        bool overrideToPortrait,
+        int oomScoreOffset, int targetSdkVersion,
+        int rotationOverride, const AttributionSourceState& clientAttribution, int32_t devicePolicy,
         /*out*/
         sp<hardware::camera2::ICameraDeviceUser>* device) {
-
     ATRACE_CALL();
+    RunThreadWithRealtimePriority priorityBump;
     Status ret = Status::ok();
     sp<CameraDeviceClient> client = nullptr;
-    std::string clientPackageNameAdj = clientPackageName;
-    int callingPid = CameraThreadState::getCallingPid();
+    std::string clientPackageNameMaybe = clientAttribution.packageName.value_or("");
+    int callingPid = getCallingPid();
+    int callingUid = getCallingUid();
     bool systemNativeClient = false;
-    if (doesClientHaveSystemUid() && (clientPackageNameAdj.size() == 0)) {
-        std::string systemClient =
-                fmt::sprintf("client.pid<%d>", CameraThreadState::getCallingPid());
-        clientPackageNameAdj = systemClient;
+    if (callerHasSystemUid() && (clientPackageNameMaybe.size() == 0)) {
+        std::string systemClient = fmt::sprintf("client.pid<%d>", callingPid);
+        clientPackageNameMaybe = systemClient;
         systemNativeClient = true;
     }
-    const std::string cameraId = resolveCameraId(
-            unresolvedCameraId,
-            CameraThreadState::getCallingUid(),
-            clientPackageNameAdj);
+
+    std::optional<std::string> cameraIdOptional = resolveCameraId(unresolvedCameraId,
+            clientAttribution.deviceId, devicePolicy);
+    if (!cameraIdOptional.has_value()) {
+        std::string msg = fmt::sprintf("Camera %s: Invalid camera id for device id %d",
+                unresolvedCameraId.c_str(), clientAttribution.deviceId);
+        ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
+    }
+    std::string cameraId = cameraIdOptional.value();
+
+    bool isNonSystemNdk = clientPackageNameMaybe.size() == 0;
+    std::string clientPackageName = resolvePackageName(clientAttribution.uid,
+            clientPackageNameMaybe);
+    logConnectionAttempt(clientAttribution.pid, clientPackageName, cameraId, API_2);
 
     if (oomScoreOffset < 0) {
         std::string msg =
                 fmt::sprintf("Cannot increase the priority of a client %s pid %d for "
-                        "camera id %s", clientPackageNameAdj.c_str(), callingPid,
+                        "camera id %s", clientPackageName.c_str(), callingPid,
                         cameraId.c_str());
         ALOGE("%s: %s", __FUNCTION__, msg.c_str());
         return STATUS_ERROR(ERROR_ILLEGAL_ARGUMENT, msg.c_str());
     }
 
-    userid_t clientUserId = multiuser_get_user_id(clientUid);
-    int callingUid = CameraThreadState::getCallingUid();
-    if (clientUid == USE_CALLING_UID) {
+    userid_t clientUserId = multiuser_get_user_id(clientAttribution.uid);
+    if (clientAttribution.uid == USE_CALLING_UID) {
         clientUserId = multiuser_get_user_id(callingUid);
     }
 
@@ -2298,23 +2301,23 @@
     // enforce system camera permissions
     if (oomScoreOffset > 0
             && !hasPermissionsForSystemCamera(cameraId, callingPid,
-                    CameraThreadState::getCallingUid())
-            && !isTrustedCallingUid(CameraThreadState::getCallingUid())) {
+                    callingUid)
+            && !isTrustedCallingUid(callingUid)) {
         std::string msg = fmt::sprintf("Cannot change the priority of a client %s pid %d for "
                         "camera id %s without SYSTEM_CAMERA permissions",
-                        clientPackageNameAdj.c_str(), callingPid, cameraId.c_str());
+                        clientPackageName.c_str(), callingPid, cameraId.c_str());
         ALOGE("%s: %s", __FUNCTION__, msg.c_str());
         return STATUS_ERROR(ERROR_PERMISSION_DENIED, msg.c_str());
     }
 
     ret = connectHelper<hardware::camera2::ICameraDeviceCallbacks,CameraDeviceClient>(cameraCb,
-            cameraId, /*api1CameraId*/-1, clientPackageNameAdj, systemNativeClient, clientFeatureId,
-            clientUid, USE_CALLING_PID, API_2, /*shimUpdateOnly*/ false, oomScoreOffset,
-            targetSdkVersion, overrideToPortrait, /*forceSlowJpegMode*/false, unresolvedCameraId,
-            /*out*/client);
+            cameraId, /*api1CameraId*/-1, clientPackageName, systemNativeClient,
+            clientAttribution.attributionTag, clientAttribution.uid, USE_CALLING_PID, API_2,
+            /*shimUpdateOnly*/ false, oomScoreOffset, targetSdkVersion, rotationOverride,
+            /*forceSlowJpegMode*/false, unresolvedCameraId, isNonSystemNdk, /*out*/client);
 
-    if(!ret.isOk()) {
-        logRejected(cameraId, callingPid, clientPackageNameAdj, toStdString(ret.toString8()));
+    if (!ret.isOk()) {
+        logRejected(cameraId, callingPid, clientPackageName, toStdString(ret.toString8()));
         return ret;
     }
 
@@ -2339,25 +2342,69 @@
     const auto& mActivityManager = getActivityManager();
     if (mActivityManager) {
         mActivityManager->logFgsApiBegin(LOG_FGS_CAMERA_API,
-            CameraThreadState::getCallingUid(),
-            CameraThreadState::getCallingPid());
+            callingUid,
+            callingPid);
     }
     return ret;
 }
 
-std::string CameraService::getPackageNameFromUid(int clientUid) {
+bool CameraService::isCameraPrivacyEnabled(const String16& packageName, const std::string& cam_id,
+        int callingPid, int callingUid) {
+    if (!isAutomotiveDevice()) {
+        return mSensorPrivacyPolicy->isCameraPrivacyEnabled();
+    }
+
+    // Automotive privileged client AID_AUTOMOTIVE_EVS using exterior system camera for
+    // safety-critical use cases cannot be disabled and are exempt from camera privacy policy.
+    if ((isAutomotivePrivilegedClient(callingUid) && isAutomotiveExteriorSystemCamera(cam_id))) {
+        ALOGI("Camera privacy cannot be enabled for automotive privileged client %d "
+                "using camera %s", callingUid, cam_id.c_str());
+        return false;
+    }
+
+    if (mSensorPrivacyPolicy->isCameraPrivacyEnabled(packageName)) {
+        return true;
+    } else if (mSensorPrivacyPolicy->getCameraPrivacyState() == SensorPrivacyManager::DISABLED) {
+        return false;
+    } else if (mSensorPrivacyPolicy->getCameraPrivacyState()
+            == SensorPrivacyManager::ENABLED_EXCEPT_ALLOWLISTED_APPS) {
+        if (hasPermissionsForCameraPrivacyAllowlist(callingPid, callingUid)) {
+            return false;
+        } else {
+            return true;
+        }
+    }
+    return false;
+}
+
+std::string CameraService::getPackageNameFromUid(int clientUid) const {
     std::string packageName("");
 
-    sp<IServiceManager> sm = defaultServiceManager();
-    sp<IBinder> binder = sm->getService(toString16(kPermissionServiceName));
-    if (binder == 0) {
-        ALOGE("Cannot get permission service");
+    sp<IPermissionController> permCtrl;
+    if (flags::cache_permission_services()) {
+        permCtrl = getPermissionController();
+    } else {
+        sp<IServiceManager> sm = defaultServiceManager();
+#pragma clang diagnostic push
+#pragma clang diagnostic ignored "-Wdeprecated-declarations"
+        // Using deprecated function to preserve functionality until the
+        // cache_permission_services flag is removed.
+        sp<IBinder> binder = sm->getService(toString16(kPermissionServiceName));
+#pragma clang diagnostic pop
+        if (binder == 0) {
+            ALOGE("Cannot get permission service");
+            permCtrl = nullptr;
+        } else {
+            permCtrl = interface_cast<IPermissionController>(binder);
+        }
+    }
+
+    if (permCtrl == nullptr) {
         // Return empty package name and the further interaction
         // with camera will likely fail
         return packageName;
     }
 
-    sp<IPermissionController> permCtrl = interface_cast<IPermissionController>(binder);
     Vector<String16> packages;
 
     permCtrl->getPackagesForUid(clientUid, packages);
@@ -2375,38 +2422,44 @@
     return packageName;
 }
 
-template<class CALLBACK, class CLIENT>
-Status CameraService::connectHelper(const sp<CALLBACK>& cameraCb, const std::string& cameraId,
-        int api1CameraId, const std::string& clientPackageNameMaybe, bool systemNativeClient,
-        const std::optional<std::string>& clientFeatureId, int clientUid, int clientPid,
-        apiLevel effectiveApiLevel, bool shimUpdateOnly, int oomScoreOffset, int targetSdkVersion,
-        bool overrideToPortrait, bool forceSlowJpegMode, const std::string& originalCameraId,
-        /*out*/sp<CLIENT>& device) {
-    binder::Status ret = binder::Status::ok();
+void CameraService::logConnectionAttempt(int clientPid, const std::string& clientPackageName,
+        const std::string& cameraId, apiLevel effectiveApiLevel) const {
+    int packagePid = (clientPid == USE_CALLING_PID) ?
+        getCallingPid() : clientPid;
+    ALOGI("CameraService::connect call (PID %d \"%s\", camera ID %s) and "
+            "Camera API version %d", packagePid, clientPackageName.c_str(), cameraId.c_str(),
+            static_cast<int>(effectiveApiLevel));
+}
 
-    bool isNonSystemNdk = false;
-    std::string clientPackageName;
-    int packageUid = (clientUid == USE_CALLING_UID) ?
-            CameraThreadState::getCallingUid() : clientUid;
+std::string CameraService::resolvePackageName(int clientUid,
+        const std::string& clientPackageNameMaybe) const {
     if (clientPackageNameMaybe.size() <= 0) {
+        int packageUid = (clientUid == USE_CALLING_UID) ?
+                getCallingUid() : clientUid;
         // NDK calls don't come with package names, but we need one for various cases.
         // Generally, there's a 1:1 mapping between UID and package name, but shared UIDs
         // do exist. For all authentication cases, all packages under the same UID get the
         // same permissions, so picking any associated package name is sufficient. For some
         // other cases, this may give inaccurate names for clients in logs.
-        isNonSystemNdk = true;
-        clientPackageName = getPackageNameFromUid(packageUid);
+        return getPackageNameFromUid(packageUid);
     } else {
-        clientPackageName = clientPackageNameMaybe;
+        return clientPackageNameMaybe;
     }
+}
 
-    int originalClientPid = 0;
+template<class CALLBACK, class CLIENT>
+Status CameraService::connectHelper(const sp<CALLBACK>& cameraCb, const std::string& cameraId,
+        int api1CameraId, const std::string& clientPackageName, bool systemNativeClient,
+        const std::optional<std::string>& clientFeatureId, int clientUid, int clientPid,
+        apiLevel effectiveApiLevel, bool shimUpdateOnly, int oomScoreOffset, int targetSdkVersion,
+        int rotationOverride, bool forceSlowJpegMode,
+        const std::string& originalCameraId, bool isNonSystemNdk, /*out*/sp<CLIENT>& device) {
+    binder::Status ret = binder::Status::ok();
 
+    int packageUid = (clientUid == USE_CALLING_UID) ?
+            getCallingUid() : clientUid;
     int packagePid = (clientPid == USE_CALLING_PID) ?
-        CameraThreadState::getCallingPid() : clientPid;
-    ALOGI("CameraService::connect call (PID %d \"%s\", camera ID %s) and "
-            "Camera API version %d", packagePid, clientPackageName.c_str(), cameraId.c_str(),
-            static_cast<int>(effectiveApiLevel));
+            getCallingPid() : clientPid;
 
     nsecs_t openTimeNs = systemTime();
 
@@ -2428,8 +2481,8 @@
         }
 
         // Enforce client permissions and do basic validity checks
-        if(!(ret = validateConnectLocked(cameraId, clientPackageName,
-                /*inout*/clientUid, /*inout*/clientPid, /*out*/originalClientPid)).isOk()) {
+        if (!(ret = validateConnectLocked(cameraId, clientPackageName,
+                /*inout*/clientUid, /*inout*/clientPid)).isOk()) {
             return ret;
         }
 
@@ -2446,7 +2499,7 @@
 
         sp<BasicClient> clientTmp = nullptr;
         std::shared_ptr<resource_policy::ClientDescriptor<std::string, sp<BasicClient>>> partial;
-        if ((err = handleEvictionsLocked(cameraId, originalClientPid, effectiveApiLevel,
+        if ((err = handleEvictionsLocked(cameraId, clientPid, effectiveApiLevel,
                 IInterface::asBinder(cameraCb), clientPackageName, oomScoreOffset,
                 systemNativeClient, /*out*/&clientTmp, /*out*/&partial)) != NO_ERROR) {
             switch (err) {
@@ -2480,7 +2533,7 @@
 
         int portraitRotation;
         auto deviceVersionAndTransport =
-                getDeviceVersion(cameraId, overrideToPortrait, /*out*/&portraitRotation,
+                getDeviceVersion(cameraId, rotationOverride, /*out*/&portraitRotation,
                         /*out*/&facing, /*out*/&orientation);
         if (facing == -1) {
             ALOGE("%s: Unable to get camera device \"%s\"  facing", __FUNCTION__, cameraId.c_str());
@@ -2491,11 +2544,14 @@
         sp<BasicClient> tmp = nullptr;
         bool overrideForPerfClass = SessionConfigurationUtils::targetPerfClassPrimaryCamera(
                 mPerfClassPrimaryCameraIds, cameraId, targetSdkVersion);
+
+        // Only use passed in clientPid to check permission. Use calling PID as the client PID
+        // that's connected to camera service directly.
         if(!(ret = makeClient(this, cameraCb, clientPackageName, systemNativeClient,
                 clientFeatureId, cameraId, api1CameraId, facing,
-                orientation, clientPid, clientUid, getpid(),
+                orientation, getCallingPid(), clientUid, getpid(),
                 deviceVersionAndTransport, effectiveApiLevel, overrideForPerfClass,
-                overrideToPortrait, forceSlowJpegMode, originalCameraId,
+                rotationOverride, forceSlowJpegMode, originalCameraId,
                 /*out*/&tmp)).isOk()) {
             return ret;
         }
@@ -2556,73 +2612,103 @@
         // Enable/disable camera service watchdog
         client->setCameraServiceWatchdog(mCameraServiceWatchdogEnabled);
 
-        // Set rotate-and-crop override behavior
-        if (mOverrideRotateAndCropMode != ANDROID_SCALER_ROTATE_AND_CROP_AUTO) {
-            client->setRotateAndCropOverride(mOverrideRotateAndCropMode);
-        } else if (overrideToPortrait && portraitRotation != 0) {
-            uint8_t rotateAndCropMode = ANDROID_SCALER_ROTATE_AND_CROP_AUTO;
-            switch (portraitRotation) {
-                case 90:
-                    rotateAndCropMode = ANDROID_SCALER_ROTATE_AND_CROP_90;
-                    break;
-                case 180:
-                    rotateAndCropMode = ANDROID_SCALER_ROTATE_AND_CROP_180;
-                    break;
-                case 270:
-                    rotateAndCropMode = ANDROID_SCALER_ROTATE_AND_CROP_270;
-                    break;
-                default:
-                    ALOGE("Unexpected portrait rotation: %d", portraitRotation);
-                    break;
+        CameraMetadata chars;
+        bool rotateAndCropSupported = true;
+        err = mCameraProviderManager->getCameraCharacteristics(cameraId, overrideForPerfClass,
+                &chars, rotationOverride);
+        if (err == OK) {
+            auto availableRotateCropEntry = chars.find(
+                    ANDROID_SCALER_AVAILABLE_ROTATE_AND_CROP_MODES);
+            if (availableRotateCropEntry.count <= 1) {
+                rotateAndCropSupported = false;
             }
-            client->setRotateAndCropOverride(rotateAndCropMode);
         } else {
-            client->setRotateAndCropOverride(
-                mCameraServiceProxyWrapper->getRotateAndCropOverride(
-                    clientPackageName, facing, multiuser_get_user_id(clientUid)));
+            ALOGE("%s: Unable to query static metadata for camera %s: %s (%d)", __FUNCTION__,
+                    cameraId.c_str(), strerror(-err), err);
         }
 
-        // Set autoframing override behaviour
-        if (mOverrideAutoframingMode != ANDROID_CONTROL_AUTOFRAMING_AUTO) {
-            client->setAutoframingOverride(mOverrideAutoframingMode);
-        } else {
-            client->setAutoframingOverride(
-                mCameraServiceProxyWrapper->getAutoframingOverride(
-                    clientPackageName));
+        if (rotateAndCropSupported) {
+            // Set rotate-and-crop override behavior
+            if (mOverrideRotateAndCropMode != ANDROID_SCALER_ROTATE_AND_CROP_AUTO) {
+                client->setRotateAndCropOverride(mOverrideRotateAndCropMode);
+            } else if (rotationOverride != hardware::ICameraService::ROTATION_OVERRIDE_NONE &&
+                    portraitRotation != 0) {
+                uint8_t rotateAndCropMode = ANDROID_SCALER_ROTATE_AND_CROP_AUTO;
+                switch (portraitRotation) {
+                    case 90:
+                        rotateAndCropMode = ANDROID_SCALER_ROTATE_AND_CROP_90;
+                        break;
+                    case 180:
+                        rotateAndCropMode = ANDROID_SCALER_ROTATE_AND_CROP_180;
+                        break;
+                    case 270:
+                        rotateAndCropMode = ANDROID_SCALER_ROTATE_AND_CROP_270;
+                        break;
+                    default:
+                        ALOGE("Unexpected portrait rotation: %d", portraitRotation);
+                        break;
+                }
+                // Here we're communicating to the client the chosen rotate
+                // and crop mode to send to the HAL
+                client->setRotateAndCropOverride(rotateAndCropMode);
+            } else {
+                client->setRotateAndCropOverride(
+                    mCameraServiceProxyWrapper->getRotateAndCropOverride(
+                        clientPackageName, facing, multiuser_get_user_id(clientUid)));
+            }
         }
 
-        // Automotive privileged client AID_AUTOMOTIVE_EVS using exterior system camera for use
-        // cases such as rear view and surround view cannot be disabled and are exempt from camera
-        // privacy policy.
-        if ((!isAutomotivePrivilegedClient(packageUid) ||
-                !isAutomotiveExteriorSystemCamera(cameraId))) {
+        bool autoframingSupported = true;
+        auto availableAutoframingEntry = chars.find(ANDROID_CONTROL_AUTOFRAMING_AVAILABLE);
+        if ((availableAutoframingEntry.count == 1) && (availableAutoframingEntry.data.u8[0] ==
+                    ANDROID_CONTROL_AUTOFRAMING_AVAILABLE_FALSE)) {
+            autoframingSupported = false;
+        }
+
+        if (autoframingSupported) {
+            // Set autoframing override behaviour
+            if (mOverrideAutoframingMode != ANDROID_CONTROL_AUTOFRAMING_AUTO) {
+                client->setAutoframingOverride(mOverrideAutoframingMode);
+            } else {
+                client->setAutoframingOverride(
+                    mCameraServiceProxyWrapper->getAutoframingOverride(
+                        clientPackageName));
+            }
+        }
+
+        bool isCameraPrivacyEnabled;
+        if (flags::camera_privacy_allowlist()) {
             // Set camera muting behavior.
-            bool isCameraPrivacyEnabled =
+            isCameraPrivacyEnabled = this->isCameraPrivacyEnabled(
+                    toString16(client->getPackageName()), cameraId, packagePid, packageUid);
+        } else {
+            isCameraPrivacyEnabled =
                     mSensorPrivacyPolicy->isCameraPrivacyEnabled();
-            if (client->supportsCameraMute()) {
-                client->setCameraMute(
-                        mOverrideCameraMuteMode || isCameraPrivacyEnabled);
-            } else if (isCameraPrivacyEnabled) {
-                // no camera mute supported, but privacy is on! => disconnect
-                ALOGI("Camera mute not supported for package: %s, camera id: %s",
-                        client->getPackageName().c_str(), cameraId.c_str());
-                // Do not hold mServiceLock while disconnecting clients, but
-                // retain the condition blocking other clients from connecting
-                // in mServiceLockWrapper if held.
-                mServiceLock.unlock();
-                // Clear caller identity temporarily so client disconnect PID
-                // checks work correctly
-                int64_t token = CameraThreadState::clearCallingIdentity();
-                // Note AppOp to trigger the "Unblock" dialog
-                client->noteAppOp();
-                client->disconnect();
-                CameraThreadState::restoreCallingIdentity(token);
-                // Reacquire mServiceLock
-                mServiceLock.lock();
+        }
 
-                return STATUS_ERROR_FMT(ERROR_DISABLED,
-                        "Camera \"%s\" disabled due to camera mute", cameraId.c_str());
-            }
+        if (client->supportsCameraMute()) {
+            client->setCameraMute(
+                    mOverrideCameraMuteMode || isCameraPrivacyEnabled);
+        } else if (isCameraPrivacyEnabled) {
+            // no camera mute supported, but privacy is on! => disconnect
+            ALOGI("Camera mute not supported for package: %s, camera id: %s",
+                    client->getPackageName().c_str(), cameraId.c_str());
+            // Do not hold mServiceLock while disconnecting clients, but
+            // retain the condition blocking other clients from connecting
+            // in mServiceLockWrapper if held.
+            mServiceLock.unlock();
+            // Clear caller identity temporarily so client disconnect PID
+            // checks work correctly
+            int64_t token = clearCallingIdentity();
+            // Note AppOp to trigger the "Unblock" dialog
+            client->noteAppOp();
+            client->disconnect();
+            restoreCallingIdentity(token);
+            // Reacquire mServiceLock
+            mServiceLock.lock();
+
+            return STATUS_ERROR_FMT(ERROR_DISABLED,
+                    "Camera \"%s\" disabled due to camera mute", cameraId.c_str());
         }
 
         if (shimUpdateOnly) {
@@ -2759,7 +2845,8 @@
 }
 
 Status CameraService::turnOnTorchWithStrengthLevel(const std::string& unresolvedCameraId,
-        int32_t torchStrength, const sp<IBinder>& clientBinder) {
+        int32_t torchStrength, const sp<IBinder>& clientBinder,
+        const AttributionSourceState& clientAttribution, int32_t devicePolicy) {
     Mutex::Autolock lock(mServiceLock);
 
     ATRACE_CALL();
@@ -2769,8 +2856,17 @@
                 "Torch client binder in null.");
     }
 
-    int uid = CameraThreadState::getCallingUid();
-    const std::string cameraId = resolveCameraId(unresolvedCameraId, uid);
+    int uid = getCallingUid();
+    std::optional<std::string> cameraIdOptional = resolveCameraId(unresolvedCameraId,
+            clientAttribution.deviceId, devicePolicy);
+    if (!cameraIdOptional.has_value()) {
+        std::string msg = fmt::sprintf("Camera %s: Invalid camera id for device id %d",
+                unresolvedCameraId.c_str(), clientAttribution.deviceId);
+        ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
+    }
+    std::string cameraId = cameraIdOptional.value();
+
     if (shouldRejectSystemCameraConnection(cameraId)) {
         return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT, "Unable to change the strength level"
                 "for system only device %s: ", cameraId.c_str());
@@ -2877,7 +2973,7 @@
         clientBinder->linkToDeath(this);
     }
 
-    int clientPid = CameraThreadState::getCallingPid();
+    int clientPid = getCallingPid();
     ALOGI("%s: Torch strength for camera id %s changed to %d for client PID %d",
             __FUNCTION__, cameraId.c_str(), torchStrength, clientPid);
     if (!shouldSkipTorchStrengthUpdates) {
@@ -2887,7 +2983,8 @@
 }
 
 Status CameraService::setTorchMode(const std::string& unresolvedCameraId, bool enabled,
-        const sp<IBinder>& clientBinder) {
+        const sp<IBinder>& clientBinder, const AttributionSourceState& clientAttribution,
+        int32_t devicePolicy) {
     Mutex::Autolock lock(mServiceLock);
 
     ATRACE_CALL();
@@ -2897,8 +2994,16 @@
                 "Torch client Binder is null");
     }
 
-    int uid = CameraThreadState::getCallingUid();
-    const std::string cameraId = resolveCameraId(unresolvedCameraId, uid);
+    int uid = getCallingUid();
+    std::optional<std::string> cameraIdOptional = resolveCameraId(unresolvedCameraId,
+            clientAttribution.deviceId, devicePolicy);
+    if (!cameraIdOptional.has_value()) {
+        std::string msg = fmt::sprintf("Camera %s: Invalid camera id for device id %d",
+                unresolvedCameraId.c_str(), clientAttribution.deviceId);
+        ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
+    }
+    std::string cameraId = cameraIdOptional.value();
 
     if (shouldRejectSystemCameraConnection(cameraId)) {
         return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT, "Unable to set torch mode"
@@ -3005,7 +3110,7 @@
         }
     }
 
-    int clientPid = CameraThreadState::getCallingPid();
+    int clientPid = getCallingPid();
     std::string torchState = enabled ? "on" : "off";
     ALOGI("Torch for camera id %s turned %s for client PID %d", cameraId.c_str(),
             torchState.c_str(), clientPid);
@@ -3025,7 +3130,7 @@
 
 Status CameraService::notifySystemEvent(int32_t eventId,
         const std::vector<int32_t>& args) {
-    const int pid = CameraThreadState::getCallingPid();
+    const int pid = getCallingPid();
     const int selfPid = getpid();
 
     // Permission checks
@@ -3033,7 +3138,7 @@
         // Ensure we're being called by system_server, or similar process with
         // permissions to notify the camera service about system events
         if (!checkCallingPermission(toString16(sCameraSendSystemEventsPermission))) {
-            const int uid = CameraThreadState::getCallingUid();
+            const int uid = getCallingUid();
             ALOGE("Permission Denial: cannot send updates to camera service about system"
                     " events from pid=%d, uid=%d", pid, uid);
             return STATUS_ERROR_FMT(ERROR_PERMISSION_DENIED,
@@ -3099,7 +3204,7 @@
 }
 
 Status CameraService::notifyDeviceStateChange(int64_t newState) {
-    const int pid = CameraThreadState::getCallingPid();
+    const int pid = getCallingPid();
     const int selfPid = getpid();
 
     // Permission checks
@@ -3107,7 +3212,7 @@
         // Ensure we're being called by system_server, or similar process with
         // permissions to notify the camera service about system events
         if (!checkCallingPermission(toString16(sCameraSendSystemEventsPermission))) {
-            const int uid = CameraThreadState::getCallingUid();
+            const int uid = getCallingUid();
             ALOGE("Permission Denial: cannot send updates to camera service about device"
                     " state changes from pid=%d, uid=%d", pid, uid);
             return STATUS_ERROR_FMT(ERROR_PERMISSION_DENIED,
@@ -3130,7 +3235,7 @@
 
 Status CameraService::notifyDisplayConfigurationChange() {
     ATRACE_CALL();
-    const int callingPid = CameraThreadState::getCallingPid();
+    const int callingPid = getCallingPid();
     const int selfPid = getpid();
 
     // Permission checks
@@ -3138,7 +3243,7 @@
         // Ensure we're being called by system_server, or similar process with
         // permissions to notify the camera service about system events
         if (!checkCallingPermission(toString16(sCameraSendSystemEventsPermission))) {
-            const int uid = CameraThreadState::getCallingUid();
+            const int uid = getCallingUid();
             ALOGE("Permission Denial: cannot send updates to camera service about orientation"
                     " changes from pid=%d, uid=%d", callingPid, uid);
             return STATUS_ERROR_FMT(ERROR_PERMISSION_DENIED,
@@ -3188,7 +3293,8 @@
     std::vector<std::unordered_set<std::string>> concurrentCameraCombinations =
             mCameraProviderManager->getConcurrentCameraIds();
     for (auto &combination : concurrentCameraCombinations) {
-        std::vector<std::string> validCombination;
+        std::vector<std::pair<std::string, int32_t>> validCombination;
+        int32_t firstDeviceId = kInvalidDeviceId;
         for (auto &cameraId : combination) {
             // if the camera state is not present, skip
             auto state = getCameraState(cameraId);
@@ -3203,7 +3309,17 @@
             if (shouldRejectSystemCameraConnection(cameraId)) {
                 continue;
             }
-            validCombination.push_back(cameraId);
+            auto [cameraOwnerDeviceId, mappedCameraId] =
+                    mVirtualDeviceCameraIdMapper.getDeviceIdAndMappedCameraIdPair(cameraId);
+            if (firstDeviceId == kInvalidDeviceId) {
+                firstDeviceId = cameraOwnerDeviceId;
+            } else if (firstDeviceId != cameraOwnerDeviceId) {
+                // Found an invalid combination which contains cameras with different device id's,
+                // hence discard it.
+                validCombination.clear();
+                break;
+            }
+            validCombination.push_back({mappedCameraId, cameraOwnerDeviceId});
         }
         if (validCombination.size() != 0) {
             concurrentCameraIds->push_back(std::move(validCombination));
@@ -3212,25 +3328,10 @@
     return Status::ok();
 }
 
-bool CameraService::hasCameraPermissions() const {
-    int callingPid = CameraThreadState::getCallingPid();
-    int callingUid = CameraThreadState::getCallingUid();
-    AttributionSourceState attributionSource{};
-    attributionSource.pid = callingPid;
-    attributionSource.uid = callingUid;
-    bool res = checkPermission(std::string(), sCameraPermission,
-            attributionSource, std::string(), AppOpsManager::OP_NONE);
-
-    bool hasPermission = ((callingPid == getpid()) || res);
-    if (!hasPermission) {
-        ALOGE("%s: pid %d doesn't have camera permissions", __FUNCTION__, callingPid);
-    }
-    return hasPermission;
-}
-
 Status CameraService::isConcurrentSessionConfigurationSupported(
         const std::vector<CameraIdAndSessionConfiguration>& cameraIdsAndSessionConfigurations,
-        int targetSdkVersion, /*out*/bool* isSupported) {
+        int targetSdkVersion, const AttributionSourceState& clientAttribution, int32_t devicePolicy,
+        /*out*/bool* isSupported) {
     if (!isSupported) {
         ALOGE("%s: isSupported is NULL", __FUNCTION__);
         return STATUS_ERROR(ERROR_ILLEGAL_ARGUMENT, "isSupported is NULL");
@@ -3242,8 +3343,27 @@
                 "Camera subsystem is not available");
     }
 
+    for (auto cameraIdAndSessionConfiguration : cameraIdsAndSessionConfigurations) {
+        std::optional<std::string> cameraIdOptional =
+                resolveCameraId(cameraIdAndSessionConfiguration.mCameraId,
+                        clientAttribution.deviceId, devicePolicy);
+        if (!cameraIdOptional.has_value()) {
+            std::string msg = fmt::sprintf("Camera %s: Invalid camera id for device id %d",
+                    cameraIdAndSessionConfiguration.mCameraId.c_str(), clientAttribution.deviceId);
+            ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+            return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
+        }
+        cameraIdAndSessionConfiguration.mCameraId = cameraIdOptional.value();
+    }
+
     // Check for camera permissions
-    if (!hasCameraPermissions()) {
+    int callingPid = getCallingPid();
+    int callingUid = getCallingUid();
+    bool hasCameraPermission = ((callingPid == getpid()) ||
+            hasPermissionsForCamera(callingPid, callingUid,
+                    devicePolicy == IVirtualDeviceManagerNative::DEVICE_POLICY_DEFAULT
+                        ? kDefaultDeviceId : clientAttribution.deviceId));
+    if (!hasCameraPermission) {
         return STATUS_ERROR(ERROR_PERMISSION_DENIED,
                 "android.permission.CAMERA needed to call"
                 "isConcurrentSessionConfigurationSupported");
@@ -3277,7 +3397,6 @@
         /*out*/
         std::vector<hardware::CameraStatus> *cameraStatuses,
         bool isVendorListener, bool isProcessLocalTest) {
-
     ATRACE_CALL();
 
     ALOGV("%s: Add listener %p", __FUNCTION__, listener.get());
@@ -3287,15 +3406,9 @@
         return STATUS_ERROR(ERROR_ILLEGAL_ARGUMENT, "Null listener given to addListener");
     }
 
-    auto clientUid = CameraThreadState::getCallingUid();
-    auto clientPid = CameraThreadState::getCallingPid();
-    AttributionSourceState attributionSource{};
-    attributionSource.uid = clientUid;
-    attributionSource.pid = clientPid;
-
-   bool openCloseCallbackAllowed = checkPermission(std::string(),
-            sCameraOpenCloseListenerPermission, attributionSource, std::string(),
-            AppOpsManager::OP_NONE);
+    auto clientPid = getCallingPid();
+    auto clientUid = getCallingUid();
+    bool openCloseCallbackAllowed = hasPermissionsForOpenCloseListener(clientPid, clientUid);
 
     Mutex::Autolock lock(mServiceLock);
 
@@ -3331,9 +3444,14 @@
     {
         Mutex::Autolock lock(mCameraStatesLock);
         for (auto& i : mCameraStates) {
-            cameraStatuses->emplace_back(i.first,
+            // Get the device id and app-visible camera id for the given HAL-visible camera id.
+            auto [deviceId, mappedCameraId] =
+                    mVirtualDeviceCameraIdMapper.getDeviceIdAndMappedCameraIdPair(i.first);
+
+            cameraStatuses->emplace_back(mappedCameraId,
                     mapToInterface(i.second->getStatus()), i.second->getUnavailablePhysicalIds(),
-                    openCloseCallbackAllowed ? i.second->getClientPackage() : std::string());
+                    openCloseCallbackAllowed ? i.second->getClientPackage() : std::string(),
+                    deviceId);
         }
     }
     // Remove the camera statuses that should be hidden from the client, we do
@@ -3342,19 +3460,37 @@
     // the same time.
     cameraStatuses->erase(std::remove_if(cameraStatuses->begin(), cameraStatuses->end(),
                 [this, &isVendorListener, &clientPid, &clientUid](const hardware::CameraStatus& s) {
-                    SystemCameraKind deviceKind = SystemCameraKind::PUBLIC;
-                    if (getSystemCameraKind(s.cameraId, &deviceKind) != OK) {
-                        ALOGE("%s: Invalid camera id %s, skipping status update",
-                                __FUNCTION__, s.cameraId.c_str());
-                        return true;
-                    }
-                    return shouldSkipStatusUpdates(deviceKind, isVendorListener, clientPid,
-                            clientUid);}), cameraStatuses->end());
+                        std::string cameraId = s.cameraId;
+                        std::optional<std::string> cameraIdOptional = resolveCameraId(s.cameraId,
+                                s.deviceId, IVirtualDeviceManagerNative::DEVICE_POLICY_CUSTOM);
+                        if (!cameraIdOptional.has_value()) {
+                            std::string msg =
+                                    fmt::sprintf(
+                                            "Camera %s: Invalid camera id for device id %d",
+                                            s.cameraId.c_str(), s.deviceId);
+                            ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+                            return true;
+                        }
+                        cameraId = cameraIdOptional.value();
+                        SystemCameraKind deviceKind = SystemCameraKind::PUBLIC;
+                        if (getSystemCameraKind(cameraId, &deviceKind) != OK) {
+                            ALOGE("%s: Invalid camera id %s, skipping status update",
+                                    __FUNCTION__, s.cameraId.c_str());
+                            return true;
+                        }
+                        return shouldSkipStatusUpdates(deviceKind, isVendorListener, clientPid,
+                                clientUid);
+                     }), cameraStatuses->end());
 
-    //cameraStatuses will have non-eligible camera ids removed.
+    // cameraStatuses will have non-eligible camera ids removed.
     std::set<std::string> idsChosenForCallback;
     for (const auto &s : *cameraStatuses) {
-        idsChosenForCallback.insert(s.cameraId);
+        // Add only default device cameras here, as virtual cameras currently don't support torch
+        // anyway. Note that this is a simplification of the implementation here, and we should
+        // change this when virtual cameras support torch.
+        if (s.deviceId == kDefaultDeviceId) {
+            idsChosenForCallback.insert(s.cameraId);
+        }
     }
 
     /*
@@ -3368,7 +3504,8 @@
             // The camera id is visible to the client. Fine to send torch
             // callback.
             if (idsChosenForCallback.find(id) != idsChosenForCallback.end()) {
-                listener->onTorchStatusChanged(mapToInterface(mTorchStatusMap.valueAt(i)), id);
+                listener->onTorchStatusChanged(mapToInterface(mTorchStatusMap.valueAt(i)), id,
+                        kDefaultDeviceId);
             }
         }
     }
@@ -3431,13 +3568,10 @@
     return ret;
 }
 
-Status CameraService::supportsCameraApi(const std::string& unresolvedCameraId, int apiVersion,
+Status CameraService::supportsCameraApi(const std::string& cameraId, int apiVersion,
         /*out*/ bool *isSupported) {
     ATRACE_CALL();
 
-    const std::string cameraId = resolveCameraId(
-            unresolvedCameraId, CameraThreadState::getCallingUid());
-
     ALOGV("%s: for camera ID = %s", __FUNCTION__, cameraId.c_str());
 
     switch (apiVersion) {
@@ -3451,7 +3585,10 @@
     }
 
     int portraitRotation;
-    auto deviceVersionAndTransport = getDeviceVersion(cameraId, false, &portraitRotation);
+    auto deviceVersionAndTransport =
+            getDeviceVersion(cameraId,
+                    /*rotationOverride*/hardware::ICameraService::ROTATION_OVERRIDE_NONE,
+                    &portraitRotation);
     if (deviceVersionAndTransport.first == -1) {
         std::string msg = fmt::sprintf("Unknown camera ID %s", cameraId.c_str());
         ALOGE("%s: %s", __FUNCTION__, msg.c_str());
@@ -3496,13 +3633,10 @@
     return Status::ok();
 }
 
-Status CameraService::isHiddenPhysicalCamera(const std::string& unresolvedCameraId,
+Status CameraService::isHiddenPhysicalCamera(const std::string& cameraId,
         /*out*/ bool *isSupported) {
     ATRACE_CALL();
 
-    const std::string cameraId = resolveCameraId(unresolvedCameraId,
-            CameraThreadState::getCallingUid());
-
     ALOGV("%s: for camera ID = %s", __FUNCTION__, cameraId.c_str());
     *isSupported = mCameraProviderManager->isHiddenPhysicalCamera(cameraId);
 
@@ -3518,11 +3652,25 @@
     ATRACE_CALL();
 
     if (!checkCallingPermission(toString16(sCameraInjectExternalCameraPermission))) {
-        const int pid = CameraThreadState::getCallingPid();
-        const int uid = CameraThreadState::getCallingUid();
+        const int pid = getCallingPid();
+        const int uid = getCallingUid();
         ALOGE("Permission Denial: can't inject camera pid=%d, uid=%d", pid, uid);
         return STATUS_ERROR(ERROR_PERMISSION_DENIED,
-                        "Permission Denial: no permission to inject camera");
+                "Permission Denial: no permission to inject camera");
+    }
+
+    // Do not allow any camera injection that injects or replaces a virtual camera.
+    auto [deviceIdForInternalCamera, _] =
+            mVirtualDeviceCameraIdMapper.getDeviceIdAndMappedCameraIdPair(internalCamId);
+    if (deviceIdForInternalCamera != kDefaultDeviceId) {
+        return STATUS_ERROR(ICameraInjectionCallback::ERROR_INJECTION_UNSUPPORTED,
+                "Cannot replace a virtual camera");
+    }
+    [[maybe_unused]] auto [deviceIdForExternalCamera, unusedMappedCameraId] =
+            mVirtualDeviceCameraIdMapper.getDeviceIdAndMappedCameraIdPair(externalCamId);
+    if (deviceIdForExternalCamera != kDefaultDeviceId) {
+        return STATUS_ERROR(ICameraInjectionCallback::ERROR_INJECTION_UNSUPPORTED,
+                "Cannot inject a virtual camera to replace an internal camera");
     }
 
     ALOGV(
@@ -3552,7 +3700,7 @@
                         mInjectionExternalCamId.c_str());
             }
             res = clientSp->injectCamera(mInjectionExternalCamId, mCameraProviderManager);
-            if(res != OK) {
+            if (res != OK) {
                 mInjectionStatusListener->notifyInjectionError(mInjectionExternalCamId, res);
             }
         } else {
@@ -3589,7 +3737,6 @@
         std::unique_ptr<AutoConditionLock> lock =
                 AutoConditionLock::waitAndAcquire(mServiceLockWrapper);
 
-
         std::vector<sp<BasicClient>> evicted;
         for (auto& i : mActiveClientManager.getAll()) {
             auto clientSp = i->getValue();
@@ -3721,13 +3868,13 @@
     mServiceLock.unlock();
 
     // Clear caller identity temporarily so client disconnect PID checks work correctly
-    int64_t token = CameraThreadState::clearCallingIdentity();
+    int64_t token = clearCallingIdentity();
 
     for (auto& i : evicted) {
         i->disconnect();
     }
 
-    CameraThreadState::restoreCallingIdentity(token);
+    restoreCallingIdentity(token);
 
     // Reacquire mServiceLock
     mServiceLock.lock();
@@ -3858,7 +4005,6 @@
 // We share the media players for shutter and recording sound for all clients.
 // A reference count is kept to determine when we will actually release the
 // media players.
-
 sp<MediaPlayer> CameraService::newMediaPlayer(const char *file) {
     sp<MediaPlayer> mp = new MediaPlayer();
     status_t error;
@@ -3938,21 +4084,23 @@
 
 CameraService::Client::Client(const sp<CameraService>& cameraService,
         const sp<ICameraClient>& cameraClient,
+        std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
         const std::string& clientPackageName, bool systemNativeClient,
         const std::optional<std::string>& clientFeatureId,
         const std::string& cameraIdStr,
         int api1CameraId, int cameraFacing, int sensorOrientation,
         int clientPid, uid_t clientUid,
-        int servicePid, bool overrideToPortrait) :
+        int servicePid, int rotationOverride) :
         CameraService::BasicClient(cameraService,
                 IInterface::asBinder(cameraClient),
+                attributionAndPermissionUtils,
                 clientPackageName, systemNativeClient, clientFeatureId,
                 cameraIdStr, cameraFacing, sensorOrientation,
                 clientPid, clientUid,
-                servicePid, overrideToPortrait),
+                servicePid, rotationOverride),
         mCameraId(api1CameraId)
 {
-    int callingPid = CameraThreadState::getCallingPid();
+    int callingPid = getCallingPid();
     LOG1("Client::Client E (pid %d, id %d)", callingPid, mCameraId);
 
     mRemoteCallback = cameraClient;
@@ -3976,10 +4124,12 @@
 
 CameraService::BasicClient::BasicClient(const sp<CameraService>& cameraService,
         const sp<IBinder>& remoteCallback,
+        std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
         const std::string& clientPackageName, bool nativeClient,
         const std::optional<std::string>& clientFeatureId, const std::string& cameraIdStr,
         int cameraFacing, int sensorOrientation, int clientPid, uid_t clientUid,
-        int servicePid, bool overrideToPortrait):
+        int servicePid, int rotationOverride):
+        AttributionAndPermissionUtilsEncapsulator(attributionAndPermissionUtils),
         mDestructionStarted(false),
         mCameraIdStr(cameraIdStr), mCameraFacing(cameraFacing), mOrientation(sensorOrientation),
         mClientPackageName(clientPackageName), mSystemNativeClient(nativeClient),
@@ -3987,7 +4137,7 @@
         mClientPid(clientPid), mClientUid(clientUid),
         mServicePid(servicePid),
         mDisconnected(false), mUidIsTrusted(false),
-        mOverrideToPortrait(overrideToPortrait),
+        mRotationOverride(rotationOverride),
         mAudioRestriction(hardware::camera2::ICameraDeviceUser::AUDIO_RESTRICTION_NONE),
         mRemoteBinder(remoteCallback),
         mOpsActive(false),
@@ -4049,8 +4199,8 @@
     const auto& mActivityManager = getActivityManager();
     if (mActivityManager) {
         mActivityManager->logFgsApiEnd(LOG_FGS_CAMERA_API,
-            CameraThreadState::getCallingUid(),
-            CameraThreadState::getCallingPid());
+            getCallingUid(),
+            getCallingPid());
     }
 
     return res;
@@ -4060,7 +4210,7 @@
     // No dumping of clients directly over Binder,
     // must go through CameraService::dump
     android_errorWriteWithInfoLog(SN_EVENT_LOG_ID, "26265403",
-            CameraThreadState::getCallingUid(), NULL, 0);
+            getCallingUid(), NULL, 0);
     return OK;
 }
 
@@ -4143,16 +4293,25 @@
         // return MODE_IGNORED. Do not treat such case as error.
         bool isUidActive = sCameraService->mUidPolicy->isUidActive(mClientUid,
                 mClientPackageName);
-        bool isCameraPrivacyEnabled =
+
+        bool isCameraPrivacyEnabled;
+        if (flags::camera_privacy_allowlist()) {
+            isCameraPrivacyEnabled = sCameraService->isCameraPrivacyEnabled(
+                    toString16(mClientPackageName), std::string(), mClientPid, mClientUid);
+        } else {
+            isCameraPrivacyEnabled =
                 sCameraService->mSensorPrivacyPolicy->isCameraPrivacyEnabled();
+        }
         // We don't want to return EACCESS if the CameraPrivacy is enabled.
         // We prefer to successfully open the camera and perform camera muting
         // or blocking in connectHelper as handleAppOpMode can be called before the
         // connection has been fully established and at that time camera muting
         // capabilities are unknown.
         if (!isUidActive || !isCameraPrivacyEnabled) {
-            ALOGI("Camera %s: Access for \"%s\" has been restricted",
-                    mCameraIdStr.c_str(), mClientPackageName.c_str());
+            ALOGI("Camera %s: Access for \"%s\" has been restricted."
+                    "uid active: %s, privacy enabled: %s", mCameraIdStr.c_str(),
+                    mClientPackageName.c_str(), isUidActive ? "true" : "false",
+                    isCameraPrivacyEnabled ? "true" : "false");
             // Return the same error as for device policy manager rejection
             return -EACCES;
         }
@@ -4170,8 +4329,15 @@
     if (mAppOpsManager != nullptr) {
         // Notify app ops that the camera is not available
         mOpsCallback = new OpsCallback(this);
-        mAppOpsManager->startWatchingMode(AppOpsManager::OP_CAMERA,
+
+        if (flags::watch_foreground_changes()) {
+            mAppOpsManager->startWatchingMode(AppOpsManager::OP_CAMERA,
+                toString16(mClientPackageName),
+                AppOpsManager::WATCH_FOREGROUND_CHANGES, mOpsCallback);
+        } else {
+            mAppOpsManager->startWatchingMode(AppOpsManager::OP_CAMERA,
                 toString16(mClientPackageName), mOpsCallback);
+        }
 
         // Just check for camera acccess here on open - delay startOp until
         // camera frames start streaming in startCameraStreamingOps
@@ -4331,20 +4497,42 @@
         block();
     } else if (res == AppOpsManager::MODE_IGNORED) {
         bool isUidActive = sCameraService->mUidPolicy->isUidActive(mClientUid, mClientPackageName);
-        bool isCameraPrivacyEnabled =
+
+        // Uid may be active, but not visible to the user (e.g. PROCESS_STATE_FOREGROUND_SERVICE).
+        // If not visible, but still active, then we want to block instead of muting the camera.
+        int32_t procState = sCameraService->mUidPolicy->getProcState(mClientUid);
+        bool isUidVisible = (procState <= ActivityManager::PROCESS_STATE_BOUND_TOP);
+
+        bool isCameraPrivacyEnabled;
+        if (flags::camera_privacy_allowlist()) {
+            isCameraPrivacyEnabled = sCameraService->isCameraPrivacyEnabled(
+                    toString16(mClientPackageName),std::string(),mClientPid,mClientUid);
+        } else {
+            isCameraPrivacyEnabled =
                 sCameraService->mSensorPrivacyPolicy->isCameraPrivacyEnabled();
-        ALOGI("Camera %s: Access for \"%s\" has been restricted, isUidTrusted %d, isUidActive %d",
-                mCameraIdStr.c_str(), mClientPackageName.c_str(),
-                mUidIsTrusted, isUidActive);
-        // If the calling Uid is trusted (a native service), or the client Uid is active (WAR for
-        // b/175320666), the AppOpsManager could return MODE_IGNORED. Do not treat such cases as
-        // error.
+        }
+
+        ALOGI("Camera %s: Access for \"%s\" has been restricted, isUidTrusted %d, isUidActive %d"
+                " isUidVisible %d, isCameraPrivacyEnabled %d", mCameraIdStr.c_str(),
+                mClientPackageName.c_str(), mUidIsTrusted, isUidActive, isUidVisible,
+                isCameraPrivacyEnabled);
+        // If the calling Uid is trusted (a native service), or the client Uid is active / visible
+        // (WAR for b/175320666)the AppOpsManager could return MODE_IGNORED. Do not treat such
+        // cases as error.
         if (!mUidIsTrusted) {
-            if (isUidActive && isCameraPrivacyEnabled && supportsCameraMute()) {
-                setCameraMute(true);
-            } else if (!isUidActive
-                || (isCameraPrivacyEnabled && !supportsCameraMute())) {
-                block();
+            if (flags::watch_foreground_changes()) {
+                if (isUidVisible && isCameraPrivacyEnabled && supportsCameraMute()) {
+                    setCameraMute(true);
+                } else {
+                    block();
+                }
+            } else {
+                if (isUidActive && isCameraPrivacyEnabled && supportsCameraMute()) {
+                    setCameraMute(true);
+                } else if (!isUidActive
+                    || (isCameraPrivacyEnabled && !supportsCameraMute())) {
+                    block();
+                }
             }
         }
     } else if (res == AppOpsManager::MODE_ALLOWED) {
@@ -4357,7 +4545,7 @@
 
     // Reset the client PID to allow server-initiated disconnect,
     // and to prevent further calls by client.
-    mClientPid = CameraThreadState::getCallingPid();
+    mClientPid = getCallingPid();
     CaptureResultExtras resultExtras; // a dummy result (invalid)
     notifyError(hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_DISABLED, resultExtras);
     disconnect();
@@ -4715,7 +4903,15 @@
     }
     hasCameraPrivacyFeature(); // Called so the result is cached
     mSpm.addSensorPrivacyListener(this);
+    if (isAutomotiveDevice()) {
+        mSpm.addToggleSensorPrivacyListener(this);
+    }
     mSensorPrivacyEnabled = mSpm.isSensorPrivacyEnabled();
+    if (flags::camera_privacy_allowlist()) {
+        mCameraPrivacyState = mSpm.getToggleSensorPrivacyState(
+                SensorPrivacyManager::TOGGLE_TYPE_SOFTWARE,
+                SensorPrivacyManager::TOGGLE_SENSOR_CAMERA);
+    }
     status_t res = mSpm.linkToDeath(this);
     if (res == OK) {
         mRegistered = true;
@@ -4747,6 +4943,9 @@
 void CameraService::SensorPrivacyPolicy::unregisterSelf() {
     Mutex::Autolock _l(mSensorPrivacyLock);
     mSpm.removeSensorPrivacyListener(this);
+    if (isAutomotiveDevice()) {
+        mSpm.removeToggleSensorPrivacyListener(this);
+    }
     mSpm.unlinkToDeath(this);
     mRegistered = false;
     ALOGV("SensorPrivacyPolicy: Unregistered with SensorPrivacyManager");
@@ -4761,6 +4960,15 @@
     return mSensorPrivacyEnabled;
 }
 
+int CameraService::SensorPrivacyPolicy::getCameraPrivacyState() {
+    if (!mRegistered) {
+        registerWithSensorPrivacyManager();
+    }
+
+    Mutex::Autolock _l(mSensorPrivacyLock);
+    return mCameraPrivacyState;
+}
+
 bool CameraService::SensorPrivacyPolicy::isCameraPrivacyEnabled() {
     if (!hasCameraPrivacyFeature()) {
         return false;
@@ -4768,18 +4976,51 @@
     return mSpm.isToggleSensorPrivacyEnabled(SensorPrivacyManager::TOGGLE_SENSOR_CAMERA);
 }
 
+bool CameraService::SensorPrivacyPolicy::isCameraPrivacyEnabled(const String16& packageName) {
+    if (!hasCameraPrivacyFeature()) {
+        return false;
+    }
+    return mSpm.isCameraPrivacyEnabled(packageName);
+}
+
 binder::Status CameraService::SensorPrivacyPolicy::onSensorPrivacyChanged(
-    int toggleType __unused, int sensor __unused, bool enabled) {
+    int toggleType, int sensor, bool enabled) {
+    if ((toggleType == SensorPrivacyManager::TOGGLE_TYPE_UNKNOWN)
+            && (sensor == SensorPrivacyManager::TOGGLE_SENSOR_UNKNOWN)) {
+        {
+            Mutex::Autolock _l(mSensorPrivacyLock);
+            mSensorPrivacyEnabled = enabled;
+        }
+        // if sensor privacy is enabled then block all clients from accessing the camera
+        if (enabled) {
+            sp<CameraService> service = mService.promote();
+            if (service != nullptr) {
+                service->blockAllClients();
+            }
+        }
+    }
+    return binder::Status::ok();
+}
+
+binder::Status CameraService::SensorPrivacyPolicy::onSensorPrivacyStateChanged(
+    int, int sensor, int state) {
+    if (!flags::camera_privacy_allowlist()
+            || (sensor != SensorPrivacyManager::TOGGLE_SENSOR_CAMERA)) {
+        return binder::Status::ok();
+    }
     {
         Mutex::Autolock _l(mSensorPrivacyLock);
-        mSensorPrivacyEnabled = enabled;
+        mCameraPrivacyState = state;
+    }
+    sp<CameraService> service = mService.promote();
+    if (!service) {
+        return binder::Status::ok();
     }
     // if sensor privacy is enabled then block all clients from accessing the camera
-    if (enabled) {
-        sp<CameraService> service = mService.promote();
-        if (service != nullptr) {
-            service->blockAllClients();
-        }
+    if (state == SensorPrivacyManager::ENABLED) {
+        service->blockAllClients();
+    } else if (state == SensorPrivacyManager::ENABLED_EXCEPT_ALLOWLISTED_APPS) {
+        service->blockPrivacyEnabledClients();
     }
     return binder::Status::ok();
 }
@@ -4894,7 +5135,6 @@
     }
 }
 
-
 // ----------------------------------------------------------------------------
 //                  CameraClientManager
 // ----------------------------------------------------------------------------
@@ -4953,7 +5193,7 @@
     }
     if (hasAny) ret << "\n";
     ret << "]\n";
-    return std::move(ret.str());
+    return ret.str();
 }
 
 CameraService::DescriptorPtr CameraService::CameraClientManager::makeClientDescriptor(
@@ -5136,8 +5376,8 @@
 
     if (checkCallingPermission(toString16(sDumpPermission)) == false) {
         dprintf(fd, "Permission Denial: can't dump CameraService from pid=%d, uid=%d\n",
-                CameraThreadState::getCallingPid(),
-                CameraThreadState::getCallingUid());
+                getCallingPid(),
+                getCallingUid());
         return NO_ERROR;
     }
     bool locked = tryLock(mServiceLock);
@@ -5386,7 +5626,7 @@
       * binder driver
       */
     // PID here is approximate and can be wrong.
-    logClientDied(CameraThreadState::getCallingPid(), "Binder died unexpectedly");
+    logClientDied(getCallingPid(), "Binder died unexpectedly");
 
     // check torch client
     handleTorchClientBinderDied(who);
@@ -5425,6 +5665,36 @@
         return;
     }
 
+    if (vd_flags::camera_device_awareness() && status == StatusInternal::PRESENT) {
+        CameraMetadata cameraInfo;
+        status_t res = mCameraProviderManager->getCameraCharacteristics(
+                cameraId, false, &cameraInfo, hardware::ICameraService::ROTATION_OVERRIDE_NONE);
+        if (res != OK) {
+            ALOGW("%s: Not able to get camera characteristics for camera id %s",
+                  __FUNCTION__, cameraId.c_str());
+        } else {
+            int32_t deviceId = getDeviceId(cameraInfo);
+            if (deviceId != kDefaultDeviceId) {
+                const auto &lensFacingEntry = cameraInfo.find(ANDROID_LENS_FACING);
+                camera_metadata_enum_android_lens_facing_t androidLensFacing =
+                        static_cast<camera_metadata_enum_android_lens_facing_t>(
+                                lensFacingEntry.data.u8[0]);
+                std::string mappedCameraId;
+                if (androidLensFacing == ANDROID_LENS_FACING_BACK) {
+                    mappedCameraId = kVirtualDeviceBackCameraId;
+                } else if (androidLensFacing == ANDROID_LENS_FACING_FRONT) {
+                    mappedCameraId = kVirtualDeviceFrontCameraId;
+                } else {
+                    ALOGD("%s: Not adding entry for an external camera of a virtual device",
+                          __func__);
+                }
+                if (!mappedCameraId.empty()) {
+                    mVirtualDeviceCameraIdMapper.addCamera(cameraId, deviceId, mappedCameraId);
+                }
+            }
+        }
+    }
+
     // Collect the logical cameras without holding mStatusLock in updateStatus
     // as that can lead to a deadlock(b/162192331).
     auto logicalCameraIds = getLogicalCameras(cameraId);
@@ -5433,56 +5703,47 @@
     state->updateStatus(status, cameraId, rejectSourceStates, [this, &deviceKind,
                         &logicalCameraIds]
             (const std::string& cameraId, StatusInternal status) {
+                // Get the device id and app-visible camera id for the given HAL-visible camera id.
+                auto [deviceId, mappedCameraId] =
+                        mVirtualDeviceCameraIdMapper.getDeviceIdAndMappedCameraIdPair(cameraId);
 
-            if (status != StatusInternal::ENUMERATING) {
-                // Update torch status if it has a flash unit.
-                Mutex::Autolock al(mTorchStatusMutex);
-                TorchModeStatus torchStatus;
-                if (getTorchStatusLocked(cameraId, &torchStatus) !=
-                        NAME_NOT_FOUND) {
-                    TorchModeStatus newTorchStatus =
-                            status == StatusInternal::PRESENT ?
-                            TorchModeStatus::AVAILABLE_OFF :
-                            TorchModeStatus::NOT_AVAILABLE;
-                    if (torchStatus != newTorchStatus) {
-                        onTorchStatusChangedLocked(cameraId, newTorchStatus, deviceKind);
+                if (status != StatusInternal::ENUMERATING) {
+                    // Update torch status if it has a flash unit.
+                    Mutex::Autolock al(mTorchStatusMutex);
+                    TorchModeStatus torchStatus;
+                    if (getTorchStatusLocked(cameraId, &torchStatus) !=
+                            NAME_NOT_FOUND) {
+                        TorchModeStatus newTorchStatus =
+                                status == StatusInternal::PRESENT ?
+                                TorchModeStatus::AVAILABLE_OFF :
+                                TorchModeStatus::NOT_AVAILABLE;
+                        if (torchStatus != newTorchStatus) {
+                            onTorchStatusChangedLocked(cameraId, newTorchStatus, deviceKind);
+                        }
                     }
                 }
-            }
 
-            Mutex::Autolock lock(mStatusListenerLock);
-            notifyPhysicalCameraStatusLocked(mapToInterface(status), cameraId,
-                    logicalCameraIds, deviceKind);
+                Mutex::Autolock lock(mStatusListenerLock);
+                notifyPhysicalCameraStatusLocked(mapToInterface(status), mappedCameraId,
+                        logicalCameraIds, deviceKind, deviceId);
 
-            for (auto& listener : mListenerList) {
-                bool isVendorListener = listener->isVendorListener();
-                if (shouldSkipStatusUpdates(deviceKind, isVendorListener,
-                        listener->getListenerPid(), listener->getListenerUid()) ||
-                        isVendorListener) {
-                    ALOGV("Skipping discovery callback for system-only camera device %s",
-                            cameraId.c_str());
-                    continue;
-                }
-                auto ret = listener->getListener()->onStatusChanged(mapToInterface(status),
-                        cameraId);
-                listener->handleBinderStatus(ret,
-                         "%s: Failed to trigger onStatusChanged callback for %d:%d: %d",
-                        __FUNCTION__, listener->getListenerUid(), listener->getListenerPid(),
-                        ret.exceptionCode());
-                // Also trigger the callbacks for cameras that were remapped to the current
-                // cameraId for the specific package that this listener belongs to.
-                std::vector<std::string> remappedCameraIds =
-                        findOriginalIdsForRemappedCameraId(cameraId, listener->getListenerUid());
-                for (auto& remappedCameraId : remappedCameraIds) {
-                    ret = listener->getListener()->onStatusChanged(
-                            mapToInterface(status), remappedCameraId);
+                for (auto& listener : mListenerList) {
+                    bool isVendorListener = listener->isVendorListener();
+                    if (shouldSkipStatusUpdates(deviceKind, isVendorListener,
+                            listener->getListenerPid(), listener->getListenerUid())) {
+                        ALOGV("Skipping discovery callback for system-only camera device %s",
+                              cameraId.c_str());
+                        continue;
+                    }
+
+                    auto ret = listener->getListener()->onStatusChanged(mapToInterface(status),
+                            mappedCameraId, deviceId);
                     listener->handleBinderStatus(ret,
-                             "%s: Failed to trigger onStatusChanged callback for %d:%d: %d",
+                            "%s: Failed to trigger onStatusChanged callback for %d:%d: %d",
                             __FUNCTION__, listener->getListenerUid(), listener->getListenerPid(),
                             ret.exceptionCode());
                 }
-            }
-        });
+            });
 }
 
 void CameraService::updateOpenCloseStatus(const std::string& cameraId, bool open,
@@ -5499,6 +5760,10 @@
         state->setClientPackage(std::string());
     }
 
+    // Get the device id and app-visible camera id for the given HAL-visible camera id.
+    auto [deviceId, mappedCameraId] =
+            mVirtualDeviceCameraIdMapper.getDeviceIdAndMappedCameraIdPair(cameraId);
+
     Mutex::Autolock lock(mStatusListenerLock);
 
     for (const auto& it : mListenerList) {
@@ -5508,9 +5773,10 @@
 
         binder::Status ret;
         if (open) {
-            ret = it->getListener()->onCameraOpened(cameraId, clientPackageName);
+            ret = it->getListener()->onCameraOpened(mappedCameraId, clientPackageName,
+                    deviceId);
         } else {
-            ret = it->getListener()->onCameraClosed(cameraId);
+            ret = it->getListener()->onCameraClosed(mappedCameraId, deviceId);
         }
 
         it->handleBinderStatus(ret,
@@ -5533,7 +5799,7 @@
     }
 
     ALOGV("%s: Status has changed for camera ID %s from %#x to %#x", __FUNCTION__,
-            cameraId.c_str(), oldStatus, status);
+            cameraId.c_str(), eToI(oldStatus), eToI(status));
 
     if (oldStatus == StatusInternal::NOT_PRESENT &&
             (status != StatusInternal::PRESENT &&
@@ -5603,7 +5869,7 @@
 
 void CameraService::notifyPhysicalCameraStatusLocked(int32_t status,
         const std::string& physicalCameraId, const std::list<std::string>& logicalCameraIds,
-        SystemCameraKind deviceKind) {
+        SystemCameraKind deviceKind, int32_t deviceId) {
     // mStatusListenerLock is expected to be locked
     for (const auto& logicalCameraId : logicalCameraIds) {
         for (auto& listener : mListenerList) {
@@ -5617,7 +5883,7 @@
                 continue;
             }
             auto ret = listener->getListener()->onPhysicalCameraStatusChanged(status,
-                    logicalCameraId, physicalCameraId);
+                    logicalCameraId, physicalCameraId, deviceId);
             listener->handleBinderStatus(ret,
                     "%s: Failed to trigger onPhysicalCameraStatusChanged for %d:%d: %d",
                     __FUNCTION__, listener->getListenerUid(), listener->getListenerPid(),
@@ -5626,7 +5892,6 @@
     }
 }
 
-
 void CameraService::blockClientsForUid(uid_t uid) {
     const auto clients = mActiveClientManager.getAll();
     for (auto& current : clients) {
@@ -5651,6 +5916,23 @@
     }
 }
 
+void CameraService::blockPrivacyEnabledClients() {
+    const auto clients = mActiveClientManager.getAll();
+    for (auto& current : clients) {
+        if (current != nullptr) {
+            const auto basicClient = current->getValue();
+            if (basicClient.get() != nullptr) {
+                std::string pkgName = basicClient->getPackageName();
+                bool cameraPrivacyEnabled =
+                        mSensorPrivacyPolicy->isCameraPrivacyEnabled(toString16(pkgName));
+                if (cameraPrivacyEnabled) {
+                    basicClient->block();
+                }
+           }
+        }
+    }
+}
+
 // NOTE: This is a remote API - make sure all args are validated
 status_t CameraService::shellCommand(int in, int out, int err, const Vector<String16>& args) {
     if (!checkCallingPermission(toString16(sManageCameraPermission), nullptr, nullptr)) {
@@ -5690,8 +5972,6 @@
         return handleWatchCommand(args, in, out);
     } else if (args.size() >= 2 && args[0] == toString16("set-watchdog")) {
         return handleSetCameraServiceWatchdog(args);
-    } else if (args.size() >= 4 && args[0] == toString16("remap-camera-id")) {
-        return handleCameraIdRemapping(args, err);
     } else if (args.size() == 1 && args[0] == toString16("help")) {
         printHelp(out);
         return OK;
@@ -5700,23 +5980,6 @@
     return BAD_VALUE;
 }
 
-status_t CameraService::handleCameraIdRemapping(const Vector<String16>& args, int err) {
-    uid_t uid = IPCThreadState::self()->getCallingUid();
-    if (uid != AID_ROOT) {
-        dprintf(err, "Must be adb root\n");
-        return PERMISSION_DENIED;
-    }
-    if (args.size() != 4) {
-        dprintf(err, "Expected format: remap-camera-id <PACKAGE> <Id0> <Id1>\n");
-        return BAD_VALUE;
-    }
-    std::string packageName = toStdString(args[1]);
-    std::string cameraIdToReplace = toStdString(args[2]);
-    std::string cameraIdNew = toStdString(args[3]);
-    remapCameraIds({{packageName, {{cameraIdToReplace, cameraIdNew}}}});
-    return OK;
-}
-
 status_t CameraService::handleSetUidState(const Vector<String16>& args, int err) {
     std::string packageName = toStdString(args[1]);
 
@@ -5999,7 +6262,7 @@
                  "        prints the monitored information in real time\n"
                  "        Hit return to exit\n"
                  "  clear clears all buffers storing information for watch command");
-  return BAD_VALUE;
+    return BAD_VALUE;
 }
 
 status_t CameraService::startWatchingTags(const Vector<String16> &args, int outFd) {
@@ -6333,7 +6596,6 @@
         "  set-watchdog <VALUE> enables or disables the camera service watchdog\n"
         "      Valid values 0=disable, 1=enable\n"
         "  watch <start|stop|dump|print|clear> manages tag monitoring in connected clients\n"
-        "  remap-camera-id <PACKAGE> <Id0> <Id1> remaps camera ids. Must use adb root\n"
         "  help print this message\n");
 }
 
@@ -6386,9 +6648,9 @@
         mServiceLock.unlock();
 
         // Clear caller identity temporarily so client disconnect PID checks work correctly
-        int64_t token = CameraThreadState::clearCallingIdentity();
+        int64_t token = clearCallingIdentity();
         clientSp->disconnect();
-        CameraThreadState::restoreCallingIdentity(token);
+        restoreCallingIdentity(token);
 
         // Reacquire mServiceLock
         mServiceLock.lock();
@@ -6407,4 +6669,4 @@
     mInjectionStatusListener->removeListener();
 }
 
-}; // namespace android
+} // namespace android
diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h
index 1487013..d5c57cb 100644
--- a/services/camera/libcameraservice/CameraService.h
+++ b/services/camera/libcameraservice/CameraService.h
@@ -21,7 +21,6 @@
 #include <android/hardware/BnCameraService.h>
 #include <android/hardware/BnSensorPrivacyListener.h>
 #include <android/hardware/ICameraServiceListener.h>
-#include <android/hardware/CameraIdRemapping.h>
 #include <android/hardware/camera2/BnCameraInjectionSession.h>
 #include <android/hardware/camera2/ICameraInjectionCallback.h>
 
@@ -34,6 +33,7 @@
 #include <binder/IServiceManager.h>
 #include <binder/IActivityManager.h>
 #include <binder/IAppOpsCallback.h>
+#include <binder/IPermissionController.h>
 #include <binder/IUidObserver.h>
 #include <hardware/camera.h>
 #include <sensorprivacy/SensorPrivacyManager.h>
@@ -53,12 +53,15 @@
 #include "utils/ClientManager.h"
 #include "utils/IPCTransport.h"
 #include "utils/CameraServiceProxyWrapper.h"
+#include "utils/AttributionAndPermissionUtils.h"
+#include "utils/VirtualDeviceCameraIdMapper.h"
 
 #include <set>
 #include <string>
 #include <list>
 #include <map>
 #include <memory>
+#include <mutex>
 #include <optional>
 #include <utility>
 #include <unordered_map>
@@ -77,7 +80,8 @@
     public virtual ::android::hardware::BnCameraService,
     public virtual IBinder::DeathRecipient,
     public virtual CameraProviderManager::StatusListener,
-    public virtual IServiceManager::LocalRegistrationCallback
+    public virtual IServiceManager::LocalRegistrationCallback,
+    public AttributionAndPermissionUtilsEncapsulator
 {
     friend class BinderService<CameraService>;
     friend class CameraOfflineSessionClient;
@@ -119,7 +123,9 @@
                         // Non-null arguments for cameraServiceProxyWrapper should be provided for
                         // testing purposes only.
                         CameraService(std::shared_ptr<CameraServiceProxyWrapper>
-                                cameraServiceProxyWrapper = nullptr);
+                                cameraServiceProxyWrapper = nullptr,
+                                std::shared_ptr<AttributionAndPermissionUtils>
+                                attributionAndPermissionUtils = nullptr);
     virtual             ~CameraService();
 
     /////////////////////////////////////////////////////////////////////
@@ -145,14 +151,20 @@
     /////////////////////////////////////////////////////////////////////
     // ICameraService
     // IMPORTANT: All binder calls that deal with logicalCameraId should use
-    // resolveCameraId(logicalCameraId) to arrive at the correct cameraId to
-    // perform the operation on (in case of Id Remapping).
-    virtual binder::Status     getNumberOfCameras(int32_t type, int32_t* numCameras);
+    // resolveCameraId(logicalCameraId, deviceId, devicePolicy) to arrive at the correct
+    // cameraId to perform the operation on (in case of contexts
+    // associated with virtual devices).
+    virtual binder::Status     getNumberOfCameras(int32_t type,
+            const AttributionSourceState& clientAttribution,
+            int32_t devicePolicy, int32_t* numCameras);
 
-    virtual binder::Status     getCameraInfo(int cameraId, bool overrideToPortrait,
-            hardware::CameraInfo* cameraInfo) override;
+    virtual binder::Status     getCameraInfo(int cameraId, int rotationOverride,
+            const AttributionSourceState& clientAttribution,
+            int32_t devicePolicy, hardware::CameraInfo* cameraInfo) override;
     virtual binder::Status     getCameraCharacteristics(const std::string& cameraId,
-            int targetSdkVersion, bool overrideToPortrait, CameraMetadata* cameraInfo) override;
+            int targetSdkVersion, int rotationOverride,
+            const AttributionSourceState& clientAttribution,
+            int32_t devicePolicy, CameraMetadata* cameraInfo) override;
     virtual binder::Status     getCameraVendorTagDescriptor(
             /*out*/
             hardware::camera2::params::VendorTagDescriptor* desc);
@@ -161,17 +173,15 @@
             hardware::camera2::params::VendorTagDescriptorCache* cache);
 
     virtual binder::Status     connect(const sp<hardware::ICameraClient>& cameraClient,
-            int32_t cameraId, const std::string& clientPackageName,
-            int32_t clientUid, int clientPid, int targetSdkVersion,
-            bool overrideToPortrait, bool forceSlowJpegMode,
-            /*out*/
-            sp<hardware::ICamera>* device) override;
+            int32_t cameraId, int targetSdkVersion, int rotationOverride, bool forceSlowJpegMode,
+            const AttributionSourceState& clientAttribution,
+            int32_t devicePolicy, /*out*/ sp<hardware::ICamera>* device) override;
 
     virtual binder::Status     connectDevice(
             const sp<hardware::camera2::ICameraDeviceCallbacks>& cameraCb,
-            const std::string& cameraId,
-            const std::string& clientPackageName, const std::optional<std::string>& clientFeatureId,
-            int32_t clientUid, int scoreOffset, int targetSdkVersion, bool overrideToPortrait,
+            const std::string& cameraId, int scoreOffset, int targetSdkVersion,
+            int rotationOverride, const AttributionSourceState& clientAttribution,
+            int32_t devicePolicy,
             /*out*/
             sp<hardware::camera2::ICameraDeviceUser>* device);
 
@@ -187,7 +197,8 @@
 
     virtual binder::Status isConcurrentSessionConfigurationSupported(
         const std::vector<hardware::camera2::utils::CameraIdAndSessionConfiguration>& sessions,
-        int targetSdkVersion, /*out*/bool* supported);
+        int targetSdkVersion, const AttributionSourceState& clientAttribution, int32_t devicePolicy,
+        /*out*/bool* supported);
 
     virtual binder::Status    getLegacyParameters(
             int32_t cameraId,
@@ -195,13 +206,17 @@
             std::string* parameters);
 
     virtual binder::Status    setTorchMode(const std::string& cameraId, bool enabled,
-            const sp<IBinder>& clientBinder);
+            const sp<IBinder>& clientBinder, const AttributionSourceState& clientAttribution,
+            int32_t devicePolicy);
 
     virtual binder::Status    turnOnTorchWithStrengthLevel(const std::string& cameraId,
-            int32_t torchStrength, const sp<IBinder>& clientBinder);
+            int32_t torchStrength, const sp<IBinder>& clientBinder,
+            const AttributionSourceState& clientAttribution,
+            int32_t devicePolicy);
 
     virtual binder::Status    getTorchStrengthLevel(const std::string& cameraId,
-            int32_t* torchStrength);
+            const AttributionSourceState& clientAttribution,
+            int32_t devicePolicy, int32_t* torchStrength);
 
     virtual binder::Status    notifySystemEvent(int32_t eventId,
             const std::vector<int32_t>& args);
@@ -231,22 +246,26 @@
     virtual binder::Status reportExtensionSessionStats(
             const hardware::CameraExtensionSessionStats& stats, std::string* sessionKey /*out*/);
 
-    virtual binder::Status remapCameraIds(const hardware::CameraIdRemapping&
-            cameraIdRemapping);
-
     virtual binder::Status injectSessionParams(
             const std::string& cameraId,
             const hardware::camera2::impl::CameraMetadataNative& sessionParams);
 
     virtual binder::Status createDefaultRequest(const std::string& cameraId, int templateId,
+            const AttributionSourceState& clientAttribution, int32_t devicePolicy,
             /*out*/
             hardware::camera2::impl::CameraMetadataNative* request);
 
     virtual binder::Status isSessionConfigurationWithParametersSupported(
-            const std::string& cameraId,
+            const std::string& cameraId, int targetSdkVersion,
             const SessionConfiguration& sessionConfiguration,
-            /*out*/
-            bool* supported);
+            const AttributionSourceState& clientAttribution, int32_t devicePolicy,
+            /*out*/ bool* supported);
+
+    virtual binder::Status getSessionCharacteristics(
+            const std::string& cameraId, int targetSdkVersion, int rotationOverride,
+            const SessionConfiguration& sessionConfiguration,
+            const AttributionSourceState& clientAttribution,
+            int32_t devicePolicy, /*out*/ CameraMetadata* outMetadata);
 
     // Extra permissions checks
     virtual status_t    onTransact(uint32_t code, const Parcel& data,
@@ -289,7 +308,8 @@
     /////////////////////////////////////////////////////////////////////
     // CameraDeviceFactory functionality
     std::pair<int, IPCTransport>    getDeviceVersion(const std::string& cameraId,
-            bool overrideToPortrait, int* portraitRotation,
+            int rotationOverride,
+            int* portraitRotation,
             int* facing = nullptr, int* orientation = nullptr);
 
     /////////////////////////////////////////////////////////////////////
@@ -309,10 +329,20 @@
     // Shared utilities
     static binder::Status filterGetInfoErrorCode(status_t err);
 
+    /**
+     * Returns true if the device is an automotive device and cameraId is system
+     * only camera which has characteristic AUTOMOTIVE_LOCATION value as either
+     * AUTOMOTIVE_LOCATION_EXTERIOR_LEFT,AUTOMOTIVE_LOCATION_EXTERIOR_RIGHT,
+     * AUTOMOTIVE_LOCATION_EXTERIOR_FRONT or AUTOMOTIVE_LOCATION_EXTERIOR_REAR.
+     */
+    bool isAutomotiveExteriorSystemCamera(const std::string& cameraId) const;
+
     /////////////////////////////////////////////////////////////////////
     // CameraClient functionality
 
-    class BasicClient : public virtual RefBase {
+    class BasicClient :
+        public virtual RefBase,
+        public AttributionAndPermissionUtilsEncapsulator {
     friend class CameraService;
     public:
         virtual status_t       initialize(sp<CameraProviderManager> manager,
@@ -329,7 +359,7 @@
         }
 
         bool getOverrideToPortrait() const {
-            return mOverrideToPortrait;
+            return mRotationOverride == ICameraService::ROTATION_OVERRIDE_OVERRIDE_TO_PORTRAIT;
         }
 
         // Disallows dumping over binder interface
@@ -423,6 +453,7 @@
     protected:
         BasicClient(const sp<CameraService>& cameraService,
                 const sp<IBinder>& remoteCallback,
+                std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
                 const std::string& clientPackageName,
                 bool nativeClient,
                 const std::optional<std::string>& clientFeatureId,
@@ -432,7 +463,7 @@
                 int clientPid,
                 uid_t clientUid,
                 int servicePid,
-                bool overrideToPortrait);
+                int rotationOverride);
 
         virtual ~BasicClient();
 
@@ -455,7 +486,7 @@
         const pid_t                     mServicePid;
         bool                            mDisconnected;
         bool                            mUidIsTrusted;
-        bool                            mOverrideToPortrait;
+        int                             mRotationOverride;
 
         mutable Mutex                   mAudioRestrictionLock;
         int32_t                         mAudioRestriction;
@@ -536,6 +567,7 @@
         // Interface used by CameraService
         Client(const sp<CameraService>& cameraService,
                 const sp<hardware::ICameraClient>& cameraClient,
+                std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
                 const std::string& clientPackageName,
                 bool systemNativeClient,
                 const std::optional<std::string>& clientFeatureId,
@@ -546,7 +578,7 @@
                 int clientPid,
                 uid_t clientUid,
                 int servicePid,
-                bool overrideToPortrait);
+                int rotationOverride);
         ~Client();
 
         // return our camera client
@@ -639,13 +671,6 @@
     int32_t updateAudioRestrictionLocked();
 
 private:
-    /**
-     * Returns true if the device is an automotive device and cameraId is system
-     * only camera which has characteristic AUTOMOTIVE_LOCATION value as either
-     * AUTOMOTIVE_LOCATION_EXTERIOR_LEFT,AUTOMOTIVE_LOCATION_EXTERIOR_RIGHT,
-     * AUTOMOTIVE_LOCATION_EXTERIOR_FRONT or AUTOMOTIVE_LOCATION_EXTERIOR_REAR.
-     */
-    bool isAutomotiveExteriorSystemCamera(const std::string& cameraId) const;
 
     // TODO: b/263304156 update this to make use of a death callback for more
     // robust/fault tolerant logging
@@ -661,27 +686,26 @@
         return activityManager;
     }
 
+    static const sp<IPermissionController>& getPermissionController() {
+        static const char* kPermissionControllerService = "permission";
+        static thread_local sp<IPermissionController> sPermissionController = nullptr;
+
+        if (sPermissionController == nullptr ||
+                !IInterface::asBinder(sPermissionController)->isBinderAlive()) {
+            sp<IServiceManager> sm = defaultServiceManager();
+            sp<IBinder> binder = sm->checkService(toString16(kPermissionControllerService));
+            if (binder == nullptr) {
+                ALOGE("%s: Could not get permission service", __FUNCTION__);
+                sPermissionController = nullptr;
+            } else {
+                sPermissionController = interface_cast<IPermissionController>(binder);
+            }
+        }
+
+        return sPermissionController;
+    }
+
     /**
-     * Pre-grants the permission if the attribution source uid is for an automotive
-     * privileged client. Otherwise uses system service permission checker to check
-     * for the appropriate permission. If this function is called for accessing a specific
-     * camera,then the cameraID must not be empty. CameraId is used only in case of automotive
-     * privileged client so that permission is pre-granted only to access system camera device
-     * which is located outside of the vehicle body frame because camera located inside the vehicle
-     * cabin would need user permission.
-     */
-    bool checkPermission(const std::string& cameraId, const std::string& permission,
-            const content::AttributionSourceState& attributionSource, const std::string& message,
-            int32_t attributedOpCode) const;
-
-    bool hasPermissionsForSystemCamera(const std::string& cameraId, int callingPid, int callingUid)
-            const;
-
-    bool hasPermissionsForCameraHeadlessSystemUser(const std::string& cameraId, int callingPid,
-            int callingUid) const;
-
-    bool hasCameraPermissions() const;
-   /**
      * Typesafe version of device status, containing both the HAL-layer and the service interface-
      * layer values.
      */
@@ -865,19 +889,27 @@
     // prevented from accessing the camera.
     class SensorPrivacyPolicy : public hardware::BnSensorPrivacyListener,
             public virtual IBinder::DeathRecipient,
-            public virtual IServiceManager::LocalRegistrationCallback {
+            public virtual IServiceManager::LocalRegistrationCallback,
+            public AttributionAndPermissionUtilsEncapsulator {
         public:
-            explicit SensorPrivacyPolicy(wp<CameraService> service)
-                    : mService(service), mSensorPrivacyEnabled(false), mRegistered(false) {}
+            explicit SensorPrivacyPolicy(wp<CameraService> service,
+                    std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils)
+                    : AttributionAndPermissionUtilsEncapsulator(attributionAndPermissionUtils),
+                      mService(service),
+                      mSensorPrivacyEnabled(false),
+                    mCameraPrivacyState(SensorPrivacyManager::DISABLED), mRegistered(false) {}
 
             void registerSelf();
             void unregisterSelf();
 
             bool isSensorPrivacyEnabled();
             bool isCameraPrivacyEnabled();
+            int getCameraPrivacyState();
+            bool isCameraPrivacyEnabled(const String16& packageName);
 
             binder::Status onSensorPrivacyChanged(int toggleType, int sensor,
                                                   bool enabled);
+            binder::Status onSensorPrivacyStateChanged(int toggleType, int sensor, int state);
 
             // Implementation of IServiceManager::LocalRegistrationCallback
             virtual void onServiceRegistration(const String16& name,
@@ -890,6 +922,7 @@
             wp<CameraService> mService;
             Mutex mSensorPrivacyLock;
             bool mSensorPrivacyEnabled;
+            int mCameraPrivacyState;
             bool mRegistered;
 
             bool hasCameraPrivacyFeature();
@@ -914,17 +947,20 @@
     void removeStates(const std::string& id);
 
     // Check if we can connect, before we acquire the service lock.
-    // The returned originalClientPid is the PID of the original process that wants to connect to
-    // camera.
-    // The returned clientPid is the PID of the client that directly connects to camera.
-    // originalClientPid and clientPid are usually the same except when the application uses
-    // mediaserver to connect to camera (using MediaRecorder to connect to camera). In that case,
-    // clientPid is the PID of mediaserver and originalClientPid is the PID of the application.
+    // If clientPid/clientUid are USE_CALLING_PID/USE_CALLING_UID, they will be overwritten with
+    // the calling pid/uid.
     binder::Status validateConnectLocked(const std::string& cameraId, const std::string& clientName,
-            /*inout*/int& clientUid, /*inout*/int& clientPid, /*out*/int& originalClientPid) const;
+            /*inout*/int& clientUid, /*inout*/int& clientPid) const;
     binder::Status validateClientPermissionsLocked(const std::string& cameraId,
-            const std::string& clientName, /*inout*/int& clientUid, /*inout*/int& clientPid,
-            /*out*/int& originalClientPid) const;
+            const std::string& clientName, /*inout*/int& clientUid, /*inout*/int& clientPid) const;
+
+    // If clientPackageNameMaybe is empty, attempts to resolve the package name.
+    std::string resolvePackageName(int clientUid, const std::string& clientPackageNameMaybe) const;
+    void logConnectionAttempt(int clientPid, const std::string& clientPackageName,
+        const std::string& cameraId, apiLevel effectiveApiLevel) const;
+
+    bool isCameraPrivacyEnabled(const String16& packageName,const std::string& cameraId,
+           int clientPid, int ClientUid);
 
     // Handle active client evictions, and update service state.
     // Only call with with mServiceLock held.
@@ -966,15 +1002,16 @@
     // as for legacy apps we will toggle the app op for all packages in the UID.
     // The caveat is that the operation may be attributed to the wrong package and
     // stats based on app ops may be slightly off.
-    std::string getPackageNameFromUid(int clientUid);
+    std::string getPackageNameFromUid(int clientUid) const;
 
     // Single implementation shared between the various connect calls
     template<class CALLBACK, class CLIENT>
     binder::Status connectHelper(const sp<CALLBACK>& cameraCb, const std::string& cameraId,
-            int api1CameraId, const std::string& clientPackageNameMaybe, bool systemNativeClient,
+            int api1CameraId, const std::string& clientPackageName, bool systemNativeClient,
             const std::optional<std::string>& clientFeatureId, int clientUid, int clientPid,
             apiLevel effectiveApiLevel, bool shimUpdateOnly, int scoreOffset, int targetSdkVersion,
-            bool overrideToPortrait, bool forceSlowJpegMode, const std::string& originalCameraId,
+            int rotationOverride, bool forceSlowJpegMode,
+            const std::string& originalCameraId, bool isNonSystemNdk,
             /*out*/sp<CLIENT>& device);
 
     // Lock guarding camera service state
@@ -996,6 +1033,10 @@
     // Adds client logs during closed session to the file pointed by fd.
     void dumpClosedSessionClientLogs(int fd, const std::string& cameraId);
 
+    binder::Status isSessionConfigurationWithParametersSupportedUnsafe(
+            const std::string& cameraId, const SessionConfiguration& sessionConfiguration,
+            bool overrideForPerfClass, /*out*/ bool* supported);
+
     // Mapping from camera ID -> state for each device, map is protected by mCameraStatesLock
     std::map<std::string, std::shared_ptr<CameraState>> mCameraStates;
 
@@ -1003,44 +1044,18 @@
     mutable Mutex mCameraStatesLock;
 
     /**
-     * Mapping from packageName -> {cameraIdToReplace -> newCameraIdtoUse}.
+     * Resolve the (potentially remapped) camera id for the given input camera id and the given
+     * device id and device policy (for the device associated with the context of the caller).
      *
-     * This specifies that for packageName, for every binder operation targeting
-     * cameraIdToReplace, use newCameraIdToUse instead.
+     * For any context associated with a virtual device with custom camera policy, this will return
+     * the actual camera id if inputCameraId corresponds to the mapped id of a virtual camera
+     * (for virtual devices with custom camera policy, the back and front virtual cameras of that
+     * device would have 0 and 1 respectively as their mapped camera id).
      */
-    typedef std::map<std::string, std::map<std::string, std::string>> TCameraIdRemapping;
-    TCameraIdRemapping mCameraIdRemapping{};
-    /** Mutex guarding mCameraIdRemapping. */
-    Mutex mCameraIdRemappingLock;
-
-    /** Parses cameraIdRemapping parcelable into the native cameraIdRemappingMap. */
-    binder::Status parseCameraIdRemapping(
-            const hardware::CameraIdRemapping& cameraIdRemapping,
-            /* out */ TCameraIdRemapping* cameraIdRemappingMap);
-
-    /**
-     * Resolve the (potentially remapped) camera Id to use for packageName.
-     *
-     * This returns the Camera Id to use in case inputCameraId was remapped to a
-     * different Id for the given packageName. Otherwise, it returns the inputCameraId.
-     *
-     * If the packageName is not provided, it will be inferred from the clientUid.
-     */
-    std::string resolveCameraId(
+    std::optional<std::string> resolveCameraId(
             const std::string& inputCameraId,
-            int clientUid,
-            const std::string& packageName = "");
-
-    /**
-     * Updates the state of mCameraIdRemapping, while disconnecting active clients as necessary.
-     */
-    void remapCameraIds(const TCameraIdRemapping& cameraIdRemapping);
-
-    /**
-     * Finds the Camera Ids that were remapped to the inputCameraId for the given client.
-     */
-    std::vector<std::string> findOriginalIdsForRemappedCameraId(
-        const std::string& inputCameraId, int clientUid);
+            int32_t deviceId,
+            int32_t devicePolicy);
 
     // Circular buffer for storing event logging for dumps
     RingBuffer<std::string> mEventLog;
@@ -1096,13 +1111,13 @@
      * Returns the underlying camera Id string mapped to a camera id int
      * Empty string is returned when the cameraIdInt is invalid.
      */
-    std::string cameraIdIntToStr(int cameraIdInt);
+    std::string cameraIdIntToStr(int cameraIdInt, int32_t deviceId, int32_t devicePolicy);
 
     /**
      * Returns the underlying camera Id string mapped to a camera id int
      * Empty string is returned when the cameraIdInt is invalid.
      */
-    std::string cameraIdIntToStrLocked(int cameraIdInt);
+    std::string cameraIdIntToStrLocked(int cameraIdInt, int32_t deviceId, int32_t devicePolicy);
 
     /**
      * Remove a single client corresponding to the given camera id from the list of active clients.
@@ -1302,6 +1317,8 @@
      *
      * This method must be idempotent.
      * This method acquires mStatusLock and mStatusListenerLock.
+     * For any virtual camera, this method must pass its mapped camera id and device id to
+     * ICameraServiceListeners (using mVirtualDeviceCameraIdMapper).
      */
     void updateStatus(StatusInternal status,
             const std::string& cameraId,
@@ -1355,7 +1372,8 @@
     // notify physical camera status when the physical camera is public.
     // Expects mStatusListenerLock to be locked.
     void notifyPhysicalCameraStatusLocked(int32_t status, const std::string& physicalCameraId,
-            const std::list<std::string>& logicalCameraIds, SystemCameraKind deviceKind);
+            const std::list<std::string>& logicalCameraIds, SystemCameraKind deviceKind,
+            int32_t virtualDeviceId);
 
     // get list of logical cameras which are backed by physicalCameraId
     std::list<std::string> getLogicalCameras(const std::string& physicalCameraId);
@@ -1385,6 +1403,9 @@
     // Blocks all active clients.
     void blockAllClients();
 
+    // Blocks clients whose privacy is enabled.
+    void blockPrivacyEnabledClients();
+
     // Overrides the UID state as if it is idle
     status_t handleSetUidState(const Vector<String16>& args, int err);
 
@@ -1467,6 +1488,12 @@
     // responsibility to acquire mLogLock before calling this functions.
     bool isClientWatchedLocked(const BasicClient *client);
 
+    // Filters out fingerprintable keys if the calling process does not have CAMERA permission.
+    // Note: function caller should ensure that shouldRejectSystemCameraConnection is checked
+    // for the calling process before calling this function.
+    binder::Status filterSensitiveMetadataIfNeeded(const std::string& cameraId,
+                                                   CameraMetadata* metadata);
+
     /**
      * Get the current system time as a formatted string.
      */
@@ -1478,7 +1505,7 @@
             const std::string& cameraId, int api1CameraId, int facing, int sensorOrientation,
             int clientPid, uid_t clientUid, int servicePid,
             std::pair<int, IPCTransport> deviceVersionAndIPCTransport, apiLevel effectiveApiLevel,
-            bool overrideForPerfClass, bool overrideToPortrait, bool forceSlowJpegMode,
+            bool overrideForPerfClass, int rotationOverride, bool forceSlowJpegMode,
             const std::string& originalCameraId,
             /*out*/ sp<BasicClient>* client);
 
@@ -1588,6 +1615,8 @@
     int64_t mDeviceState;
 
     void updateTorchUidMapLocked(const std::string& cameraId, int uid);
+
+    VirtualDeviceCameraIdMapper mVirtualDeviceCameraIdMapper;
 };
 
 } // namespace android
diff --git a/services/camera/libcameraservice/CameraServiceWatchdog.h b/services/camera/libcameraservice/CameraServiceWatchdog.h
index afc432d..165dece 100644
--- a/services/camera/libcameraservice/CameraServiceWatchdog.h
+++ b/services/camera/libcameraservice/CameraServiceWatchdog.h
@@ -26,7 +26,7 @@
  *   and single call monitoring differently. See function documentation for
  *   more details.
  * To disable/enable:
- *   - adb shell cmd media.camera set-cameraservice-watchdog [0/1]
+ *   - adb shell cmd media.camera set-watchdog [0/1]
  */
 #pragma once
 #include <chrono>
diff --git a/services/camera/libcameraservice/TEST_MAPPING b/services/camera/libcameraservice/TEST_MAPPING
index ca6cc58..6257aee 100644
--- a/services/camera/libcameraservice/TEST_MAPPING
+++ b/services/camera/libcameraservice/TEST_MAPPING
@@ -4,6 +4,17 @@
       "name": "cameraservice_test"
     }
   ],
+  "postsubmit": [
+    {
+      "name": "CtsVirtualDevicesCameraTestCases",
+      "options": [
+        {
+          "exclude-annotation": "androidx.test.filters.FlakyTest"
+        }
+      ],
+      "keywords": ["primary-device"]
+    }
+  ],
   "imports": [
     {
       "path": "frameworks/av/camera"
diff --git a/services/camera/libcameraservice/aidl/AidlCameraService.cpp b/services/camera/libcameraservice/aidl/AidlCameraService.cpp
index 79dbfed..7f674bd 100644
--- a/services/camera/libcameraservice/aidl/AidlCameraService.cpp
+++ b/services/camera/libcameraservice/aidl/AidlCameraService.cpp
@@ -26,7 +26,9 @@
 #include <android/binder_ibinder.h>
 #include <android/binder_manager.h>
 #include <binder/Status.h>
+#include <camera/CameraUtils.h>
 #include <hidl/HidlTransportSupport.h>
+#include <utils/AttributionAndPermissionUtils.h>
 #include <utils/Utils.h>
 
 namespace android::frameworks::cameraservice::service::implementation {
@@ -37,6 +39,7 @@
 using ::android::hardware::cameraservice::utils::conversion::aidl::cloneToAidl;
 using ::android::hardware::cameraservice::utils::conversion::aidl::convertToAidl;
 using ::android::hardware::cameraservice::utils::conversion::aidl::filterVndkKeys;
+using hardware::BnCameraService::ROTATION_OVERRIDE_NONE;
 using ::ndk::ScopedAStatus;
 
 // VNDK classes
@@ -87,9 +90,16 @@
     if (_aidl_return == nullptr) { return fromSStatus(SStatus::ILLEGAL_ARGUMENT); }
 
     ::android::CameraMetadata cameraMetadata;
+    AttributionSourceState clientAttribution =
+            AttributionAndPermissionUtils::buildAttributionSource(
+                    hardware::ICameraService::USE_CALLING_PID,
+                    hardware::ICameraService::USE_CALLING_UID,
+                    kDefaultDeviceId);
     UStatus ret = mCameraService->getCameraCharacteristics(in_cameraId,
                                                            mVndkVersion,
-                                                           /* overrideToPortrait= */ false,
+                                                           ROTATION_OVERRIDE_NONE,
+                                                           clientAttribution,
+                                                           /* devicePolicy= */ 0,
                                                            &cameraMetadata);
     if (!ret.isOk()) {
         if (ret.exceptionCode() != EX_SERVICE_SPECIFIC) {
@@ -139,15 +149,21 @@
         return fromSStatus(SStatus::UNKNOWN_ERROR);
     }
     sp<hardware::camera2::ICameraDeviceCallbacks> callbacks = hybridCallbacks;
+    AttributionSourceState clientAttribution =
+            AttributionAndPermissionUtils::buildAttributionSource(
+                    hardware::ICameraService::USE_CALLING_PID,
+                    hardware::ICameraService::USE_CALLING_UID,
+                    kDefaultDeviceId);
+    clientAttribution.packageName = "";
+    clientAttribution.attributionTag = std::nullopt;
     binder::Status serviceRet = mCameraService->connectDevice(
             callbacks,
             in_cameraId,
-            std::string(),
-            /* clientFeatureId= */{},
-            hardware::ICameraService::USE_CALLING_UID,
             /* scoreOffset= */ 0,
             /* targetSdkVersion= */ __ANDROID_API_FUTURE__,
-            /* overrideToPortrait= */ false,
+            ROTATION_OVERRIDE_NONE,
+            clientAttribution,
+            /* devicePolicy= */ 0,
             &unstableDevice);
     if (!serviceRet.isOk()) {
         ALOGE("%s: Unable to connect to camera device: %s", __FUNCTION__,
diff --git a/services/camera/libcameraservice/aidl/AidlCameraServiceListener.cpp b/services/camera/libcameraservice/aidl/AidlCameraServiceListener.cpp
index d7ab0d9..dc5c7f5 100644
--- a/services/camera/libcameraservice/aidl/AidlCameraServiceListener.cpp
+++ b/services/camera/libcameraservice/aidl/AidlCameraServiceListener.cpp
@@ -18,6 +18,7 @@
 #include <aidl/AidlUtils.h>
 #include <aidl/android/frameworks/cameraservice/common/Status.h>
 #include <aidl/android/frameworks/cameraservice/service/CameraStatusAndId.h>
+#include <camera/CameraUtils.h>
 #include <camera/StringUtils.h>
 
 namespace android::frameworks::cameraservice::service::implementation {
@@ -28,7 +29,10 @@
 using SStatus = ::aidl::android::frameworks::cameraservice::common::Status;
 
 binder::Status AidlCameraServiceListener::onStatusChanged(
-        int32_t status, const std::string& cameraId) {
+        int32_t status, const std::string& cameraId, int32_t deviceId) {
+    if (deviceId != kDefaultDeviceId) {
+        return binder::Status::ok();
+    }
     SCameraDeviceStatus sStatus = convertCameraStatusToAidl(status);
     auto ret = mBase->onStatusChanged(sStatus, cameraId);
     LOG_STATUS_ERROR_IF_NOT_OK(ret, "onStatusChanged")
@@ -37,7 +41,10 @@
 
 binder::Status AidlCameraServiceListener::onPhysicalCameraStatusChanged(
         int32_t status, const std::string& cameraId,
-        const std::string& physicalCameraId) {
+        const std::string& physicalCameraId, int32_t deviceId) {
+    if (deviceId != kDefaultDeviceId) {
+        return binder::Status::ok();
+    }
     SCameraDeviceStatus sStatus = convertCameraStatusToAidl(status);
 
     auto ret = mBase->onPhysicalCameraStatusChanged(sStatus, cameraId, physicalCameraId);
@@ -46,20 +53,22 @@
 }
 
 ::android::binder::Status AidlCameraServiceListener::onTorchStatusChanged(
-    int32_t, const std::string&) {
+    [[maybe_unused]] int32_t, [[maybe_unused]] const std::string&, int32_t) {
   // We don't implement onTorchStatusChanged
   return binder::Status::ok();
 }
 
 ::android::binder::Status AidlCameraServiceListener::onTorchStrengthLevelChanged(
-    const std::string&, int32_t) {
+    [[maybe_unused]] const std::string&, [[maybe_unused]] int32_t, [[maybe_unused]] int32_t) {
     // We don't implement onTorchStrengthLevelChanged
     return binder::Status::ok();
 }
+
 status_t AidlCameraServiceListener::linkToDeath(const sp<DeathRecipient>& recipient, void* cookie,
                                                 uint32_t flags) {
     return mDeathPipe.linkToDeath(recipient, cookie, flags);
 }
+
 status_t AidlCameraServiceListener::unlinkToDeath(const wp<DeathRecipient>& recipient, void* cookie,
                                                   uint32_t flags,
                                                   wp<DeathRecipient>* outRecipient) {
diff --git a/services/camera/libcameraservice/aidl/AidlCameraServiceListener.h b/services/camera/libcameraservice/aidl/AidlCameraServiceListener.h
index 6483fe1..a7c32e3 100644
--- a/services/camera/libcameraservice/aidl/AidlCameraServiceListener.h
+++ b/services/camera/libcameraservice/aidl/AidlCameraServiceListener.h
@@ -45,25 +45,28 @@
     ~AidlCameraServiceListener() = default;
 
     ::android::binder::Status onStatusChanged(int32_t status,
-            const std::string& cameraId) override;
+            const std::string& cameraId, int32_t deviceId) override;
     ::android::binder::Status onPhysicalCameraStatusChanged(int32_t status,
             const std::string& cameraId,
-            const std::string& physicalCameraId) override;
+            const std::string& physicalCameraId,
+            int32_t deviceId) override;
 
     ::android::binder::Status onTorchStatusChanged(
-            int32_t status, const std::string& cameraId) override;
+            int32_t status, const std::string& cameraId, int32_t deviceId) override;
     ::android::binder::Status onTorchStrengthLevelChanged(
-            const std::string& cameraId, int32_t newStrengthLevel) override;
+            const std::string& cameraId, int32_t newStrengthLevel, int32_t deviceId) override;
     binder::Status onCameraAccessPrioritiesChanged() override {
         // TODO: no implementation yet.
         return binder::Status::ok();
     }
-    binder::Status onCameraOpened(const std::string& /*cameraId*/,
-            const std::string& /*clientPackageId*/) override {
+    binder::Status onCameraOpened([[maybe_unused]] const std::string& /*cameraId*/,
+            [[maybe_unused]] const std::string& /*clientPackageId*/,
+            [[maybe_unused]] int32_t /*deviceId*/) override {
         // empty implementation
         return binder::Status::ok();
     }
-    binder::Status onCameraClosed(const std::string& /*cameraId*/) override {
+    binder::Status onCameraClosed([[maybe_unused]] const std::string& /*cameraId*/,
+            [[maybe_unused]] int32_t /*deviceId*/) override {
         // empty implementation
         return binder::Status::ok();
     }
diff --git a/services/camera/libcameraservice/aidl/AidlUtils.cpp b/services/camera/libcameraservice/aidl/AidlUtils.cpp
index f2d1414..1ec5072 100644
--- a/services/camera/libcameraservice/aidl/AidlUtils.cpp
+++ b/services/camera/libcameraservice/aidl/AidlUtils.cpp
@@ -15,14 +15,18 @@
  */
 
 #define LOG_TAG "AidlUtils"
+//#define LOG_NDEBUG 0
 
 #include <aidl/AidlUtils.h>
+#include <aidl/ExtensionMetadataTags.h>
+#include <aidl/SessionCharacteristicsTags.h>
 #include <aidl/VndkVersionMetadataTags.h>
 #include <aidlcommonsupport/NativeHandle.h>
+#include <camera/StringUtils.h>
 #include <device3/Camera3StreamInterface.h>
 #include <gui/bufferqueue/1.0/H2BGraphicBufferProducer.h>
 #include <mediautils/AImageReaderUtils.h>
-#include <camera/StringUtils.h>
+#include "utils/Utils.h"
 
 namespace android::hardware::cameraservice::utils::conversion::aidl {
 
@@ -312,6 +316,7 @@
     if (vndkVersion == __ANDROID_API_FUTURE__) {
         // VNDK version derived from ro.board.api_level is a version code-name that
         // corresponds to the current SDK version.
+        ALOGV("%s: VNDK version is API FUTURE, not filtering any keys", __FUNCTION__);
         return OK;
     }
     const auto &apiLevelToKeys =
@@ -320,9 +325,14 @@
     // versions above the given one, need to have their keys filtered from the
     // metadata in order to avoid metadata invalidation.
     auto it = apiLevelToKeys.upper_bound(vndkVersion);
+    ALOGV("%s: VNDK version for filtering is %d", __FUNCTION__ , vndkVersion);
     while (it != apiLevelToKeys.end()) {
         for (const auto &key : it->second) {
             status_t res = metadata.erase(key);
+            // Should be okay to not use get_local_camera_metadata_tag_name
+            // since we're not filtering vendor tags
+            ALOGV("%s: Metadata key being filtered is %s", __FUNCTION__ ,
+                    get_camera_metadata_tag_name(key));
             if (res != OK) {
                 ALOGE("%s metadata key %d could not be erased", __FUNCTION__, key);
                 return res;
@@ -333,4 +343,90 @@
     return OK;
 }
 
+status_t copySessionCharacteristics(const CameraMetadata& from, CameraMetadata* to,
+                                    int queryVersion) {
+    // Ensure the vendor ID are the same before attempting
+    // anything else. If vendor IDs differ we cannot safely copy the characteristics.
+    if (from.getVendorId() != to->getVendorId()) {
+        ALOGE("%s: Incompatible CameraMetadata objects. Vendor IDs differ. From: %" PRIu64
+              "; To: %" PRIu64, __FUNCTION__, from.getVendorId(), to->getVendorId());
+        return BAD_VALUE;
+    }
+
+    // Allow public tags according to the queryVersion
+    std::unordered_set<uint32_t> validPublicTags;
+    auto last = api_level_to_session_characteristic_keys.upper_bound(queryVersion);
+    for (auto it = api_level_to_session_characteristic_keys.begin(); it != last; it++) {
+        validPublicTags.insert(it->second.cbegin(), it->second.cend());
+    }
+
+    const camera_metadata_t* src = from.getAndLock();
+    camera_metadata_ro_entry_t entry{};
+    for (size_t i = 0; i < get_camera_metadata_entry_count(src); i++) {
+        int ret = get_camera_metadata_ro_entry(src, i, &entry);
+        if (ret != OK) {
+            ALOGE("%s: Could not fetch entry at index %zu. Error: %d", __FUNCTION__, i, ret);
+            from.unlock(src);
+            return BAD_VALUE;
+        }
+
+        if (entry.tag < (uint32_t)VENDOR_SECTION_START &&
+                validPublicTags.find(entry.tag) == validPublicTags.end()) {
+            ALOGI("%s: Session Characteristics contains tag %s but not supported by query version "
+                  "(%d)",
+                  __FUNCTION__, get_camera_metadata_tag_name(entry.tag), queryVersion);
+            continue;
+        }
+
+        // The entry is either a vendor tag, or a valid session characteristic key.
+        // Copy over the value
+        to->update(entry);
+    }
+    from.unlock(src);
+    return OK;
+}
+
+bool areExtensionKeysSupported(const CameraMetadata& metadata) {
+    auto requestKeys = metadata.find(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS);
+    if (requestKeys.count == 0) {
+        ALOGE("%s: No ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS entries!", __FUNCTION__);
+        return false;
+    }
+
+    auto resultKeys = metadata.find(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS);
+    if (resultKeys.count == 0) {
+        ALOGE("%s: No ANDROID_REQUEST_AVAILABLE_RESULT_KEYS entries!", __FUNCTION__);
+        return false;
+    }
+
+    for (const auto& extensionKey : extension_metadata_keys) {
+        if (std::find(requestKeys.data.i32, requestKeys.data.i32 + requestKeys.count, extensionKey)
+                != requestKeys.data.i32 + requestKeys.count) {
+            return true;
+        }
+
+        if (std::find(resultKeys.data.i32, resultKeys.data.i32 + resultKeys.count, extensionKey)
+                != resultKeys.data.i32 + resultKeys.count) {
+            return true;
+        }
+    }
+
+    return false;
+}
+
+status_t filterExtensionKeys(CameraMetadata* metadata /*out*/) {
+    if (metadata == nullptr) {
+        return BAD_VALUE;
+    }
+
+    for (const auto& key : extension_metadata_keys) {
+        status_t res = metadata->erase(key);
+        if (res != OK) {
+            ALOGE("%s metadata key %d could not be erased", __FUNCTION__, key);
+            return res;
+        }
+    }
+    return OK;
+}
+
 } // namespace android::hardware::cameraservice::utils::conversion::aidl
diff --git a/services/camera/libcameraservice/aidl/AidlUtils.h b/services/camera/libcameraservice/aidl/AidlUtils.h
index c89d7ff..92e878e 100644
--- a/services/camera/libcameraservice/aidl/AidlUtils.h
+++ b/services/camera/libcameraservice/aidl/AidlUtils.h
@@ -122,6 +122,12 @@
 
 status_t filterVndkKeys(int vndkVersion, CameraMetadata &metadata, bool isStatic = true);
 
+status_t copySessionCharacteristics(const CameraMetadata& from, CameraMetadata* to,
+                                    int queryVersion);
+
+bool areExtensionKeysSupported(const CameraMetadata& metadata);
+
+status_t filterExtensionKeys(CameraMetadata* metadata /*out*/);
 } // namespace android::hardware::cameraservice::utils::conversion::aidl
 
 #endif  // FRAMEWORKS_AV_SERVICES_CAMERA_LIBCAMERASERVICE_AIDL_AIDLUTILS_H_
diff --git a/services/camera/libcameraservice/aidl/ExtensionMetadataTags.h b/services/camera/libcameraservice/aidl/ExtensionMetadataTags.h
new file mode 100644
index 0000000..86af36c
--- /dev/null
+++ b/services/camera/libcameraservice/aidl/ExtensionMetadataTags.h
@@ -0,0 +1,42 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <vector>
+#pragma once
+/**
+ * ! Do not edit this file directly !
+ *
+ * Generated automatically from extensions_camera_metadata_tags.mako. To be included in libcameraservice
+ * only by aidl/AidlUtils.cpp.
+ */
+
+/**
+ * API level to dynamic keys mapping. To be used for filtering out keys depending on vndk version
+ * used by vendor clients.
+ */
+std::vector<camera_metadata_tag> extension_metadata_keys{
+            ANDROID_EXTENSION_STRENGTH,
+            ANDROID_EXTENSION_CURRENT_TYPE,
+            ANDROID_EFV_PADDING_ZOOM_FACTOR,
+            ANDROID_EFV_AUTO_ZOOM,
+            ANDROID_EFV_MAX_PADDING_ZOOM_FACTOR,
+            ANDROID_EFV_STABILIZATION_MODE,
+            ANDROID_EFV_TRANSLATE_VIEWPORT,
+            ANDROID_EFV_ROTATE_VIEWPORT,
+            ANDROID_EFV_PADDING_REGION,
+            ANDROID_EFV_AUTO_ZOOM_PADDING_REGION,
+            ANDROID_EFV_TARGET_COORDINATES,
+};
diff --git a/services/camera/libcameraservice/aidl/SessionCharacteristicsTags.h b/services/camera/libcameraservice/aidl/SessionCharacteristicsTags.h
new file mode 100644
index 0000000..cefb8a6
--- /dev/null
+++ b/services/camera/libcameraservice/aidl/SessionCharacteristicsTags.h
@@ -0,0 +1,37 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <map>
+#include <vector>
+#pragma once
+/**
+ * ! Do not edit this file directly !
+ *
+ * Generated automatically from session_characteristics_tags.mako. To be included in
+ * libcameraservice only by aidl/AidlUtils.cpp.
+ */
+
+/**
+ * Mapping of session characteristics to the INFO_SESSION_CONFIGURATION_QUERY_VERSION value
+ * at which they were introduced.
+ */
+std::map<int, std::vector<camera_metadata_tag>> api_level_to_session_characteristic_keys {
+        {35,
+         {
+                 ANDROID_CONTROL_ZOOM_RATIO_RANGE,
+                 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
+         }},
+};
diff --git a/services/camera/libcameraservice/aidl/VndkVersionMetadataTags.h b/services/camera/libcameraservice/aidl/VndkVersionMetadataTags.h
index e403b97..0e1db5c 100644
--- a/services/camera/libcameraservice/aidl/VndkVersionMetadataTags.h
+++ b/services/camera/libcameraservice/aidl/VndkVersionMetadataTags.h
@@ -77,11 +77,6 @@
       {34, {
           ANDROID_CONTROL_AUTOFRAMING_AVAILABLE,
           ANDROID_CONTROL_AVAILABLE_SETTINGS_OVERRIDES,
-          ANDROID_CONTROL_LOW_LIGHT_BOOST_INFO_LUMINANCE_RANGE,
-          ANDROID_FLASH_SINGLE_STRENGTH_DEFAULT_LEVEL,
-          ANDROID_FLASH_SINGLE_STRENGTH_MAX_LEVEL,
-          ANDROID_FLASH_TORCH_STRENGTH_DEFAULT_LEVEL,
-          ANDROID_FLASH_TORCH_STRENGTH_MAX_LEVEL,
           ANDROID_JPEGR_AVAILABLE_JPEG_R_MIN_FRAME_DURATIONS,
           ANDROID_JPEGR_AVAILABLE_JPEG_R_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION,
           ANDROID_JPEGR_AVAILABLE_JPEG_R_STALL_DURATIONS,
@@ -90,6 +85,15 @@
           ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION,
           ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP,
         } },
+      {35, {
+          ANDROID_CONTROL_LOW_LIGHT_BOOST_INFO_LUMINANCE_RANGE,
+          ANDROID_EFV_PADDING_ZOOM_FACTOR_RANGE,
+          ANDROID_FLASH_SINGLE_STRENGTH_DEFAULT_LEVEL,
+          ANDROID_FLASH_SINGLE_STRENGTH_MAX_LEVEL,
+          ANDROID_FLASH_TORCH_STRENGTH_DEFAULT_LEVEL,
+          ANDROID_FLASH_TORCH_STRENGTH_MAX_LEVEL,
+          ANDROID_INFO_SESSION_CONFIGURATION_QUERY_VERSION,
+        } },
 };
 
 /**
@@ -109,14 +113,25 @@
       {34, {
           ANDROID_CONTROL_AUTOFRAMING,
           ANDROID_CONTROL_AUTOFRAMING_STATE,
-          ANDROID_CONTROL_LOW_LIGHT_BOOST_STATE,
           ANDROID_CONTROL_SETTINGS_OVERRIDE,
           ANDROID_CONTROL_SETTINGS_OVERRIDING_FRAME_NUMBER,
           ANDROID_EXTENSION_CURRENT_TYPE,
           ANDROID_EXTENSION_STRENGTH,
+          ANDROID_SCALER_RAW_CROP_REGION,
+        }  },
+      {35, {
+          ANDROID_CONTROL_LOW_LIGHT_BOOST_STATE,
+          ANDROID_EFV_AUTO_ZOOM,
+          ANDROID_EFV_AUTO_ZOOM_PADDING_REGION,
+          ANDROID_EFV_MAX_PADDING_ZOOM_FACTOR,
+          ANDROID_EFV_PADDING_REGION,
+          ANDROID_EFV_PADDING_ZOOM_FACTOR,
+          ANDROID_EFV_ROTATE_VIEWPORT,
+          ANDROID_EFV_STABILIZATION_MODE,
+          ANDROID_EFV_TARGET_COORDINATES,
+          ANDROID_EFV_TRANSLATE_VIEWPORT,
           ANDROID_FLASH_STRENGTH_LEVEL,
           ANDROID_LOGICAL_MULTI_CAMERA_ACTIVE_PHYSICAL_SENSOR_CROP_REGION,
-          ANDROID_SCALER_RAW_CROP_REGION,
           ANDROID_STATISTICS_LENS_INTRINSIC_SAMPLES,
           ANDROID_STATISTICS_LENS_INTRINSIC_TIMESTAMPS,
         }  },
diff --git a/services/camera/libcameraservice/api1/Camera2Client.cpp b/services/camera/libcameraservice/api1/Camera2Client.cpp
index caa6424..861414f 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.cpp
+++ b/services/camera/libcameraservice/api1/Camera2Client.cpp
@@ -24,11 +24,12 @@
 #include <utils/Log.h>
 #include <utils/Trace.h>
 
+#include <android/hardware/camera2/ICameraDeviceCallbacks.h>
 #include <camera/CameraUtils.h>
 #include <camera/StringUtils.h>
+#include <com_android_internal_camera_flags.h>
 #include <cutils/properties.h>
 #include <gui/Surface.h>
-#include <android/hardware/camera2/ICameraDeviceCallbacks.h>
 
 #include "api1/Camera2Client.h"
 
@@ -38,7 +39,6 @@
 #include "api1/client2/CallbackProcessor.h"
 #include "api1/client2/ZslProcessor.h"
 #include "device3/RotateAndCropMapper.h"
-#include "utils/CameraThreadState.h"
 #include "utils/CameraServiceProxyWrapper.h"
 
 #define ALOG1(...) ALOGD_IF(gLogLevel >= 1, __VA_ARGS__);
@@ -51,11 +51,14 @@
 namespace android {
 using namespace camera2;
 
+namespace flags = com::android::internal::camera::flags;
+
 // Interface used by CameraService
 
 Camera2Client::Camera2Client(const sp<CameraService>& cameraService,
         const sp<hardware::ICameraClient>& cameraClient,
         std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper,
+        std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
         const std::string& clientPackageName,
         const std::optional<std::string>& clientFeatureId,
         const std::string& cameraDeviceId,
@@ -66,12 +69,13 @@
         uid_t clientUid,
         int servicePid,
         bool overrideForPerfClass,
-        bool overrideToPortrait,
+        int rotationOverride,
         bool forceSlowJpegMode):
-        Camera2ClientBase(cameraService, cameraClient, cameraServiceProxyWrapper, clientPackageName,
+        Camera2ClientBase(cameraService, cameraClient, cameraServiceProxyWrapper,
+                attributionAndPermissionUtils, clientPackageName,
                 false/*systemNativeClient - since no ndk for api1*/, clientFeatureId,
                 cameraDeviceId, api1CameraId, cameraFacing, sensorOrientation, clientPid,
-                clientUid, servicePid, overrideForPerfClass, overrideToPortrait,
+                clientUid, servicePid, overrideForPerfClass, rotationOverride,
                 /*legacyClient*/ true),
         mParameters(api1CameraId, cameraFacing),
         mLatestRequestIds(kMaxRequestIds),
@@ -419,7 +423,7 @@
         result << "    none\n";
     }
 
-    std::string resultStr = std::move(result.str());
+    std::string resultStr = result.str();
 
     write(fd, resultStr.c_str(), resultStr.size());
 
@@ -444,7 +448,7 @@
 
     binder::Status res = binder::Status::ok();
     // Allow both client and the cameraserver to disconnect at all times
-    int callingPid = CameraThreadState::getCallingPid();
+    int callingPid = getCallingPid();
     if (callingPid != mClientPid && callingPid != mServicePid) return res;
 
     if (mDevice == 0) return res;
@@ -500,7 +504,16 @@
     bool hasDeviceError = mDevice->hasDeviceError();
     mDevice->disconnect();
 
-    CameraService::Client::disconnect();
+    if (flags::api1_release_binderlock_before_cameraservice_disconnect()) {
+        // CameraService::Client::disconnect calls CameraService which attempts to lock
+        // CameraService's mServiceLock. This might lead to a deadlock if the cameraservice is
+        // currently waiting to lock mSerializationLock on another thread.
+        mBinderSerializationLock.unlock();
+        CameraService::Client::disconnect();
+        mBinderSerializationLock.lock();
+    } else {
+        CameraService::Client::disconnect();
+    }
 
     int32_t closeLatencyMs = ns2ms(systemTime() - startTime);
     mCameraServiceProxyWrapper->logClose(mCameraIdStr, closeLatencyMs, hasDeviceError);
@@ -513,14 +526,14 @@
     ALOGV("%s: E", __FUNCTION__);
     Mutex::Autolock icl(mBinderSerializationLock);
 
-    if (mClientPid != 0 && CameraThreadState::getCallingPid() != mClientPid) {
+    if (mClientPid != 0 && getCallingPid() != mClientPid) {
         ALOGE("%s: Camera %d: Connection attempt from pid %d; "
                 "current locked to pid %d", __FUNCTION__,
-                mCameraId, CameraThreadState::getCallingPid(), mClientPid);
+                mCameraId, getCallingPid(), mClientPid);
         return BAD_VALUE;
     }
 
-    mClientPid = CameraThreadState::getCallingPid();
+    mClientPid = getCallingPid();
 
     mRemoteCallback = client;
     mSharedCameraCallbacks = client;
@@ -533,16 +546,16 @@
     ALOGV("%s: E", __FUNCTION__);
     Mutex::Autolock icl(mBinderSerializationLock);
     ALOGV("%s: Camera %d: Lock call from pid %d; current client pid %d",
-            __FUNCTION__, mCameraId, CameraThreadState::getCallingPid(), mClientPid);
+            __FUNCTION__, mCameraId, getCallingPid(), mClientPid);
 
     if (mClientPid == 0) {
-        mClientPid = CameraThreadState::getCallingPid();
+        mClientPid = getCallingPid();
         return OK;
     }
 
-    if (mClientPid != CameraThreadState::getCallingPid()) {
+    if (mClientPid != getCallingPid()) {
         ALOGE("%s: Camera %d: Lock call from pid %d; currently locked to pid %d",
-                __FUNCTION__, mCameraId, CameraThreadState::getCallingPid(), mClientPid);
+                __FUNCTION__, mCameraId, getCallingPid(), mClientPid);
         return EBUSY;
     }
 
@@ -554,9 +567,9 @@
     ALOGV("%s: E", __FUNCTION__);
     Mutex::Autolock icl(mBinderSerializationLock);
     ALOGV("%s: Camera %d: Unlock call from pid %d; current client pid %d",
-            __FUNCTION__, mCameraId, CameraThreadState::getCallingPid(), mClientPid);
+            __FUNCTION__, mCameraId, getCallingPid(), mClientPid);
 
-    if (mClientPid == CameraThreadState::getCallingPid()) {
+    if (mClientPid == getCallingPid()) {
         SharedParameters::Lock l(mParameters);
         if (l.mParameters.state == Parameters::RECORD ||
                 l.mParameters.state == Parameters::VIDEO_SNAPSHOT) {
@@ -570,7 +583,7 @@
     }
 
     ALOGE("%s: Camera %d: Unlock call from pid %d; currently locked to pid %d",
-            __FUNCTION__, mCameraId, CameraThreadState::getCallingPid(), mClientPid);
+            __FUNCTION__, mCameraId, getCallingPid(), mClientPid);
     return EBUSY;
 }
 
@@ -1460,6 +1473,11 @@
     int triggerId;
     {
         SharedParameters::Lock l(mParameters);
+        if (l.mParameters.state == Parameters::DISCONNECTED) {
+            ALOGE("%s: Camera %d has been disconnected.", __FUNCTION__, mCameraId);
+            return INVALID_OPERATION;
+        }
+
         // Canceling does nothing in FIXED or INFINITY modes
         if (l.mParameters.focusMode == Parameters::FOCUS_MODE_FIXED ||
                 l.mParameters.focusMode == Parameters::FOCUS_MODE_INFINITY) {
@@ -1644,7 +1662,7 @@
     ALOGV("%s: Camera %d", __FUNCTION__, mCameraId);
     Mutex::Autolock icl(mBinderSerializationLock);
     // The camera service can unconditionally get the parameters at all times
-    if (CameraThreadState::getCallingPid() != mServicePid && checkPid(__FUNCTION__) != OK) return String8();
+    if (getCallingPid() != mServicePid && checkPid(__FUNCTION__) != OK) return String8();
 
     SharedParameters::ReadLock l(mParameters);
 
diff --git a/services/camera/libcameraservice/api1/Camera2Client.h b/services/camera/libcameraservice/api1/Camera2Client.h
index 2cb7af0..a0c9f2d 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.h
+++ b/services/camera/libcameraservice/api1/Camera2Client.h
@@ -103,6 +103,7 @@
     Camera2Client(const sp<CameraService>& cameraService,
             const sp<hardware::ICameraClient>& cameraClient,
             std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper,
+            std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
             const std::string& clientPackageName,
             const std::optional<std::string>& clientFeatureId,
             const std::string& cameraDeviceId,
@@ -113,7 +114,7 @@
             uid_t clientUid,
             int servicePid,
             bool overrideForPerfClass,
-            bool overrideToPortrait,
+            int rotationOverride,
             bool forceSlowJpegMode);
 
     virtual ~Camera2Client();
diff --git a/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp b/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp
index 17db20b..2fbf49e 100644
--- a/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp
@@ -18,9 +18,10 @@
 #define ATRACE_TAG ATRACE_TAG_CAMERA
 //#define LOG_NDEBUG 0
 
+#include <com_android_graphics_libgui_flags.h>
+#include <gui/Surface.h>
 #include <utils/Log.h>
 #include <utils/Trace.h>
-#include <gui/Surface.h>
 
 #include "common/CameraDeviceBase.h"
 #include "api1/Camera2Client.h"
@@ -113,6 +114,12 @@
     if (!mCallbackToApp && mCallbackConsumer == 0) {
         // Create CPU buffer queue endpoint, since app hasn't given us one
         // Make it async to avoid disconnect deadlocks
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+        mCallbackConsumer = new CpuConsumer(kCallbackHeapCount);
+        mCallbackConsumer->setFrameAvailableListener(this);
+        mCallbackConsumer->setName(String8("Camera2-CallbackConsumer"));
+        mCallbackWindow = mCallbackConsumer->getSurface();
+#else
         sp<IGraphicBufferProducer> producer;
         sp<IGraphicBufferConsumer> consumer;
         BufferQueue::createBufferQueue(&producer, &consumer);
@@ -120,6 +127,7 @@
         mCallbackConsumer->setFrameAvailableListener(this);
         mCallbackConsumer->setName(String8("Camera2-CallbackConsumer"));
         mCallbackWindow = new Surface(producer);
+#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
     }
 
     if (mCallbackStreamId != NO_STREAM) {
diff --git a/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp b/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp
index eb00bf8..3a0489c 100755
--- a/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp
@@ -25,9 +25,10 @@
 
 #include <binder/MemoryBase.h>
 #include <binder/MemoryHeapBase.h>
+#include <com_android_graphics_libgui_flags.h>
+#include <gui/Surface.h>
 #include <utils/Log.h>
 #include <utils/Trace.h>
-#include <gui/Surface.h>
 
 #include "common/CameraDeviceBase.h"
 #include "api1/Camera2Client.h"
@@ -93,6 +94,12 @@
 
     if (mCaptureConsumer == 0) {
         // Create CPU buffer queue endpoint
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+        mCaptureConsumer = new CpuConsumer(1);
+        mCaptureConsumer->setFrameAvailableListener(this);
+        mCaptureConsumer->setName(String8("Camera2-JpegConsumer"));
+        mCaptureWindow = mCaptureConsumer->getSurface();
+#else
         sp<IGraphicBufferProducer> producer;
         sp<IGraphicBufferConsumer> consumer;
         BufferQueue::createBufferQueue(&producer, &consumer);
@@ -100,6 +107,7 @@
         mCaptureConsumer->setFrameAvailableListener(this);
         mCaptureConsumer->setName(String8("Camera2-JpegConsumer"));
         mCaptureWindow = new Surface(producer);
+#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
     }
 
     // Since ashmem heaps are rounded up to page size, don't reallocate if
diff --git a/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp b/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp
index d6c2415..d4953c1 100644
--- a/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp
@@ -27,10 +27,11 @@
 
 #include <inttypes.h>
 
+#include <camera/StringUtils.h>
+#include <com_android_graphics_libgui_flags.h>
+#include <gui/Surface.h>
 #include <utils/Log.h>
 #include <utils/Trace.h>
-#include <gui/Surface.h>
-#include <camera/StringUtils.h>
 
 #include "common/CameraDeviceBase.h"
 #include "api1/Camera2Client.h"
@@ -250,7 +251,11 @@
     if (mZslStreamId == NO_STREAM) {
         // Create stream for HAL production
         // TODO: Sort out better way to select resolution for ZSL
-
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+        mProducer = new RingBufferConsumer(GRALLOC_USAGE_HW_CAMERA_ZSL, mBufferQueueDepth);
+        mProducer->setName("Camera2-ZslRingBufferConsumer");
+        sp<Surface> outSurface = mProducer->getSurface();
+#else
         sp<IGraphicBufferProducer> producer;
         sp<IGraphicBufferConsumer> consumer;
         BufferQueue::createBufferQueue(&producer, &consumer);
@@ -258,6 +263,7 @@
             mBufferQueueDepth);
         mProducer->setName("Camera2-ZslRingBufferConsumer");
         sp<Surface> outSurface = new Surface(producer);
+#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
 
         res = device->createStream(outSurface, params.fastInfo.usedZslSize.width,
             params.fastInfo.usedZslSize.height, HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index 3488629..fcba9bc 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -20,7 +20,6 @@
 
 #include <com_android_internal_camera_flags.h>
 #include <cutils/properties.h>
-#include <utils/CameraThreadState.h>
 #include <utils/Log.h>
 #include <utils/SessionConfigurationUtils.h>
 #include <utils/Trace.h>
@@ -61,6 +60,7 @@
 CameraDeviceClientBase::CameraDeviceClientBase(
         const sp<CameraService>& cameraService,
         const sp<hardware::camera2::ICameraDeviceCallbacks>& remoteCallback,
+        std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
         const std::string& clientPackageName,
         bool systemNativeClient,
         const std::optional<std::string>& clientFeatureId,
@@ -71,9 +71,10 @@
         int clientPid,
         uid_t clientUid,
         int servicePid,
-        bool overrideToPortrait) :
+        int rotationOverride) :
     BasicClient(cameraService,
             IInterface::asBinder(remoteCallback),
+            attributionAndPermissionUtils,
             clientPackageName,
             systemNativeClient,
             clientFeatureId,
@@ -83,7 +84,7 @@
             clientPid,
             clientUid,
             servicePid,
-            overrideToPortrait),
+            rotationOverride),
     mRemoteCallback(remoteCallback) {
 }
 
@@ -92,6 +93,7 @@
 CameraDeviceClient::CameraDeviceClient(const sp<CameraService>& cameraService,
         const sp<hardware::camera2::ICameraDeviceCallbacks>& remoteCallback,
         std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper,
+        std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
         const std::string& clientPackageName,
         bool systemNativeClient,
         const std::optional<std::string>& clientFeatureId,
@@ -102,12 +104,13 @@
         uid_t clientUid,
         int servicePid,
         bool overrideForPerfClass,
-        bool overrideToPortrait,
+        int rotationOverride,
         const std::string& originalCameraId) :
-    Camera2ClientBase(cameraService, remoteCallback, cameraServiceProxyWrapper, clientPackageName,
+    Camera2ClientBase(cameraService, remoteCallback, cameraServiceProxyWrapper,
+            attributionAndPermissionUtils, clientPackageName,
             systemNativeClient, clientFeatureId, cameraId, /*API1 camera ID*/ -1, cameraFacing,
             sensorOrientation, clientPid, clientUid, servicePid, overrideForPerfClass,
-            overrideToPortrait),
+            rotationOverride),
     mInputStream(),
     mStreamingRequestId(REQUEST_ID_NONE),
     mRequestIdCounter(0),
@@ -292,7 +295,7 @@
     }
 
     List<const CameraDeviceBase::PhysicalCameraSettingsList> metadataRequestList;
-    std::list<const SurfaceMap> surfaceMapList;
+    std::list<SurfaceMap> surfaceMapList;
     submitInfo->mRequestId = mRequestIdCounter;
     uint32_t loopCounter = 0;
 
@@ -534,27 +537,28 @@
 
         // Save certain CaptureRequest settings
         if (!request.mUserTag.empty()) {
-            mUserTag = request.mUserTag;
+            mRunningSessionStats.mUserTag = request.mUserTag;
         }
         camera_metadata_entry entry =
                 physicalSettingsList.begin()->metadata.find(
                         ANDROID_CONTROL_VIDEO_STABILIZATION_MODE);
         if (entry.count == 1) {
-            mVideoStabilizationMode = entry.data.u8[0];
+            mRunningSessionStats.mVideoStabilizationMode = entry.data.u8[0];
         }
-        if (flags::log_ultrawide_usage()) {
+
+        if (!mRunningSessionStats.mUsedUltraWide && flags::log_ultrawide_usage()) {
             entry = physicalSettingsList.begin()->metadata.find(
                     ANDROID_CONTROL_ZOOM_RATIO);
             if (entry.count == 1 && entry.data.f[0] < 1.0f ) {
-                mUsedUltraWide = true;
+                mRunningSessionStats.mUsedUltraWide = true;
             }
         }
-        if (!mUsedSettingsOverrideZoom && flags::log_zoom_override_usage()) {
+        if (!mRunningSessionStats.mUsedSettingsOverrideZoom && flags::log_zoom_override_usage()) {
             entry = physicalSettingsList.begin()->metadata.find(
                     ANDROID_CONTROL_SETTINGS_OVERRIDE);
             if (entry.count == 1 && entry.data.i32[0] ==
                     ANDROID_CONTROL_SETTINGS_OVERRIDE_ZOOM) {
-                mUsedSettingsOverrideZoom = true;
+                mRunningSessionStats.mUsedSettingsOverrideZoom = true;
             }
         }
     }
@@ -777,60 +781,6 @@
     return res;
 }
 
-binder::Status CameraDeviceClient::getSessionCharacteristics(
-        const SessionConfiguration& sessionConfiguration,
-        /*out*/
-        hardware::camera2::impl::CameraMetadataNative* sessionCharacteristics) {
-    ATRACE_CALL();
-    binder::Status res;
-    status_t ret = OK;
-    if (!(res = checkPidStatus(__FUNCTION__)).isOk()) return res;
-
-    Mutex::Autolock icl(mBinderSerializationLock);
-
-    if (!mDevice.get()) {
-        return STATUS_ERROR(CameraService::ERROR_DISCONNECTED, "Camera device no longer alive");
-    }
-
-    auto operatingMode = sessionConfiguration.getOperatingMode();
-    res = SessionConfigurationUtils::checkOperatingMode(operatingMode, mDevice->info(),
-            mCameraIdStr);
-    if (!res.isOk()) {
-        return res;
-    }
-
-    camera3::metadataGetter getMetadata = [this](const std::string &id,
-            bool /*overrideForPerfClass*/) {
-          return mDevice->infoPhysical(id);};
-    ret = mProviderManager->getSessionCharacteristics(mCameraIdStr.c_str(),
-            sessionConfiguration, mOverrideForPerfClass, getMetadata,
-            sessionCharacteristics);
-
-    switch (ret) {
-        case OK:
-            // Expected, do nothing.
-            break;
-        case INVALID_OPERATION: {
-                std::string msg = fmt::sprintf(
-                        "Camera %s: Session characteristics query not supported!",
-                        mCameraIdStr.c_str());
-                ALOGD("%s: %s", __FUNCTION__, msg.c_str());
-                res = STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.c_str());
-            }
-
-            break;
-        default: {
-                std::string msg = fmt::sprintf( "Camera %s: Error: %s (%d)", mCameraIdStr.c_str(),
-                        strerror(-ret), ret);
-                ALOGE("%s: %s", __FUNCTION__, msg.c_str());
-                res = STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
-                        msg.c_str());
-            }
-    }
-
-    return res;
-}
-
 binder::Status CameraDeviceClient::deleteStream(int streamId) {
     ATRACE_CALL();
     ALOGV("%s (streamId = 0x%x)", __FUNCTION__, streamId);
@@ -942,6 +892,11 @@
 
     Mutex::Autolock icl(mBinderSerializationLock);
 
+    if (!outputConfiguration.isComplete()) {
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
+                "OutputConfiguration isn't valid!");
+    }
+
     const std::vector<sp<IGraphicBufferProducer>>& bufferProducers =
             outputConfiguration.getGraphicBufferProducers();
     size_t numBufferProducers = bufferProducers.size();
@@ -958,7 +913,7 @@
     bool useReadoutTimestamp = outputConfiguration.useReadoutTimestamp();
 
     res = SessionConfigurationUtils::checkSurfaceType(numBufferProducers, deferredConsumer,
-            outputConfiguration.getSurfaceType());
+            outputConfiguration.getSurfaceType(), /*isConfigurationComplete*/true);
     if (!res.isOk()) {
         return res;
     }
@@ -1001,7 +956,7 @@
         res = SessionConfigurationUtils::createSurfaceFromGbp(streamInfo,
                 isStreamInfoValid, surface, bufferProducer, mCameraIdStr,
                 mDevice->infoPhysical(physicalCameraId), sensorPixelModesUsed, dynamicRangeProfile,
-                streamUseCase, timestampBase, mirrorMode, colorSpace);
+                streamUseCase, timestampBase, mirrorMode, colorSpace, /*respectSurfaceSize*/false);
 
         if (!res.isOk())
             return res;
@@ -1114,6 +1069,10 @@
     if (!mDevice.get()) {
         return STATUS_ERROR(CameraService::ERROR_DISCONNECTED, "Camera device no longer alive");
     }
+    if (!outputConfiguration.isComplete()) {
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
+                "OutputConfiguration isn't valid!");
+    }
 
     // Infer the surface info for deferred surface stream creation.
     width = outputConfiguration.getWidth();
@@ -1306,6 +1265,10 @@
     if (!mDevice.get()) {
         return STATUS_ERROR(CameraService::ERROR_DISCONNECTED, "Camera device no longer alive");
     }
+    if (!outputConfiguration.isComplete()) {
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
+                "OutputConfiguration isn't valid!");
+    }
 
     const std::vector<sp<IGraphicBufferProducer> >& bufferProducers =
             outputConfiguration.getGraphicBufferProducers();
@@ -1373,7 +1336,7 @@
         res = SessionConfigurationUtils::createSurfaceFromGbp(outInfo,
                 /*isStreamInfoValid*/ false, surface, newOutputsMap.valueAt(i), mCameraIdStr,
                 mDevice->infoPhysical(physicalCameraId), sensorPixelModesUsed, dynamicRangeProfile,
-                streamUseCase, timestampBase, mirrorMode, colorSpace);
+                streamUseCase, timestampBase, mirrorMode, colorSpace, /*respectSurfaceSize*/false);
         if (!res.isOk())
             return res;
 
@@ -1686,6 +1649,11 @@
 
     Mutex::Autolock icl(mBinderSerializationLock);
 
+    if (!outputConfiguration.isComplete()) {
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
+                "OutputConfiguration isn't valid!");
+    }
+
     const std::vector<sp<IGraphicBufferProducer> >& bufferProducers =
             outputConfiguration.getGraphicBufferProducers();
     const std::string &physicalId = outputConfiguration.getPhysicalCameraId();
@@ -1751,7 +1719,7 @@
         res = SessionConfigurationUtils::createSurfaceFromGbp(mStreamInfoMap[streamId],
                 true /*isStreamInfoValid*/, surface, bufferProducer, mCameraIdStr,
                 mDevice->infoPhysical(physicalId), sensorPixelModesUsed, dynamicRangeProfile,
-                streamUseCase, timestampBase, mirrorMode, colorSpace);
+                streamUseCase, timestampBase, mirrorMode, colorSpace, /*respectSurfaceSize*/false);
 
         if (!res.isOk())
             return res;
@@ -1954,9 +1922,9 @@
     sp<CameraOfflineSessionClient> offlineClient;
     if (offlineSession.get() != nullptr) {
         offlineClient = new CameraOfflineSessionClient(sCameraService,
-                offlineSession, offlineCompositeStreamMap, cameraCb, mClientPackageName,
-                mClientFeatureId, mCameraIdStr, mCameraFacing, mOrientation, mClientPid, mClientUid,
-                mServicePid);
+                offlineSession, offlineCompositeStreamMap, cameraCb, mAttributionAndPermissionUtils,
+                mClientPackageName, mClientFeatureId, mCameraIdStr, mCameraFacing, mOrientation,
+                mClientPid, mClientUid, mServicePid);
         ret = sCameraService->addOfflineClient(mCameraIdStr, offlineClient);
     }
 
@@ -2097,6 +2065,7 @@
 
 void CameraDeviceClient::notifyIdle(
         int64_t requestCount, int64_t resultErrorCount, bool deviceError,
+        std::pair<int32_t, int32_t> mostRequestedFpsRange,
         const std::vector<hardware::CameraStreamStats>& streamStats) {
     // Thread safe. Don't bother locking.
     sp<hardware::camera2::ICameraDeviceCallbacks> remoteCb = getRemoteCallback();
@@ -2117,8 +2086,12 @@
         }
     }
     Camera2ClientBase::notifyIdleWithUserTag(requestCount, resultErrorCount, deviceError,
-            fullStreamStats, mUserTag, mVideoStabilizationMode, mUsedUltraWide,
-            mUsedSettingsOverrideZoom);
+            mostRequestedFpsRange,
+            fullStreamStats,
+            mRunningSessionStats.mUserTag,
+            mRunningSessionStats.mVideoStabilizationMode,
+            mRunningSessionStats.mUsedUltraWide,
+            mRunningSessionStats.mUsedSettingsOverrideZoom);
 }
 
 void CameraDeviceClient::notifyShutter(const CaptureResultExtras& resultExtras,
@@ -2236,7 +2209,7 @@
 // TODO: move to Camera2ClientBase
 bool CameraDeviceClient::enforceRequestPermissions(CameraMetadata& metadata) {
 
-    const int pid = CameraThreadState::getCallingPid();
+    const int pid = getCallingPid();
     const int selfPid = getpid();
     camera_metadata_entry_t entry;
 
@@ -2275,7 +2248,7 @@
         String16 permissionString =
             toString16("android.permission.CAMERA_DISABLE_TRANSMIT_LED");
         if (!checkCallingPermission(permissionString)) {
-            const int uid = CameraThreadState::getCallingUid();
+            const int uid = getCallingUid();
             ALOGE("Permission Denial: "
                   "can't disable transmit LED pid=%d, uid=%d", pid, uid);
             return false;
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.h b/services/camera/libcameraservice/api2/CameraDeviceClient.h
index c2f7f56..42f2752 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.h
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.h
@@ -50,6 +50,7 @@
 protected:
     CameraDeviceClientBase(const sp<CameraService>& cameraService,
             const sp<hardware::camera2::ICameraDeviceCallbacks>& remoteCallback,
+            std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
             const std::string& clientPackageName,
             bool systemNativeClient,
             const std::optional<std::string>& clientFeatureId,
@@ -60,7 +61,7 @@
             int clientPid,
             uid_t clientUid,
             int servicePid,
-            bool overrideToPortrait);
+            int rotationOverride);
 
     sp<hardware::camera2::ICameraDeviceCallbacks> mRemoteCallback;
 };
@@ -109,11 +110,6 @@
             /*out*/
             bool* streamStatus) override;
 
-    virtual binder::Status getSessionCharacteristics(
-            const SessionConfiguration& sessionConfiguration,
-            /*out*/
-            hardware::camera2::impl::CameraMetadataNative* sessionCharacteristics) override;
-
     // Returns -EBUSY if device is not idle or in error state
     virtual binder::Status deleteStream(int streamId) override;
 
@@ -186,6 +182,7 @@
     CameraDeviceClient(const sp<CameraService>& cameraService,
             const sp<hardware::camera2::ICameraDeviceCallbacks>& remoteCallback,
             std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper,
+            std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
             const std::string& clientPackageName,
             bool clientPackageOverride,
             const std::optional<std::string>& clientFeatureId,
@@ -196,7 +193,7 @@
             uid_t clientUid,
             int servicePid,
             bool overrideForPerfClass,
-            bool overrideToPortrait,
+            int rotationOverride,
             const std::string& originalCameraId);
     virtual ~CameraDeviceClient();
 
@@ -232,6 +229,7 @@
      */
 
     virtual void notifyIdle(int64_t requestCount, int64_t resultErrorCount, bool deviceError,
+                            std::pair<int32_t, int32_t> mostRequestedFpsRange,
                             const std::vector<hardware::CameraStreamStats>& streamStats);
     virtual void notifyError(int32_t errorCode,
                              const CaptureResultExtras& resultExtras);
@@ -367,14 +365,17 @@
     // Override the camera characteristics for performance class primary cameras.
     bool mOverrideForPerfClass;
 
-    // The string representation of object passed into CaptureRequest.setTag.
-    std::string mUserTag;
-    // The last set video stabilization mode
-    int mVideoStabilizationMode = -1;
-    // Whether a zoom_ratio < 1.0 has been used during this session
-    bool mUsedUltraWide = false;
-    // Whether a zoom settings override has been used during this session
-    bool mUsedSettingsOverrideZoom = false;
+    // Various fields used to collect session statistics
+    struct RunningSessionStats {
+        // The string representation of object passed into CaptureRequest.setTag.
+        std::string mUserTag;
+        // The last set video stabilization mode
+        int mVideoStabilizationMode = -1;
+        // Whether a zoom_ratio < 1.0 has been used during this session
+        bool mUsedUltraWide = false;
+        // Whether a zoom settings override has been used during this session
+        bool mUsedSettingsOverrideZoom = false;
+    } mRunningSessionStats;
 
     // This only exists in case of camera ID Remapping.
     const std::string mOriginalCameraId;
diff --git a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp
index dc9e0c1..9a1fdd6 100644
--- a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp
@@ -19,7 +19,6 @@
 //#define LOG_NDEBUG 0
 
 #include "CameraOfflineSessionClient.h"
-#include "utils/CameraThreadState.h"
 #include <utils/Trace.h>
 #include <camera/StringUtils.h>
 
@@ -163,7 +162,7 @@
         return res;
     }
     // Allow both client and the media server to disconnect at all times
-    int callingPid = CameraThreadState::getCallingPid();
+    int callingPid = getCallingPid();
     if (callingPid != mClientPid &&
             callingPid != mServicePid) {
         return res;
@@ -326,6 +325,7 @@
 
 void CameraOfflineSessionClient::notifyIdle(
         int64_t /*requestCount*/, int64_t /*resultErrorCount*/, bool /*deviceError*/,
+        std::pair<int32_t, int32_t> /*mostRequestedFpsRange*/,
         const std::vector<hardware::CameraStreamStats>& /*streamStats*/) {
     if (mRemoteCallback.get() != nullptr) {
         mRemoteCallback->onDeviceIdle();
diff --git a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
index 804498f..77de874 100644
--- a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
+++ b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
@@ -17,6 +17,7 @@
 #ifndef ANDROID_SERVERS_CAMERA_PHOTOGRAPHY_CAMERAOFFLINESESSIONCLIENT_H
 #define ANDROID_SERVERS_CAMERA_PHOTOGRAPHY_CAMERAOFFLINESESSIONCLIENT_H
 
+#include <android/hardware/ICameraService.h>
 #include <android/hardware/camera2/BnCameraOfflineSession.h>
 #include <android/hardware/camera2/ICameraDeviceCallbacks.h>
 #include "common/FrameProcessorBase.h"
@@ -47,6 +48,7 @@
             sp<CameraOfflineSessionBase> session,
             const KeyedVector<sp<IBinder>, sp<CompositeStream>>& offlineCompositeStreamMap,
             const sp<ICameraDeviceCallbacks>& remoteCallback,
+            std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
             const std::string& clientPackageName,
             const std::optional<std::string>& clientFeatureId,
             const std::string& cameraIdStr, int cameraFacing, int sensorOrientation,
@@ -54,10 +56,11 @@
             CameraService::BasicClient(
                     cameraService,
                     IInterface::asBinder(remoteCallback),
+                    attributionAndPermissionUtils,
                     // (v)ndk doesn't have offline session support
                     clientPackageName, /*overridePackageName*/false, clientFeatureId,
                     cameraIdStr, cameraFacing, sensorOrientation, clientPid, clientUid, servicePid,
-                    /*overrideToPortrait*/false),
+                    hardware::ICameraService::ROTATION_OVERRIDE_NONE),
             mRemoteCallback(remoteCallback), mOfflineSession(session),
             mCompositeStreamMap(offlineCompositeStreamMap) {}
 
@@ -110,6 +113,7 @@
     void notifyShutter(const CaptureResultExtras& resultExtras, nsecs_t timestamp) override;
     status_t notifyActive(float maxPreviewFps) override;
     void notifyIdle(int64_t requestCount, int64_t resultErrorCount, bool deviceError,
+            std::pair<int32_t, int32_t> mostRequestedFpsRange,
             const std::vector<hardware::CameraStreamStats>& streamStats) override;
     void notifyAutoFocus(uint8_t newState, int triggerId) override;
     void notifyAutoExposure(uint8_t newState, int triggerId) override;
diff --git a/services/camera/libcameraservice/api2/CompositeStream.h b/services/camera/libcameraservice/api2/CompositeStream.h
index 1b7fc6e..fa569ce 100644
--- a/services/camera/libcameraservice/api2/CompositeStream.h
+++ b/services/camera/libcameraservice/api2/CompositeStream.h
@@ -23,7 +23,7 @@
 #include <android/hardware/camera2/ICameraDeviceCallbacks.h>
 #include <camera/CameraMetadata.h>
 #include <camera/camera2/OutputConfiguration.h>
-#include <gui/IProducerListener.h>
+#include <gui/Surface.h>
 #include "common/CameraDeviceBase.h"
 #include "device3/Camera3StreamInterface.h"
 
@@ -96,9 +96,12 @@
             const CameraMetadata& settings) override;
 
 protected:
-    struct ProducerListener : public BnProducerListener {
-        // ProducerListener impementation
+    struct StreamSurfaceListener : public SurfaceListener {
+        // StreamSurfaceListener implementation
         void onBufferReleased() override { /*No impl. for now*/ };
+        bool needsReleaseNotify() override { return true; };
+        void onBuffersDiscarded(const std::vector<sp<GraphicBuffer>>& /*buffers*/) override {};
+        void onBufferDetached(int /*slot*/) override {};
     };
 
     status_t registerCompositeStreamListener(int32_t streamId);
diff --git a/services/camera/libcameraservice/api2/DepthCompositeStream.cpp b/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
index 1bd0b85..244a1e5 100644
--- a/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
@@ -22,13 +22,15 @@
 #include <aidl/android/hardware/camera/device/CameraBlobId.h>
 #include <camera/StringUtils.h>
 
-#include "api1/client2/JpegProcessor.h"
-#include "common/CameraProviderManager.h"
-#include "utils/SessionConfigurationUtils.h"
+#include <com_android_graphics_libgui_flags.h>
 #include <gui/Surface.h>
 #include <utils/Log.h>
 #include <utils/Trace.h>
 
+#include "api1/client2/JpegProcessor.h"
+#include "common/CameraProviderManager.h"
+#include "utils/SessionConfigurationUtils.h"
+
 #include "DepthCompositeStream.h"
 
 namespace android {
@@ -48,7 +50,7 @@
         mBlobHeight(0),
         mDepthBufferAcquired(false),
         mBlobBufferAcquired(false),
-        mProducerListener(new ProducerListener()),
+        mStreamSurfaceListener(new StreamSurfaceListener()),
         mMaxJpegBufferSize(-1),
         mUHRMaxJpegBufferSize(-1),
         mIsLogicalCamera(false) {
@@ -517,6 +519,15 @@
     return false;
 }
 
+bool DepthCompositeStream::isDepthCompositeStreamInfo(const OutputStreamInfo& streamInfo) {
+    if ((streamInfo.dataSpace == static_cast<android_dataspace_t>(HAL_DATASPACE_DYNAMIC_DEPTH)) &&
+            (streamInfo.format == HAL_PIXEL_FORMAT_BLOB)) {
+        return true;
+    }
+
+    return false;
+}
+
 static bool setContains(std::unordered_set<int32_t> containerSet, int32_t value) {
     return containerSet.find(value) != containerSet.end();
 }
@@ -605,6 +616,12 @@
         return NO_INIT;
     }
 
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+    mBlobConsumer = new CpuConsumer(/*maxLockedBuffers*/ 1, /*controlledByApp*/ true);
+    mBlobConsumer->setFrameAvailableListener(this);
+    mBlobConsumer->setName(String8("Camera3-JpegCompositeStream"));
+    mBlobSurface = mBlobConsumer->getSurface();
+#else
     sp<IGraphicBufferProducer> producer;
     sp<IGraphicBufferConsumer> consumer;
     BufferQueue::createBufferQueue(&producer, &consumer);
@@ -612,6 +629,7 @@
     mBlobConsumer->setFrameAvailableListener(this);
     mBlobConsumer->setName(String8("Camera3-JpegCompositeStream"));
     mBlobSurface = new Surface(producer);
+#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
 
     ret = device->createStream(mBlobSurface, width, height, format, kJpegDataSpace, rotation,
             id, physicalCameraId, sensorPixelModesUsed, surfaceIds,
@@ -630,11 +648,18 @@
         return ret;
     }
 
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+    mDepthConsumer = new CpuConsumer(/*maxLockedBuffers*/ 1, /*controlledByApp*/ true);
+    mDepthConsumer->setFrameAvailableListener(this);
+    mDepthConsumer->setName(String8("Camera3-DepthCompositeStream"));
+    mDepthSurface = mDepthConsumer->getSurface();
+#else
     BufferQueue::createBufferQueue(&producer, &consumer);
     mDepthConsumer = new CpuConsumer(consumer, /*maxLockedBuffers*/ 1, /*controlledByApp*/ true);
     mDepthConsumer->setFrameAvailableListener(this);
     mDepthConsumer->setName(String8("Camera3-DepthCompositeStream"));
     mDepthSurface = new Surface(producer);
+#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
     std::vector<int> depthSurfaceId;
     ret = device->createStream(mDepthSurface, depthWidth, depthHeight, kDepthMapPixelFormat,
             kDepthMapDataSpace, rotation, &mDepthStreamId, physicalCameraId, sensorPixelModesUsed,
@@ -681,7 +706,7 @@
         return NO_INIT;
     }
 
-    auto res = mOutputSurface->connect(NATIVE_WINDOW_API_CAMERA, mProducerListener);
+    auto res = mOutputSurface->connect(NATIVE_WINDOW_API_CAMERA, mStreamSurfaceListener);
     if (res != OK) {
         ALOGE("%s: Unable to connect to native window for stream %d",
                 __FUNCTION__, mBlobStreamId);
diff --git a/services/camera/libcameraservice/api2/DepthCompositeStream.h b/services/camera/libcameraservice/api2/DepthCompositeStream.h
index f797f9c..75deef7 100644
--- a/services/camera/libcameraservice/api2/DepthCompositeStream.h
+++ b/services/camera/libcameraservice/api2/DepthCompositeStream.h
@@ -46,6 +46,7 @@
     ~DepthCompositeStream() override;
 
     static bool isDepthCompositeStream(const sp<Surface> &surface);
+    static bool isDepthCompositeStreamInfo(const OutputStreamInfo& streamInfo);
 
     // CompositeStream overrides
     status_t createInternalStreams(const std::vector<sp<Surface>>& consumers,
@@ -129,12 +130,12 @@
     static const auto kDepthMapDataSpace = HAL_DATASPACE_DEPTH;
     static const auto kJpegDataSpace = HAL_DATASPACE_V0_JFIF;
 
-    int                  mBlobStreamId, mBlobSurfaceId, mDepthStreamId, mDepthSurfaceId;
-    size_t               mBlobWidth, mBlobHeight;
-    sp<CpuConsumer>      mBlobConsumer, mDepthConsumer;
-    bool                 mDepthBufferAcquired, mBlobBufferAcquired;
-    sp<Surface>          mDepthSurface, mBlobSurface, mOutputSurface;
-    sp<ProducerListener> mProducerListener;
+    int                         mBlobStreamId, mBlobSurfaceId, mDepthStreamId, mDepthSurfaceId;
+    size_t                      mBlobWidth, mBlobHeight;
+    sp<CpuConsumer>             mBlobConsumer, mDepthConsumer;
+    bool                        mDepthBufferAcquired, mBlobBufferAcquired;
+    sp<Surface>                 mDepthSurface, mBlobSurface, mOutputSurface;
+    sp<StreamSurfaceListener>   mStreamSurfaceListener;
 
     ssize_t              mMaxJpegBufferSize;
     ssize_t              mUHRMaxJpegBufferSize;
diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
index 68e9ad4..206c879 100644
--- a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
@@ -25,11 +25,12 @@
 
 #include <aidl/android/hardware/camera/device/CameraBlob.h>
 #include <aidl/android/hardware/camera/device/CameraBlobId.h>
-#include <libyuv.h>
+#include <camera/StringUtils.h>
+#include <com_android_graphics_libgui_flags.h>
 #include <gui/Surface.h>
+#include <libyuv.h>
 #include <utils/Log.h>
 #include <utils/Trace.h>
-#include <camera/StringUtils.h>
 
 #include <mediadrm/ICrypto.h>
 #include <media/MediaCodecBuffer.h>
@@ -40,6 +41,7 @@
 #include "common/CameraDeviceBase.h"
 #include "utils/ExifUtils.h"
 #include "utils/SessionConfigurationUtils.h"
+#include "utils/Utils.h"
 #include "HeicEncoderInfoManager.h"
 #include "HeicCompositeStream.h"
 
@@ -67,7 +69,7 @@
         mMainImageStreamId(-1),
         mMainImageSurfaceId(-1),
         mYuvBufferAcquired(false),
-        mProducerListener(new ProducerListener()),
+        mStreamSurfaceListener(new StreamSurfaceListener()),
         mDequeuedOutputBufferCnt(0),
         mCodecOutputCounter(0),
         mQuality(-1),
@@ -94,6 +96,11 @@
     mMainImageSurface.clear();
 }
 
+bool HeicCompositeStream::isHeicCompositeStreamInfo(const OutputStreamInfo& streamInfo) {
+    return ((streamInfo.dataSpace == static_cast<android_dataspace_t>(HAL_DATASPACE_HEIF)) &&
+            (streamInfo.format == HAL_PIXEL_FORMAT_BLOB));
+}
+
 bool HeicCompositeStream::isHeicCompositeStream(const sp<Surface> &surface) {
     ANativeWindow *anw = surface.get();
     status_t err;
@@ -136,6 +143,13 @@
         return NO_INIT;
     }
 
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+    mAppSegmentConsumer = new CpuConsumer(kMaxAcquiredAppSegment);
+    mAppSegmentConsumer->setFrameAvailableListener(this);
+    mAppSegmentConsumer->setName(String8("Camera3-HeicComposite-AppSegmentStream"));
+    mAppSegmentSurface = mAppSegmentConsumer->getSurface();
+    sp<IGraphicBufferProducer> producer = mAppSegmentSurface->getIGraphicBufferProducer();
+#else
     sp<IGraphicBufferProducer> producer;
     sp<IGraphicBufferConsumer> consumer;
     BufferQueue::createBufferQueue(&producer, &consumer);
@@ -143,6 +157,7 @@
     mAppSegmentConsumer->setFrameAvailableListener(this);
     mAppSegmentConsumer->setName(String8("Camera3-HeicComposite-AppSegmentStream"));
     mAppSegmentSurface = new Surface(producer);
+#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
 
     mStaticInfo = device->info();
 
@@ -172,8 +187,13 @@
             return res;
         }
     } else {
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+        mMainImageConsumer = new CpuConsumer(1);
+        producer = mMainImageConsumer->getSurface()->getIGraphicBufferProducer();
+#else
         BufferQueue::createBufferQueue(&producer, &consumer);
         mMainImageConsumer = new CpuConsumer(consumer, 1);
+#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
         mMainImageConsumer->setFrameAvailableListener(this);
         mMainImageConsumer->setName(String8("Camera3-HeicComposite-HevcInputYUVStream"));
     }
@@ -507,7 +527,7 @@
         return NO_INIT;
     }
 
-    auto res = mOutputSurface->connect(NATIVE_WINDOW_API_CAMERA, mProducerListener);
+    auto res = mOutputSurface->connect(NATIVE_WINDOW_API_CAMERA, mStreamSurfaceListener);
     if (res != OK) {
         ALOGE("%s: Unable to connect to native window for stream %d",
                 __FUNCTION__, mMainImageStreamId);
@@ -1459,7 +1479,7 @@
     const uint8_t *header = appSegmentBuffer + (maxSize - sizeof(CameraBlob));
     const CameraBlob *blob = (const CameraBlob*)(header);
     if (blob->blobId != CameraBlobId::JPEG_APP_SEGMENTS) {
-        ALOGE("%s: Invalid EXIF blobId %d", __FUNCTION__, blob->blobId);
+        ALOGE("%s: Invalid EXIF blobId %d", __FUNCTION__, eToI(blob->blobId));
         return 0;
     }
 
@@ -1583,7 +1603,7 @@
         // The chrome plane could be either Cb first, or Cr first. Take the
         // smaller address.
         uint8_t *src = std::min(yuvBuffer.dataCb, yuvBuffer.dataCr);
-        MediaImage2::PlaneIndex dstPlane = codecUvOffsetDiff > 0 ? MediaImage2::U : MediaImage2::V;
+        MediaImage2::PlaneIndex dstPlane = codecUPlaneFirst ? MediaImage2::U : MediaImage2::V;
         for (auto row = top/2; row < (top+height)/2; row++) {
             uint8_t *dst = codecBuffer->data() + imageInfo->mPlane[dstPlane].mOffset +
                     imageInfo->mPlane[dstPlane].mRowInc * (row - top/2);
diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.h b/services/camera/libcameraservice/api2/HeicCompositeStream.h
index b539cdd..ba10e05 100644
--- a/services/camera/libcameraservice/api2/HeicCompositeStream.h
+++ b/services/camera/libcameraservice/api2/HeicCompositeStream.h
@@ -19,7 +19,6 @@
 
 #include <queue>
 
-#include <gui/IProducerListener.h>
 #include <gui/CpuConsumer.h>
 
 #include <media/hardware/VideoAPI.h>
@@ -42,6 +41,7 @@
     ~HeicCompositeStream() override;
 
     static bool isHeicCompositeStream(const sp<Surface> &surface);
+    static bool isHeicCompositeStreamInfo(const OutputStreamInfo& streamInfo);
 
     status_t createInternalStreams(const std::vector<sp<Surface>>& consumers,
             bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
@@ -233,10 +233,10 @@
     bool              mYuvBufferAcquired; // Only applicable to HEVC codec
     std::queue<int64_t> mMainImageFrameNumbers;
 
-    static const int32_t kMaxOutputSurfaceProducerCount = 1;
-    sp<Surface>       mOutputSurface;
-    sp<ProducerListener> mProducerListener;
-    int32_t           mDequeuedOutputBufferCnt;
+    static const int32_t        kMaxOutputSurfaceProducerCount = 1;
+    sp<Surface>                 mOutputSurface;
+    sp<StreamSurfaceListener>   mStreamSurfaceListener;
+    int32_t                     mDequeuedOutputBufferCnt;
 
     // Map from frame number to JPEG setting of orientation+quality
     struct HeicSettings {
diff --git a/services/camera/libcameraservice/api2/JpegRCompositeStream.cpp b/services/camera/libcameraservice/api2/JpegRCompositeStream.cpp
index 988446b..c5bd7a9 100644
--- a/services/camera/libcameraservice/api2/JpegRCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/JpegRCompositeStream.cpp
@@ -14,9 +14,6 @@
  * limitations under the License.
  */
 
-#include "hardware/gralloc.h"
-#include "system/graphics-base-v1.0.h"
-#include "system/graphics-base-v1.1.h"
 #define LOG_TAG "Camera3-JpegRCompositeStream"
 #define ATRACE_TAG ATRACE_TAG_CAMERA
 //#define LOG_NDEBUG 0
@@ -25,11 +22,16 @@
 #include <aidl/android/hardware/camera/device/CameraBlobId.h>
 
 #include "common/CameraProviderManager.h"
+#include "utils/SessionConfigurationUtils.h"
+
+#include <com_android_graphics_libgui_flags.h>
 #include <gui/Surface.h>
+#include <hardware/gralloc.h>
+#include <system/graphics-base-v1.0.h>
+#include <system/graphics-base-v1.1.h>
 #include <ultrahdr/jpegr.h>
 #include <utils/ExifUtils.h>
 #include <utils/Log.h>
-#include "utils/SessionConfigurationUtils.h"
 #include <utils/Trace.h>
 
 #include "JpegRCompositeStream.h"
@@ -54,7 +56,7 @@
         mOutputColorSpace(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED),
         mOutputStreamUseCase(0),
         mFirstRequestLatency(-1),
-        mProducerListener(new ProducerListener()),
+        mStreamSurfaceListener(new StreamSurfaceListener()),
         mMaxJpegBufferSize(-1),
         mUHRMaxJpegBufferSize(-1),
         mStaticInfo(device->info()) {
@@ -520,6 +522,15 @@
     return false;
 }
 
+bool JpegRCompositeStream::isJpegRCompositeStreamInfo(const OutputStreamInfo& streamInfo) {
+    if ((streamInfo.format == HAL_PIXEL_FORMAT_BLOB) &&
+            (streamInfo.dataSpace == static_cast<int>(kJpegRDataSpace))) {
+        return true;
+    }
+
+    return false;
+}
+
 void JpegRCompositeStream::deriveDynamicRangeAndDataspace(int64_t dynamicProfile,
         int64_t* /*out*/dynamicRange, int64_t* /*out*/dataSpace) {
     if ((dynamicRange == nullptr) || (dataSpace == nullptr)) {
@@ -564,6 +575,12 @@
             mStaticInfo, mP010DynamicRange,
             ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD);
 
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+    mP010Consumer = new CpuConsumer(/*maxLockedBuffers*/ 1, /*controlledByApp*/ true);
+    mP010Consumer->setFrameAvailableListener(this);
+    mP010Consumer->setName(String8("Camera3-P010CompositeStream"));
+    mP010Surface = mP010Consumer->getSurface();
+#else
     sp<IGraphicBufferProducer> producer;
     sp<IGraphicBufferConsumer> consumer;
     BufferQueue::createBufferQueue(&producer, &consumer);
@@ -571,6 +588,7 @@
     mP010Consumer->setFrameAvailableListener(this);
     mP010Consumer->setName(String8("Camera3-P010CompositeStream"));
     mP010Surface = new Surface(producer);
+#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
 
     auto ret = device->createStream(mP010Surface, width, height, kP010PixelFormat,
             static_cast<android_dataspace>(mP010DataSpace), rotation,
@@ -588,11 +606,18 @@
     }
 
     if (mSupportInternalJpeg) {
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+        mBlobConsumer = new CpuConsumer(/*maxLockedBuffers*/ 1, /*controlledByApp*/ true);
+        mBlobConsumer->setFrameAvailableListener(this);
+        mBlobConsumer->setName(String8("Camera3-JpegRCompositeStream"));
+        mBlobSurface = mBlobConsumer->getSurface();
+#else
         BufferQueue::createBufferQueue(&producer, &consumer);
         mBlobConsumer = new CpuConsumer(consumer, /*maxLockedBuffers*/ 1, /*controlledByApp*/ true);
         mBlobConsumer->setFrameAvailableListener(this);
         mBlobConsumer->setName(String8("Camera3-JpegRCompositeStream"));
         mBlobSurface = new Surface(producer);
+#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
         std::vector<int> blobSurfaceId;
         ret = device->createStream(mBlobSurface, width, height, format,
                 kJpegDataSpace, rotation, &mBlobStreamId, physicalCameraId, sensorPixelModesUsed,
@@ -644,7 +669,7 @@
         return NO_INIT;
     }
 
-    auto res = mOutputSurface->connect(NATIVE_WINDOW_API_CAMERA, mProducerListener);
+    auto res = mOutputSurface->connect(NATIVE_WINDOW_API_CAMERA, mStreamSurfaceListener);
     if (res != OK) {
         ALOGE("%s: Unable to connect to native window for stream %d",
                 __FUNCTION__, mP010StreamId);
@@ -832,8 +857,8 @@
     (*compositeOutput)[0].colorSpace =
         ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED;
 
-    if (CameraProviderManager::isConcurrentDynamicRangeCaptureSupported(staticInfo,
-                streamInfo.dynamicRangeProfile,
+    if (CameraProviderManager::isConcurrentDynamicRangeCaptureSupported(
+                staticInfo, dynamicRange,
                 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD)) {
         compositeOutput->push_back({});
         (*compositeOutput)[1].width = streamInfo.width;
@@ -856,8 +881,9 @@
 
     bool deviceError;
     std::map<int, StreamStats> stats;
+    std::pair<int32_t, int32_t> mostRequestedFps;
     mSessionStatsBuilder.buildAndReset(&streamStats->mRequestCount, &streamStats->mErrorCount,
-            &deviceError, &stats);
+            &deviceError, &mostRequestedFps, &stats);
     if (stats.find(mP010StreamId) != stats.end()) {
         streamStats->mWidth = mBlobWidth;
         streamStats->mHeight = mBlobHeight;
diff --git a/services/camera/libcameraservice/api2/JpegRCompositeStream.h b/services/camera/libcameraservice/api2/JpegRCompositeStream.h
index 016d57c..d3ab19c 100644
--- a/services/camera/libcameraservice/api2/JpegRCompositeStream.h
+++ b/services/camera/libcameraservice/api2/JpegRCompositeStream.h
@@ -43,6 +43,7 @@
     ~JpegRCompositeStream() override;
 
     static bool isJpegRCompositeStream(const sp<Surface> &surface);
+    static bool isJpegRCompositeStreamInfo(const OutputStreamInfo& streamInfo);
 
     // CompositeStream overrides
     status_t createInternalStreams(const std::vector<sp<Surface>>& consumers,
@@ -127,7 +128,8 @@
     int32_t              mOutputColorSpace;
     int64_t              mOutputStreamUseCase;
     nsecs_t              mFirstRequestLatency;
-    sp<ProducerListener> mProducerListener;
+
+    sp<StreamSurfaceListener> mStreamSurfaceListener;
 
     ssize_t              mMaxJpegBufferSize;
     ssize_t              mUHRMaxJpegBufferSize;
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.cpp b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
index a126f61..352c6f8 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.cpp
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
@@ -27,8 +27,10 @@
 #include <gui/Surface.h>
 #include <gui/Surface.h>
 
+#include <android/hardware/ICameraService.h>
 #include <camera/CameraSessionStats.h>
 #include <camera/StringUtils.h>
+#include <com_android_window_flags.h>
 
 #include "common/Camera2ClientBase.h"
 
@@ -37,12 +39,13 @@
 #include "device3/Camera3Device.h"
 #include "device3/aidl/AidlCamera3Device.h"
 #include "device3/hidl/HidlCamera3Device.h"
-#include "utils/CameraThreadState.h"
 
 namespace android {
 
 using namespace camera2;
 
+namespace wm_flags = com::android::window::flags;
+
 // Interface used by CameraService
 
 template <typename TClientBase>
@@ -50,6 +53,7 @@
         const sp<CameraService>& cameraService,
         const sp<TCamCallbacks>& remoteCallback,
         std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper,
+        std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
         const std::string& clientPackageName,
         bool systemNativeClient,
         const std::optional<std::string>& clientFeatureId,
@@ -61,11 +65,11 @@
         uid_t clientUid,
         int servicePid,
         bool overrideForPerfClass,
-        bool overrideToPortrait,
+        int rotationOverride,
         bool legacyClient):
-        TClientBase(cameraService, remoteCallback, clientPackageName, systemNativeClient,
-                clientFeatureId, cameraId, api1CameraId, cameraFacing, sensorOrientation, clientPid,
-                clientUid, servicePid, overrideToPortrait),
+        TClientBase(cameraService, remoteCallback, attributionAndPermissionUtils, clientPackageName,
+                systemNativeClient, clientFeatureId, cameraId, api1CameraId, cameraFacing,
+                sensorOrientation, clientPid, clientUid, servicePid, rotationOverride),
         mSharedCameraCallbacks(remoteCallback),
         mCameraServiceProxyWrapper(cameraServiceProxyWrapper),
         mDeviceActive(false), mApi1CameraId(api1CameraId)
@@ -82,7 +86,7 @@
 status_t Camera2ClientBase<TClientBase>::checkPid(const char* checkLocation)
         const {
 
-    int callingPid = CameraThreadState::getCallingPid();
+    int callingPid = TClientBase::getCallingPid();
     if (callingPid == TClientBase::mClientPid) return NO_ERROR;
 
     ALOGE("%s: attempt to use a locked camera from a different process"
@@ -115,14 +119,16 @@
         case IPCTransport::HIDL:
             mDevice =
                     new HidlCamera3Device(mCameraServiceProxyWrapper,
+                            TClientBase::mAttributionAndPermissionUtils,
                             TClientBase::mCameraIdStr, mOverrideForPerfClass,
-                            TClientBase::mOverrideToPortrait, mLegacyClient);
+                            TClientBase::mRotationOverride, mLegacyClient);
             break;
         case IPCTransport::AIDL:
             mDevice =
                     new AidlCamera3Device(mCameraServiceProxyWrapper,
+                            TClientBase::mAttributionAndPermissionUtils,
                             TClientBase::mCameraIdStr, mOverrideForPerfClass,
-                            TClientBase::mOverrideToPortrait, mLegacyClient);
+                            TClientBase::mRotationOverride, mLegacyClient);
              break;
         default:
             ALOGE("%s Invalid transport for camera id %s", __FUNCTION__,
@@ -135,16 +141,17 @@
         return NO_INIT;
     }
 
+    // Verify ops permissions
+    res = TClientBase::startCameraOps();
+    if (res != OK) {
+        TClientBase::finishCameraOps();
+        return res;
+    }
+
     res = mDevice->initialize(providerPtr, monitorTags);
     if (res != OK) {
         ALOGE("%s: Camera %s: unable to initialize device: %s (%d)",
                 __FUNCTION__, TClientBase::mCameraIdStr.c_str(), strerror(-res), res);
-        return res;
-    }
-
-    // Verify ops permissions
-    res = TClientBase::startCameraOps();
-    if (res != OK) {
         TClientBase::finishCameraOps();
         return res;
     }
@@ -266,7 +273,7 @@
     ALOGD("Camera %s: serializationLock acquired", TClientBase::mCameraIdStr.c_str());
     binder::Status res = binder::Status::ok();
     // Allow both client and the media server to disconnect at all times
-    int callingPid = CameraThreadState::getCallingPid();
+    int callingPid = TClientBase::getCallingPid();
     if (callingPid != TClientBase::mClientPid &&
         callingPid != TClientBase::mServicePid) return res;
 
@@ -305,18 +312,18 @@
     Mutex::Autolock icl(mBinderSerializationLock);
 
     if (TClientBase::mClientPid != 0 &&
-        CameraThreadState::getCallingPid() != TClientBase::mClientPid) {
+        TClientBase::getCallingPid() != TClientBase::mClientPid) {
 
         ALOGE("%s: Camera %s: Connection attempt from pid %d; "
                 "current locked to pid %d",
                 __FUNCTION__,
                 TClientBase::mCameraIdStr.c_str(),
-                CameraThreadState::getCallingPid(),
+                TClientBase::getCallingPid(),
                 TClientBase::mClientPid);
         return BAD_VALUE;
     }
 
-    TClientBase::mClientPid = CameraThreadState::getCallingPid();
+    TClientBase::mClientPid = TClientBase::getCallingPid();
 
     TClientBase::mRemoteCallback = client;
     mSharedCameraCallbacks = client;
@@ -336,8 +343,9 @@
 
 template <typename TClientBase>
 void Camera2ClientBase<TClientBase>::notifyPhysicalCameraChange(const std::string &physicalId) {
-    // We're only interested in this notification if overrideToPortrait is turned on.
-    if (!TClientBase::mOverrideToPortrait) {
+    using android::hardware::ICameraService;
+    // We're only interested in this notification if rotationOverride is turned on.
+    if (TClientBase::mRotationOverride == ICameraService::ROTATION_OVERRIDE_NONE) {
         return;
     }
 
@@ -347,8 +355,13 @@
     if (orientationEntry.count == 1) {
         int orientation = orientationEntry.data.i32[0];
         int rotateAndCropMode = ANDROID_SCALER_ROTATE_AND_CROP_NONE;
-
-        if (orientation == 0 || orientation == 180) {
+        bool landscapeSensor =  (orientation == 0 || orientation == 180);
+        if (((TClientBase::mRotationOverride ==
+                ICameraService::ROTATION_OVERRIDE_OVERRIDE_TO_PORTRAIT) && landscapeSensor) ||
+                        ((wm_flags::camera_compat_for_freeform() &&
+                                TClientBase::mRotationOverride ==
+                                ICameraService::ROTATION_OVERRIDE_ROTATION_ONLY)
+                                && !landscapeSensor)) {
             rotateAndCropMode = ANDROID_SCALER_ROTATE_AND_CROP_90;
         }
 
@@ -377,6 +390,7 @@
 template <typename TClientBase>
 void Camera2ClientBase<TClientBase>::notifyIdleWithUserTag(
         int64_t requestCount, int64_t resultErrorCount, bool deviceError,
+        std::pair<int32_t, int32_t> mostRequestedFpsRange,
         const std::vector<hardware::CameraStreamStats>& streamStats,
         const std::string& userTag, int videoStabilizationMode, bool usedUltraWide,
         bool usedZoomOverride) {
@@ -388,7 +402,7 @@
         }
         mCameraServiceProxyWrapper->logIdle(TClientBase::mCameraIdStr,
                 requestCount, resultErrorCount, deviceError, userTag, videoStabilizationMode,
-                usedUltraWide, usedZoomOverride, streamStats);
+                usedUltraWide, usedZoomOverride, mostRequestedFpsRange, streamStats);
     }
     mDeviceActive = false;
 
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.h b/services/camera/libcameraservice/common/Camera2ClientBase.h
index 2bb90d9..c9d5735 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.h
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.h
@@ -21,6 +21,7 @@
 #include "camera/CameraMetadata.h"
 #include "camera/CaptureResult.h"
 #include "utils/CameraServiceProxyWrapper.h"
+#include "utils/AttributionAndPermissionUtils.h"
 #include "CameraServiceWatchdog.h"
 
 namespace android {
@@ -51,6 +52,7 @@
     Camera2ClientBase(const sp<CameraService>& cameraService,
                       const sp<TCamCallbacks>& remoteCallback,
                       std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper,
+                      std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
                       const std::string& clientPackageName,
                       bool systemNativeClient,
                       const std::optional<std::string>& clientFeatureId,
@@ -62,7 +64,7 @@
                       uid_t clientUid,
                       int servicePid,
                       bool overrideForPerfClass,
-                      bool overrideToPortrait,
+                      int rotationOverride,
                       bool legacyClient = false);
     virtual ~Camera2ClientBase();
 
@@ -84,6 +86,7 @@
     virtual status_t      notifyActive(float maxPreviewFps);
     virtual void          notifyIdle(int64_t /*requestCount*/, int64_t /*resultErrorCount*/,
                                      bool /*deviceError*/,
+                                     std::pair<int32_t, int32_t> /*mostRequestedFpsRange*/,
                                      const std::vector<hardware::CameraStreamStats>&) {}
     virtual void          notifyShutter(const CaptureResultExtras& resultExtras,
                                         nsecs_t timestamp);
@@ -97,6 +100,7 @@
 
     void                  notifyIdleWithUserTag(int64_t requestCount, int64_t resultErrorCount,
                                      bool deviceError,
+                                     std::pair<int32_t, int32_t> mostRequestedFpsRange,
                                      const std::vector<hardware::CameraStreamStats>& streamStats,
                                      const std::string& userTag, int videoStabilizationMode,
                                      bool usedUltraWide, bool usedZoomOverride);
diff --git a/services/camera/libcameraservice/common/CameraDeviceBase.h b/services/camera/libcameraservice/common/CameraDeviceBase.h
index cfc41c3..aceb5c0 100644
--- a/services/camera/libcameraservice/common/CameraDeviceBase.h
+++ b/services/camera/libcameraservice/common/CameraDeviceBase.h
@@ -136,7 +136,7 @@
      * Output lastFrameNumber is the expected last frame number of the list of requests.
      */
     virtual status_t captureList(const List<const PhysicalCameraSettingsList> &requests,
-                                 const std::list<const SurfaceMap> &surfaceMaps,
+                                 const std::list<SurfaceMap> &surfaceMaps,
                                  int64_t *lastFrameNumber = NULL) = 0;
 
     /**
@@ -152,7 +152,7 @@
      * Output lastFrameNumber is the last frame number of the previous streaming request.
      */
     virtual status_t setStreamingRequestList(const List<const PhysicalCameraSettingsList> &requests,
-                                             const std::list<const SurfaceMap> &surfaceMaps,
+                                             const std::list<SurfaceMap> &surfaceMaps,
                                              int64_t *lastFrameNumber = NULL) = 0;
 
     /**
diff --git a/services/camera/libcameraservice/common/CameraOfflineSessionBase.h b/services/camera/libcameraservice/common/CameraOfflineSessionBase.h
index 976c47c..b1ba761 100644
--- a/services/camera/libcameraservice/common/CameraOfflineSessionBase.h
+++ b/services/camera/libcameraservice/common/CameraOfflineSessionBase.h
@@ -46,6 +46,7 @@
     // May return an error since it checks appops
     virtual status_t notifyActive(float maxPreviewFps) = 0;
     virtual void notifyIdle(int64_t requestCount, int64_t resultError, bool deviceError,
+            std::pair<int32_t, int32_t> mostRequestedFpsRange,
             const std::vector<hardware::CameraStreamStats>& streamStats) = 0;
 
     // Required only for API2
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.cpp b/services/camera/libcameraservice/common/CameraProviderManager.cpp
index 98c1a79..2440c37 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.cpp
+++ b/services/camera/libcameraservice/common/CameraProviderManager.cpp
@@ -33,10 +33,12 @@
 #include <future>
 #include <inttypes.h>
 #include <android_companion_virtualdevice_flags.h>
+#include <android_companion_virtualdevice_build_flags.h>
 #include <android/binder_manager.h>
 #include <android/hidl/manager/1.2/IServiceManager.h>
 #include <hidl/ServiceManagement.h>
 #include <com_android_internal_camera_flags.h>
+#include <com_android_window_flags.h>
 #include <functional>
 #include <camera_metadata_hidden.h>
 #include <android-base/parseint.h>
@@ -49,6 +51,7 @@
 
 #include "api2/HeicCompositeStream.h"
 #include "device3/ZoomRatioMapper.h"
+#include "utils/Utils.h"
 
 namespace android {
 
@@ -61,6 +64,7 @@
 
 namespace flags = com::android::internal::camera::flags;
 namespace vd_flags = android::companion::virtualdevice::flags;
+namespace wm_flags = com::android::window::flags;
 
 namespace {
 const bool kEnableLazyHal(property_get_bool("ro.camera.enableLazyHal", false));
@@ -89,7 +93,7 @@
         case TorchModeStatus::AVAILABLE_ON:
             return "AVAILABLE_ON";
     }
-    ALOGW("Unexpected HAL torch mode status code %d", s);
+    ALOGW("Unexpected HAL torch mode status code %d", eToI(s));
     return "UNKNOWN_STATUS";
 }
 
@@ -102,7 +106,7 @@
         case CameraDeviceStatus::ENUMERATING:
             return "ENUMERATING";
     }
-    ALOGW("Unexpected HAL device status code %d", s);
+    ALOGW("Unexpected HAL device status code %d", eToI(s));
     return "UNKNOWN_STATUS";
 }
 
@@ -139,7 +143,7 @@
 }
 
 std::shared_ptr<aidl::android::hardware::camera::provider::ICameraProvider>
-CameraProviderManager::AidlServiceInteractionProxyImpl::getAidlService(
+CameraProviderManager::AidlServiceInteractionProxyImpl::getService(
         const std::string& serviceName) {
     using aidl::android::hardware::camera::provider::ICameraProvider;
 
@@ -151,15 +155,31 @@
     }
 
     if (binder == nullptr) {
-        ALOGD("%s: AIDL Camera provider HAL '%s' is not actually available", __FUNCTION__,
-              serviceName.c_str());
+        ALOGE("%s: AIDL Camera provider HAL '%s' is not actually available, despite waiting "
+              "indefinitely?", __FUNCTION__, serviceName.c_str());
         return nullptr;
     }
     std::shared_ptr<ICameraProvider> interface =
             ICameraProvider::fromBinder(ndk::SpAIBinder(binder));
 
     return interface;
-};
+}
+
+std::shared_ptr<aidl::android::hardware::camera::provider::ICameraProvider>
+CameraProviderManager::AidlServiceInteractionProxyImpl::tryGetService(
+        const std::string& serviceName) {
+    using aidl::android::hardware::camera::provider::ICameraProvider;
+
+    std::shared_ptr<ICameraProvider> interface = ICameraProvider::fromBinder(
+                    ndk::SpAIBinder(AServiceManager_checkService(serviceName.c_str())));
+    if (interface == nullptr) {
+        ALOGD("%s: AIDL Camera provider HAL '%s' is not actually available", __FUNCTION__,
+              serviceName.c_str());
+        return nullptr;
+    }
+
+    return interface;
+}
 
 static std::string getFullAidlProviderName(const std::string instance) {
     std::string aidlHalServiceDescriptor =
@@ -386,13 +406,14 @@
 }
 
 status_t CameraProviderManager::getCameraInfo(const std::string &id,
-        bool overrideToPortrait, int *portraitRotation, hardware::CameraInfo* info) const {
+         int rotationOverride, int *portraitRotation,
+         hardware::CameraInfo* info) const {
     std::lock_guard<std::mutex> lock(mInterfaceMutex);
 
     auto deviceInfo = findDeviceInfoLocked(id);
     if (deviceInfo == nullptr) return NAME_NOT_FOUND;
 
-    return deviceInfo->getCameraInfo(overrideToPortrait, portraitRotation, info);
+    return deviceInfo->getCameraInfo(rotationOverride, portraitRotation, info);
 }
 
 status_t CameraProviderManager::isSessionConfigurationSupported(const std::string& id,
@@ -404,8 +425,16 @@
         return NAME_NOT_FOUND;
     }
 
+    metadataGetter getMetadata = [this](const std::string &id,
+            bool overrideForPerfClass) {
+        CameraMetadata metadata;
+        this->getCameraCharacteristicsLocked(id, overrideForPerfClass,
+                                             &metadata,
+                                             hardware::ICameraService::ROTATION_OVERRIDE_NONE);
+        return metadata;
+    };
     return deviceInfo->isSessionConfigurationSupported(configuration,
-            overrideForPerfClass, checkSessionParams, status);
+            overrideForPerfClass, getMetadata, checkSessionParams, status);
 }
 
 status_t  CameraProviderManager::createDefaultRequest(const std::string& cameraId,
@@ -422,9 +451,8 @@
         return NAME_NOT_FOUND;
     }
 
-    camera_metadata_t *rawRequest;
     status_t res = deviceInfo->createDefaultRequest(templateId,
-            &rawRequest);
+            metadata);
 
     if (res == BAD_VALUE) {
         ALOGI("%s: template %d is not supported on this camera device",
@@ -436,25 +464,34 @@
         return res;
     }
 
-    set_camera_metadata_vendor_id(rawRequest, deviceInfo->mProviderTagid);
-    metadata->acquire(rawRequest);
-
     return OK;
 }
 
-status_t CameraProviderManager::getSessionCharacteristics(const std::string& id,
-        const SessionConfiguration &configuration, bool overrideForPerfClass,
-        metadataGetter getMetadata,
-        CameraMetadata* sessionCharacteristics /*out*/) const {
+status_t CameraProviderManager::getSessionCharacteristics(
+        const std::string& id, const SessionConfiguration& configuration, bool overrideForPerfClass,
+        int rotationOverride, CameraMetadata* sessionCharacteristics /*out*/) const {
     if (!flags::feature_combination_query()) {
         return INVALID_OPERATION;
     }
+
     std::lock_guard<std::mutex> lock(mInterfaceMutex);
     auto deviceInfo = findDeviceInfoLocked(id);
     if (deviceInfo == nullptr) {
         return NAME_NOT_FOUND;
     }
 
+    metadataGetter getMetadata = [this, rotationOverride](const std::string& id,
+                                                            bool overrideForPerfClass) {
+        CameraMetadata metadata;
+        status_t ret = this->getCameraCharacteristicsLocked(id, overrideForPerfClass, &metadata,
+                                                            rotationOverride);
+        if (ret != OK) {
+            ALOGE("%s: Could not get CameraCharacteristics for device %s", __FUNCTION__,
+                  id.c_str());
+        }
+        return metadata;
+    };
+
     return deviceInfo->getSessionCharacteristics(configuration,
             overrideForPerfClass, getMetadata, sessionCharacteristics);
 }
@@ -476,10 +513,10 @@
 
 status_t CameraProviderManager::getCameraCharacteristics(const std::string &id,
         bool overrideForPerfClass, CameraMetadata* characteristics,
-        bool overrideToPortrait) const {
+        int rotationOverride) const {
     std::lock_guard<std::mutex> lock(mInterfaceMutex);
     return getCameraCharacteristicsLocked(id, overrideForPerfClass, characteristics,
-            overrideToPortrait);
+            rotationOverride);
 }
 
 status_t CameraProviderManager::getHighestSupportedVersion(const std::string &id,
@@ -1062,20 +1099,6 @@
     }
 }
 
-CameraMetadata CameraProviderManager::ProviderInfo::DeviceInfo3::deviceInfo(
-        const std::string &id) {
-    if (id.empty()) {
-        return mCameraCharacteristics;
-    } else {
-        if (mPhysicalCameraCharacteristics.find(id) != mPhysicalCameraCharacteristics.end()) {
-            return mPhysicalCameraCharacteristics.at(id);
-        } else {
-            ALOGE("%s: Invalid physical camera id %s", __FUNCTION__, id.c_str());
-            return mCameraCharacteristics;
-        }
-    }
-}
-
 SystemCameraKind CameraProviderManager::ProviderInfo::DeviceInfo3::getSystemCameraKind() {
     camera_metadata_entry_t entryCap;
     entryCap = mCameraCharacteristics.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
@@ -1833,7 +1856,7 @@
 
     auto& c = mCameraCharacteristics;
     status_t res = c.update(ANDROID_INFO_SESSION_CONFIGURATION_QUERY_VERSION, &versionCode, 1);
-
+    mSessionConfigQueryVersion = versionCode;
     return res;
 }
 
@@ -2096,8 +2119,14 @@
         const std::string& providerName, const sp<ProviderInfo>& providerInfo) {
     using aidl::android::hardware::camera::provider::ICameraProvider;
 
-    std::shared_ptr<ICameraProvider> interface =
-            mAidlServiceProxy->getAidlService(providerName.c_str());
+    std::shared_ptr<ICameraProvider> interface;
+    if (flags::delay_lazy_hal_instantiation()) {
+        // Only get remote instance if already running. Lazy Providers will be
+        // woken up later.
+        interface = mAidlServiceProxy->tryGetService(providerName);
+    } else {
+        interface = mAidlServiceProxy->getService(providerName);
+    }
 
     if (interface == nullptr) {
         ALOGW("%s: AIDL Camera provider HAL '%s' is not actually available", __FUNCTION__,
@@ -2251,7 +2280,7 @@
                         return tryToInitializeAidlProviderLocked(removedAidlProviderName,
                                 providerInfo);
                     default:
-                        ALOGE("%s Unsupported Transport %d", __FUNCTION__, providerTransport);
+                        ALOGE("%s Unsupported Transport %d", __FUNCTION__, eToI(providerTransport));
                 }
             }
         }
@@ -2338,7 +2367,7 @@
             }
             break;
         default:
-            ALOGE("%s Invalid transport %d", __FUNCTION__, transport);
+            ALOGE("%s Invalid transport %d", __FUNCTION__, eToI(transport));
             return BAD_VALUE;
     }
 
@@ -2447,8 +2476,9 @@
                 device->hasFlashUnit() ? "true" : "false");
         hardware::CameraInfo info;
         int portraitRotation;
-        status_t res = device->getCameraInfo(/*overrideToPortrait*/false, &portraitRotation,
-                &info);
+        status_t res = device->getCameraInfo(
+                /*rotationOverride*/hardware::ICameraService::ROTATION_OVERRIDE_NONE,
+                &portraitRotation, &info);
         if (res != OK) {
             dprintf(fd, "   <Error reading camera info: %s (%d)>\n",
                     strerror(-res), res);
@@ -2459,7 +2489,7 @@
         }
         CameraMetadata info2;
         res = device->getCameraCharacteristics(true /*overrideForPerfClass*/, &info2,
-                /*overrideToPortrait*/false);
+                hardware::ICameraService::ROTATION_OVERRIDE_NONE);
         if (res == INVALID_OPERATION) {
             dprintf(fd, "  API2 not directly supported\n");
         } else if (res != OK) {
@@ -2683,7 +2713,7 @@
         }
         if (!known) {
             ALOGW("Camera provider %s says an unknown camera %s now has torch status %d. Curious.",
-                    mProviderName.c_str(), cameraDeviceName.c_str(), newStatus);
+                mProviderName.c_str(), cameraDeviceName.c_str(), eToI(newStatus));
             return;
         }
         // no lock needed since listener is set up only once during
@@ -2737,10 +2767,15 @@
 }
 
 status_t CameraProviderManager::ProviderInfo::DeviceInfo3::getCameraInfo(
-        bool overrideToPortrait, int *portraitRotation,
+        int rotationOverride, int *portraitRotation,
         hardware::CameraInfo *info) const {
     if (info == nullptr) return BAD_VALUE;
 
+    bool freeform_compat_enabled = wm_flags::camera_compat_for_freeform();
+    if (!freeform_compat_enabled &&
+            rotationOverride > hardware::ICameraService::ROTATION_OVERRIDE_OVERRIDE_TO_PORTRAIT) {
+        ALOGW("Camera compat freeform flag disabled but rotation override is %d", rotationOverride);
+    }
     camera_metadata_ro_entry facing =
             mCameraCharacteristics.find(ANDROID_LENS_FACING);
     if (facing.count == 1) {
@@ -2768,13 +2803,18 @@
         return NAME_NOT_FOUND;
     }
 
-    if (overrideToPortrait && (info->orientation == 0 || info->orientation == 180)) {
+    if (rotationOverride == hardware::ICameraService::ROTATION_OVERRIDE_OVERRIDE_TO_PORTRAIT
+            && (info->orientation == 0 || info->orientation == 180)) {
         *portraitRotation = 90;
         if (info->facing == hardware::CAMERA_FACING_FRONT) {
             info->orientation = (360 + info->orientation - 90) % 360;
         } else {
             info->orientation = (360 + info->orientation + 90) % 360;
         }
+    } else if (freeform_compat_enabled &&
+            rotationOverride == hardware::ICameraService::ROTATION_OVERRIDE_ROTATION_ONLY
+            && (info->orientation == 90 || info->orientation == 270)) {
+        *portraitRotation = info->facing == hardware::CAMERA_FACING_BACK ? 90 : 270;
     } else {
         *portraitRotation = 0;
     }
@@ -2804,7 +2844,8 @@
 }
 
 status_t CameraProviderManager::ProviderInfo::DeviceInfo3::getCameraCharacteristics(
-        bool overrideForPerfClass, CameraMetadata *characteristics, bool overrideToPortrait) {
+        bool overrideForPerfClass, CameraMetadata *characteristics,
+        int rotationOverride) {
     if (characteristics == nullptr) return BAD_VALUE;
 
     if (!overrideForPerfClass && mCameraCharNoPCOverride != nullptr) {
@@ -2813,7 +2854,7 @@
         *characteristics = mCameraCharacteristics;
     }
 
-    if (overrideToPortrait) {
+    if (rotationOverride == hardware::ICameraService::ROTATION_OVERRIDE_OVERRIDE_TO_PORTRAIT) {
         const auto &lensFacingEntry = characteristics->find(ANDROID_LENS_FACING);
         const auto &sensorOrientationEntry = characteristics->find(ANDROID_SENSOR_ORIENTATION);
         uint8_t lensFacing = lensFacingEntry.data.u8[0];
@@ -3174,11 +3215,11 @@
 
 status_t CameraProviderManager::getCameraCharacteristicsLocked(const std::string &id,
         bool overrideForPerfClass, CameraMetadata* characteristics,
-        bool overrideToPortrait) const {
+        int rotationOverride) const {
     auto deviceInfo = findDeviceInfoLocked(id);
     if (deviceInfo != nullptr) {
         return deviceInfo->getCameraCharacteristics(overrideForPerfClass, characteristics,
-                overrideToPortrait);
+                rotationOverride);
     }
 
     // Find hidden physical camera characteristics
@@ -3214,8 +3255,9 @@
 
         hardware::CameraInfo info;
         int portraitRotation;
-        status_t res = deviceInfo->getCameraInfo(/*overrideToPortrait*/false, &portraitRotation,
-                &info);
+        status_t res = deviceInfo->getCameraInfo(
+                /*rotationOverride*/hardware::ICameraService::ROTATION_OVERRIDE_NONE,
+                &portraitRotation, &info);
         if (res != OK) {
             ALOGE("%s: Error reading camera info: %s (%d)", __FUNCTION__, strerror(-res), res);
             continue;
@@ -3244,7 +3286,8 @@
 }
 
 bool CameraProviderManager::isVirtualCameraHalEnabled() {
-    return vd_flags::virtual_camera_service_discovery();
+    return vd_flags::virtual_camera_service_discovery() &&
+           vd_flags::virtual_camera_service_build_flag();
 }
 
 } // namespace android
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.h b/services/camera/libcameraservice/common/CameraProviderManager.h
index 53a2102..4a64b44 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.h
+++ b/services/camera/libcameraservice/common/CameraProviderManager.h
@@ -178,9 +178,15 @@
     // Proxy to inject fake services in test.
     class AidlServiceInteractionProxy {
       public:
-        // Returns the Aidl service with the given serviceName
+        // Returns the Aidl service with the given serviceName. Will wait indefinitely
+        // for the service to come up if not running.
         virtual std::shared_ptr<aidl::android::hardware::camera::provider::ICameraProvider>
-        getAidlService(const std::string& serviceName) = 0;
+        getService(const std::string& serviceName) = 0;
+
+        // Attempts to get an already running AIDL service of the given serviceName.
+        // Returns nullptr immediately if service is not running.
+        virtual std::shared_ptr<aidl::android::hardware::camera::provider::ICameraProvider>
+        tryGetService(const std::string& serviceName) = 0;
 
         virtual ~AidlServiceInteractionProxy() = default;
     };
@@ -190,7 +196,10 @@
     class AidlServiceInteractionProxyImpl : public AidlServiceInteractionProxy {
       public:
         virtual std::shared_ptr<aidl::android::hardware::camera::provider::ICameraProvider>
-        getAidlService(const std::string& serviceName) override;
+        getService(const std::string& serviceName) override;
+
+        virtual std::shared_ptr<aidl::android::hardware::camera::provider::ICameraProvider>
+        tryGetService(const std::string& serviceName) override;
     };
 
     /**
@@ -283,7 +292,8 @@
      * Return the old camera API camera info
      */
     status_t getCameraInfo(const std::string &id,
-            bool overrideToPortrait, int *portraitRotation, hardware::CameraInfo* info) const;
+            int rotationOverride, int *portraitRotation,
+            hardware::CameraInfo* info) const;
 
     /**
      * Return API2 camera characteristics - returns NAME_NOT_FOUND if a device ID does
@@ -291,7 +301,7 @@
      */
     status_t getCameraCharacteristics(const std::string &id,
             bool overrideForPerfClass, CameraMetadata* characteristics,
-            bool overrideToPortrait) const;
+            int rotationOverride) const;
 
     status_t isConcurrentSessionConfigurationSupported(
             const std::vector<hardware::camera2::utils::CameraIdAndSessionConfiguration>
@@ -321,7 +331,8 @@
      */
      status_t getSessionCharacteristics(const std::string& id,
             const SessionConfiguration &configuration,
-            bool overrideForPerfClass, camera3::metadataGetter getMetadata,
+            bool overrideForPerfClass,
+            int rotationOverride,
             CameraMetadata* sessionCharacteristics /*out*/) const;
 
     /**
@@ -616,7 +627,8 @@
             virtual status_t setTorchMode(bool enabled) = 0;
             virtual status_t turnOnTorchWithStrengthLevel(int32_t torchStrength) = 0;
             virtual status_t getTorchStrengthLevel(int32_t *torchStrength) = 0;
-            virtual status_t getCameraInfo(bool overrideToPortrait,
+            virtual status_t getCameraInfo(
+                    int rotationOverride,
                     int *portraitRotation,
                     hardware::CameraInfo *info) const = 0;
             virtual bool isAPI1Compatible() const = 0;
@@ -624,7 +636,7 @@
             virtual status_t getCameraCharacteristics(
                     [[maybe_unused]] bool overrideForPerfClass,
                     [[maybe_unused]] CameraMetadata *characteristics,
-                    [[maybe_unused]] bool overrideToPortrait) {
+                    [[maybe_unused]] int rotationOverride) {
                 return INVALID_OPERATION;
             }
             virtual status_t getPhysicalCameraCharacteristics(
@@ -636,6 +648,7 @@
             virtual status_t isSessionConfigurationSupported(
                     const SessionConfiguration &/*configuration*/,
                     bool /*overrideForPerfClass*/,
+                    camera3::metadataGetter /*getMetadata*/,
                     bool /*checkSessionParams*/,
                     bool * /*status*/) {
                 return INVALID_OPERATION;
@@ -644,8 +657,7 @@
             virtual status_t getSessionCharacteristics(
                     const SessionConfiguration &/*configuration*/,
                     bool /*overrideForPerfClass*/,
-                    camera3::metadataGetter /*getMetadata*/,
-                    CameraMetadata* /*sessionCharacteristics*/) {
+                    camera3::metadataGetter /*getMetadata*/, CameraMetadata* /*outChars*/) {
                 return INVALID_OPERATION;
             }
 
@@ -653,7 +665,7 @@
             virtual void notifyDeviceStateChange(int64_t /*newState*/) {}
             virtual status_t createDefaultRequest(
                     camera3::camera_request_template_t /*templateId*/,
-                    camera_metadata_t** /*metadata*/) {
+                    CameraMetadata* /*metadata*/) {
                 return INVALID_OPERATION;
             }
 
@@ -695,7 +707,8 @@
             virtual status_t setTorchMode(bool enabled) = 0;
             virtual status_t turnOnTorchWithStrengthLevel(int32_t torchStrength) = 0;
             virtual status_t getTorchStrengthLevel(int32_t *torchStrength) = 0;
-            virtual status_t getCameraInfo(bool overrideToPortrait,
+            virtual status_t getCameraInfo(
+                    int rotationOverride,
                     int *portraitRotation,
                     hardware::CameraInfo *info) const override;
             virtual bool isAPI1Compatible() const override;
@@ -703,7 +716,7 @@
             virtual status_t getCameraCharacteristics(
                     bool overrideForPerfClass,
                     CameraMetadata *characteristics,
-                    bool overrideToPortrait) override;
+                    int rotationOverride) override;
             virtual status_t getPhysicalCameraCharacteristics(const std::string& physicalCameraId,
                     CameraMetadata *characteristics) const override;
             virtual status_t filterSmallJpegSizes() override;
@@ -727,6 +740,10 @@
             // Only contains characteristics for hidden physical cameras,
             // not for public physical cameras.
             std::unordered_map<std::string, CameraMetadata> mPhysicalCameraCharacteristics;
+            // Value filled in from addSessionConfigQueryVersionTag.
+            // Cached to make lookups faster
+            int mSessionConfigQueryVersion = 0;
+
             void queryPhysicalCameraIds();
             SystemCameraKind getSystemCameraKind();
             status_t fixupMonochromeTags();
@@ -764,8 +781,6 @@
                     std::vector<int64_t>* stallDurations,
                     const camera_metadata_entry& halStreamConfigs,
                     const camera_metadata_entry& halStreamDurations);
-
-            CameraMetadata deviceInfo(const std::string &id);
         };
     protected:
         std::string mType;
@@ -905,7 +920,7 @@
         const hardware::camera::common::V1_0::TorchModeStatus&);
 
     status_t getCameraCharacteristicsLocked(const std::string &id, bool overrideForPerfClass,
-            CameraMetadata* characteristics, bool overrideToPortrait) const;
+            CameraMetadata* characteristics, int rotationOverride) const;
     void filterLogicalCameraIdsLocked(std::vector<std::string>& deviceIds) const;
 
     status_t getSystemCameraKindLocked(const std::string& id, SystemCameraKind *kind) const;
diff --git a/services/camera/libcameraservice/common/HalConversionsTemplated.h b/services/camera/libcameraservice/common/HalConversionsTemplated.h
index 96a715c..c586062 100644
--- a/services/camera/libcameraservice/common/HalConversionsTemplated.h
+++ b/services/camera/libcameraservice/common/HalConversionsTemplated.h
@@ -19,6 +19,7 @@
 #include "common/CameraProviderManager.h"
 
 #include <device3/Camera3StreamInterface.h>
+#include <utils/Utils.h>
 
 namespace android {
 
@@ -48,7 +49,7 @@
         case HalCameraDeviceStatus::ENUMERATING:
             return CameraDeviceStatus::ENUMERATING;
     }
-    ALOGW("Unexpectedcamera device status code %d", s);
+    ALOGW("Unexpectedcamera device status code %d", eToI(s));
     return CameraDeviceStatus::NOT_PRESENT;
 }
 
@@ -74,7 +75,7 @@
         case HalTorchModeStatus::AVAILABLE_ON:
             return TorchModeStatus::AVAILABLE_ON;
     }
-    ALOGW("Unexpectedcamera torch mode status code %d", s);
+    ALOGW("Unexpectedcamera torch mode status code %d", eToI(s));
     return TorchModeStatus::NOT_AVAILABLE;
 }
 
@@ -88,7 +89,7 @@
         case HalCameraDeviceStatus::ENUMERATING:
             return "ENUMERATING";
     }
-    ALOGW("Unexpected HAL device status code %d", s);
+    ALOGW("Unexpected HAL device status code %d", eToI(s));
     return "UNKNOWN_STATUS";
 }
 
diff --git a/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp b/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp
index 921ee43..e76b750 100644
--- a/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp
+++ b/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp
@@ -17,6 +17,8 @@
 #include "common/HalConversionsTemplated.h"
 #include "common/CameraProviderInfoTemplated.h"
 
+#include <aidl/AidlUtils.h>
+
 #include <com_android_internal_camera_flags.h>
 #include <cutils/properties.h>
 
@@ -26,6 +28,7 @@
 #include <android/hardware/ICameraService.h>
 #include <camera_metadata_hidden.h>
 
+#include "device3/DistortionMapper.h"
 #include "device3/ZoomRatioMapper.h"
 #include <utils/SessionConfigurationUtils.h>
 #include <utils/Trace.h>
@@ -42,8 +45,10 @@
 
 using namespace aidl::android::hardware;
 using namespace hardware::camera;
+using android::hardware::cameraservice::utils::conversion::aidl::copySessionCharacteristics;
 using hardware::camera2::utils::CameraIdAndSessionConfiguration;
 using hardware::ICameraService;
+using SessionConfigurationUtils::overrideDefaultRequestKeys;
 
 using HalDeviceStatusType = aidl::android::hardware::camera::common::CameraDeviceStatus;
 using ICameraProvider = aidl::android::hardware::camera::provider::ICameraProvider;
@@ -297,9 +302,10 @@
     }
 
     ALOGV("Camera provider actually needs restart, calling getService(%s)", mProviderName.c_str());
-    interface = mManager->mAidlServiceProxy->getAidlService(mProviderName.c_str());
+    interface = mManager->mAidlServiceProxy->getService(mProviderName);
+
     if (interface == nullptr) {
-        ALOGD("%s: %s service not started", __FUNCTION__, mProviderName.c_str());
+        ALOGE("%s: %s service not started", __FUNCTION__, mProviderName.c_str());
         return nullptr;
     }
 
@@ -689,6 +695,14 @@
         }
     }
 
+    int deviceVersion = HARDWARE_DEVICE_API_VERSION(mVersion.get_major(), mVersion.get_minor());
+    if (deviceVersion >= CAMERA_DEVICE_API_VERSION_1_3) {
+        // This additional set of request keys must match the ones specified
+        // in ICameraDevice.isSessionConfigurationWithSettingsSupported.
+        mAdditionalKeysForFeatureQuery.insert(mAdditionalKeysForFeatureQuery.end(),
+                {ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, ANDROID_CONTROL_AE_TARGET_FPS_RANGE});
+    }
+
     if (!kEnableLazyHal) {
         // Save HAL reference indefinitely
         mSavedInterface = interface;
@@ -786,7 +800,7 @@
 
 status_t AidlProviderInfo::AidlDeviceInfo3::isSessionConfigurationSupported(
         const SessionConfiguration &configuration, bool overrideForPerfClass,
-        bool checkSessionParams, bool *status) {
+        camera3::metadataGetter getMetadata, bool checkSessionParams, bool *status) {
 
     auto operatingMode = configuration.getOperatingMode();
 
@@ -798,12 +812,10 @@
 
     camera::device::StreamConfiguration streamConfiguration;
     bool earlyExit = false;
-    camera3::metadataGetter getMetadata = [this](const std::string &id,
-            bool /*overrideForPerfClass*/) {return this->deviceInfo(id);};
     auto bRes = SessionConfigurationUtils::convertToHALStreamCombination(configuration,
             mId, mCameraCharacteristics, mCompositeJpegRDisabled, getMetadata,
             mPhysicalIds, streamConfiguration, overrideForPerfClass, mProviderTagid,
-            checkSessionParams, &earlyExit);
+            checkSessionParams, mAdditionalKeysForFeatureQuery, &earlyExit);
 
     if (!bRes.isOk()) {
         return UNKNOWN_ERROR;
@@ -850,7 +862,7 @@
 }
 
 status_t AidlProviderInfo::AidlDeviceInfo3::createDefaultRequest(
-        camera3::camera_request_template_t templateId, camera_metadata_t** metadata) {
+        camera3::camera_request_template_t templateId, CameraMetadata* metadata) {
     const std::shared_ptr<camera::device::ICameraDevice> interface =
             startDeviceInterface();
 
@@ -886,11 +898,12 @@
     }
     const camera_metadata *r =
             reinterpret_cast<const camera_metadata_t*>(request.metadata.data());
+    camera_metadata *rawRequest  = nullptr;
     size_t expectedSize = request.metadata.size();
     int ret = validate_camera_metadata_structure(r, &expectedSize);
     if (ret == OK || ret == CAMERA_METADATA_VALIDATION_SHIFTED) {
-        *metadata = clone_camera_metadata(r);
-        if (*metadata == nullptr) {
+        rawRequest = clone_camera_metadata(r);
+        if (rawRequest == nullptr) {
             ALOGE("%s: Unable to clone camera metadata received from HAL",
                     __FUNCTION__);
             res = UNKNOWN_ERROR;
@@ -900,18 +913,28 @@
         res = UNKNOWN_ERROR;
     }
 
+    set_camera_metadata_vendor_id(rawRequest, mProviderTagid);
+    metadata->acquire(rawRequest);
+
+    res = overrideDefaultRequestKeys(metadata);
+    if (res != OK) {
+        ALOGE("Unabled to override default request keys: %s (%d)",
+                strerror(-res), res);
+        return res;
+    }
+
     return res;
 }
 
 status_t AidlProviderInfo::AidlDeviceInfo3::getSessionCharacteristics(
         const SessionConfiguration &configuration, bool overrideForPerfClass,
-        camera3::metadataGetter getMetadata, CameraMetadata *sessionCharacteristics) {
+        camera3::metadataGetter getMetadata, CameraMetadata* outChars) {
     camera::device::StreamConfiguration streamConfiguration;
     bool earlyExit = false;
     auto res = SessionConfigurationUtils::convertToHALStreamCombination(configuration,
             mId, mCameraCharacteristics, mCompositeJpegRDisabled, getMetadata,
             mPhysicalIds, streamConfiguration, overrideForPerfClass, mProviderTagid,
-            /*checkSessionParams*/true, &earlyExit);
+            /*checkSessionParams*/true, mAdditionalKeysForFeatureQuery, &earlyExit);
 
     if (!res.isOk()) {
         return UNKNOWN_ERROR;
@@ -931,24 +954,32 @@
     aidl::android::hardware::camera::device::CameraMetadata chars;
     ::ndk::ScopedAStatus ret =
         interface->getSessionCharacteristics(streamConfiguration, &chars);
-    std::vector<uint8_t> &metadata = chars.metadata;
+    if (!ret.isOk()) {
+        ALOGE("%s: Unexpected binder error while getting session characteristics (%d): %s",
+              __FUNCTION__, ret.getExceptionCode(), ret.getMessage());
+        return mapToStatusT(ret);
+    }
 
-    camera_metadata_t *buffer = reinterpret_cast<camera_metadata_t*>(metadata.data());
+    std::vector<uint8_t> &metadata = chars.metadata;
+    auto *buffer = reinterpret_cast<camera_metadata_t*>(metadata.data());
     size_t expectedSize = metadata.size();
     int resV = validate_camera_metadata_structure(buffer, &expectedSize);
     if (resV == OK || resV == CAMERA_METADATA_VALIDATION_SHIFTED) {
         set_camera_metadata_vendor_id(buffer, mProviderTagid);
-        *sessionCharacteristics = buffer;
     } else {
         ALOGE("%s: Malformed camera metadata received from HAL", __FUNCTION__);
         return BAD_VALUE;
     }
 
-    if (!ret.isOk()) {
-        ALOGE("%s: Unexpected binder error: %s", __FUNCTION__, ret.getMessage());
-        return mapToStatusT(ret);
-    }
-    return OK;
+    CameraMetadata rawSessionChars;
+    rawSessionChars = buffer;  //  clone buffer
+    rawSessionChars.sort();    // sort for faster lookups
+
+    *outChars = mCameraCharacteristics;
+    outChars->sort();  // sort for faster reads and (hopefully!) writes
+
+    return copySessionCharacteristics(/*from=*/rawSessionChars, /*to=*/outChars,
+                                      mSessionConfigQueryVersion);
 }
 
 status_t AidlProviderInfo::convertToAidlHALStreamCombinationAndCameraIdsLocked(
@@ -970,16 +1001,16 @@
                 SessionConfigurationUtils::targetPerfClassPrimaryCamera(
                         perfClassPrimaryCameraIds, cameraId, targetSdkVersion);
         res = mManager->getCameraCharacteristicsLocked(cameraId, overrideForPerfClass, &deviceInfo,
-                /*overrideToPortrait*/false);
+                hardware::ICameraService::ROTATION_OVERRIDE_NONE);
         if (res != OK) {
             return res;
         }
         camera3::metadataGetter getMetadata =
                 [this](const std::string &id, bool overrideForPerfClass) {
                     CameraMetadata physicalDeviceInfo;
-                    mManager->getCameraCharacteristicsLocked(id, overrideForPerfClass,
-                                                   &physicalDeviceInfo,
-                                                   /*overrideToPortrait*/false);
+                    mManager->getCameraCharacteristicsLocked(
+                            id, overrideForPerfClass, &physicalDeviceInfo,
+                            hardware::ICameraService::ROTATION_OVERRIDE_NONE);
                     return physicalDeviceInfo;
                 };
         std::vector<std::string> physicalCameraIds;
@@ -991,7 +1022,8 @@
                     mManager->isCompositeJpegRDisabledLocked(cameraId), getMetadata,
                     physicalCameraIds, streamConfiguration,
                     overrideForPerfClass, mProviderTagid,
-                    /*checkSessionParams*/false, &shouldExit);
+                    /*checkSessionParams*/false, /*additionalKeys*/{},
+                    &shouldExit);
         if (!bStatus.isOk()) {
             ALOGE("%s: convertToHALStreamCombination failed", __FUNCTION__);
             return INVALID_OPERATION;
diff --git a/services/camera/libcameraservice/common/aidl/AidlProviderInfo.h b/services/camera/libcameraservice/common/aidl/AidlProviderInfo.h
index 0bfa7d4..1983cc3 100644
--- a/services/camera/libcameraservice/common/aidl/AidlProviderInfo.h
+++ b/services/camera/libcameraservice/common/aidl/AidlProviderInfo.h
@@ -127,20 +127,21 @@
 
         virtual status_t isSessionConfigurationSupported(
                 const SessionConfiguration &/*configuration*/,
-                bool overrideForPerfClass, bool checkSessionParams,
-                bool *status/*status*/);
+                bool overrideForPerfClass, camera3::metadataGetter getMetadata,
+                bool checkSessionParams, bool *status/*status*/);
 
         virtual status_t createDefaultRequest(
                     camera3::camera_request_template_t templateId,
-                    camera_metadata_t** metadata) override;
+                    CameraMetadata* metadata) override;
 
         virtual status_t getSessionCharacteristics(
                 const SessionConfiguration &/*configuration*/,
                 bool overrideForPerfClass, camera3::metadataGetter /*getMetadata*/,
-                CameraMetadata *sessionCharacteristics /*sessionCharacteristics*/);
+                CameraMetadata */*outChars*/);
 
         std::shared_ptr<aidl::android::hardware::camera::device::ICameraDevice>
                 startDeviceInterface();
+        std::vector<int32_t> mAdditionalKeysForFeatureQuery;
     };
 
  private:
diff --git a/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp b/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
index 065f0c5..40800d9 100644
--- a/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
+++ b/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
@@ -26,6 +26,7 @@
 #include "device3/ZoomRatioMapper.h"
 #include <utils/SessionConfigurationUtilsHidl.h>
 #include <utils/Trace.h>
+#include <utils/Utils.h>
 
 #include <android/hardware/camera/device/3.7/ICameraDevice.h>
 
@@ -69,7 +70,7 @@
         case Status::INTERNAL_ERROR:
             return INVALID_OPERATION;
     }
-    ALOGW("Unexpected HAL status code %d", s);
+    ALOGW("Unexpected HAL status code %d", eToI(s));
     return INVALID_OPERATION;
 }
 
@@ -111,7 +112,7 @@
         case Status::INTERNAL_ERROR:
             return "INTERNAL_ERROR";
     }
-    ALOGW("Unexpected HAL status code %d", s);
+    ALOGW("Unexpected HAL status code %d", eToI(s));
     return "UNKNOWN_ERROR";
 }
 
@@ -591,7 +592,7 @@
     }
     if (status != Status::OK) {
         ALOGE("%s: Unable to get camera characteristics for device %s: %s (%d)",
-                __FUNCTION__, id.c_str(), statusToString(status), status);
+                __FUNCTION__, id.c_str(), statusToString(status), eToI(status));
         return;
     }
 
@@ -770,7 +771,7 @@
             if (status != Status::OK) {
                 ALOGE("%s: Unable to get physical camera %s characteristics for device %s: %s (%d)",
                         __FUNCTION__, id.c_str(), mId.c_str(),
-                        statusToString(status), status);
+                        statusToString(status), eToI(status));
                 return;
             }
 
@@ -861,7 +862,7 @@
 
 status_t HidlProviderInfo::HidlDeviceInfo3::isSessionConfigurationSupported(
         const SessionConfiguration &configuration, bool overrideForPerfClass,
-        bool checkSessionParams, bool *status) {
+        camera3::metadataGetter getMetadata, bool checkSessionParams, bool *status) {
 
     if (checkSessionParams) {
         // HIDL device doesn't support checking session parameters
@@ -870,8 +871,6 @@
 
     hardware::camera::device::V3_7::StreamConfiguration configuration_3_7;
     bool earlyExit = false;
-    camera3::metadataGetter getMetadata = [this](const std::string &id,
-            bool /*overrideForPerfClass*/) {return this->deviceInfo(id);};
     auto bRes = SessionConfigurationUtils::convertToHALStreamCombination(configuration,
             mId, mCameraCharacteristics, getMetadata, mPhysicalIds,
             configuration_3_7, overrideForPerfClass, mProviderTagid,
@@ -930,7 +929,7 @@
                 res = INVALID_OPERATION;
                 break;
             default:
-                ALOGE("%s: Session configuration query failed: %d", __FUNCTION__, callStatus);
+                ALOGE("%s: Session configuration query failed: %d", __FUNCTION__, eToI(callStatus));
                 res = UNKNOWN_ERROR;
         }
     } else {
@@ -959,7 +958,7 @@
                 SessionConfigurationUtils::targetPerfClassPrimaryCamera(
                         perfClassPrimaryCameraIds, cameraId, targetSdkVersion);
         res = mManager->getCameraCharacteristicsLocked(cameraId, overrideForPerfClass, &deviceInfo,
-                /*overrideToPortrait*/false);
+                 hardware::ICameraService::ROTATION_OVERRIDE_NONE);
         if (res != OK) {
             return res;
         }
@@ -967,7 +966,7 @@
                 [this](const std::string &id, bool overrideForPerfClass) {
                     CameraMetadata physicalDeviceInfo;
                     mManager->getCameraCharacteristicsLocked(id, overrideForPerfClass,
-                            &physicalDeviceInfo, /*overrideToPortrait*/false);
+                            &physicalDeviceInfo, hardware::ICameraService::ROTATION_OVERRIDE_NONE);
                     return physicalDeviceInfo;
                 };
         std::vector<std::string> physicalCameraIds;
@@ -1078,7 +1077,7 @@
                         break;
                     default:
                         ALOGE("%s: Session configuration query failed: %d", __FUNCTION__,
-                                  callStatus);
+                                eToI(callStatus));
                         res = UNKNOWN_ERROR;
                 }
             } else {
diff --git a/services/camera/libcameraservice/common/hidl/HidlProviderInfo.h b/services/camera/libcameraservice/common/hidl/HidlProviderInfo.h
index 869bba0..2838f03 100644
--- a/services/camera/libcameraservice/common/hidl/HidlProviderInfo.h
+++ b/services/camera/libcameraservice/common/hidl/HidlProviderInfo.h
@@ -105,8 +105,8 @@
 
         virtual status_t isSessionConfigurationSupported(
                 const SessionConfiguration &/*configuration*/,
-                bool overrideForPerfClass, bool checkSessionParams,
-                bool *status/*status*/);
+                bool overrideForPerfClass, camera3::metadataGetter /*getMetadata*/,
+                bool checkSessionParams, bool *status/*status*/);
 
         sp<hardware::camera::device::V3_2::ICameraDevice> startDeviceInterface();
     };
diff --git a/services/camera/libcameraservice/device3/Camera3BufferManager.cpp b/services/camera/libcameraservice/device3/Camera3BufferManager.cpp
index c42e51a..65fee7d 100644
--- a/services/camera/libcameraservice/device3/Camera3BufferManager.cpp
+++ b/services/camera/libcameraservice/device3/Camera3BufferManager.cpp
@@ -485,7 +485,7 @@
                     streamId, bufferCount);
         }
     }
-    std::string linesStr = std::move(lines.str());
+    std::string linesStr = lines.str();
     write(fd, linesStr.c_str(), linesStr.size());
 }
 
diff --git a/services/camera/libcameraservice/device3/Camera3BufferManager.h b/services/camera/libcameraservice/device3/Camera3BufferManager.h
index 64aaa230..27fcf96 100644
--- a/services/camera/libcameraservice/device3/Camera3BufferManager.h
+++ b/services/camera/libcameraservice/device3/Camera3BufferManager.h
@@ -68,7 +68,7 @@
      * by the consumer end point, the BufferQueueProducer callback onBufferReleased will call
      * returnBufferForStream() to return the free buffer to this buffer manager. If the stream
      * uses buffer manager to manage the stream buffers, it should disable the BufferQueue
-     * allocation via IGraphicBufferProducer::allowAllocation(false).
+     * allocation via Surface::allowAllocation(false).
      *
      * Registering an already registered stream has no effect.
      *
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index c0a0544..11891e9 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -45,6 +45,7 @@
 #include <utility>
 
 #include <android-base/stringprintf.h>
+#include <sched.h>
 #include <utils/Log.h>
 #include <utils/Trace.h>
 #include <utils/Timers.h>
@@ -64,7 +65,6 @@
 #include "device3/Camera3InputStream.h"
 #include "device3/Camera3OutputStream.h"
 #include "device3/Camera3SharedOutputStream.h"
-#include "utils/CameraThreadState.h"
 #include "utils/CameraTraces.h"
 #include "utils/SchedulingPolicyUtils.h"
 #include "utils/SessionConfigurationUtils.h"
@@ -78,13 +78,16 @@
 using namespace android::camera3;
 using namespace android::camera3::SessionConfigurationUtils;
 using namespace android::hardware::camera;
+using namespace android::hardware::cameraservice::utils::conversion::aidl;
 
 namespace flags = com::android::internal::camera::flags;
 namespace android {
 
 Camera3Device::Camera3Device(std::shared_ptr<CameraServiceProxyWrapper>& cameraServiceProxyWrapper,
-        const std::string &id, bool overrideForPerfClass, bool overrideToPortrait,
+        std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
+        const std::string &id, bool overrideForPerfClass, int rotationOverride,
         bool legacyClient):
+        AttributionAndPermissionUtilsEncapsulator(attributionAndPermissionUtils),
         mCameraServiceProxyWrapper(cameraServiceProxyWrapper),
         mId(id),
         mLegacyClient(legacyClient),
@@ -108,7 +111,7 @@
         mLastTemplateId(-1),
         mNeedFixupMonochromeTags(false),
         mOverrideForPerfClass(overrideForPerfClass),
-        mOverrideToPortrait(overrideToPortrait),
+        mRotationOverride(rotationOverride),
         mRotateAndCropOverride(ANDROID_SCALER_ROTATE_AND_CROP_NONE),
         mComposerOutput(false),
         mAutoframingOverride(ANDROID_CONTROL_AUTOFRAMING_OFF),
@@ -184,7 +187,7 @@
     /** Start up request queue thread */
     mRequestThread = createNewRequestThread(
             this, mStatusTracker, mInterface, sessionParamKeys,
-            mUseHalBufManager, mSupportCameraMute, mOverrideToPortrait,
+            mUseHalBufManager, mSupportCameraMute, mRotationOverride,
             mSupportZoomOverride);
     res = mRequestThread->run((std::string("C3Dev-") + mId + "-ReqQueue").c_str());
     if (res != OK) {
@@ -195,6 +198,8 @@
         return res;
     }
 
+    setCameraMuteLocked(mCameraMuteInitial);
+
     mPreparerThread = new PreparerThread();
 
     internalUpdateStatusLocked(STATUS_UNCONFIGURED);
@@ -254,6 +259,8 @@
         return res;
     }
 
+    mSupportsExtensionKeys = areExtensionKeysSupported(mDeviceInfo);
+
     return OK;
 }
 
@@ -605,10 +612,35 @@
 
     {
         lines = "    Last request sent:\n";
+        LatestRequestInfo lastRequestInfo = getLatestRequestInfoLocked();
+        // Print out output and input stream ids
+        if (flags::dumpsys_request_stream_ids()) {
+            if (lastRequestInfo.outputStreamIds.size() != 0) {
+                lines += "      Output Stream Ids:\n";
+                for (const auto &streamId: lastRequestInfo.outputStreamIds) {
+                    lines +=  "         " + std::to_string(streamId) + "\n";
+                }
+            }
+            if (lastRequestInfo.inputStreamId != -1) {
+                lines += "       Input Stream Id: " + std::to_string(lastRequestInfo.inputStreamId)
+                        + "\n";
+            }
+        }
+        // Keeping this write() outside the flagged if makes it easier while
+        // removing the flag.
+        write(fd, lines.c_str(), lines.size());
+        lines = "    Logical request settings:\n";
+        CameraMetadata lastRequestSettings = lastRequestInfo.requestSettings;
         write(fd, lines.c_str(), lines.size());
 
-        CameraMetadata lastRequest = getLatestRequestLocked();
-        lastRequest.dump(fd, /*verbosity*/2, /*indentation*/6);
+        lastRequestSettings.dump(fd, /*verbosity=all info*/2, /*indentation*/6);
+        if (flags::dumpsys_request_stream_ids()) {
+            for (const auto& pair: lastRequestInfo.physicalRequestSettings) {
+                lines = "    Physical request settings for camera id " + pair.first + ":\n";
+                write(fd, lines.c_str(), lines.size());
+                pair.second.dump(fd, /*verbosity=all info*/2, /*indentation*/8);
+            }
+        }
     }
 
     if (dumpTemplates) {
@@ -713,7 +745,7 @@
 
 status_t Camera3Device::convertMetadataListToRequestListLocked(
         const List<const PhysicalCameraSettingsList> &metadataList,
-        const std::list<const SurfaceMap> &surfaceMaps,
+        const std::list<SurfaceMap> &surfaceMaps,
         bool repeating, nsecs_t requestTimeNs,
         RequestList *requestList) {
     if (requestList == NULL) {
@@ -723,7 +755,7 @@
 
     int32_t burstId = 0;
     List<const PhysicalCameraSettingsList>::const_iterator metadataIt = metadataList.begin();
-    std::list<const SurfaceMap>::const_iterator surfaceMapIt = surfaceMaps.begin();
+    std::list<SurfaceMap>::const_iterator surfaceMapIt = surfaceMaps.begin();
     for (; metadataIt != metadataList.end() && surfaceMapIt != surfaceMaps.end();
             ++metadataIt, ++surfaceMapIt) {
         sp<CaptureRequest> newRequest = setUpRequestLocked(*metadataIt, *surfaceMapIt);
@@ -771,14 +803,14 @@
     ATRACE_CALL();
 
     List<const PhysicalCameraSettingsList> requestsList;
-    std::list<const SurfaceMap> surfaceMaps;
+    std::list<SurfaceMap> surfaceMaps;
     convertToRequestList(requestsList, surfaceMaps, request);
 
     return captureList(requestsList, surfaceMaps, lastFrameNumber);
 }
 
 void Camera3Device::convertToRequestList(List<const PhysicalCameraSettingsList>& requestsList,
-        std::list<const SurfaceMap>& surfaceMaps,
+        std::list<SurfaceMap>& surfaceMaps,
         const CameraMetadata& request) {
     PhysicalCameraSettingsList requestList;
     requestList.push_back({getId(), request});
@@ -796,7 +828,7 @@
 
 status_t Camera3Device::submitRequestsHelper(
         const List<const PhysicalCameraSettingsList> &requests,
-        const std::list<const SurfaceMap> &surfaceMaps,
+        const std::list<SurfaceMap> &surfaceMaps,
         bool repeating,
         /*out*/
         int64_t *lastFrameNumber) {
@@ -844,7 +876,7 @@
 }
 
 status_t Camera3Device::captureList(const List<const PhysicalCameraSettingsList> &requestsList,
-                                    const std::list<const SurfaceMap> &surfaceMaps,
+                                    const std::list<SurfaceMap> &surfaceMaps,
                                     int64_t *lastFrameNumber) {
     ATRACE_CALL();
 
@@ -856,7 +888,7 @@
     ATRACE_CALL();
 
     List<const PhysicalCameraSettingsList> requestsList;
-    std::list<const SurfaceMap> surfaceMaps;
+    std::list<SurfaceMap> surfaceMaps;
     convertToRequestList(requestsList, surfaceMaps, request);
 
     return setStreamingRequestList(requestsList, /*surfaceMap*/surfaceMaps,
@@ -865,7 +897,7 @@
 
 status_t Camera3Device::setStreamingRequestList(
         const List<const PhysicalCameraSettingsList> &requestsList,
-        const std::list<const SurfaceMap> &surfaceMaps, int64_t *lastFrameNumber) {
+        const std::list<SurfaceMap> &surfaceMaps, int64_t *lastFrameNumber) {
     ATRACE_CALL();
 
     return submitRequestsHelper(requestsList, surfaceMaps, /*repeating*/true, lastFrameNumber);
@@ -1359,7 +1391,8 @@
 status_t Camera3Device::filterParamsAndConfigureLocked(const CameraMetadata& params,
         int operatingMode) {
     CameraMetadata filteredParams;
-    SessionConfigurationUtils::filterParameters(params, mDeviceInfo, mVendorTagId, filteredParams);
+    SessionConfigurationUtils::filterParameters(params, mDeviceInfo,
+            /*additionalKeys*/{}, mVendorTagId, filteredParams);
 
     camera_metadata_entry_t availableSessionKeys = mDeviceInfo.find(
             ANDROID_REQUEST_AVAILABLE_SESSION_KEYS);
@@ -1390,7 +1423,7 @@
                 request->mRotateAndCropAuto = false;
             }
 
-            overrideAutoRotateAndCrop(request, mOverrideToPortrait, mRotateAndCropOverride);
+            overrideAutoRotateAndCrop(request, mRotationOverride, mRotateAndCropOverride);
         }
 
         if (autoframingSessionKey) {
@@ -1429,7 +1462,7 @@
 
     if (templateId <= 0 || templateId >= CAMERA_TEMPLATE_COUNT) {
         android_errorWriteWithInfoLog(CameraService::SN_EVENT_LOG_ID, "26866110",
-                CameraThreadState::getCallingUid(), nullptr, 0);
+                getCallingUid(), nullptr, 0);
         return BAD_VALUE;
     }
 
@@ -1480,29 +1513,13 @@
         set_camera_metadata_vendor_id(rawRequest, mVendorTagId);
         mRequestTemplateCache[templateId].acquire(rawRequest);
 
-        // Override the template request with zoomRatioMapper
-        res = mZoomRatioMappers[mId].initZoomRatioInTemplate(
-                &mRequestTemplateCache[templateId]);
+        res = overrideDefaultRequestKeys(&mRequestTemplateCache[templateId]);
         if (res != OK) {
-            CLOGE("Failed to update zoom ratio for template %d: %s (%d)",
+            CLOGE("Failed to overrideDefaultRequestKeys for template %d: %s (%d)",
                     templateId, strerror(-res), res);
             return res;
         }
 
-        // Fill in JPEG_QUALITY if not available
-        if (!mRequestTemplateCache[templateId].exists(ANDROID_JPEG_QUALITY)) {
-            static const uint8_t kDefaultJpegQuality = 95;
-            mRequestTemplateCache[templateId].update(ANDROID_JPEG_QUALITY,
-                    &kDefaultJpegQuality, 1);
-        }
-
-        // Fill in AUTOFRAMING if not available
-        if (!mRequestTemplateCache[templateId].exists(ANDROID_CONTROL_AUTOFRAMING)) {
-            static const uint8_t kDefaultAutoframingMode = ANDROID_CONTROL_AUTOFRAMING_OFF;
-            mRequestTemplateCache[templateId].update(ANDROID_CONTROL_AUTOFRAMING,
-                    &kDefaultAutoframingMode, 1);
-        }
-
         *request = mRequestTemplateCache[templateId];
         mLastTemplateId = templateId;
     }
@@ -1973,9 +1990,10 @@
             // Get session stats from the builder, and notify the listener.
             int64_t requestCount, resultErrorCount;
             bool deviceError;
+            std::pair<int32_t, int32_t> mostRequestedFpsRange;
             std::map<int, StreamStats> streamStatsMap;
             mSessionStatsBuilder.buildAndReset(&requestCount, &resultErrorCount,
-                    &deviceError, &streamStatsMap);
+                    &deviceError, &mostRequestedFpsRange, &streamStatsMap);
             for (size_t i = 0; i < streamIds.size(); i++) {
                 int streamId = streamIds[i];
                 auto stats = streamStatsMap.find(streamId);
@@ -1993,7 +2011,8 @@
                            stats->second.mCaptureLatencyHistogram.end());
                 }
             }
-            listener->notifyIdle(requestCount, resultErrorCount, deviceError, streamStats);
+            listener->notifyIdle(requestCount, resultErrorCount, deviceError,
+                mostRequestedFpsRange, streamStats);
         } else {
             res = listener->notifyActive(sessionMaxPreviewFps);
         }
@@ -2384,10 +2403,15 @@
     return ret;
 }
 
+
 status_t Camera3Device::configureStreamsLocked(int operatingMode,
         const CameraMetadata& sessionParams, bool notifyRequestThread) {
     ATRACE_CALL();
     status_t res;
+    // Stream/surface setup can include a lot of binder IPC. Raise the
+    // thread priority when running the binder IPC heavy configuration
+    // sequence.
+    RunThreadWithRealtimePriority priorityBump;
 
     if (mStatus != STATUS_UNCONFIGURED && mStatus != STATUS_CONFIGURED) {
         CLOGE("Not idle");
@@ -2531,10 +2555,13 @@
 
     config.streams = streams.editArray();
     config.hal_buffer_managed_streams = mHalBufManagedStreamIds;
+    config.use_hal_buf_manager = mUseHalBufManager;
 
     // Do the HAL configuration; will potentially touch stream
-    // max_buffers, usage, and priv fields, as well as data_space and format
-    // fields for IMPLEMENTATION_DEFINED formats.
+    // max_buffers, usage, priv fields, data_space and format
+    // fields for IMPLEMENTATION_DEFINED formats as well as hal buffer managed
+    // streams and use_hal_buf_manager (in case aconfig flag session_hal_buf_manager
+    // is not enabled but the HAL supports session specific hal buffer manager).
 
     int64_t logId = mCameraServiceProxyWrapper->getCurrentLogIdForCamera(mId);
     const camera_metadata_t *sessionBuffer = sessionParams.getAndLock();
@@ -2554,6 +2581,8 @@
                 strerror(-res), res);
         return res;
     }
+    // It is possible that use hal buffer manager behavior was changed by the
+    // configureStreams call.
     mUseHalBufManager = config.use_hal_buf_manager;
     if (flags::session_hal_buf_manager()) {
         bool prevSessionHalBufManager = (mHalBufManagedStreamIds.size() != 0);
@@ -2574,6 +2603,7 @@
         }
         mRequestThread->setHalBufferManagedStreams(mHalBufManagedStreamIds);
     }
+
     // Finish all stream configuration immediately.
     // TODO: Try to relax this later back to lazy completion, which should be
     // faster
@@ -2582,8 +2612,8 @@
         bool streamReConfigured = false;
         res = mInputStream->finishConfiguration(&streamReConfigured);
         if (res != OK) {
-            CLOGE("Can't finish configuring input stream %d: %s (%d)",
-                    mInputStream->getId(), strerror(-res), res);
+            CLOGE("Can't finish configuring input stream %d: %s (%d)", mInputStream->getId(),
+                  strerror(-res), res);
             cancelStreamsConfigurationLocked();
             if ((res == NO_INIT || res == DEAD_OBJECT) && mInputStream->isAbandoned()) {
                 return DEAD_OBJECT;
@@ -2601,8 +2631,8 @@
             bool streamReConfigured = false;
             res = outputStream->finishConfiguration(&streamReConfigured);
             if (res != OK) {
-                CLOGE("Can't finish configuring output stream %d: %s (%d)",
-                        outputStream->getId(), strerror(-res), res);
+                CLOGE("Can't finish configuring output stream %d: %s (%d)", outputStream->getId(),
+                      strerror(-res), res);
                 cancelStreamsConfigurationLocked();
                 if ((res == NO_INIT || res == DEAD_OBJECT) && outputStream->isAbandoned()) {
                     return DEAD_OBJECT;
@@ -2630,8 +2660,8 @@
     if (disableFifo != 1) {
         // Boost priority of request thread to SCHED_FIFO.
         pid_t requestThreadTid = mRequestThread->getTid();
-        res = SchedulingPolicyUtils::requestPriorityDirect(getpid(), requestThreadTid,
-                kRequestThreadPriority);
+        res = SchedulingPolicyUtils::requestPriorityDirect(
+                getpid(), requestThreadTid, RunThreadWithRealtimePriority::kRequestThreadPriority);
         if (res != OK) {
             ALOGW("Can't set realtime priority for request processing thread: %s (%d)",
                     strerror(-res), res);
@@ -2906,13 +2936,13 @@
     camera3::flushInflightRequests(states);
 }
 
-CameraMetadata Camera3Device::getLatestRequestLocked() {
+Camera3Device::LatestRequestInfo Camera3Device::getLatestRequestInfoLocked() {
     ALOGV("%s", __FUNCTION__);
 
-    CameraMetadata retVal;
+    LatestRequestInfo retVal;
 
     if (mRequestThread != NULL) {
-        retVal = mRequestThread->getLatestRequest();
+        retVal = mRequestThread->getLatestRequestInfo();
     }
 
     return retVal;
@@ -2928,6 +2958,16 @@
             physicalMetadata, outputBuffers, numOutputBuffers, inputStreamId);
 }
 
+void Camera3Device::collectRequestStats(int64_t frameNumber, const CameraMetadata &request) {
+    if (flags::analytics_24q3()) {
+        auto entry = request.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE);
+        if (entry.count >= 2) {
+            mSessionStatsBuilder.incFpsRequestedCount(
+                entry.data.i32[0], entry.data.i32[1], frameNumber);
+        }
+    }
+}
+
 void Camera3Device::cleanupNativeHandles(
         std::vector<native_handle_t*> *handles, bool closeFd) {
     if (handles == nullptr) {
@@ -3026,7 +3066,7 @@
         sp<HalInterface> interface, const Vector<int32_t>& sessionParamKeys,
         bool useHalBufManager,
         bool supportCameraMute,
-        bool overrideToPortrait,
+        int rotationOverride,
         bool supportSettingsOverride) :
         Thread(/*canCallJava*/false),
         mParent(parent),
@@ -3060,7 +3100,7 @@
         mLatestSessionParams(sessionParamKeys.size()),
         mUseHalBufManager(useHalBufManager),
         mSupportCameraMute(supportCameraMute),
-        mOverrideToPortrait(overrideToPortrait),
+        mRotationOverride(rotationOverride),
         mSupportSettingsOverride(supportSettingsOverride) {
     mStatusId = statusTracker->addComponent("RequestThread");
     mVndkVersion = getVNDKVersionFromProp(__ANDROID_API_FUTURE__);
@@ -3468,34 +3508,47 @@
 void Camera3Device::RequestThread::updateNextRequest(NextRequest& nextRequest) {
     // Update the latest request sent to HAL
     camera_capture_request_t& halRequest = nextRequest.halRequest;
-    if (halRequest.settings != NULL) { // Don't update if they were unchanged
+    sp<Camera3Device> parent = mParent.promote();
+    if (halRequest.settings != nullptr) { // Don't update if they were unchanged
         Mutex::Autolock al(mLatestRequestMutex);
 
-        camera_metadata_t* cloned = clone_camera_metadata(halRequest.settings);
-        mLatestRequest.acquire(cloned);
+        // Fill in latest request and physical request
+        camera_metadata_t *cloned = clone_camera_metadata(halRequest.settings);
+        mLatestRequestInfo.requestSettings.acquire(cloned);
 
-        mLatestPhysicalRequest.clear();
+        mLatestRequestInfo.physicalRequestSettings.clear();
+        mLatestRequestInfo.outputStreamIds.clear();
         for (uint32_t i = 0; i < halRequest.num_physcam_settings; i++) {
             cloned = clone_camera_metadata(halRequest.physcam_settings[i]);
-            mLatestPhysicalRequest.emplace(halRequest.physcam_id[i],
-                    CameraMetadata(cloned));
+            mLatestRequestInfo.physicalRequestSettings.emplace(halRequest.physcam_id[i],
+                                           CameraMetadata(cloned));
         }
 
-        sp<Camera3Device> parent = mParent.promote();
-        if (parent != NULL) {
+        if (parent != nullptr) {
             int32_t inputStreamId = -1;
             if (halRequest.input_buffer != nullptr) {
               inputStreamId = Camera3Stream::cast(halRequest.input_buffer->stream)->getId();
+              mLatestRequestInfo.inputStreamId = inputStreamId;
             }
 
+           for (size_t i = 0; i < halRequest.num_output_buffers; i++) {
+               int32_t outputStreamId =
+                       Camera3Stream::cast(halRequest.output_buffers[i].stream)->getId();
+               mLatestRequestInfo.outputStreamIds.emplace(outputStreamId);
+           }
+
             parent->monitorMetadata(TagMonitor::REQUEST,
                     halRequest.frame_number,
-                    0, mLatestRequest, mLatestPhysicalRequest, halRequest.output_buffers,
+                    0, mLatestRequestInfo.requestSettings,
+                    mLatestRequestInfo.physicalRequestSettings, halRequest.output_buffers,
                     halRequest.num_output_buffers, inputStreamId);
         }
     }
+    if (parent != nullptr) {
+        parent->collectRequestStats(halRequest.frame_number, mLatestRequestInfo.requestSettings);
+    }
 
-    if (halRequest.settings != NULL) {
+    if (halRequest.settings != nullptr) {
         nextRequest.captureRequest->mSettingsList.begin()->metadata.unlock(
                 halRequest.settings);
     }
@@ -3595,10 +3648,12 @@
         sp<CaptureRequest> captureRequest = nextRequest.captureRequest;
         captureRequest->mTestPatternChanged = overrideTestPattern(captureRequest);
         // Do not override rotate&crop for stream configurations that include
-        // SurfaceViews(HW_COMPOSER) output, unless mOverrideToPortrait is set.
+        // SurfaceViews(HW_COMPOSER) output, unless mRotationOverride is set.
         // The display rotation there will be compensated by NATIVE_WINDOW_TRANSFORM_INVERSE_DISPLAY
-        captureRequest->mRotateAndCropChanged = (mComposerOutput && !mOverrideToPortrait) ? false :
-            overrideAutoRotateAndCrop(captureRequest);
+        using hardware::ICameraService::ROTATION_OVERRIDE_NONE;
+        captureRequest->mRotateAndCropChanged =
+                (mComposerOutput && (mRotationOverride == ROTATION_OVERRIDE_NONE)) ?
+                        false : overrideAutoRotateAndCrop(captureRequest);
         captureRequest->mAutoframingChanged = overrideAutoframing(captureRequest);
         if (flags::inject_session_params()) {
             injectSessionParams(captureRequest, mInjectedSessionParams);
@@ -3650,19 +3705,18 @@
         cleanUpFailedRequests(/*sendRequestError*/ true);
         // Check if any stream is abandoned.
         checkAndStopRepeatingRequest();
+        // Inform waitUntilRequestProcessed thread of a failed request ID
+        wakeupLatestRequest(/*failedRequestId*/true, latestRequestId);
         return true;
     } else if (res != OK) {
         cleanUpFailedRequests(/*sendRequestError*/ false);
+        // Inform waitUntilRequestProcessed thread of a failed request ID
+        wakeupLatestRequest(/*failedRequestId*/true, latestRequestId);
         return false;
     }
 
     // Inform waitUntilRequestProcessed thread of a new request ID
-    {
-        Mutex::Autolock al(mLatestRequestMutex);
-
-        mLatestRequestId = latestRequestId;
-        mLatestRequestSignal.signal();
-    }
+    wakeupLatestRequest(/*failedRequestId*/false, latestRequestId);
 
     // Submit a batch of requests to HAL.
     // Use flush lock only when submitting multilple requests in a batch.
@@ -3887,13 +3941,22 @@
 
                     for (it = captureRequest->mSettingsList.begin();
                             it != captureRequest->mSettingsList.end(); it++) {
-                        res = hardware::cameraservice::utils::conversion::aidl::filterVndkKeys(
-                                mVndkVersion, it->metadata, false /*isStatic*/);
+                        res = filterVndkKeys(mVndkVersion, it->metadata, false /*isStatic*/);
                         if (res != OK) {
                             SET_ERR("RequestThread: Failed during VNDK filter of capture requests "
                                     "%d: %s (%d)", halRequest->frame_number, strerror(-res), res);
                             return INVALID_OPERATION;
                         }
+
+                        if (!parent->mSupportsExtensionKeys) {
+                            res = filterExtensionKeys(&it->metadata);
+                            if (res != OK) {
+                                SET_ERR("RequestThread: Failed during extension filter of capture "
+                                        "requests %d: %s (%d)", halRequest->frame_number,
+                                        strerror(-res), res);
+                                return INVALID_OPERATION;
+                            }
+                        }
                     }
                 }
             }
@@ -4142,13 +4205,13 @@
     return OK;
 }
 
-CameraMetadata Camera3Device::RequestThread::getLatestRequest() const {
+Camera3Device::LatestRequestInfo Camera3Device::RequestThread::getLatestRequestInfo() const {
     ATRACE_CALL();
     Mutex::Autolock al(mLatestRequestMutex);
 
     ALOGV("RequestThread::%s", __FUNCTION__);
 
-    return mLatestRequest;
+    return mLatestRequestInfo;
 }
 
 bool Camera3Device::RequestThread::isStreamPending(
@@ -4408,12 +4471,7 @@
                         hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_REQUEST,
                         captureRequest->mResultExtras);
             }
-            {
-                Mutex::Autolock al(mLatestRequestMutex);
-
-                mLatestFailedRequestId = captureRequest->mResultExtras.requestId;
-                mLatestRequestSignal.signal();
-            }
+            wakeupLatestRequest(/*failedRequestId*/true, captureRequest->mResultExtras.requestId);
         }
 
         // Remove yet-to-be submitted inflight request from inflightMap
@@ -4865,16 +4923,16 @@
 bool Camera3Device::RequestThread::overrideAutoRotateAndCrop(const sp<CaptureRequest> &request) {
     ATRACE_CALL();
     Mutex::Autolock l(mTriggerMutex);
-    return Camera3Device::overrideAutoRotateAndCrop(request, this->mOverrideToPortrait,
+    return Camera3Device::overrideAutoRotateAndCrop(request, this->mRotationOverride,
             this->mRotateAndCropOverride);
 }
 
 bool Camera3Device::overrideAutoRotateAndCrop(const sp<CaptureRequest> &request,
-        bool overrideToPortrait,
+        int rotationOverride,
         camera_metadata_enum_android_scaler_rotate_and_crop_t rotateAndCropOverride) {
     ATRACE_CALL();
 
-    if (overrideToPortrait) {
+    if (rotationOverride != hardware::ICameraService::ROTATION_OVERRIDE_NONE) {
         uint8_t rotateAndCrop_u8 = rotateAndCropOverride;
         CameraMetadata &metadata = request->mSettingsList.begin()->metadata;
         metadata.update(ANDROID_SCALER_ROTATE_AND_CROP,
@@ -5075,6 +5133,20 @@
     return OK;
 }
 
+void  Camera3Device::RequestThread::wakeupLatestRequest(
+        bool latestRequestFailed,
+        int32_t latestRequestId) {
+    Mutex::Autolock al(mLatestRequestMutex);
+
+    if (latestRequestFailed) {
+        mLatestFailedRequestId = latestRequestId;
+    } else {
+        mLatestRequestId = latestRequestId;
+    }
+    mLatestRequestSignal.signal();
+}
+
+
 /**
  * PreparerThread inner class methods
  */
@@ -5543,10 +5615,19 @@
     ATRACE_CALL();
     Mutex::Autolock il(mInterfaceLock);
     Mutex::Autolock l(mLock);
+    return setCameraMuteLocked(enabled);
+}
 
-    if (mRequestThread == nullptr || !mSupportCameraMute) {
+status_t Camera3Device::setCameraMuteLocked(bool enabled) {
+    if (mRequestThread == nullptr) {
+        mCameraMuteInitial = enabled;
+        return OK;
+    }
+
+    if (!mSupportCameraMute) {
         return INVALID_OPERATION;
     }
+
     int32_t muteMode =
             !enabled                      ? ANDROID_SENSOR_TEST_PATTERN_MODE_OFF :
             mSupportTestPatternSolidColor ? ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR :
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index 498ef55..6f87ca3 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -47,6 +47,7 @@
 #include "device3/Camera3OutputInterface.h"
 #include "device3/Camera3OfflineSession.h"
 #include "device3/Camera3StreamInterface.h"
+#include "utils/AttributionAndPermissionUtils.h"
 #include "utils/TagMonitor.h"
 #include "utils/IPCTransport.h"
 #include "utils/LatencyHistogram.h"
@@ -79,13 +80,15 @@
             public camera3::SetErrorInterface,
             public camera3::InflightRequestUpdateInterface,
             public camera3::RequestBufferInterface,
-            public camera3::FlushBufferInterface {
+            public camera3::FlushBufferInterface,
+            public AttributionAndPermissionUtilsEncapsulator {
   friend class HidlCamera3Device;
   friend class AidlCamera3Device;
   public:
 
     explicit Camera3Device(std::shared_ptr<CameraServiceProxyWrapper>& cameraServiceProxyWrapper,
-            const std::string& id, bool overrideForPerfClass, bool overrideToPortrait,
+            std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
+            const std::string& id, bool overrideForPerfClass, int rotationOverride,
             bool legacyClient = false);
 
     virtual ~Camera3Device();
@@ -129,12 +132,12 @@
     // idle state
     status_t capture(CameraMetadata &request, int64_t *lastFrameNumber = NULL) override;
     status_t captureList(const List<const PhysicalCameraSettingsList> &requestsList,
-            const std::list<const SurfaceMap> &surfaceMaps,
+            const std::list<SurfaceMap> &surfaceMaps,
             int64_t *lastFrameNumber = NULL) override;
     status_t setStreamingRequest(const CameraMetadata &request,
             int64_t *lastFrameNumber = NULL) override;
     status_t setStreamingRequestList(const List<const PhysicalCameraSettingsList> &requestsList,
-            const std::list<const SurfaceMap> &surfaceMaps,
+            const std::list<SurfaceMap> &surfaceMaps,
             int64_t *lastFrameNumber = NULL) override;
     status_t clearStreamingRequest(int64_t *lastFrameNumber = NULL) override;
 
@@ -305,6 +308,15 @@
     status_t setCameraMute(bool enabled);
 
     /**
+     * Mute the camera.
+     *
+     * When muted, black image data is output on all output streams.
+     * This method assumes the caller already acquired the 'mInterfaceLock'
+     * and 'mLock' locks.
+     */
+    status_t setCameraMuteLocked(bool enabled);
+
+    /**
      * Enables/disables camera service watchdog
      */
     status_t setCameraServiceWatchdog(bool enabled);
@@ -361,8 +373,6 @@
     static const size_t        kInFlightWarnLimitHighSpeed = 256; // batch size 32 * pipe depth 8
     static const nsecs_t       kMinInflightDuration = 5000000000; // 5 s
     static const nsecs_t       kBaseGetBufferWait = 3000000000; // 3 sec.
-    // SCHED_FIFO priority for request submission thread in HFR mode
-    static const int           kRequestThreadPriority = 1;
 
     struct                     RequestTrigger;
     // minimal jpeg buffer size: 256KB + blob header
@@ -693,17 +703,17 @@
 
     status_t convertMetadataListToRequestListLocked(
             const List<const PhysicalCameraSettingsList> &metadataList,
-            const std::list<const SurfaceMap> &surfaceMaps,
+            const std::list<SurfaceMap> &surfaceMaps,
             bool repeating, nsecs_t requestTimeNs,
             /*out*/
             RequestList *requestList);
 
     void convertToRequestList(List<const PhysicalCameraSettingsList>& requestsList,
-            std::list<const SurfaceMap>& surfaceMaps,
+            std::list<SurfaceMap>& surfaceMaps,
             const CameraMetadata& request);
 
     status_t submitRequestsHelper(const List<const PhysicalCameraSettingsList> &requestsList,
-                                  const std::list<const SurfaceMap> &surfaceMaps,
+                                  const std::list<SurfaceMap> &surfaceMaps,
                                   bool repeating,
                                   int64_t *lastFrameNumber = NULL);
 
@@ -726,12 +736,19 @@
     virtual void applyMaxBatchSizeLocked(
             RequestList* requestList, const sp<camera3::Camera3OutputStreamInterface>& stream) = 0;
 
+    struct LatestRequestInfo {
+        CameraMetadata requestSettings;
+        std::unordered_map<std::string, CameraMetadata> physicalRequestSettings;
+        int32_t inputStreamId = -1;
+        std::set<int32_t> outputStreamIds;
+    };
+
     /**
      * Get the last request submitted to the hal by the request thread.
      *
      * Must be called with mLock held.
      */
-    virtual CameraMetadata getLatestRequestLocked();
+    virtual LatestRequestInfo getLatestRequestInfoLocked();
 
     virtual status_t injectionCameraInitialize(const std::string &injectCamId,
             sp<CameraProviderManager> manager) = 0;
@@ -867,7 +884,7 @@
 
     // Override rotate_and_crop control if needed
     static bool    overrideAutoRotateAndCrop(const sp<CaptureRequest> &request /*out*/,
-            bool overrideToPortrait,
+            int rotationOverride,
             camera_metadata_enum_android_scaler_rotate_and_crop_t rotateAndCropOverride);
 
     // Override auto framing control if needed
@@ -904,7 +921,7 @@
                 const Vector<int32_t>& sessionParamKeys,
                 bool useHalBufManager,
                 bool supportCameraMute,
-                bool overrideToPortrait,
+                int rotationOverride,
                 bool supportSettingsOverride);
         ~RequestThread();
 
@@ -982,7 +999,7 @@
          * Get the latest request that was sent to the HAL
          * with process_capture_request.
          */
-        CameraMetadata getLatestRequest() const;
+        LatestRequestInfo getLatestRequestInfo() const;
 
         /**
          * Returns true if the stream is a target of any queued or repeating
@@ -1026,6 +1043,11 @@
             const sp<CaptureRequest> &request,
             const CameraMetadata& injectedSessionParams);
 
+        /**
+         * signal mLatestRequestmutex
+         **/
+        void wakeupLatestRequest(bool latestRequestFailed, int32_t latestRequestId);
+
       protected:
 
         virtual bool threadLoop();
@@ -1060,7 +1082,7 @@
         static const nsecs_t kRequestTimeout = 50e6; // 50 ms
 
         // TODO: does this need to be adjusted for long exposure requests?
-        static const nsecs_t kRequestSubmitTimeout = 200e6; // 200 ms
+        static const nsecs_t kRequestSubmitTimeout = 500e6; // 500 ms
 
         // Used to prepare a batch of requests.
         struct NextRequest {
@@ -1177,8 +1199,7 @@
         // android.request.id for latest process_capture_request
         int32_t            mLatestRequestId;
         int32_t            mLatestFailedRequestId;
-        CameraMetadata     mLatestRequest;
-        std::unordered_map<std::string, CameraMetadata> mLatestPhysicalRequest;
+        LatestRequestInfo mLatestRequestInfo;
 
         typedef KeyedVector<uint32_t/*tag*/, RequestTrigger> TriggerMap;
         Mutex              mTriggerMutex;
@@ -1214,7 +1235,7 @@
         bool               mUseHalBufManager = false;
         std::set<int32_t > mHalBufManagedStreamIds;
         const bool         mSupportCameraMute;
-        const bool         mOverrideToPortrait;
+        const bool         mRotationOverride;
         const bool         mSupportSettingsOverride;
         int32_t            mVndkVersion = -1;
     };
@@ -1225,7 +1246,7 @@
                 const Vector<int32_t>& /*sessionParamKeys*/,
                 bool /*useHalBufManager*/,
                 bool /*supportCameraMute*/,
-                bool /*overrideToPortrait*/,
+                int /*rotationOverride*/,
                 bool /*supportSettingsOverride*/) = 0;
 
     sp<RequestThread> mRequestThread;
@@ -1392,6 +1413,10 @@
             const camera_stream_buffer_t *outputBuffers, uint32_t numOutputBuffers,
             int32_t inputStreamId);
 
+    // Collect any statistics that are based on the stream of capture requests sent
+    // to the HAL
+    void collectRequestStats(int64_t frameNumber, const CameraMetadata& request);
+
     metadata_vendor_id_t mVendorTagId;
 
     // Cached last requested template id
@@ -1503,13 +1528,17 @@
 
     // Whether the camera framework overrides the device characteristics for
     // app compatibility reasons.
-    bool mOverrideToPortrait;
+    int mRotationOverride;
     camera_metadata_enum_android_scaler_rotate_and_crop_t mRotateAndCropOverride;
     bool mComposerOutput;
 
     // Auto framing override value
     camera_metadata_enum_android_control_autoframing mAutoframingOverride;
 
+    // Initial camera mute state stored before the request thread
+    // is active.
+    bool mCameraMuteInitial = false;
+
     // Settings override value
     int32_t mSettingsOverride; // -1 = use original, otherwise
                                // the settings override to use.
@@ -1523,6 +1552,9 @@
     // AE_TARGET_FPS_RANGE
     bool mIsFixedFps = false;
 
+    // Flag to indicate that we shouldn't forward extension related metadata
+    bool mSupportsExtensionKeys = false;
+
     // Injection camera related methods.
     class Camera3DeviceInjectionMethods : public virtual RefBase {
       public:
@@ -1607,6 +1639,7 @@
 
     void overrideStreamUseCaseLocked();
 
+
 }; // class Camera3Device
 
 }; // namespace android
diff --git a/services/camera/libcameraservice/device3/Camera3FakeStream.cpp b/services/camera/libcameraservice/device3/Camera3FakeStream.cpp
index 75162bf..55467c3 100644
--- a/services/camera/libcameraservice/device3/Camera3FakeStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3FakeStream.cpp
@@ -68,7 +68,7 @@
     return INVALID_OPERATION;
 }
 
-void Camera3FakeStream::dump(int fd, [[maybe_unused]] const Vector<String16> &args) const {
+void Camera3FakeStream::dump(int fd, [[maybe_unused]] const Vector<String16> &args) {
     std::string lines;
     lines += fmt::sprintf("    Stream[%d]: Fake\n", mId);
     write(fd, lines.c_str(), lines.size());
@@ -99,12 +99,12 @@
     return OK;
 }
 
-status_t Camera3FakeStream::getEndpointUsage(uint64_t *usage) const {
+status_t Camera3FakeStream::getEndpointUsage(uint64_t *usage) {
     *usage = FAKE_USAGE;
     return OK;
 }
 
-bool Camera3FakeStream::isVideoStream() const {
+bool Camera3FakeStream::isVideoStream() {
     return false;
 }
 
diff --git a/services/camera/libcameraservice/device3/Camera3FakeStream.h b/services/camera/libcameraservice/device3/Camera3FakeStream.h
index 1d82190..7addb90 100644
--- a/services/camera/libcameraservice/device3/Camera3FakeStream.h
+++ b/services/camera/libcameraservice/device3/Camera3FakeStream.h
@@ -50,7 +50,7 @@
      * Camera3Stream interface
      */
 
-    virtual void     dump(int fd, const Vector<String16> &args) const;
+    virtual void     dump(int fd, const Vector<String16> &args);
 
     status_t         setTransform(int transform, bool mayChangeMirror);
 
@@ -70,7 +70,7 @@
     /**
      * Return if this output stream is for video encoding.
      */
-    bool isVideoStream() const;
+    bool isVideoStream();
 
     /**
      * Return if the consumer configuration of this stream is deferred.
@@ -144,7 +144,7 @@
 
     virtual status_t configureQueueLocked();
 
-    virtual status_t getEndpointUsage(uint64_t *usage) const;
+    virtual status_t getEndpointUsage(uint64_t *usage);
 
 }; // class Camera3FakeStream
 
diff --git a/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp b/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
index 152687e..61c5a3b 100644
--- a/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
+++ b/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
@@ -77,7 +77,7 @@
     return false;
 }
 
-void Camera3IOStreamBase::dump(int fd, [[maybe_unused]] const Vector<String16> &args) const {
+void Camera3IOStreamBase::dump(int fd, [[maybe_unused]] const Vector<String16> &args) {
     std::ostringstream lines;
 
     uint64_t consumerUsage = 0;
@@ -104,7 +104,7 @@
     lines << fmt::sprintf("      Total buffers: %zu, currently dequeued: %zu, "
             "currently cached: %zu\n", mTotalBufferCount, mHandoutTotalBufferCount,
             mCachedOutputBufferCount);
-    std::string linesStr = std::move(lines.str());
+    std::string linesStr = lines.str();
     write(fd, linesStr.c_str(), linesStr.size());
 
     Camera3Stream::dump(fd, args);
diff --git a/services/camera/libcameraservice/device3/Camera3IOStreamBase.h b/services/camera/libcameraservice/device3/Camera3IOStreamBase.h
index 239fc71..7e73662 100644
--- a/services/camera/libcameraservice/device3/Camera3IOStreamBase.h
+++ b/services/camera/libcameraservice/device3/Camera3IOStreamBase.h
@@ -52,7 +52,7 @@
      * Camera3Stream interface
      */
 
-    virtual void     dump(int fd, const Vector<String16> &args) const;
+    virtual void     dump(int fd, const Vector<String16> &args);
 
     int              getMaxTotalBuffers() const { return mTotalBufferCount; }
   protected:
@@ -108,7 +108,7 @@
     virtual size_t   getCachedOutputBufferCountLocked() const;
     virtual size_t   getMaxCachedOutputBuffersLocked() const;
 
-    virtual status_t getEndpointUsage(uint64_t *usage) const = 0;
+    virtual status_t getEndpointUsage(uint64_t *usage) = 0;
 
     status_t getBufferPreconditionCheckLocked() const;
     status_t returnBufferPreconditionCheckLocked() const;
diff --git a/services/camera/libcameraservice/device3/Camera3InputStream.cpp b/services/camera/libcameraservice/device3/Camera3InputStream.cpp
index 54ffbd7..f9b6037 100644
--- a/services/camera/libcameraservice/device3/Camera3InputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3InputStream.cpp
@@ -18,10 +18,12 @@
 #define ATRACE_TAG ATRACE_TAG_CAMERA
 //#define LOG_NDEBUG 0
 
+#include <camera/StringUtils.h>
+#include <com_android_graphics_libgui_flags.h>
 #include <gui/BufferItem.h>
 #include <utils/Log.h>
 #include <utils/Trace.h>
-#include <camera/StringUtils.h>
+
 #include "Camera3InputStream.h"
 
 namespace android {
@@ -216,7 +218,7 @@
     return OK;
 }
 
-void Camera3InputStream::dump(int fd, [[maybe_unused]] const Vector<String16> &args) const {
+void Camera3InputStream::dump(int fd, [[maybe_unused]] const Vector<String16> &args) {
     std::string lines;
     lines += fmt::sprintf("    Stream[%d]: Input\n", mId);
     write(fd, lines.c_str(), lines.size());
@@ -239,9 +241,15 @@
     mLastTimestamp = 0;
 
     if (mConsumer.get() == 0) {
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+        sp<BufferItemConsumer> bufferItemConsumer = new BufferItemConsumer(mUsage);
+        sp<IGraphicBufferProducer> producer =
+                bufferItemConsumer->getSurface()->getIGraphicBufferProducer();
+#else
         sp<IGraphicBufferProducer> producer;
         sp<IGraphicBufferConsumer> consumer;
         BufferQueue::createBufferQueue(&producer, &consumer);
+#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
 
         int minUndequeuedBuffers = 0;
         res = producer->query(NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &minUndequeuedBuffers);
@@ -271,11 +279,19 @@
             camera_stream::max_buffers : minBufs;
         // TODO: somehow set the total buffer count when producer connects?
 
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+        mConsumer = bufferItemConsumer;
+        mConsumer->setName(String8::format("Camera3-InputStream-%d", mId));
+        mConsumer->setMaxAcquiredBufferCount(mTotalBufferCount);
+
+        mProducer = mConsumer->getSurface()->getIGraphicBufferProducer();
+#else
         mConsumer = new BufferItemConsumer(consumer, mUsage,
                                            mTotalBufferCount);
         mConsumer->setName(String8::format("Camera3-InputStream-%d", mId));
 
         mProducer = producer;
+#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
 
         mConsumer->setBufferFreedListener(this);
     }
@@ -297,7 +313,7 @@
     return OK;
 }
 
-status_t Camera3InputStream::getEndpointUsage(uint64_t *usage) const {
+status_t Camera3InputStream::getEndpointUsage(uint64_t *usage) {
     // Per HAL3 spec, input streams have 0 for their initial usage field.
     *usage = 0;
     return OK;
diff --git a/services/camera/libcameraservice/device3/Camera3InputStream.h b/services/camera/libcameraservice/device3/Camera3InputStream.h
index d4f4b15..a99c364 100644
--- a/services/camera/libcameraservice/device3/Camera3InputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3InputStream.h
@@ -43,7 +43,7 @@
     Camera3InputStream(int id, uint32_t width, uint32_t height, int format);
     ~Camera3InputStream();
 
-    virtual void     dump(int fd, const Vector<String16> &args) const;
+    virtual void     dump(int fd, const Vector<String16> &args);
 
     // TODO: expose an interface to get the IGraphicBufferProducer
 
@@ -81,7 +81,7 @@
 
     virtual status_t configureQueueLocked();
 
-    virtual status_t getEndpointUsage(uint64_t *usage) const;
+    virtual status_t getEndpointUsage(uint64_t *usage);
 
     /**
      * BufferItemConsumer::BufferFreedListener interface
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
index f98636b..54d55a1 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
@@ -27,6 +27,7 @@
 #include "aidl/android/hardware/graphics/common/Dataspace.h"
 
 #include <android-base/unique_fd.h>
+#include <com_android_internal_camera_flags.h>
 #include <cutils/properties.h>
 #include <ui/GraphicBuffer.h>
 #include <utils/Log.h>
@@ -43,6 +44,8 @@
     (type *)((char*)(ptr) - offsetof(type, member))
 #endif
 
+namespace flags = com::android::internal::camera::flags;
+
 namespace android {
 
 namespace camera3 {
@@ -165,7 +168,6 @@
         mState = STATE_ERROR;
     }
 
-    mConsumerName = "Deferred";
     bool needsReleaseNotify = setId > CAMERA3_STREAM_SET_ID_INVALID;
     mBufferProducerListener = new BufferProducerListener(this, needsReleaseNotify);
 }
@@ -235,65 +237,6 @@
     return OK;
 }
 
-status_t Camera3OutputStream::getBuffersLocked(std::vector<OutstandingBuffer>* outBuffers) {
-    status_t res;
-
-    if ((res = getBufferPreconditionCheckLocked()) != OK) {
-        return res;
-    }
-
-    if (mUseBufferManager) {
-        ALOGE("%s: stream %d is managed by buffer manager and does not support batch operation",
-                __FUNCTION__, mId);
-        return INVALID_OPERATION;
-    }
-
-    sp<Surface> consumer = mConsumer;
-    /**
-     * Release the lock briefly to avoid deadlock for below scenario:
-     * Thread 1: StreamingProcessor::startStream -> Camera3Stream::isConfiguring().
-     * This thread acquired StreamingProcessor lock and try to lock Camera3Stream lock.
-     * Thread 2: Camera3Stream::returnBuffer->StreamingProcessor::onFrameAvailable().
-     * This thread acquired Camera3Stream lock and bufferQueue lock, and try to lock
-     * StreamingProcessor lock.
-     * Thread 3: Camera3Stream::getBuffer(). This thread acquired Camera3Stream lock
-     * and try to lock bufferQueue lock.
-     * Then there is circular locking dependency.
-     */
-    mLock.unlock();
-
-    size_t numBuffersRequested = outBuffers->size();
-    std::vector<Surface::BatchBuffer> buffers(numBuffersRequested);
-
-    nsecs_t dequeueStart = systemTime(SYSTEM_TIME_MONOTONIC);
-    res = consumer->dequeueBuffers(&buffers);
-    nsecs_t dequeueEnd = systemTime(SYSTEM_TIME_MONOTONIC);
-    mDequeueBufferLatency.add(dequeueStart, dequeueEnd);
-
-    mLock.lock();
-
-    if (res != OK) {
-        if (shouldLogError(res, mState)) {
-            ALOGE("%s: Stream %d: Can't dequeue %zu output buffers: %s (%d)",
-                    __FUNCTION__, mId, numBuffersRequested, strerror(-res), res);
-        }
-        checkRetAndSetAbandonedLocked(res);
-        return res;
-    }
-    checkRemovedBuffersLocked();
-
-    /**
-     * FenceFD now owned by HAL except in case of error,
-     * in which case we reassign it to acquire_fence
-     */
-    for (size_t i = 0; i < numBuffersRequested; i++) {
-        handoutBufferLocked(*(outBuffers->at(i).outBuffer),
-                &(buffers[i].buffer->handle), /*acquireFence*/buffers[i].fenceFd,
-                /*releaseFence*/-1, CAMERA_BUFFER_STATUS_OK, /*output*/true);
-    }
-    return OK;
-}
-
 status_t Camera3OutputStream::queueBufferToConsumer(sp<ANativeWindow>& consumer,
             ANativeWindowBuffer* buffer, int anwReleaseFence,
             const std::vector<size_t>&) {
@@ -523,10 +466,11 @@
     return res;
 }
 
-void Camera3OutputStream::dump(int fd, [[maybe_unused]] const Vector<String16> &args) const {
+void Camera3OutputStream::dump(int fd, [[maybe_unused]] const Vector<String16> &args) {
     std::string lines;
     lines += fmt::sprintf("    Stream[%d]: Output\n", mId);
-    lines += fmt::sprintf("      Consumer name: %s\n", mConsumerName);
+    lines += fmt::sprintf("      Consumer name: %s\n", (mConsumer.get() != nullptr) ?
+            mConsumer->getConsumerName() : "Deferred");
     write(fd, lines.c_str(), lines.size());
 
     Camera3IOStreamBase::dump(fd, args);
@@ -611,16 +555,14 @@
     // Configure consumer-side ANativeWindow interface. The listener may be used
     // to notify buffer manager (if it is used) of the returned buffers.
     res = mConsumer->connect(NATIVE_WINDOW_API_CAMERA,
-            /*reportBufferRemoval*/true,
-            /*listener*/mBufferProducerListener);
+            /*listener*/mBufferProducerListener,
+            /*reportBufferRemoval*/true);
     if (res != OK) {
         ALOGE("%s: Unable to connect to native window for stream %d",
                 __FUNCTION__, mId);
         return res;
     }
 
-    mConsumerName = mConsumer->getConsumerName();
-
     res = native_window_set_usage(mConsumer.get(), mUsage);
     if (res != OK) {
         ALOGE("%s: Unable to configure usage %" PRIu64 " for stream %d",
@@ -668,7 +610,7 @@
         return res;
     }
 
-    int maxConsumerBuffers;
+    int maxConsumerBuffers = 0;
     res = static_cast<ANativeWindow*>(mConsumer.get())->query(
             mConsumer.get(),
             NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &maxConsumerBuffers);
@@ -745,8 +687,11 @@
         }
     }
 
-    res = native_window_set_buffer_count(mConsumer.get(),
-            mTotalBufferCount);
+    if (flags::surface_ipc()) {
+        res = mConsumer->setMaxDequeuedBufferCount(mTotalBufferCount - maxConsumerBuffers);
+    } else {
+        res = native_window_set_buffer_count(mConsumer.get(), mTotalBufferCount);
+    }
     if (res != OK) {
         ALOGE("%s: Unable to set buffer count for stream %d",
                 __FUNCTION__, mId);
@@ -1048,7 +993,7 @@
     return OK;
 }
 
-status_t Camera3OutputStream::getEndpointUsage(uint64_t *usage) const {
+status_t Camera3OutputStream::getEndpointUsage(uint64_t *usage) {
 
     status_t res;
 
@@ -1084,17 +1029,24 @@
 }
 
 status_t Camera3OutputStream::getEndpointUsageForSurface(uint64_t *usage,
-        const sp<Surface>& surface) const {
-    status_t res;
-    uint64_t u = 0;
+        const sp<Surface>& surface) {
+    bool internalConsumer = (mConsumer.get() != nullptr) && (mConsumer == surface);
+    if (mConsumerUsageCachedValue.has_value() && flags::surface_ipc() && internalConsumer) {
+        *usage = mConsumerUsageCachedValue.value();
+        return OK;
+    }
 
-    res = native_window_get_consumer_usage(static_cast<ANativeWindow*>(surface.get()), &u);
-    applyZSLUsageQuirk(camera_stream::format, &u);
-    *usage = u;
+    status_t res;
+
+    res = native_window_get_consumer_usage(static_cast<ANativeWindow*>(surface.get()), usage);
+    applyZSLUsageQuirk(camera_stream::format, usage);
+    if (internalConsumer) {
+        mConsumerUsageCachedValue = *usage;
+    }
     return res;
 }
 
-bool Camera3OutputStream::isVideoStream() const {
+bool Camera3OutputStream::isVideoStream() {
     uint64_t usage = 0;
     status_t res = getEndpointUsage(&usage);
     if (res != OK) {
@@ -1275,7 +1227,7 @@
     return OK;
 }
 
-bool Camera3OutputStream::isConsumedByHWComposer() const {
+bool Camera3OutputStream::isConsumedByHWComposer() {
     uint64_t usage = 0;
     status_t res = getEndpointUsage(&usage);
     if (res != OK) {
@@ -1286,7 +1238,7 @@
     return (usage & GRALLOC_USAGE_HW_COMPOSER) != 0;
 }
 
-bool Camera3OutputStream::isConsumedByHWTexture() const {
+bool Camera3OutputStream::isConsumedByHWTexture() {
     uint64_t usage = 0;
     status_t res = getEndpointUsage(&usage);
     if (res != OK) {
@@ -1297,7 +1249,7 @@
     return (usage & GRALLOC_USAGE_HW_TEXTURE) != 0;
 }
 
-bool Camera3OutputStream::isConsumedByCPU() const {
+bool Camera3OutputStream::isConsumedByCPU() {
     uint64_t usage = 0;
     status_t res = getEndpointUsage(&usage);
     if (res != OK) {
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.h b/services/camera/libcameraservice/device3/Camera3OutputStream.h
index 65791a9..f8b78c1 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.h
@@ -18,6 +18,7 @@
 #define ANDROID_SERVERS_CAMERA3_OUTPUT_STREAM_H
 
 #include <mutex>
+#include <optional>
 #include <utils/RefBase.h>
 #include <gui/IProducerListener.h>
 #include <gui/Surface.h>
@@ -143,7 +144,7 @@
      * Camera3Stream interface
      */
 
-    virtual void     dump(int fd, const Vector<String16> &args) const;
+    virtual void     dump(int fd, const Vector<String16> &args);
 
     /**
      * Set the transform on the output stream; one of the
@@ -154,21 +155,21 @@
     /**
      * Return if this output stream is for video encoding.
      */
-    bool isVideoStream() const;
+    bool isVideoStream();
     /**
      * Return if this output stream is consumed by hardware composer.
      */
-    bool isConsumedByHWComposer() const;
+    bool isConsumedByHWComposer();
 
     /**
      * Return if this output stream is consumed by hardware texture.
      */
-    bool isConsumedByHWTexture() const;
+    bool isConsumedByHWTexture();
 
     /**
      * Return if this output stream is consumed by CPU.
      */
-    bool isConsumedByCPU() const;
+    bool isConsumedByCPU();
 
     /**
      * Return if the consumer configuration of this stream is deferred.
@@ -192,6 +193,7 @@
             virtual void onBufferReleased();
             virtual bool needsReleaseNotify() { return mNeedsReleaseNotify; }
             virtual void onBuffersDiscarded(const std::vector<sp<GraphicBuffer>>& buffers);
+            virtual void onBufferDetached(int /*slot*/) override {};
 
         private:
             wp<Camera3OutputStream> mParent;
@@ -304,8 +306,7 @@
     virtual status_t disconnectLocked();
     status_t fixUpHidlJpegBlobHeader(ANativeWindowBuffer* anwBuffer, int fence);
 
-    status_t getEndpointUsageForSurface(uint64_t *usage,
-            const sp<Surface>& surface) const;
+    status_t getEndpointUsageForSurface(uint64_t *usage, const sp<Surface>& surface);
     status_t configureConsumerQueueLocked(bool allowPreviewRespace);
 
     // Consumer as the output of camera HAL
@@ -326,9 +327,6 @@
 
     bool mTraceFirstBuffer;
 
-    // Name of Surface consumer
-    std::string           mConsumerName;
-
     /**
      * GraphicBuffer manager this stream is registered to. Used to replace the buffer
      * allocation/deallocation role of BufferQueue.
@@ -366,6 +364,11 @@
      */
     uint64_t    mConsumerUsage;
 
+    /**
+     * Consumer end point usage flag retrieved from the buffer queue.
+     */
+    std::optional<uint64_t>    mConsumerUsageCachedValue;
+
     // Whether to drop valid buffers.
     bool mDropBuffers;
 
@@ -388,8 +391,6 @@
     virtual status_t getBufferLocked(camera_stream_buffer *buffer,
             const std::vector<size_t>& surface_ids);
 
-    virtual status_t getBuffersLocked(/*out*/std::vector<OutstandingBuffer>* buffers) override;
-
     virtual status_t returnBufferLocked(
             const camera_stream_buffer &buffer,
             nsecs_t timestamp, nsecs_t readoutTimestamp,
@@ -401,7 +402,7 @@
 
     virtual status_t configureQueueLocked();
 
-    virtual status_t getEndpointUsage(uint64_t *usage) const;
+    virtual status_t getEndpointUsage(uint64_t *usage);
 
     /**
      * Private methods
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h b/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h
index 1ab8162..77edfbe 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h
@@ -39,7 +39,7 @@
     /**
      * Return if this output stream is for video encoding.
      */
-    virtual bool isVideoStream() const = 0;
+    virtual bool isVideoStream() = 0;
 
     /**
      * Return if the consumer configuration of this stream is deferred.
diff --git a/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp b/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
index 5d5c54c..31707ec 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
@@ -36,6 +36,7 @@
 #include <utils/SortedVector.h>
 #include <utils/Trace.h>
 
+#include <android/hardware/ICameraService.h>
 #include <android/hardware/camera2/ICameraDeviceCallbacks.h>
 
 #include <android/hardware/camera/device/3.4/ICameraDeviceCallback.h>
@@ -499,7 +500,8 @@
     states.inflightIntf.onInflightEntryRemovedLocked(duration);
 }
 
-void removeInFlightRequestIfReadyLocked(CaptureOutputStates& states, int idx) {
+void removeInFlightRequestIfReadyLocked(CaptureOutputStates& states, int idx,
+        std::vector<BufferToReturn> *returnableBuffers) {
     InFlightRequestMap& inflightMap = states.inflightMap;
     const InFlightRequest &request = inflightMap.valueAt(idx);
     const uint32_t frameNumber = inflightMap.keyAt(idx);
@@ -537,12 +539,13 @@
         assert(request.requestStatus != OK ||
                request.pendingOutputBuffers.size() == 0);
 
-        returnOutputBuffers(
+        collectReturnableOutputBuffers(
             states.useHalBufManager, states.halBufManagedStreamIds,
             states.listener,
             request.pendingOutputBuffers.array(),
             request.pendingOutputBuffers.size(), /*timestamp*/0, /*readoutTimestamp*/0,
             /*requested*/true, request.requestTimeNs, states.sessionStatsBuilder,
+            /*out*/ returnableBuffers,
             /*timestampIncreasing*/true,
             request.outputSurfaces, request.resultExtras,
             request.errorBufStrategy, request.transform);
@@ -637,6 +640,7 @@
     // in-flight request and they will be returned when the shutter timestamp
     // arrives. Update the in-flight status and remove the in-flight entry if
     // all result data and shutter timestamp have been received.
+    std::vector<BufferToReturn> returnableBuffers{};
     nsecs_t shutterTimestamp = 0;
     {
         std::lock_guard<std::mutex> l(states.inflightLock);
@@ -672,8 +676,9 @@
                     states.listener->notifyPhysicalCameraChange(physicalId);
                 }
                 states.activePhysicalId = physicalId;
-
-                if (!states.legacyClient && !states.overrideToPortrait) {
+                using hardware::ICameraService::ROTATION_OVERRIDE_NONE;
+                if (!states.legacyClient &&
+                        states.rotationOverride == ROTATION_OVERRIDE_NONE) {
                     auto deviceInfo = states.physicalDeviceInfoMap.find(physicalId);
                     if (deviceInfo != states.physicalDeviceInfoMap.end()) {
                         auto orientation = deviceInfo->second.find(ANDROID_SENSOR_ORIENTATION);
@@ -798,10 +803,11 @@
         request.pendingOutputBuffers.appendArray(result->output_buffers,
                 result->num_output_buffers);
         if (shutterTimestamp != 0) {
-            returnAndRemovePendingOutputBuffers(
+            collectAndRemovePendingOutputBuffers(
                 states.useHalBufManager, states.halBufManagedStreamIds,
                 states.listener,
-                request, states.sessionStatsBuilder);
+                request, states.sessionStatsBuilder,
+                /*out*/ &returnableBuffers);
         }
 
         if (result->result != NULL && !isPartialResult) {
@@ -826,9 +832,18 @@
                     request.physicalMetadatas);
             }
         }
-        removeInFlightRequestIfReadyLocked(states, idx);
+        removeInFlightRequestIfReadyLocked(states, idx, &returnableBuffers);
+        if (!flags::return_buffers_outside_locks()) {
+            finishReturningOutputBuffers(returnableBuffers,
+                states.listener, states.sessionStatsBuilder);
+        }
     } // scope for states.inFlightLock
 
+    if (flags::return_buffers_outside_locks()) {
+        finishReturningOutputBuffers(returnableBuffers,
+                states.listener, states.sessionStatsBuilder);
+    }
+
     if (result->input_buffer != NULL) {
         if (hasInputBufferInRequest) {
             Camera3Stream *stream =
@@ -849,17 +864,17 @@
     }
 }
 
-void returnOutputBuffers(
+void collectReturnableOutputBuffers(
         bool useHalBufManager,
         const std::set<int32_t> &halBufferManagedStreams,
         sp<NotificationListener> listener,
         const camera_stream_buffer_t *outputBuffers, size_t numBuffers,
         nsecs_t timestamp, nsecs_t readoutTimestamp, bool requested,
         nsecs_t requestTimeNs, SessionStatsBuilder& sessionStatsBuilder,
+        /*out*/ std::vector<BufferToReturn> *returnableBuffers,
         bool timestampIncreasing, const SurfaceMap& outputSurfaces,
-        const CaptureResultExtras &inResultExtras,
+        const CaptureResultExtras &resultExtras,
         ERROR_BUF_STRATEGY errorBufStrategy, int32_t transform) {
-
     for (size_t i = 0; i < numBuffers; i++)
     {
         Camera3StreamInterface *stream = Camera3Stream::cast(outputBuffers[i].stream);
@@ -869,7 +884,7 @@
         if (outputBuffers[i].status == CAMERA_BUFFER_STATUS_ERROR &&
                 errorBufStrategy == ERROR_BUF_RETURN_NOTIFY) {
             if (listener != nullptr) {
-                CaptureResultExtras extras = inResultExtras;
+                CaptureResultExtras extras = resultExtras;
                 extras.errorStreamId = streamId;
                 listener->notifyError(
                         hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_BUFFER,
@@ -894,22 +909,35 @@
         }
 
         const auto& it = outputSurfaces.find(streamId);
-        status_t res = OK;
 
         // Do not return the buffer if the buffer status is error, and the error
         // buffer strategy is CACHE.
         if (outputBuffers[i].status != CAMERA_BUFFER_STATUS_ERROR ||
                 errorBufStrategy != ERROR_BUF_CACHE) {
             if (it != outputSurfaces.end()) {
-                res = stream->returnBuffer(
+                returnableBuffers->emplace_back(stream,
                         outputBuffers[i], timestamp, readoutTimestamp, timestampIncreasing,
-                        it->second, inResultExtras.frameNumber, transform);
+                        it->second, resultExtras,
+                        transform, requested ? requestTimeNs : 0);
             } else {
-                res = stream->returnBuffer(
+                returnableBuffers->emplace_back(stream,
                         outputBuffers[i], timestamp, readoutTimestamp, timestampIncreasing,
-                        std::vector<size_t> (), inResultExtras.frameNumber, transform);
+                        std::vector<size_t> (), resultExtras,
+                        transform, requested ? requestTimeNs : 0 );
             }
         }
+    }
+}
+
+void finishReturningOutputBuffers(const std::vector<BufferToReturn> &returnableBuffers,
+        sp<NotificationListener> listener, SessionStatsBuilder& sessionStatsBuilder) {
+    for (auto& b : returnableBuffers) {
+        const int streamId = b.stream->getId();
+
+        status_t res = b.stream->returnBuffer(b.buffer, b.timestamp,
+                b.readoutTimestamp, b.timestampIncreasing,
+                b.surfaceIds, b.resultExtras.frameNumber, b.transform);
+
         // Note: stream may be deallocated at this point, if this buffer was
         // the last reference to it.
         bool dropped = false;
@@ -920,51 +948,55 @@
             ALOGE("Can't return buffer to its stream: %s (%d)", strerror(-res), res);
             dropped = true;
         } else {
-            if (outputBuffers[i].status == CAMERA_BUFFER_STATUS_ERROR || timestamp == 0) {
+            if (b.buffer.status == CAMERA_BUFFER_STATUS_ERROR || b.timestamp == 0) {
                 dropped = true;
             }
         }
-        if (requested) {
+        if (b.requestTimeNs > 0) {
             nsecs_t bufferTimeNs = systemTime();
-            int32_t captureLatencyMs = ns2ms(bufferTimeNs - requestTimeNs);
+            int32_t captureLatencyMs = ns2ms(bufferTimeNs - b.requestTimeNs);
             sessionStatsBuilder.incCounter(streamId, dropped, captureLatencyMs);
         }
 
         // Long processing consumers can cause returnBuffer timeout for shared stream
         // If that happens, cancel the buffer and send a buffer error to client
-        if (it != outputSurfaces.end() && res == TIMED_OUT &&
-                outputBuffers[i].status == CAMERA_BUFFER_STATUS_OK) {
+        if (b.surfaceIds.size() > 0 && res == TIMED_OUT &&
+                b.buffer.status == CAMERA_BUFFER_STATUS_OK) {
             // cancel the buffer
-            camera_stream_buffer_t sb = outputBuffers[i];
+            camera_stream_buffer_t sb = b.buffer;
             sb.status = CAMERA_BUFFER_STATUS_ERROR;
-            stream->returnBuffer(sb, /*timestamp*/0, /*readoutTimestamp*/0,
-                    timestampIncreasing, std::vector<size_t> (),
-                    inResultExtras.frameNumber, transform);
+            b.stream->returnBuffer(sb, /*timestamp*/0, /*readoutTimestamp*/0,
+                    b.timestampIncreasing, std::vector<size_t> (),
+                    b.resultExtras.frameNumber, b.transform);
 
             if (listener != nullptr) {
-                CaptureResultExtras extras = inResultExtras;
+                CaptureResultExtras extras = b.resultExtras;
                 extras.errorStreamId = streamId;
                 listener->notifyError(
                         hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_BUFFER,
                         extras);
             }
         }
+
     }
 }
 
-void returnAndRemovePendingOutputBuffers(bool useHalBufManager,
+void collectAndRemovePendingOutputBuffers(bool useHalBufManager,
         const std::set<int32_t> &halBufferManagedStreams,
         sp<NotificationListener> listener, InFlightRequest& request,
-        SessionStatsBuilder& sessionStatsBuilder) {
+        SessionStatsBuilder& sessionStatsBuilder,
+        std::vector<BufferToReturn> *returnableBuffers) {
     bool timestampIncreasing =
             !((request.zslCapture && request.stillCapture) || request.hasInputBuffer);
     nsecs_t readoutTimestamp = request.resultExtras.hasReadoutTimestamp ?
             request.resultExtras.readoutTimestamp : 0;
-    returnOutputBuffers(useHalBufManager, halBufferManagedStreams, listener,
+    collectReturnableOutputBuffers(useHalBufManager, halBufferManagedStreams, listener,
             request.pendingOutputBuffers.array(),
             request.pendingOutputBuffers.size(),
             request.shutterTimestamp, readoutTimestamp,
-            /*requested*/true, request.requestTimeNs, sessionStatsBuilder, timestampIncreasing,
+            /*requested*/true, request.requestTimeNs, sessionStatsBuilder,
+            /*out*/ returnableBuffers,
+            timestampIncreasing,
             request.outputSurfaces, request.resultExtras,
             request.errorBufStrategy, request.transform);
 
@@ -984,6 +1016,9 @@
     ATRACE_CALL();
     ssize_t idx;
 
+    std::vector<BufferToReturn> returnableBuffers{};
+    CaptureResultExtras pendingNotificationResultExtras{};
+
     // Set timestamp for the request in the in-flight tracking
     // and get the request ID to send upstream
     {
@@ -1050,9 +1085,13 @@
                             states.lastCompletedReprocessFrameNumber;
                     r.resultExtras.lastCompletedZslFrameNumber =
                             states.lastCompletedZslFrameNumber;
-                    states.listener->notifyShutter(r.resultExtras, msg.timestamp);
+                    if (flags::return_buffers_outside_locks()) {
+                        pendingNotificationResultExtras = r.resultExtras;
+                    } else {
+                        states.listener->notifyShutter(r.resultExtras, msg.timestamp);
+                    }
                 }
-                // send pending result and buffers
+                // send pending result and buffers; this queues them up for delivery later
                 const auto& cameraIdsWithZoom = getCameraIdsWithZoomLocked(
                         inflightMap, r.pendingMetadata, r.cameraIdsWithZoom);
                 sendCaptureResult(states,
@@ -1061,17 +1100,35 @@
                     r.hasInputBuffer, r.zslCapture && r.stillCapture,
                     r.rotateAndCropAuto, cameraIdsWithZoom, r.physicalMetadatas);
             }
-            returnAndRemovePendingOutputBuffers(
+            collectAndRemovePendingOutputBuffers(
                     states.useHalBufManager, states.halBufManagedStreamIds,
-                    states.listener, r, states.sessionStatsBuilder);
+                    states.listener, r, states.sessionStatsBuilder, &returnableBuffers);
 
-            removeInFlightRequestIfReadyLocked(states, idx);
+            if (!flags::return_buffers_outside_locks()) {
+                finishReturningOutputBuffers(returnableBuffers,
+                        states.listener, states.sessionStatsBuilder);
+            }
+
+            removeInFlightRequestIfReadyLocked(states, idx, &returnableBuffers);
+
         }
     }
     if (idx < 0) {
         SET_ERR("Shutter notification for non-existent frame number %d",
                 msg.frame_number);
     }
+    // Call notifyShutter outside of in-flight mutex
+    if (flags::return_buffers_outside_locks() && pendingNotificationResultExtras.isValid()) {
+        states.listener->notifyShutter(pendingNotificationResultExtras, msg.timestamp);
+    }
+
+    // With no locks held, finish returning buffers to streams, which may take a while since
+    // binder calls are involved
+    if (flags::return_buffers_outside_locks()) {
+        finishReturningOutputBuffers(returnableBuffers,
+                states.listener, states.sessionStatsBuilder);
+    }
+
 }
 
 void notifyError(CaptureOutputStates& states, const camera_error_msg_t &msg) {
@@ -1117,6 +1174,8 @@
             break;
         case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_REQUEST:
         case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_RESULT:
+        {
+            std::vector<BufferToReturn> returnableBuffers{};
             {
                 std::lock_guard<std::mutex> l(states.inflightLock);
                 ssize_t idx = states.inflightMap.indexOfKey(msg.frame_number);
@@ -1153,7 +1212,12 @@
 
                         // Check whether the buffers returned. If they returned,
                         // remove inflight request.
-                        removeInFlightRequestIfReadyLocked(states, idx);
+                        removeInFlightRequestIfReadyLocked(states, idx, &returnableBuffers);
+                        if (!flags::return_buffers_outside_locks()) {
+                            finishReturningOutputBuffers(returnableBuffers,
+                                    states.listener, states.sessionStatsBuilder);
+                        }
+
                     }
                 } else {
                     resultExtras.frameNumber = msg.frame_number;
@@ -1162,6 +1226,12 @@
                             resultExtras.frameNumber);
                 }
             }
+
+            if (flags::return_buffers_outside_locks()) {
+                finishReturningOutputBuffers(returnableBuffers,
+                        states.listener, states.sessionStatsBuilder);
+            }
+
             resultExtras.errorStreamId = streamId;
             if (states.listener != nullptr) {
                 states.listener->notifyError(errorCode, resultExtras);
@@ -1170,6 +1240,7 @@
                         states.cameraId.c_str(), __FUNCTION__);
             }
             break;
+        }
         case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_BUFFER:
             // Do not depend on HAL ERROR_CAMERA_BUFFER to send buffer error
             // callback to the app. Rather, use STATUS_ERROR of image buffers.
@@ -1199,18 +1270,24 @@
 
 void flushInflightRequests(FlushInflightReqStates& states) {
     ATRACE_CALL();
+    std::vector<BufferToReturn> returnableBuffers{};
     { // First return buffers cached in inFlightMap
         std::lock_guard<std::mutex> l(states.inflightLock);
         for (size_t idx = 0; idx < states.inflightMap.size(); idx++) {
             const InFlightRequest &request = states.inflightMap.valueAt(idx);
-            returnOutputBuffers(
+            collectReturnableOutputBuffers(
                 states.useHalBufManager, states.halBufManagedStreamIds,
                 states.listener,
                 request.pendingOutputBuffers.array(),
                 request.pendingOutputBuffers.size(), /*timestamp*/0, /*readoutTimestamp*/0,
                 /*requested*/true, request.requestTimeNs, states.sessionStatsBuilder,
+                /*out*/ &returnableBuffers,
                 /*timestampIncreasing*/true, request.outputSurfaces, request.resultExtras,
                 request.errorBufStrategy);
+            if (!flags::return_buffers_outside_locks()) {
+                finishReturningOutputBuffers(returnableBuffers,
+                        states.listener, states.sessionStatsBuilder);
+            }
             ALOGW("%s: Frame %d |  Timestamp: %" PRId64 ", metadata"
                     " arrived: %s, buffers left: %d.\n", __FUNCTION__,
                     states.inflightMap.keyAt(idx), request.shutterTimestamp,
@@ -1221,6 +1298,10 @@
         states.inflightMap.clear();
         states.inflightIntf.onInflightMapFlushedLocked();
     }
+    if (flags::return_buffers_outside_locks()) {
+        finishReturningOutputBuffers(returnableBuffers,
+                states.listener, states.sessionStatsBuilder);
+    }
 
     // Then return all inflight buffers not returned by HAL
     std::vector<std::pair<int32_t, int32_t>> inflightKeys;
diff --git a/services/camera/libcameraservice/device3/Camera3OutputUtils.h b/services/camera/libcameraservice/device3/Camera3OutputUtils.h
index d155fa2..21965f5 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputUtils.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputUtils.h
@@ -44,16 +44,50 @@
      * Helper methods shared between Camera3Device/Camera3OfflineSession for HAL callbacks
      */
 
-    // helper function to return the output buffers to output streams. The
-    // function also optionally calls notify(ERROR_BUFFER).
-    void returnOutputBuffers(
+    struct BufferToReturn {
+        Camera3StreamInterface *stream;
+        camera_stream_buffer_t buffer;
+        nsecs_t timestamp;
+        nsecs_t readoutTimestamp;
+        bool timestampIncreasing;
+        std::vector<size_t> surfaceIds;
+        const CaptureResultExtras resultExtras;
+        int32_t transform;
+        nsecs_t requestTimeNs;
+
+        BufferToReturn(Camera3StreamInterface *stream,
+                camera_stream_buffer_t buffer,
+                nsecs_t timestamp, nsecs_t readoutTimestamp,
+                bool timestampIncreasing, std::vector<size_t> surfaceIds,
+                const CaptureResultExtras &resultExtras,
+                int32_t transform, nsecs_t requestTimeNs):
+            stream(stream),
+            buffer(buffer),
+            timestamp(timestamp),
+            readoutTimestamp(readoutTimestamp),
+            timestampIncreasing(timestampIncreasing),
+            surfaceIds(surfaceIds),
+            resultExtras(resultExtras),
+            transform(transform),
+            requestTimeNs(requestTimeNs) {}
+    };
+
+    // helper function to return the output buffers to output
+    // streams. The function also optionally calls
+    // notify(ERROR_BUFFER).  Returns the list of buffers to hand back
+    // to streams in returnableBuffers.  Does not make any two-way
+    // binder calls, so suitable for use when critical locks are being
+    // held
+    void collectReturnableOutputBuffers(
             bool useHalBufManager,
             const std::set<int32_t> &halBufferManagedStreams,
             sp<NotificationListener> listener, // Only needed when outputSurfaces is not empty
             const camera_stream_buffer_t *outputBuffers,
             size_t numBuffers, nsecs_t timestamp,
             nsecs_t readoutTimestamp, bool requested, nsecs_t requestTimeNs,
-            SessionStatsBuilder& sessionStatsBuilder, bool timestampIncreasing = true,
+            SessionStatsBuilder& sessionStatsBuilder,
+            /*out*/ std::vector<BufferToReturn> *returnableBuffers,
+            bool timestampIncreasing = true,
             // The following arguments are only meant for surface sharing use case
             const SurfaceMap& outputSurfaces = SurfaceMap{},
             // Used to send buffer error callback when failing to return buffer
@@ -61,14 +95,24 @@
             ERROR_BUF_STRATEGY errorBufStrategy = ERROR_BUF_RETURN,
             int32_t transform = -1);
 
-    // helper function to return the output buffers to output streams, and
-    // remove the returned buffers from the inflight request's pending buffers
-    // vector.
-    void returnAndRemovePendingOutputBuffers(
+    // helper function to collect the output buffers ready to be
+    // returned to output streams, and to remove these buffers from
+    // the inflight request's pending buffers vector.  Does not make
+    // any two-way binder calls, so suitable for use when critical
+    // locks are being held
+    void collectAndRemovePendingOutputBuffers(
             bool useHalBufManager,
             const std::set<int32_t> &halBufferManagedStreams,
             sp<NotificationListener> listener, // Only needed when outputSurfaces is not empty
-            InFlightRequest& request, SessionStatsBuilder& sessionStatsBuilder);
+            InFlightRequest& request, SessionStatsBuilder& sessionStatsBuilder,
+            /*out*/ std::vector<BufferToReturn> *returnableBuffers);
+
+    // Actually return filled output buffers to the consumer to use, using the list
+    // provided by collectReturnableOutputBuffers / collectAndRemovePendingOutputBuffers
+    // Makes two-way binder calls to applications, so do not hold any critical locks when
+    // calling.
+    void finishReturningOutputBuffers(const std::vector<BufferToReturn> &returnableBuffers,
+            sp<NotificationListener> listener, SessionStatsBuilder& sessionStatsBuilder);
 
     // Camera3Device/Camera3OfflineSession internal states used in notify/processCaptureResult
     // callbacks
@@ -110,7 +154,7 @@
         bool legacyClient;
         nsecs_t& minFrameDuration;
         bool& isFixedFps;
-        bool overrideToPortrait;
+        int rotationOverride;
         std::string &activePhysicalId;
     };
 
diff --git a/services/camera/libcameraservice/device3/Camera3OutputUtilsTemplated.h b/services/camera/libcameraservice/device3/Camera3OutputUtilsTemplated.h
index 2c30b15..aca7a67 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputUtilsTemplated.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputUtilsTemplated.h
@@ -345,10 +345,15 @@
             continue;
         }
         streamBuffer.stream = stream->asHalStream();
-        returnOutputBuffers(states.useHalBufManager, states.halBufManagedStreamIds,
+        std::vector<BufferToReturn> returnableBuffers{};
+        collectReturnableOutputBuffers(states.useHalBufManager, states.halBufManagedStreamIds,
                 /*listener*/nullptr, &streamBuffer, /*size*/1, /*timestamp*/ 0,
                 /*readoutTimestamp*/0, /*requested*/false, /*requestTimeNs*/0,
+                states.sessionStatsBuilder,
+                /*out*/&returnableBuffers);
+        finishReturningOutputBuffers(returnableBuffers, /*listener*/ nullptr,
                 states.sessionStatsBuilder);
+
     }
 }
 
diff --git a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp
index 22f97bf..485f3f0 100644
--- a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp
@@ -279,7 +279,7 @@
     return res;
 }
 
-status_t Camera3SharedOutputStream::getEndpointUsage(uint64_t *usage) const {
+status_t Camera3SharedOutputStream::getEndpointUsage(uint64_t *usage) {
 
     status_t res = OK;
     uint64_t u = 0;
diff --git a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h
index 90914d4..818ce17 100644
--- a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h
@@ -137,7 +137,7 @@
 
     virtual status_t disconnectLocked();
 
-    virtual status_t getEndpointUsage(uint64_t *usage) const;
+    virtual status_t getEndpointUsage(uint64_t *usage);
 
 }; // class Camera3SharedOutputStream
 
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.cpp b/services/camera/libcameraservice/device3/Camera3Stream.cpp
index 701c472..4934203 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Stream.cpp
@@ -957,7 +957,7 @@
     }
 }
 
-void Camera3Stream::dump(int fd, [[maybe_unused]] const Vector<String16> &args) const
+void Camera3Stream::dump(int fd, [[maybe_unused]] const Vector<String16> &args)
 {
     mBufferLimitLatency.dump(fd,
             "      Latency histogram for wait on max_buffers");
@@ -969,11 +969,6 @@
     return INVALID_OPERATION;
 }
 
-status_t Camera3Stream::getBuffersLocked(std::vector<OutstandingBuffer>*) {
-    ALOGE("%s: This type of stream does not support output", __FUNCTION__);
-    return INVALID_OPERATION;
-}
-
 status_t Camera3Stream::returnBufferLocked(const camera_stream_buffer &,
                                            nsecs_t, nsecs_t, int32_t, const std::vector<size_t>&) {
     ALOGE("%s: This type of stream does not support output", __FUNCTION__);
@@ -1047,92 +1042,6 @@
     mBufferFreedListener = listener;
 }
 
-status_t Camera3Stream::getBuffers(std::vector<OutstandingBuffer>* buffers,
-        nsecs_t waitBufferTimeout) {
-    ATRACE_CALL();
-    Mutex::Autolock l(mLock);
-    status_t res = OK;
-
-    if (buffers == nullptr) {
-        ALOGI("%s: buffers must not be null!", __FUNCTION__);
-        return BAD_VALUE;
-    }
-
-    size_t numBuffersRequested = buffers->size();
-    if (numBuffersRequested == 0) {
-        ALOGE("%s: 0 buffers are requested!", __FUNCTION__);
-        return BAD_VALUE;
-    }
-
-    // This function should be only called when the stream is configured already.
-    if (mState != STATE_CONFIGURED) {
-        ALOGE("%s: Stream %d: Can't get buffers if stream is not in CONFIGURED state %d",
-                __FUNCTION__, mId, mState);
-        if (mState == STATE_ABANDONED) {
-            return DEAD_OBJECT;
-        } else {
-            return INVALID_OPERATION;
-        }
-    }
-
-    size_t numOutstandingBuffers = getHandoutOutputBufferCountLocked();
-    size_t numCachedBuffers = getCachedOutputBufferCountLocked();
-    size_t maxNumCachedBuffers = getMaxCachedOutputBuffersLocked();
-    // Wait for new buffer returned back if we are running into the limit. There
-    // are 2 limits:
-    // 1. The number of HAL buffers is greater than max_buffers
-    // 2. The number of HAL buffers + cached buffers is greater than max_buffers
-    //    + maxCachedBuffers
-    while (numOutstandingBuffers + numBuffersRequested > camera_stream::max_buffers ||
-            numOutstandingBuffers + numCachedBuffers + numBuffersRequested >
-            camera_stream::max_buffers + maxNumCachedBuffers) {
-        ALOGV("%s: Already dequeued %zu(+%zu) output buffers and requesting %zu "
-                "(max is %d(+%zu)), waiting.", __FUNCTION__, numOutstandingBuffers,
-                numCachedBuffers, numBuffersRequested, camera_stream::max_buffers,
-                maxNumCachedBuffers);
-        nsecs_t waitStart = systemTime(SYSTEM_TIME_MONOTONIC);
-        if (waitBufferTimeout < kWaitForBufferDuration) {
-            waitBufferTimeout = kWaitForBufferDuration;
-        }
-        res = mOutputBufferReturnedSignal.waitRelative(mLock, waitBufferTimeout);
-        nsecs_t waitEnd = systemTime(SYSTEM_TIME_MONOTONIC);
-        mBufferLimitLatency.add(waitStart, waitEnd);
-        if (res != OK) {
-            if (res == TIMED_OUT) {
-                ALOGE("%s: wait for output buffer return timed out after %lldms (max_buffers %d)",
-                        __FUNCTION__, waitBufferTimeout / 1000000LL,
-                        camera_stream::max_buffers);
-            }
-            return res;
-        }
-        size_t updatedNumOutstandingBuffers = getHandoutOutputBufferCountLocked();
-        size_t updatedNumCachedBuffers = getCachedOutputBufferCountLocked();
-        if (updatedNumOutstandingBuffers >= numOutstandingBuffers &&
-                updatedNumCachedBuffers == numCachedBuffers) {
-            ALOGE("%s: outstanding buffer count goes from %zu to %zu, "
-                    "getBuffer(s) call must not run in parallel!", __FUNCTION__,
-                    numOutstandingBuffers, updatedNumOutstandingBuffers);
-            return INVALID_OPERATION;
-        }
-        numOutstandingBuffers = updatedNumOutstandingBuffers;
-        numCachedBuffers = updatedNumCachedBuffers;
-    }
-
-    res = getBuffersLocked(buffers);
-    if (res == OK) {
-        for (auto& outstandingBuffer : *buffers) {
-            camera_stream_buffer* buffer = outstandingBuffer.outBuffer;
-            fireBufferListenersLocked(*buffer, /*acquired*/true, /*output*/true);
-            if (buffer->buffer) {
-                Mutex::Autolock l(mOutstandingBuffersLock);
-                mOutstandingBuffers.push_back(*buffer->buffer);
-            }
-        }
-    }
-
-    return res;
-}
-
 void Camera3Stream::queueHDRMetadata(buffer_handle_t buffer, sp<ANativeWindow>& anw,
         int64_t dynamicRangeProfile) {
     auto& mapper = GraphicBufferMapper::get();
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.h b/services/camera/libcameraservice/device3/Camera3Stream.h
index f06ccf3..ccd1044 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.h
+++ b/services/camera/libcameraservice/device3/Camera3Stream.h
@@ -343,12 +343,6 @@
             const std::vector<size_t>& surface_ids = std::vector<size_t>());
 
     /**
-     * Similar to getBuffer() except this method fills multiple buffers.
-     */
-    status_t         getBuffers(std::vector<OutstandingBuffer>* buffers,
-            nsecs_t waitBufferTimeout);
-
-    /**
      * Return a buffer to the stream after use by the HAL.
      *
      * Multiple surfaces could share the same HAL stream, but a request may
@@ -429,7 +423,7 @@
     /**
      * Debug dump of the stream's state.
      */
-    virtual void     dump(int fd, const Vector<String16> &args) const;
+    virtual void     dump(int fd, const Vector<String16> &args);
 
     /**
      * Add a camera3 buffer listener. Adding the same listener twice has
@@ -535,8 +529,6 @@
             nsecs_t timestamp, nsecs_t readoutTimestamp, int32_t transform,
             const std::vector<size_t>& surface_ids = std::vector<size_t>());
 
-    virtual status_t getBuffersLocked(std::vector<OutstandingBuffer>*);
-
     virtual status_t getInputBufferLocked(camera_stream_buffer *buffer, Size* size);
 
     virtual status_t returnInputBufferLocked(
@@ -570,7 +562,7 @@
 
     // Get the usage flags for the other endpoint, or return
     // INVALID_OPERATION if they cannot be obtained.
-    virtual status_t getEndpointUsage(uint64_t *usage) const = 0;
+    virtual status_t getEndpointUsage(uint64_t *usage) = 0;
 
     // Return whether the buffer is in the list of outstanding buffers.
     bool isOutstandingBuffer(const camera_stream_buffer& buffer) const;
diff --git a/services/camera/libcameraservice/device3/Camera3StreamInterface.h b/services/camera/libcameraservice/device3/Camera3StreamInterface.h
index 7fa6273..4df8193 100644
--- a/services/camera/libcameraservice/device3/Camera3StreamInterface.h
+++ b/services/camera/libcameraservice/device3/Camera3StreamInterface.h
@@ -395,11 +395,6 @@
          */
         std::vector<size_t> surface_ids;
     };
-    /**
-     * Similar to getBuffer() except this method fills multiple buffers.
-     */
-    virtual status_t getBuffers(std::vector<OutstandingBuffer>* buffers,
-            nsecs_t waitBufferTimeout) = 0;
 
     /**
      * Return a buffer to the stream after use by the HAL.
@@ -484,7 +479,7 @@
     /**
      * Debug dump of the stream's state.
      */
-    virtual void     dump(int fd, const Vector<String16> &args) const = 0;
+    virtual void     dump(int fd, const Vector<String16> &args) = 0;
 
     virtual void     addBufferListener(
             wp<Camera3StreamBufferListener> listener) = 0;
diff --git a/services/camera/libcameraservice/device3/Camera3StreamSplitter.cpp b/services/camera/libcameraservice/device3/Camera3StreamSplitter.cpp
index 255b4f2..77c037a 100644
--- a/services/camera/libcameraservice/device3/Camera3StreamSplitter.cpp
+++ b/services/camera/libcameraservice/device3/Camera3StreamSplitter.cpp
@@ -20,12 +20,13 @@
 #define ATRACE_TAG ATRACE_TAG_CAMERA
 //#define LOG_NDEBUG 0
 
+#include <camera/StringUtils.h>
+#include <com_android_graphics_libgui_flags.h>
 #include <gui/BufferItem.h>
+#include <gui/BufferQueue.h>
 #include <gui/IGraphicBufferConsumer.h>
 #include <gui/IGraphicBufferProducer.h>
-#include <gui/BufferQueue.h>
 #include <gui/Surface.h>
-#include <camera/StringUtils.h>
 
 #include <ui/GraphicBuffer.h>
 
@@ -81,18 +82,28 @@
         }
     }
 
+#if !COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
     // Create BufferQueue for input
     BufferQueue::createBufferQueue(&mProducer, &mConsumer);
+#endif  // !COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
 
     // Allocate 1 extra buffer to handle the case where all buffers are detached
     // from input, and attached to the outputs. In this case, the input queue's
     // dequeueBuffer can still allocate 1 extra buffer before being blocked by
     // the output's attachBuffer().
     mMaxConsumerBuffers++;
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+    mBufferItemConsumer = new BufferItemConsumer(consumerUsage, mMaxConsumerBuffers);
+#else
     mBufferItemConsumer = new BufferItemConsumer(mConsumer, consumerUsage, mMaxConsumerBuffers);
+#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
     if (mBufferItemConsumer == nullptr) {
         return NO_MEMORY;
     }
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+    mProducer = mBufferItemConsumer->getSurface()->getIGraphicBufferProducer();
+    mConsumer = mBufferItemConsumer->getIGraphicBufferConsumer();
+#endif  //  COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
     mConsumer->setConsumerName(toString8(mConsumerName));
 
     *consumer = new Surface(mProducer);
diff --git a/services/camera/libcameraservice/device3/Camera3StreamSplitter.h b/services/camera/libcameraservice/device3/Camera3StreamSplitter.h
index 1feb4a0..43f12fb 100644
--- a/services/camera/libcameraservice/device3/Camera3StreamSplitter.h
+++ b/services/camera/libcameraservice/device3/Camera3StreamSplitter.h
@@ -22,7 +22,7 @@
 #include <camera/CameraMetadata.h>
 
 #include <gui/IConsumerListener.h>
-#include <gui/IProducerListener.h>
+#include <gui/Surface.h>
 #include <gui/BufferItemConsumer.h>
 
 #include <utils/Condition.h>
@@ -159,7 +159,7 @@
     // the IProducerListener::onBufferReleased callback is associated with. We
     // create one of these per output BufferQueue, and then pass the producer
     // into onBufferReleasedByOutput above.
-    class OutputListener : public BnProducerListener,
+    class OutputListener : public SurfaceListener,
                            public IBinder::DeathRecipient {
     public:
         OutputListener(wp<Camera3StreamSplitter> splitter,
@@ -168,6 +168,9 @@
 
         // From IProducerListener
         void onBufferReleased() override;
+        bool needsReleaseNotify() override { return true; };
+        void onBuffersDiscarded(const std::vector<sp<GraphicBuffer>>& /*buffers*/) override {};
+        void onBufferDetached(int /*slot*/) override {}
 
         // From IBinder::DeathRecipient
         void binderDied(const wp<IBinder>& who) override;
diff --git a/services/camera/libcameraservice/device3/PreviewFrameSpacer.cpp b/services/camera/libcameraservice/device3/PreviewFrameSpacer.cpp
index 83caa00..a04406e 100644
--- a/services/camera/libcameraservice/device3/PreviewFrameSpacer.cpp
+++ b/services/camera/libcameraservice/device3/PreviewFrameSpacer.cpp
@@ -18,10 +18,16 @@
 #define ATRACE_TAG ATRACE_TAG_CAMERA
 //#define LOG_NDEBUG 0
 
+#include <com_android_internal_camera_flags.h>
+
 #include <utils/Log.h>
 
 #include "PreviewFrameSpacer.h"
 #include "Camera3OutputStream.h"
+#include "utils/SchedulingPolicyUtils.h"
+#include "utils/Utils.h"
+
+namespace flags = com::android::internal::camera::flags;
 
 namespace android {
 
@@ -129,6 +135,24 @@
     mLastCameraReadoutTime = bufferHolder.readoutTimestamp;
 }
 
+status_t PreviewFrameSpacer::run(const char* name, int32_t priority, size_t stack) {
+    auto ret = Thread::run(name, priority, stack);
+    if (flags::bump_preview_frame_space_priority()) {
+        // Boost priority of the preview frame spacer thread to SCHED_FIFO.
+        pid_t previewFrameSpacerTid = getTid();
+        auto res = SchedulingPolicyUtils::requestPriorityDirect(getpid(), previewFrameSpacerTid,
+                RunThreadWithRealtimePriority::kRequestThreadPriority);
+        if (res != OK) {
+            ALOGW("Can't set realtime priority for preview frame spacer thread: %s (%d)",
+                    strerror(-res), res);
+        } else {
+            ALOGV("Set real time priority for preview frame spacer thread (tid %d)",
+                    previewFrameSpacerTid);
+        }
+    }
+    return ret;
+}
+
 }; // namespace camera3
 
 }; // namespace android
diff --git a/services/camera/libcameraservice/device3/PreviewFrameSpacer.h b/services/camera/libcameraservice/device3/PreviewFrameSpacer.h
index f46de3d..ab85189 100644
--- a/services/camera/libcameraservice/device3/PreviewFrameSpacer.h
+++ b/services/camera/libcameraservice/device3/PreviewFrameSpacer.h
@@ -58,6 +58,7 @@
 
     bool threadLoop() override;
     void requestExit() override;
+    status_t run(const char* name, int32_t priority = PRIORITY_DEFAULT, size_t stack = 0) override;
 
   private:
     // structure holding cached preview buffer info
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp
index 13c500f..57297bc 100644
--- a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp
@@ -68,7 +68,6 @@
 #include "device3/aidl/AidlCamera3OutputUtils.h"
 #include "device3/aidl/AidlCamera3OfflineSession.h"
 #include "CameraService.h"
-#include "utils/CameraThreadState.h"
 #include "utils/SessionConfigurationUtils.h"
 #include "utils/TraceHFR.h"
 #include "utils/CameraServiceProxyWrapper.h"
@@ -173,10 +172,11 @@
 
 AidlCamera3Device::AidlCamera3Device(
         std::shared_ptr<CameraServiceProxyWrapper>& cameraServiceProxyWrapper,
-        const std::string& id, bool overrideForPerfClass, bool overrideToPortrait,
+        std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
+        const std::string& id, bool overrideForPerfClass, int rotationOverride,
         bool legacyClient) :
-        Camera3Device(cameraServiceProxyWrapper, id, overrideForPerfClass, overrideToPortrait,
-        legacyClient) {
+        Camera3Device(cameraServiceProxyWrapper, attributionAndPermissionUtils, id,
+                overrideForPerfClass, rotationOverride, legacyClient) {
     mCallbacks = ndk::SharedRefBase::make<AidlCameraDeviceCallbacks>(this);
 }
 
@@ -207,7 +207,7 @@
       return INVALID_OPERATION;
     }
     res = manager->getCameraCharacteristics(mId, mOverrideForPerfClass, &mDeviceInfo,
-            mOverrideToPortrait);
+            mRotationOverride);
     if (res != OK) {
         SET_ERR_L("Could not retrieve camera characteristics: %s (%d)", strerror(-res), res);
         session->close();
@@ -223,7 +223,7 @@
             // Do not override characteristics for physical cameras
             res = manager->getCameraCharacteristics(
                     physicalId, /*overrideForPerfClass*/false, &mPhysicalDeviceInfoMap[physicalId],
-                    mOverrideToPortrait);
+                    mRotationOverride);
             if (res != OK) {
                 SET_ERR_L("Could not retrieve camera %s characteristics: %s (%d)",
                         physicalId.c_str(), strerror(-res), res);
@@ -417,7 +417,7 @@
         mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
         mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this,
         *this, *(mInterface), mLegacyClient, mMinExpectedDuration, mIsFixedFps,
-        mOverrideToPortrait, mActivePhysicalId}, mResultMetadataQueue
+        mRotationOverride, mActivePhysicalId}, mResultMetadataQueue
     };
 
     for (const auto& result : results) {
@@ -459,7 +459,7 @@
         mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
         mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this,
         *this, *(mInterface), mLegacyClient, mMinExpectedDuration, mIsFixedFps,
-        mOverrideToPortrait, mActivePhysicalId}, mResultMetadataQueue
+        mRotationOverride, mActivePhysicalId}, mResultMetadataQueue
     };
     for (const auto& msg : msgs) {
         camera3::notify(states, msg, mSensorReadoutTimestampSupported);
@@ -913,7 +913,6 @@
     std::set<int> activeStreams;
     camera::device::StreamConfiguration requestedConfiguration;
     requestedConfiguration.streams.resize(config->num_streams);
-    config->use_hal_buf_manager = mUseHalBufManager;
     for (size_t i = 0; i < config->num_streams; i++) {
         camera::device::Stream &dst = requestedConfiguration.streams[i];
         camera3::camera_stream_t *src = config->streams[i];
@@ -1481,10 +1480,10 @@
                 const Vector<int32_t>& sessionParamKeys,
                 bool useHalBufManager,
                 bool supportCameraMute,
-                bool overrideToPortrait,
+                int rotationOverride,
                 bool supportSettingsOverride) :
           RequestThread(parent, statusTracker, interface, sessionParamKeys,
-                  useHalBufManager, supportCameraMute, overrideToPortrait,
+                  useHalBufManager, supportCameraMute, rotationOverride,
                   supportSettingsOverride) {}
 
 status_t AidlCamera3Device::AidlRequestThread::switchToOffline(
@@ -1715,10 +1714,10 @@
                 const Vector<int32_t>& sessionParamKeys,
                 bool useHalBufManager,
                 bool supportCameraMute,
-                bool overrideToPortrait,
+                int rotationOverride,
                 bool supportSettingsOverride) {
     return new AidlRequestThread(parent, statusTracker, interface, sessionParamKeys,
-            useHalBufManager, supportCameraMute, overrideToPortrait,
+            useHalBufManager, supportCameraMute, rotationOverride,
             supportSettingsOverride);
 };
 
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.h b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.h
index f0a5f7e..abc3f9c 100644
--- a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.h
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.h
@@ -41,7 +41,8 @@
     friend class AidlCameraDeviceCallbacks;
     explicit AidlCamera3Device(
             std::shared_ptr<CameraServiceProxyWrapper>& cameraServiceProxyWrapper,
-            const std::string& id, bool overrideForPerfClass, bool overrideToPortrait,
+            std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
+            const std::string& id, bool overrideForPerfClass, int rotationOverride,
             bool legacyClient = false);
 
     virtual ~AidlCamera3Device() { }
@@ -183,7 +184,7 @@
                 const Vector<int32_t>& sessionParamKeys,
                 bool useHalBufManager,
                 bool supportCameraMute,
-                bool overrideToPortrait,
+                int rotationOverride,
                 bool supportSettingsOverride);
 
         status_t switchToOffline(
@@ -274,7 +275,7 @@
                 const Vector<int32_t>& sessionParamKeys,
                 bool useHalBufManager,
                 bool supportCameraMute,
-                bool overrideToPortrait,
+                int rotationOverride,
                 bool supportSettingsOverride) override;
 
     virtual sp<Camera3DeviceInjectionMethods>
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.cpp b/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.cpp
index f8308df..cc32c2a 100644
--- a/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.cpp
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.cpp
@@ -29,6 +29,7 @@
 
 #include <utils/Trace.h>
 
+#include <android/hardware/ICameraService.h>
 #include <android/hardware/camera2/ICameraDeviceCallbacks.h>
 #include <android/binder_ibinder_platform.h>
 #include <camera/StringUtils.h>
@@ -127,7 +128,7 @@
         mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
         mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this,
         *this, mBufferRecords, /*legacyClient*/ false, mMinExpectedDuration, mIsFixedFps,
-        /*overrideToPortrait*/false, activePhysicalId}, mResultMetadataQueue
+        hardware::ICameraService::ROTATION_OVERRIDE_NONE, activePhysicalId}, mResultMetadataQueue
     };
 
     std::lock_guard<std::mutex> lock(mProcessCaptureResultLock);
@@ -174,7 +175,7 @@
         mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
         mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this,
         *this, mBufferRecords, /*legacyClient*/ false, mMinExpectedDuration, mIsFixedFps,
-        /*overrideToPortrait*/false, activePhysicalId}, mResultMetadataQueue
+        hardware::ICameraService::ROTATION_OVERRIDE_NONE, activePhysicalId}, mResultMetadataQueue
     };
     for (const auto& msg : msgs) {
         camera3::notify(states, msg, mSensorReadoutTimestampSupported);
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3OutputUtils.cpp b/services/camera/libcameraservice/device3/aidl/AidlCamera3OutputUtils.cpp
index 3fc070b..d9c8e57 100644
--- a/services/camera/libcameraservice/device3/aidl/AidlCamera3OutputUtils.cpp
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3OutputUtils.cpp
@@ -319,10 +319,15 @@
                 sb.acquire_fence = -1;
                 sb.status = CAMERA_BUFFER_STATUS_ERROR;
             }
-            returnOutputBuffers(states.useHalBufManager,states.halBufManagedStreamIds, nullptr,
-                    streamBuffers.data(), numAllocatedBuffers, 0,
-                    0, false,
-                    0, states.sessionStatsBuilder);
+            std::vector<BufferToReturn> returnableBuffers{};
+            collectReturnableOutputBuffers(states.useHalBufManager, states.halBufManagedStreamIds,
+                    /*listener*/ nullptr,
+                    streamBuffers.data(), numAllocatedBuffers, /*timestamp*/ 0,
+                    /*readoutTimestamp*/ 0, /*requested*/ false,
+                    /*requestTimeNs*/ 0, states.sessionStatsBuilder,
+                    /*out*/ &returnableBuffers);
+            finishReturningOutputBuffers(returnableBuffers, /*listener*/ nullptr,
+                    states.sessionStatsBuilder);
             for (auto buf : newBuffers) {
                 states.bufferRecordsIntf.removeOneBufferCache(streamId, buf);
             }
diff --git a/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp
index f2e618f..09299e6 100644
--- a/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp
+++ b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp
@@ -56,6 +56,7 @@
 #include "device3/hidl/HidlCamera3OfflineSession.h"
 #include "utils/SessionConfigurationUtilsHidl.h"
 #include "utils/TraceHFR.h"
+#include "utils/Utils.h"
 
 #include "../../common/hidl/HidlProviderInfo.h"
 #include "HidlCamera3Device.h"
@@ -166,7 +167,7 @@
     }
 
     res = manager->getCameraCharacteristics(mId, mOverrideForPerfClass, &mDeviceInfo,
-            /*overrideToPortrait*/false);
+            hardware::ICameraService::ROTATION_OVERRIDE_NONE);
     if (res != OK) {
         SET_ERR_L("Could not retrieve camera characteristics: %s (%d)", strerror(-res), res);
         session->close();
@@ -181,7 +182,7 @@
             // Do not override characteristics for physical cameras
             res = manager->getCameraCharacteristics(
                     physicalId, /*overrideForPerfClass*/false, &mPhysicalDeviceInfoMap[physicalId],
-                    /*overrideToPortrait*/false);
+                    hardware::ICameraService::ROTATION_OVERRIDE_NONE);
             if (res != OK) {
                 SET_ERR_L("Could not retrieve camera %s characteristics: %s (%d)",
                         physicalId.c_str(), strerror(-res), res);
@@ -370,7 +371,7 @@
         mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
         mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
         mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
-        *mInterface, mLegacyClient, mMinExpectedDuration, mIsFixedFps, mOverrideToPortrait,
+        *mInterface, mLegacyClient, mMinExpectedDuration, mIsFixedFps, mRotationOverride,
         mActivePhysicalId}, mResultMetadataQueue
     };
 
@@ -433,7 +434,7 @@
         mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
         mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
         mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
-        *mInterface, mLegacyClient, mMinExpectedDuration, mIsFixedFps, mOverrideToPortrait,
+        *mInterface, mLegacyClient, mMinExpectedDuration, mIsFixedFps, mRotationOverride,
         mActivePhysicalId}, mResultMetadataQueue
     };
 
@@ -481,7 +482,7 @@
         mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
         mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
         mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
-        *mInterface, mLegacyClient, mMinExpectedDuration, mIsFixedFps, mOverrideToPortrait,
+        *mInterface, mLegacyClient, mMinExpectedDuration, mIsFixedFps, mRotationOverride,
         mActivePhysicalId}, mResultMetadataQueue
     };
     for (const auto& msg : msgs) {
@@ -717,10 +718,10 @@
                 const Vector<int32_t>& sessionParamKeys,
                 bool useHalBufManager,
                 bool supportCameraMute,
-                bool overrideToPortrait,
+                int rotationOverride,
                 bool supportSettingsOverride) {
         return new HidlRequestThread(parent, statusTracker, interface, sessionParamKeys,
-                useHalBufManager, supportCameraMute, overrideToPortrait,
+                useHalBufManager, supportCameraMute, rotationOverride,
                 supportSettingsOverride);
 };
 
@@ -881,7 +882,7 @@
                     ret = true;
                     break;
                 default:
-                    ALOGV("%s: Reconfiguration query failed: %d", __FUNCTION__, callStatus);
+                    ALOGV("%s: Reconfiguration query failed: %d", __FUNCTION__, eToI(callStatus));
                     ret = true;
             }
         } else {
@@ -1721,10 +1722,10 @@
                 const Vector<int32_t>& sessionParamKeys,
                 bool useHalBufManager,
                 bool supportCameraMute,
-                bool overrideToPortrait,
+                int rotationOverride,
                 bool supportSettingsOverride) :
           RequestThread(parent, statusTracker, interface, sessionParamKeys, useHalBufManager,
-                  supportCameraMute, overrideToPortrait, supportSettingsOverride) {}
+                  supportCameraMute, rotationOverride, supportSettingsOverride) {}
 
 status_t HidlCamera3Device::HidlRequestThread::switchToOffline(
         const std::vector<int32_t>& streamsToKeep,
diff --git a/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.h b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.h
index 350b072..bcc4d80 100644
--- a/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.h
+++ b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.h
@@ -33,10 +33,11 @@
 
     explicit HidlCamera3Device(
         std::shared_ptr<CameraServiceProxyWrapper>& cameraServiceProxyWrapper,
-        const std::string& id, bool overrideForPerfClass, bool overrideToPortrait,
+        std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
+        const std::string& id, bool overrideForPerfClass, int rotationOverride,
         bool legacyClient = false) :
-        Camera3Device(cameraServiceProxyWrapper, id, overrideForPerfClass, overrideToPortrait,
-                legacyClient) { }
+        Camera3Device(cameraServiceProxyWrapper, attributionAndPermissionUtils, id,
+                overrideForPerfClass, rotationOverride, legacyClient) { }
 
     virtual ~HidlCamera3Device() {}
 
@@ -178,7 +179,7 @@
                 const Vector<int32_t>& sessionParamKeys,
                 bool useHalBufManager,
                 bool supportCameraMute,
-                bool overrideToPortrait,
+                int rotationOverride,
                 bool supportSettingsOverride);
 
         status_t switchToOffline(
@@ -231,7 +232,7 @@
                 const Vector<int32_t>& sessionParamKeys,
                 bool useHalBufManager,
                 bool supportCameraMute,
-                bool overrideToPortrait,
+                int rotationOverride,
                 bool supportSettingsOverride) override;
 
     virtual sp<Camera3DeviceInjectionMethods>
diff --git a/services/camera/libcameraservice/device3/hidl/HidlCamera3OfflineSession.cpp b/services/camera/libcameraservice/device3/hidl/HidlCamera3OfflineSession.cpp
index aa4b762..c26583e 100644
--- a/services/camera/libcameraservice/device3/hidl/HidlCamera3OfflineSession.cpp
+++ b/services/camera/libcameraservice/device3/hidl/HidlCamera3OfflineSession.cpp
@@ -22,6 +22,7 @@
 
 #include <utils/Trace.h>
 
+#include <android/hardware/ICameraService.h>
 #include <android/hardware/camera2/ICameraDeviceCallbacks.h>
 #include <camera/StringUtils.h>
 
@@ -108,7 +109,7 @@
         mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
         mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
         mBufferRecords, /*legacyClient*/ false, mMinExpectedDuration, mIsFixedFps,
-        /*overrideToPortrait*/false, activePhysicalId}, mResultMetadataQueue
+        hardware::ICameraService::ROTATION_OVERRIDE_NONE, activePhysicalId}, mResultMetadataQueue
     };
 
     std::lock_guard<std::mutex> lock(mProcessCaptureResultLock);
@@ -150,7 +151,7 @@
         mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
         mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
         mBufferRecords, /*legacyClient*/ false, mMinExpectedDuration, mIsFixedFps,
-        /*overrideToPortrait*/false, activePhysicalId}, mResultMetadataQueue
+        hardware::ICameraService::ROTATION_OVERRIDE_NONE, activePhysicalId}, mResultMetadataQueue
     };
 
     std::lock_guard<std::mutex> lock(mProcessCaptureResultLock);
@@ -187,7 +188,7 @@
         mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
         mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
         mBufferRecords, /*legacyClient*/ false, mMinExpectedDuration, mIsFixedFps,
-        /*overrideToPortrait*/false, activePhysicalId}, mResultMetadataQueue
+        hardware::ICameraService::ROTATION_OVERRIDE_NONE, activePhysicalId}, mResultMetadataQueue
     };
     for (const auto& msg : msgs) {
         camera3::notify(states, msg);
diff --git a/services/camera/libcameraservice/fuzzer/Android.bp b/services/camera/libcameraservice/fuzzer/Android.bp
index 7760f6a..667ba02 100644
--- a/services/camera/libcameraservice/fuzzer/Android.bp
+++ b/services/camera/libcameraservice/fuzzer/Android.bp
@@ -26,7 +26,18 @@
 cc_defaults {
     name: "libcameraservice_fuzz_defaults",
     fuzz_config: {
-        componentid: 41727
+        cc: [
+            "android-camera-fwk-eng@google.com",
+        ],
+        componentid: 41727,
+        hotlists: [
+            "4593311",
+        ],
+        description: "The fuzzer targets the APIs of libcameraservice",
+        vector: "local_no_privileges_required",
+        service_privilege: "privileged",
+        users: "multi_user",
+        fuzzed_code_usage: "shipped",
     },
 }
 
@@ -37,9 +48,9 @@
         "DistortionMapperFuzzer.cpp",
     ],
     shared_libs: [
-        "libcameraservice",
+        "camera_platform_flags_c_lib",
         "libcamera_client",
-        "camera_platform_flags_c_lib"
+        "libcameraservice",
     ],
 }
 
@@ -50,8 +61,8 @@
         "DepthProcessorFuzzer.cpp",
     ],
     shared_libs: [
+        "camera_platform_flags_c_lib",
         "libcameraservice",
-        "camera_platform_flags_c_lib"
     ],
     corpus: ["corpus/*.jpg"],
 }
diff --git a/services/camera/libcameraservice/fuzzer/DepthProcessorFuzzer.cpp b/services/camera/libcameraservice/fuzzer/DepthProcessorFuzzer.cpp
index 650ca91..5c5e177 100644
--- a/services/camera/libcameraservice/fuzzer/DepthProcessorFuzzer.cpp
+++ b/services/camera/libcameraservice/fuzzer/DepthProcessorFuzzer.cpp
@@ -14,49 +14,92 @@
  * limitations under the License.
  */
 
-#include <array>
-#include <vector>
+#include "common/DepthPhotoProcessor.h"
+
+#include <random>
 
 #include <fuzzer/FuzzedDataProvider.h>
 
-#include "common/DepthPhotoProcessor.h"
-
 using namespace android;
 using namespace android::camera3;
 
-static const size_t kTestBufferWidth = 640;
-static const size_t kTestBufferHeight = 480;
-static const size_t kTestBufferDepthSize (kTestBufferWidth * kTestBufferHeight);
+static const float kMinRatio = 0.1f;
+static const float kMaxRatio = 0.9f;
 
-void generateDepth16Buffer(const uint8_t* data, size_t size, std::array<uint16_t, kTestBufferDepthSize> *depth16Buffer /*out*/) {
-    FuzzedDataProvider dataProvider(data, size);
-    for (size_t i = 0; i < depth16Buffer->size(); i++) {
-        (*depth16Buffer)[i] = dataProvider.ConsumeIntegral<uint16_t>();
+static const uint8_t kTotalDepthJpegBufferCount = 3;
+static const uint8_t kIntrinsicCalibrationSize = 5;
+static const uint8_t kLensDistortionSize = 5;
+
+static const DepthPhotoOrientation kDepthPhotoOrientations[] = {
+        DepthPhotoOrientation::DEPTH_ORIENTATION_0_DEGREES,
+        DepthPhotoOrientation::DEPTH_ORIENTATION_90_DEGREES,
+        DepthPhotoOrientation::DEPTH_ORIENTATION_180_DEGREES,
+        DepthPhotoOrientation::DEPTH_ORIENTATION_270_DEGREES};
+
+void generateDepth16Buffer(std::vector<uint16_t>* depth16Buffer /*out*/, size_t length,
+                           FuzzedDataProvider& fdp) {
+    std::default_random_engine gen(fdp.ConsumeIntegral<uint8_t>());
+    std::uniform_int_distribution uniDist(0, UINT16_MAX - 1);
+    for (size_t i = 0; i < length; ++i) {
+        (*depth16Buffer)[i] = uniDist(gen);
     }
 }
 
 extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    FuzzedDataProvider fdp(data, size);
+
     DepthPhotoInputFrame inputFrame;
+
+    /**
+     * Consuming 80% of the data to set mMainJpegBuffer. This ensures that we
+     * don't completely exhaust data and use the rest 20% for fuzzing of APIs.
+     */
+    std::vector<uint8_t> buffer = fdp.ConsumeBytes<uint8_t>((size * 80) / 100);
+    inputFrame.mMainJpegBuffer = reinterpret_cast<const char*>(buffer.data());
+
+    /**
+     * Calculate height and width based on buffer size and a ratio within [0.1, 0.9].
+     * The ratio adjusts the dimensions while maintaining a relationship to the total buffer size.
+     */
+    const float ratio = fdp.ConsumeFloatingPointInRange<float>(kMinRatio, kMaxRatio);
+    const size_t height = std::sqrt(buffer.size()) * ratio;
+    const size_t width = std::sqrt(buffer.size()) / ratio;
+
+    inputFrame.mMainJpegHeight = height;
+    inputFrame.mMainJpegWidth = width;
+    inputFrame.mMainJpegSize = buffer.size();
     // Worst case both depth and confidence maps have the same size as the main color image.
-    inputFrame.mMaxJpegSize = inputFrame.mMainJpegSize * 3;
+    inputFrame.mMaxJpegSize = inputFrame.mMainJpegSize * kTotalDepthJpegBufferCount;
+
+    std::vector<uint16_t> depth16Buffer(height * width);
+    generateDepth16Buffer(&depth16Buffer, height * width, fdp);
+    inputFrame.mDepthMapBuffer = depth16Buffer.data();
+    inputFrame.mDepthMapHeight = height;
+    inputFrame.mDepthMapWidth = inputFrame.mDepthMapStride = width;
+
+    inputFrame.mIsLogical = fdp.ConsumeBool();
+
+    inputFrame.mOrientation = fdp.PickValueInArray<DepthPhotoOrientation>(kDepthPhotoOrientations);
+
+    if (fdp.ConsumeBool()) {
+        for (uint8_t i = 0; i < kIntrinsicCalibrationSize; ++i) {
+            inputFrame.mIntrinsicCalibration[i] = fdp.ConsumeFloatingPoint<float>();
+        }
+        inputFrame.mIsIntrinsicCalibrationValid = 1;
+    }
+
+    if (fdp.ConsumeBool()) {
+        for (uint8_t i = 0; i < kLensDistortionSize; ++i) {
+            inputFrame.mLensDistortion[i] = fdp.ConsumeFloatingPoint<float>();
+        }
+        inputFrame.mIsLensDistortionValid = 1;
+    }
 
     std::vector<uint8_t> depthPhotoBuffer(inputFrame.mMaxJpegSize);
     size_t actualDepthPhotoSize = 0;
 
-    std::array<uint16_t, kTestBufferDepthSize> depth16Buffer;
-    generateDepth16Buffer(data, size, &depth16Buffer);
+    processDepthPhotoFrame(inputFrame, depthPhotoBuffer.size(), depthPhotoBuffer.data(),
+                           &actualDepthPhotoSize);
 
-    inputFrame.mMainJpegBuffer = reinterpret_cast<const char*> (data);
-    inputFrame.mMainJpegSize = size;
-    inputFrame.mDepthMapBuffer = depth16Buffer.data();
-    inputFrame.mDepthMapStride = kTestBufferWidth;
-    inputFrame.mDepthMapWidth = kTestBufferWidth;
-    inputFrame.mDepthMapHeight = kTestBufferHeight;
-    processDepthPhotoFrame(
-        inputFrame,
-        depthPhotoBuffer.size(),
-        depthPhotoBuffer.data(),
-        &actualDepthPhotoSize);
-
-  return 0;
+    return 0;
 }
diff --git a/services/camera/libcameraservice/gui/RingBufferConsumer.cpp b/services/camera/libcameraservice/gui/RingBufferConsumer.cpp
index 5dbfb36..c968e44 100644
--- a/services/camera/libcameraservice/gui/RingBufferConsumer.cpp
+++ b/services/camera/libcameraservice/gui/RingBufferConsumer.cpp
@@ -18,12 +18,14 @@
 #define LOG_TAG "RingBufferConsumer"
 #define ATRACE_TAG ATRACE_TAG_GRAPHICS
 
+#include <com_android_graphics_libgui_flags.h>
 #include <inttypes.h>
 
 #include <utils/Log.h>
 
-#include <gui/RingBufferConsumer.h>
 #include <camera/StringUtils.h>
+#include <com_android_graphics_libgui_flags.h>
+#include <gui/RingBufferConsumer.h>
 
 #define BI_LOGV(x, ...) ALOGV("[%s] " x, mName.c_str(), ##__VA_ARGS__)
 #define BI_LOGD(x, ...) ALOGD("[%s] " x, mName.c_str(), ##__VA_ARGS__)
@@ -38,13 +40,14 @@
 
 namespace android {
 
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+RingBufferConsumer::RingBufferConsumer(uint64_t consumerUsage, int bufferCount)
+    : ConsumerBase(), mBufferCount(bufferCount), mLatestTimestamp(0) {
+#else
 RingBufferConsumer::RingBufferConsumer(const sp<IGraphicBufferConsumer>& consumer,
-        uint64_t consumerUsage,
-        int bufferCount) :
-    ConsumerBase(consumer),
-    mBufferCount(bufferCount),
-    mLatestTimestamp(0)
-{
+                                       uint64_t consumerUsage, int bufferCount)
+    : ConsumerBase(consumer), mBufferCount(bufferCount), mLatestTimestamp(0) {
+#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
     mConsumer->setConsumerUsageBits(consumerUsage);
     mConsumer->setMaxAcquiredBufferCount(bufferCount);
 
@@ -317,7 +320,9 @@
 
         mLatestTimestamp = item.mTimestamp;
 
+#if !COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_RING_BUFFER)
         item.mGraphicBuffer = mSlots[item.mSlot].mGraphicBuffer;
+#endif
     } // end of mMutex lock
 
     ConsumerBase::onFrameAvailable(item);
diff --git a/services/camera/libcameraservice/gui/RingBufferConsumer.h b/services/camera/libcameraservice/gui/RingBufferConsumer.h
index 2e523d1..9fdc996 100644
--- a/services/camera/libcameraservice/gui/RingBufferConsumer.h
+++ b/services/camera/libcameraservice/gui/RingBufferConsumer.h
@@ -17,9 +17,10 @@
 #ifndef ANDROID_GUI_RINGBUFFERCONSUMER_H
 #define ANDROID_GUI_RINGBUFFERCONSUMER_H
 
+#include <com_android_graphics_libgui_flags.h>
 #include <gui/BufferItem.h>
-#include <gui/ConsumerBase.h>
 #include <gui/BufferQueue.h>
+#include <gui/ConsumerBase.h>
 
 #include <utils/List.h>
 
@@ -58,8 +59,12 @@
     // the consumer usage flags passed to the graphics allocator. The
     // bufferCount parameter specifies how many buffers can be pinned for user
     // access at the same time.
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+    RingBufferConsumer(uint64_t consumerUsage, int bufferCount);
+#else
     RingBufferConsumer(const sp<IGraphicBufferConsumer>& consumer, uint64_t consumerUsage,
             int bufferCount);
+#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
 
     virtual ~RingBufferConsumer();
 
diff --git a/services/camera/libcameraservice/hidl/AidlCameraServiceListener.cpp b/services/camera/libcameraservice/hidl/AidlCameraServiceListener.cpp
index 2b81224..d28c7ab 100644
--- a/services/camera/libcameraservice/hidl/AidlCameraServiceListener.cpp
+++ b/services/camera/libcameraservice/hidl/AidlCameraServiceListener.cpp
@@ -16,6 +16,7 @@
 
 #include <hidl/AidlCameraServiceListener.h>
 #include <hidl/Utils.h>
+#include <camera/CameraUtils.h>
 #include <camera/StringUtils.h>
 
 namespace android {
@@ -29,7 +30,10 @@
 typedef frameworks::cameraservice::service::V2_1::ICameraServiceListener HCameraServiceListener2_1;
 
 binder::Status H2BCameraServiceListener::onStatusChanged(
-    int32_t status, const std::string& cameraId) {
+    int32_t status, const std::string& cameraId, int32_t deviceId) {
+  if (deviceId != kDefaultDeviceId) {
+      return binder::Status::ok();
+  }
   HCameraDeviceStatus hCameraDeviceStatus = convertToHidlCameraDeviceStatus(status);
   CameraStatusAndId cameraStatusAndId;
   cameraStatusAndId.deviceStatus = hCameraDeviceStatus;
@@ -44,7 +48,10 @@
 
 binder::Status H2BCameraServiceListener::onPhysicalCameraStatusChanged(
     int32_t status, const std::string& cameraId,
-    const std::string& physicalCameraId) {
+    const std::string& physicalCameraId, int32_t deviceId) {
+  if (deviceId != kDefaultDeviceId) {
+      return binder::Status::ok();
+  }
   auto cast2_1 = HCameraServiceListener2_1::castFrom(mBase);
   sp<HCameraServiceListener2_1> interface2_1 = nullptr;
   if (cast2_1.isOk()) {
@@ -66,13 +73,13 @@
 }
 
 ::android::binder::Status H2BCameraServiceListener::onTorchStatusChanged(
-    int32_t, const std::string&) {
+    [[maybe_unused]] int32_t, [[maybe_unused]] const std::string&, [[maybe_unused]] int32_t) {
   // We don't implement onTorchStatusChanged
   return binder::Status::ok();
 }
 
 ::android::binder::Status H2BCameraServiceListener::onTorchStrengthLevelChanged(
-    const std::string&, int32_t) {
+    [[maybe_unused]] const std::string&, [[maybe_unused]] int32_t, [[maybe_unused]] int32_t) {
   return binder::Status::ok();
 }
 
diff --git a/services/camera/libcameraservice/hidl/AidlCameraServiceListener.h b/services/camera/libcameraservice/hidl/AidlCameraServiceListener.h
index 91a4c16..78fca4e 100644
--- a/services/camera/libcameraservice/hidl/AidlCameraServiceListener.h
+++ b/services/camera/libcameraservice/hidl/AidlCameraServiceListener.h
@@ -47,25 +47,28 @@
     ~H2BCameraServiceListener() { }
 
     virtual ::android::binder::Status onStatusChanged(int32_t status,
-            const std::string& cameraId) override;
+            const std::string& cameraId, int32_t deviceId) override;
     virtual ::android::binder::Status onPhysicalCameraStatusChanged(int32_t status,
             const std::string& cameraId,
-            const std::string& physicalCameraId) override;
+            const std::string& physicalCameraId,
+            int32_t deviceId) override;
 
     virtual ::android::binder::Status onTorchStatusChanged(
-            int32_t status, const std::string& cameraId) override;
+            int32_t status, const std::string& cameraId, int32_t deviceId) override;
     virtual ::android::binder::Status onTorchStrengthLevelChanged(
-            const std::string& cameraId, int32_t newStrengthLevel) override;
+            const std::string& cameraId, int32_t newStrengthLevel, int32_t deviceId) override;
     virtual binder::Status onCameraAccessPrioritiesChanged() {
         // TODO: no implementation yet.
         return binder::Status::ok();
     }
-    virtual binder::Status onCameraOpened(const std::string& /*cameraId*/,
-            const std::string& /*clientPackageId*/) {
+    virtual binder::Status onCameraOpened([[maybe_unused]] const std::string& /*cameraId*/,
+            [[maybe_unused]] const std::string& /*clientPackageId*/,
+            [[maybe_unused]] int32_t /*deviceId*/) {
         // empty implementation
         return binder::Status::ok();
     }
-    virtual binder::Status onCameraClosed(const std::string& /*cameraId*/) {
+    virtual binder::Status onCameraClosed([[maybe_unused]] const std::string& /*cameraId*/,
+            [[maybe_unused]] int32_t /*deviceId*/) {
         // empty implementation
         return binder::Status::ok();
     }
diff --git a/services/camera/libcameraservice/hidl/HidlCameraService.cpp b/services/camera/libcameraservice/hidl/HidlCameraService.cpp
index 1a5a6b9..59e892f 100644
--- a/services/camera/libcameraservice/hidl/HidlCameraService.cpp
+++ b/services/camera/libcameraservice/hidl/HidlCameraService.cpp
@@ -25,6 +25,8 @@
 
 #include <hidl/HidlTransportSupport.h>
 
+#include <camera/CameraUtils.h>
+#include <utils/AttributionAndPermissionUtils.h>
 #include <utils/Utils.h>
 
 namespace android {
@@ -36,6 +38,7 @@
 
 using frameworks::cameraservice::service::V2_0::implementation::HidlCameraService;
 using hardware::hidl_vec;
+using hardware::BnCameraService::ROTATION_OVERRIDE_NONE;
 using hardware::cameraservice::utils::conversion::convertToHidl;
 using hardware::cameraservice::utils::conversion::B2HStatus;
 using hardware::Void;
@@ -66,10 +69,15 @@
                                             getCameraCharacteristics_cb _hidl_cb) {
     android::CameraMetadata cameraMetadata;
     HStatus status = HStatus::NO_ERROR;
+    AttributionSourceState clientAttribution =
+            AttributionAndPermissionUtils::buildAttributionSource(
+                    hardware::ICameraService::USE_CALLING_PID,
+                    hardware::ICameraService::USE_CALLING_UID,
+                    kDefaultDeviceId);
     binder::Status serviceRet =
         mAidlICameraService->getCameraCharacteristics(cameraId,
-                /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false,
-                &cameraMetadata);
+                /*targetSdkVersion*/__ANDROID_API_FUTURE__, ROTATION_OVERRIDE_NONE,
+                clientAttribution, 0, &cameraMetadata);
     HCameraMetadata hidlMetadata;
     if (!serviceRet.isOk()) {
         switch(serviceRet.serviceSpecificErrorCode()) {
@@ -117,11 +125,17 @@
         return Void();
     }
     sp<hardware::camera2::ICameraDeviceCallbacks> callbacks = hybridCallbacks;
+    AttributionSourceState clientAttribution =
+            AttributionAndPermissionUtils::buildAttributionSource(
+                    hardware::ICameraService::USE_CALLING_PID,
+                    hardware::ICameraService::USE_CALLING_UID,
+                    kDefaultDeviceId);
+    clientAttribution.packageName = "";
+    clientAttribution.attributionTag = std::nullopt;
     binder::Status serviceRet = mAidlICameraService->connectDevice(
-            callbacks, cameraId, std::string(), {},
-            hardware::ICameraService::USE_CALLING_UID, 0/*oomScoreOffset*/,
-            /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false,
-            /*out*/&deviceRemote);
+            callbacks, cameraId, 0/*oomScoreOffset*/,
+            /*targetSdkVersion*/__ANDROID_API_FUTURE__, ROTATION_OVERRIDE_NONE,
+            clientAttribution, /*devicePolicy*/0, /*out*/&deviceRemote);
     HStatus status = HStatus::NO_ERROR;
     if (!serviceRet.isOk()) {
         ALOGE("%s: Unable to connect to camera device", __FUNCTION__);
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp b/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp
index 9dd657c..53234f0 100644
--- a/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp
@@ -35,6 +35,7 @@
         "libmedia_headers",
     ],
     shared_libs: [
+        "framework-permission-aidl-cpp",
         "libbinder",
         "libbase",
         "libutils",
@@ -57,6 +58,7 @@
         "android.hardware.camera.device@3.5",
         "android.hardware.camera.device@3.6",
         "android.hardware.camera.device@3.7",
+        "camera_platform_flags_c_lib",
     ],
     fuzz_config: {
         cc: [
@@ -84,7 +86,7 @@
         "camera_service_fuzzer.cpp",
     ],
     defaults: [
-        "camera_service_fuzzer_defaults"
+        "camera_service_fuzzer_defaults",
     ],
 }
 
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp b/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp
index 854c342..718e1d6 100644
--- a/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp
@@ -22,12 +22,15 @@
 //#define LOG_NDEBUG 0
 
 #include <CameraService.h>
-#include <device3/Camera3StreamInterface.h>
+#include <android/content/AttributionSourceState.h>
 #include <android/hardware/BnCameraServiceListener.h>
-#include <android/hardware/camera2/BnCameraDeviceCallbacks.h>
 #include <android/hardware/ICameraServiceListener.h>
+#include <android/hardware/camera2/BnCameraDeviceCallbacks.h>
 #include <android/hardware/camera2/ICameraDeviceUser.h>
+#include <camera/CameraUtils.h>
 #include <camera/camera2/OutputConfiguration.h>
+#include <com_android_graphics_libgui_flags.h>
+#include <device3/Camera3StreamInterface.h>
 #include <gui/BufferItemConsumer.h>
 #include <gui/IGraphicBufferProducer.h>
 #include <gui/Surface.h>
@@ -39,6 +42,9 @@
 using namespace hardware;
 using namespace std;
 
+using ICameraService::ROTATION_OVERRIDE_NONE;
+using ICameraService::ROTATION_OVERRIDE_OVERRIDE_TO_PORTRAIT;
+
 const int32_t kPreviewThreshold = 8;
 const int32_t kNumRequestsTested = 8;
 const nsecs_t kPreviewTimeout = 5000000000;  // .5 [s.]
@@ -147,7 +153,7 @@
         mAutoFocusMessage = true;
         mAutoFocusCondition.broadcast();
     }
-};
+}
 
 void CameraFuzzer::dataCallback(int32_t msgType, const sp<IMemory> & /*data*/,
                                 camera_frame_metadata_t *) {
@@ -169,7 +175,7 @@
         default:
             break;
     }
-};
+}
 
 status_t CameraFuzzer::waitForPreviewStart() {
     status_t rc = NO_ERROR;
@@ -215,7 +221,9 @@
     } else {
         camType = kCamType[mFuzzedDataProvider->ConsumeBool()];
     }
-    mCameraService->getNumberOfCameras(camType, &mNumCameras);
+    AttributionSourceState clientAttribution;
+    clientAttribution.deviceId = kDefaultDeviceId;
+    mCameraService->getNumberOfCameras(camType, clientAttribution, /*devicePolicy*/0, &mNumCameras);
 }
 
 void CameraFuzzer::getCameraInformation(int32_t cameraId) {
@@ -234,12 +242,17 @@
     hardware::camera2::params::VendorTagDescriptorCache cache;
     mCameraService->getCameraVendorTagCache(&cache);
 
+    AttributionSourceState clientAttribution;
+    clientAttribution.deviceId = kDefaultDeviceId;
+
     CameraInfo cameraInfo;
-    mCameraService->getCameraInfo(cameraId, /*overrideToPortrait*/false, &cameraInfo);
+    mCameraService->getCameraInfo(cameraId, ROTATION_OVERRIDE_NONE, clientAttribution,
+            /*devicePolicy*/0, &cameraInfo);
 
     CameraMetadata metadata;
     mCameraService->getCameraCharacteristics(cameraIdStr,
-            /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false, &metadata);
+            /*targetSdkVersion*/__ANDROID_API_FUTURE__, ROTATION_OVERRIDE_NONE,
+            clientAttribution, /*devicePolicy*/0, &metadata);
 }
 
 void CameraFuzzer::invokeCameraSound() {
@@ -321,12 +334,15 @@
     std::string cameraIdStr = std::to_string(cameraId);
     sp<IBinder> binder = new BBinder;
 
-    mCameraService->setTorchMode(cameraIdStr, true, binder);
+    AttributionSourceState clientAttribution;
+    clientAttribution.deviceId = kDefaultDeviceId;
+    mCameraService->setTorchMode(cameraIdStr, true, binder, clientAttribution, /*devicePolicy*/0);
     ALOGV("Turned torch on.");
     int32_t torchStrength = rand() % 5 + 1;
     ALOGV("Changing torch strength level to %d", torchStrength);
-    mCameraService->turnOnTorchWithStrengthLevel(cameraIdStr, torchStrength, binder);
-    mCameraService->setTorchMode(cameraIdStr, false, binder);
+    mCameraService->turnOnTorchWithStrengthLevel(cameraIdStr, torchStrength, binder,
+            clientAttribution, /*devicePolicy*/0);
+    mCameraService->setTorchMode(cameraIdStr, false, binder, clientAttribution, /*devicePolicy*/0);
     ALOGV("Turned torch off.");
 }
 
@@ -342,12 +358,15 @@
     ::android::binder::Status rc;
     sp<ICamera> cameraDevice;
 
-    rc = mCameraService->connect(this, cameraId, std::string(),
-                                 android::CameraService::USE_CALLING_UID,
-                                 android::CameraService::USE_CALLING_PID,
+    AttributionSourceState clientAttribution;
+    clientAttribution.deviceId = kDefaultDeviceId;
+    clientAttribution.uid = android::CameraService::USE_CALLING_UID;
+    clientAttribution.pid = android::CameraService::USE_CALLING_PID;
+    rc = mCameraService->connect(this, cameraId,
                                  /*targetSdkVersion*/ __ANDROID_API_FUTURE__,
-                                 /*overrideToPortrait*/true, /*forceSlowJpegMode*/false,
-                                 &cameraDevice);
+                                 ROTATION_OVERRIDE_OVERRIDE_TO_PORTRAIT,
+                                 /*forceSlowJpegMode*/false,
+                                 clientAttribution, /*devicePolicy*/0, &cameraDevice);
     if (!rc.isOk()) {
         // camera not connected
         return;
@@ -484,20 +503,22 @@
 public:
     virtual ~TestCameraServiceListener() {};
 
-    virtual binder::Status onStatusChanged(int32_t, const std::string&) {
+    virtual binder::Status onStatusChanged(int32_t /*status*/, const std::string& /*cameraId*/,
+            int32_t /*deviceId*/) {
         return binder::Status::ok();
-    };
+    }
 
     virtual binder::Status onPhysicalCameraStatusChanged(int32_t /*status*/,
-            const std::string& /*cameraId*/, const std::string& /*physicalCameraId*/) {
+            const std::string& /*cameraId*/, const std::string& /*physicalCameraId*/,
+            int32_t /*deviceId*/) {
         // No op
         return binder::Status::ok();
-    };
+    }
 
     virtual binder::Status onTorchStatusChanged(int32_t /*status*/,
-            const std::string& /*cameraId*/) {
+            const std::string& /*cameraId*/, int32_t /*deviceId*/) {
         return binder::Status::ok();
-    };
+    }
 
     virtual binder::Status onCameraAccessPrioritiesChanged() {
         // No op
@@ -505,18 +526,18 @@
     }
 
     virtual binder::Status onCameraOpened(const std::string& /*cameraId*/,
-            const std::string& /*clientPackageName*/) {
+            const std::string& /*clientPackageName*/, int32_t /*deviceId*/) {
         // No op
         return binder::Status::ok();
     }
 
-    virtual binder::Status onCameraClosed(const std::string& /*cameraId*/) {
+    virtual binder::Status onCameraClosed(const std::string& /*cameraId*/, int32_t /*deviceId*/) {
         // No op
         return binder::Status::ok();
     }
 
     virtual binder::Status onTorchStrengthLevelChanged(const std::string& /*cameraId*/,
-            int32_t /*torchStrength*/) {
+            int32_t /*torchStrength*/, int32_t /*deviceId*/) {
         // No op
         return binder::Status::ok();
     }
@@ -580,14 +601,37 @@
     for (auto s : statuses) {
         sp<TestCameraDeviceCallbacks> callbacks(new TestCameraDeviceCallbacks());
         sp<hardware::camera2::ICameraDeviceUser> device;
-        mCameraService->connectDevice(callbacks, s.cameraId, std::string(), {},
-                android::CameraService::USE_CALLING_UID, 0/*oomScoreDiff*/,
-                /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/true,
-                &device);
+
+        AttributionSourceState clientAttribution;
+        clientAttribution.deviceId = kDefaultDeviceId;
+        clientAttribution.uid = android::CameraService::USE_CALLING_UID;
+        clientAttribution.pid = android::CameraService::USE_CALLING_PID;
+        mCameraService->connectDevice(callbacks, s.cameraId,
+                0/*oomScoreDiff*/, /*targetSdkVersion*/__ANDROID_API_FUTURE__,
+                ROTATION_OVERRIDE_OVERRIDE_TO_PORTRAIT,
+                clientAttribution, /*devicePolicy*/0, &device);
         if (device == nullptr) {
             continue;
         }
         device->beginConfigure();
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+        sp<BufferItemConsumer> opaqueConsumer = new BufferItemConsumer(
+                GRALLOC_USAGE_SW_READ_NEVER, /*maxImages*/ 8, /*controlledByApp*/ true);
+        opaqueConsumer->setName(String8("Roger"));
+
+        // Set to VGA dimension for default, as that is guaranteed to be present
+        opaqueConsumer->setDefaultBufferSize(640, 480);
+        opaqueConsumer->setDefaultBufferFormat(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
+
+        sp<Surface> surface = opaqueConsumer->getSurface();
+
+        std::string noPhysicalId;
+        size_t rotations = sizeof(kRotations) / sizeof(int32_t) - 1;
+        sp<IGraphicBufferProducer> igbp = surface->getIGraphicBufferProducer();
+        OutputConfiguration output(
+                igbp, kRotations[mFuzzedDataProvider->ConsumeIntegralInRange<size_t>(0, rotations)],
+                noPhysicalId);
+#else
         sp<IGraphicBufferProducer> gbProducer;
         sp<IGraphicBufferConsumer> gbConsumer;
         BufferQueue::createBufferQueue(&gbProducer, &gbConsumer);
@@ -606,6 +650,7 @@
         OutputConfiguration output(gbProducer,
                 kRotations[mFuzzedDataProvider->ConsumeIntegralInRange<size_t>(0, rotations)],
                 noPhysicalId);
+#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
         int streamId;
         device->createStream(output, &streamId);
         CameraMetadata sessionParams;
diff --git a/services/camera/libcameraservice/tests/Android.bp b/services/camera/libcameraservice/tests/Android.bp
index 5072edd..bbc10dc 100644
--- a/services/camera/libcameraservice/tests/Android.bp
+++ b/services/camera/libcameraservice/tests/Android.bp
@@ -18,6 +18,49 @@
     ],
 }
 
+cc_defaults {
+    name: "cameraservice_test_hostsupported",
+
+    // All test sources that can run on both host and device
+    // should be listed here
+    srcs: [
+        "ClientManagerTest.cpp",
+        "DepthProcessorTest.cpp",
+        "DistortionMapperTest.cpp",
+        "ExifUtilsTest.cpp",
+        "NV12Compressor.cpp",
+        "RotateAndCropMapperTest.cpp",
+        "SessionStatsBuilderTest.cpp",
+        "ZoomRatioTest.cpp",
+    ],
+
+    // All shared libs available on both host and device
+    // should be listed here
+    shared_libs: [
+        "libbase",
+        "libbinder",
+        "libcamera_metadata",
+        "libexif",
+        "libjpeg",
+        "liblog",
+        "libutils",
+        "camera_platform_flags_c_lib",
+    ],
+
+    static_libs: [
+        "libgmock",
+    ],
+
+    cflags: [
+        "-Wall",
+        "-Wextra",
+        "-Werror",
+    ],
+
+    test_suites: ["device-tests"],
+
+}
+
 cc_test {
     name: "cameraservice_test",
 
@@ -33,20 +76,16 @@
 
     defaults: [
         "libcameraservice_deps",
+        "cameraservice_test_hostsupported",
     ],
 
+    // Only include libs that can't be run host-side here
     shared_libs: [
-        "libbase",
-        "libbinder",
         "libcutils",
         "libhidlbase",
-        "liblog",
         "libcamera_client",
-        "libcamera_metadata",
+        "libgui",
         "libui",
-        "libutils",
-        "libjpeg",
-        "libexif",
         "android.companion.virtualdevice.flags-aconfig-cc",
         "android.hardware.camera.common@1.0",
         "android.hardware.camera.device@1.0",
@@ -57,6 +96,7 @@
         "camera_platform_flags_c_lib",
     ],
 
+    // Only include libs that can't be run host-side here
     static_libs: [
         "android.hardware.camera.provider@2.4",
         "android.hardware.camera.provider@2.5",
@@ -64,76 +104,41 @@
         "android.hardware.camera.provider@2.7",
         "android.hardware.camera.provider-V3-ndk",
         "libcameraservice",
-        "libgmock",
         "libflagtest",
     ],
 
+    // Only include sources that can't be run host-side here
     srcs: [
+        "Camera3StreamSplitterTest.cpp",
         "CameraPermissionsTest.cpp",
         "CameraProviderManagerTest.cpp",
-        "ClientManagerTest.cpp",
-        "DepthProcessorTest.cpp",
-        "DistortionMapperTest.cpp",
-        "ExifUtilsTest.cpp",
-        "NV12Compressor.cpp",
-        "RotateAndCropMapperTest.cpp",
-        "ZoomRatioTest.cpp",
     ],
 
-    cflags: [
-        "-Wall",
-        "-Wextra",
-        "-Werror",
-    ],
-
-    test_suites: ["device-tests"],
-
 }
 
 cc_test_host {
     name: "cameraservice_test_host",
 
+    defaults: [
+        "cameraservice_test_hostsupported",
+    ],
+
     include_dirs: [
         "frameworks/av/camera/include",
         "frameworks/av/camera/include/camera",
-        "frameworks/native/libs/binder/include_activitymanager"
+        "frameworks/native/libs/binder/include_activitymanager",
     ],
 
+    // Only include libs that can't be run device-side here
     shared_libs: [
         "libactivity_manager_procstate_aidl-cpp",
-        "libbase",
-        "libbinder",
-        "libcamera_metadata",
         "libdynamic_depth",
-        "libexif",
-        "libjpeg",
-        "liblog",
-        "libutils",
-        "camera_platform_flags_c_lib",
     ],
 
+    // Only include libs that can't be run device-side here
     static_libs: [
         "libcamera_client_host",
         "libcameraservice_device_independent",
-        "libgmock",
     ],
 
-    srcs: [
-        "ClientManagerTest.cpp",
-        "DepthProcessorTest.cpp",
-        "DistortionMapperTest.cpp",
-        "ExifUtilsTest.cpp",
-        "NV12Compressor.cpp",
-        "RotateAndCropMapperTest.cpp",
-        "ZoomRatioTest.cpp",
-    ],
-
-    cflags: [
-        "-Wall",
-        "-Wextra",
-        "-Werror",
-    ],
-
-    test_suites: ["device-tests"],
-
 }
diff --git a/services/camera/libcameraservice/tests/Camera3StreamSplitterTest.cpp b/services/camera/libcameraservice/tests/Camera3StreamSplitterTest.cpp
new file mode 100644
index 0000000..3d49ae5
--- /dev/null
+++ b/services/camera/libcameraservice/tests/Camera3StreamSplitterTest.cpp
@@ -0,0 +1,169 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "Camera3StreamSplitterTest"
+// #define LOG_NDEBUG 0
+
+#include "../device3/Camera3StreamSplitter.h"
+
+#include <android/hardware_buffer.h>
+#include <com_android_graphics_libgui_flags.h>
+#include <gui/BufferItemConsumer.h>
+#include <gui/IGraphicBufferConsumer.h>
+#include <gui/IGraphicBufferProducer.h>
+#include <gui/Surface.h>
+#include <ui/Fence.h>
+#include <ui/GraphicBuffer.h>
+#include <ui/GraphicBufferAllocator.h>
+#include <ui/PixelFormat.h>
+
+#include <system/window.h>
+#include <vndk/window.h>
+
+#include <gtest/gtest.h>
+
+using namespace android;
+
+namespace {
+
+uint64_t kConsumerUsage = AHARDWAREBUFFER_USAGE_CAMERA_READ;
+uint64_t kProducerUsage = AHARDWAREBUFFER_USAGE_CAMERA_READ;
+size_t kHalMaxBuffers = 3;
+uint32_t kWidth = 640;
+uint32_t kHeight = 480;
+PixelFormat kFormat = HAL_PIXEL_FORMAT_YCBCR_420_888;
+int64_t kDynamicRangeProfile = 0;
+
+std::tuple<sp<BufferItemConsumer>, sp<Surface>> createConsumerAndSurface() {
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+    sp<BufferItemConsumer> consumer = sp<BufferItemConsumer>::make(kConsumerUsage);
+    return {consumer, consumer->getSurface()};
+#else
+    sp<IGraphicBufferProducer> producer;
+    sp<IGraphicBufferConsumer> consumer;
+    BufferQueue::createBufferQueue(&producer, &consumer);
+
+    return {sp<BufferItemConsumer>::make(consumer, kConsumerUsage), sp<Surface>::make(producer)};
+#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+}
+
+class Camera3StreamSplitterTest : public testing::Test {
+  public:
+    void SetUp() override { mSplitter = sp<Camera3StreamSplitter>::make(); }
+
+  protected:
+    sp<Camera3StreamSplitter> mSplitter;
+};
+
+class TestSurfaceListener : public SurfaceListener {
+  public:
+    virtual void onBufferReleased() override { mNumBuffersReleased++; }
+    virtual bool needsReleaseNotify() { return true; }
+    virtual void onBufferDetached(int) override {}
+    virtual void onBuffersDiscarded(const std::vector<sp<GraphicBuffer>>&) override {};
+
+    uint32_t mNumBuffersReleased = 0;
+};
+
+class TestConsumerListener : public BufferItemConsumer::FrameAvailableListener {
+  public:
+    TestConsumerListener(const wp<BufferItemConsumer>& consumer) : mConsumer(consumer) {}
+
+    virtual void onFrameAvailable(const BufferItem&) {
+        sp<BufferItemConsumer> consumer = mConsumer.promote();
+        EXPECT_NE(nullptr, consumer);
+
+        BufferItem item;
+        EXPECT_EQ(OK, consumer->acquireBuffer(&item, 0));
+        mNumBuffersAcquired++;
+        EXPECT_EQ(OK, consumer->releaseBuffer(item, Fence::NO_FENCE));
+    }
+    virtual void onFrameReplaced(const BufferItem&) {}
+    virtual void onFrameDequeued(const uint64_t) {}
+    virtual void onFrameCancelled(const uint64_t) {}
+    virtual void onFrameDetached(const uint64_t) {}
+
+    wp<BufferItemConsumer> mConsumer;
+    uint32_t mNumBuffersAcquired = 0;
+};
+
+}  // namespace
+
+TEST_F(Camera3StreamSplitterTest, TestWithoutSurfaces_NoBuffersConsumed) {
+    sp<Surface> consumer;
+    EXPECT_EQ(OK, mSplitter->connect({}, kConsumerUsage, kProducerUsage, kHalMaxBuffers, kWidth,
+                                     kHeight, kFormat, &consumer, kDynamicRangeProfile));
+
+    sp<TestSurfaceListener> surfaceListener = sp<TestSurfaceListener>::make();
+    EXPECT_EQ(OK, consumer->connect(NATIVE_WINDOW_API_CAMERA, surfaceListener, false));
+
+    sp<GraphicBuffer> buffer = new GraphicBuffer(kWidth, kHeight, kFormat, kProducerUsage);
+    EXPECT_EQ(OK, consumer->attachBuffer(buffer->getNativeBuffer()));
+    // TODO: Do this with the surface itself once the API is available.
+    EXPECT_EQ(OK,
+              ANativeWindow_queueBuffer(consumer.get(), buffer->getNativeBuffer(), /*fenceFd*/ -1));
+
+    EXPECT_EQ(0u, surfaceListener->mNumBuffersReleased);
+}
+
+TEST_F(Camera3StreamSplitterTest, TestProcessSingleBuffer) {
+    //
+    // Set up output consumers:
+    //
+    constexpr auto kSurfaceId1 = 1;
+    auto [bufferItemConsumer1, surface1] = createConsumerAndSurface();
+    sp<TestConsumerListener> consumerListener1 =
+            sp<TestConsumerListener>::make(bufferItemConsumer1);
+    bufferItemConsumer1->setFrameAvailableListener(consumerListener1);
+
+    constexpr auto kSurfaceId2 = 2;
+    auto [bufferItemConsumer2, surface2] = createConsumerAndSurface();
+    sp<TestConsumerListener> consumerListener2 =
+            sp<TestConsumerListener>::make(bufferItemConsumer2);
+    bufferItemConsumer2->setFrameAvailableListener(consumerListener2);
+
+    //
+    // Connect it to the splitter, get the input surface, and set it up:
+    //
+    sp<Surface> inputSurface;
+    EXPECT_EQ(OK, mSplitter->connect({{kSurfaceId1, surface1}, {kSurfaceId2, surface2}},
+                                     kConsumerUsage, kProducerUsage, kHalMaxBuffers, kWidth,
+                                     kHeight, kFormat, &inputSurface, kDynamicRangeProfile));
+    sp<TestSurfaceListener> surfaceListener = sp<TestSurfaceListener>::make();
+    EXPECT_EQ(OK, inputSurface->connect(NATIVE_WINDOW_API_CAMERA, surfaceListener, false));
+    // TODO: Do this with the surface itself once the API is available.
+    EXPECT_EQ(OK, inputSurface->getIGraphicBufferProducer()->allowAllocation(false));
+
+    //
+    // Create a buffer to use:
+    //
+    sp<GraphicBuffer> singleBuffer = new GraphicBuffer(kWidth, kHeight, kFormat, kProducerUsage);
+    EXPECT_NE(nullptr, singleBuffer);
+    mSplitter->attachBufferToOutputs(singleBuffer->getNativeBuffer(), {kSurfaceId1, kSurfaceId2});
+
+    //
+    // Verify that when we attach the buffer, it's processed appropriately:
+    //
+    EXPECT_EQ(OK, inputSurface->attachBuffer(singleBuffer->getNativeBuffer()));
+    EXPECT_EQ(OK, mSplitter->getOnFrameAvailableResult());
+    // TODO: Do this with the surface itself once the API is available.
+    EXPECT_EQ(OK, ANativeWindow_queueBuffer(inputSurface.get(), singleBuffer->getNativeBuffer(),
+                                            /*fenceFd*/ -1));
+
+    EXPECT_EQ(1u, consumerListener1->mNumBuffersAcquired);
+    EXPECT_EQ(1u, consumerListener2->mNumBuffersAcquired);
+    EXPECT_EQ(1u, surfaceListener->mNumBuffersReleased);
+}
diff --git a/services/camera/libcameraservice/tests/CameraPermissionsTest.cpp b/services/camera/libcameraservice/tests/CameraPermissionsTest.cpp
index db43a02..50aeaca 100644
--- a/services/camera/libcameraservice/tests/CameraPermissionsTest.cpp
+++ b/services/camera/libcameraservice/tests/CameraPermissionsTest.cpp
@@ -14,6 +14,7 @@
  * limitations under the License.
  */
 
+#include <android/content/AttributionSourceState.h>
 #include <android/hardware/BnCameraServiceListener.h>
 #include <android/hardware/BnCameraServiceProxy.h>
 #include <android/hardware/camera2/BnCameraDeviceCallbacks.h>
@@ -21,6 +22,8 @@
 
 #include <private/android_filesystem_config.h>
 
+#include <camera/CameraUtils.h>
+
 #include "../CameraService.h"
 #include "../utils/CameraServiceProxyWrapper.h"
 
@@ -35,22 +38,23 @@
 // Empty service listener.
 class TestCameraServiceListener : public hardware::BnCameraServiceListener {
 public:
-    virtual ~TestCameraServiceListener() {};
+    virtual ~TestCameraServiceListener() {}
 
-    virtual binder::Status onStatusChanged(int32_t , const std::string&) {
+    virtual binder::Status onStatusChanged(int32_t , const std::string&, int32_t) {
         return binder::Status::ok();
-    };
+    }
 
     virtual binder::Status onPhysicalCameraStatusChanged(int32_t /*status*/,
-            const std::string& /*cameraId*/, const std::string& /*physicalCameraId*/) {
+            const std::string& /*cameraId*/, const std::string& /*physicalCameraId*/,
+            int32_t /*deviceId*/) {
         // No op
         return binder::Status::ok();
-    };
+    }
 
     virtual binder::Status onTorchStatusChanged(int32_t /*status*/,
-            const std::string& /*cameraId*/) {
+            const std::string& /*cameraId*/, int32_t /*deviceId*/) {
         return binder::Status::ok();
-    };
+    }
 
     virtual binder::Status onCameraAccessPrioritiesChanged() {
         // No op
@@ -58,18 +62,18 @@
     }
 
     virtual binder::Status onCameraOpened(const std::string& /*cameraId*/,
-            const std::string& /*clientPackageName*/) {
+            const std::string& /*clientPackageName*/, int32_t /*deviceId*/) {
         // No op
         return binder::Status::ok();
     }
 
-    virtual binder::Status onCameraClosed(const std::string& /*cameraId*/) {
+    virtual binder::Status onCameraClosed(const std::string& /*cameraId*/, int32_t /*deviceId*/) {
         // No op
         return binder::Status::ok();
     }
 
     virtual binder::Status onTorchStrengthLevelChanged(const std::string& /*cameraId*/,
-            int32_t /*torchStrength*/) {
+            int32_t /*torchStrength*/, int32_t /*deviceId*/) {
         // No op
         return binder::Status::ok();
     }
@@ -143,6 +147,11 @@
         return mCameraServiceProxy->notifyCameraState(cameraSessionStats);
     }
 
+    virtual binder::Status notifyFeatureCombinationStats(
+            const hardware::CameraFeatureCombinationStats& featureCombStats) override {
+        return mCameraServiceProxy->notifyFeatureCombinationStats(featureCombStats);
+    }
+
     virtual binder::Status isCameraDisabled(int userId, bool *ret) override {
         if (mOverrideCameraDisabled) {
             *ret = mCameraDisabled;
@@ -215,6 +224,11 @@
 // Test that camera connections fail with ERROR_DISABLED when the camera is disabled via device
 // policy, and succeed when it isn't.
 TEST_F(CameraPermissionsTest, TestCameraDisabled) {
+    AttributionSourceState clientAttribution;
+    clientAttribution.deviceId = kDefaultDeviceId;
+    clientAttribution.uid = android::CameraService::USE_CALLING_UID;
+    clientAttribution.pid = android::CameraService::USE_CALLING_PID;
+
     std::vector<hardware::CameraStatus> statuses;
     sp<TestCameraServiceListener> serviceListener = new TestCameraServiceListener();
     sCameraService->addListenerTest(serviceListener, &statuses);
@@ -225,9 +239,10 @@
         sp<TestCameraDeviceCallbacks> callbacks = new TestCameraDeviceCallbacks();
         sp<hardware::camera2::ICameraDeviceUser> device;
         binder::Status status =
-                sCameraService->connectDevice(callbacks, s.cameraId, std::string(), {},
-                android::CameraService::USE_CALLING_UID, 0/*oomScoreDiff*/,
-                /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false, &device);
+                sCameraService->connectDevice(callbacks, s.cameraId,
+                0/*oomScoreDiff*/, /*targetSdkVersion*/__ANDROID_API_FUTURE__,
+                hardware::ICameraService::ROTATION_OVERRIDE_NONE,
+                clientAttribution, /*devicePolicy*/0, &device);
         AutoDisconnectDevice autoDisconnect(device);
         ASSERT_TRUE(!status.isOk()) << "connectDevice returned OK status";
         ASSERT_EQ(status.serviceSpecificErrorCode(), hardware::ICameraService::ERROR_DISABLED)
@@ -239,9 +254,10 @@
         sp<TestCameraDeviceCallbacks> callbacks = new TestCameraDeviceCallbacks();
         sp<hardware::camera2::ICameraDeviceUser> device;
         binder::Status status =
-                sCameraService->connectDevice(callbacks, s.cameraId, std::string(), {},
-                android::CameraService::USE_CALLING_UID, 0/*oomScoreDiff*/,
-                /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false, &device);
+                sCameraService->connectDevice(callbacks, s.cameraId,
+                0/*oomScoreDiff*/, /*targetSdkVersion*/__ANDROID_API_FUTURE__,
+                hardware::ICameraService::ROTATION_OVERRIDE_NONE,
+                clientAttribution, /*devicePolicy*/0, &device);
         AutoDisconnectDevice autoDisconnect(device);
         ASSERT_TRUE(status.isOk());
     }
@@ -249,6 +265,10 @@
 
 // Test that consecutive camera connections succeed.
 TEST_F(CameraPermissionsTest, TestConsecutiveConnections) {
+    AttributionSourceState clientAttribution;
+    clientAttribution.deviceId = kDefaultDeviceId;
+    clientAttribution.uid = android::CameraService::USE_CALLING_UID;
+
     std::vector<hardware::CameraStatus> statuses;
     sp<TestCameraServiceListener> serviceListener = new TestCameraServiceListener();
     sCameraService->addListenerTest(serviceListener, &statuses);
@@ -258,16 +278,18 @@
         sp<TestCameraDeviceCallbacks> callbacks = new TestCameraDeviceCallbacks();
         sp<hardware::camera2::ICameraDeviceUser> deviceA, deviceB;
         binder::Status status =
-                sCameraService->connectDevice(callbacks, s.cameraId, std::string(), {},
-                android::CameraService::USE_CALLING_UID, 0/*oomScoreDiff*/,
-                /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false, &deviceA);
+                sCameraService->connectDevice(callbacks, s.cameraId,
+                0/*oomScoreDiff*/, /*targetSdkVersion*/__ANDROID_API_FUTURE__,
+                hardware::ICameraService::ROTATION_OVERRIDE_NONE,
+                clientAttribution, /*devicePolicy*/0, &deviceA);
         AutoDisconnectDevice autoDisconnectA(deviceA);
         ASSERT_TRUE(status.isOk()) << "Exception code " << status.exceptionCode() <<
                 " service specific error code " << status.serviceSpecificErrorCode();
         status =
-                sCameraService->connectDevice(callbacks, s.cameraId, std::string(), {},
-                android::CameraService::USE_CALLING_UID, 0/*oomScoreDiff*/,
-                /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false, &deviceB);
+                sCameraService->connectDevice(callbacks, s.cameraId,
+                0/*oomScoreDiff*/, /*targetSdkVersion*/__ANDROID_API_FUTURE__,
+                hardware::ICameraService::ROTATION_OVERRIDE_NONE,
+                clientAttribution, /*devicePolicy*/0, &deviceB);
         AutoDisconnectDevice autoDisconnectB(deviceB);
         ASSERT_TRUE(status.isOk()) << "Exception code " << status.exceptionCode() <<
                 " service specific error code " << status.serviceSpecificErrorCode();
@@ -277,6 +299,10 @@
 // Test that consecutive camera connections succeed even when a nonzero oomScoreOffset is provided
 // in the second call.
 TEST_F(CameraPermissionsTest, TestConflictingOomScoreOffset) {
+    AttributionSourceState clientAttribution;
+    clientAttribution.deviceId = kDefaultDeviceId;
+    clientAttribution.uid = android::CameraService::USE_CALLING_UID;
+
     std::vector<hardware::CameraStatus> statuses;
     sp<TestCameraServiceListener> serviceListener = new TestCameraServiceListener();
     sCameraService->addListenerTest(serviceListener, &statuses);
@@ -286,16 +312,18 @@
         sp<TestCameraDeviceCallbacks> callbacks = new TestCameraDeviceCallbacks();
         sp<hardware::camera2::ICameraDeviceUser> deviceA, deviceB;
         binder::Status status =
-                sCameraService->connectDevice(callbacks, s.cameraId, std::string(), {},
-                android::CameraService::USE_CALLING_UID, 0/*oomScoreDiff*/,
-                /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false, &deviceA);
+                sCameraService->connectDevice(callbacks, s.cameraId,
+                0/*oomScoreDiff*/, /*targetSdkVersion*/__ANDROID_API_FUTURE__,
+                hardware::ICameraService::ROTATION_OVERRIDE_NONE,
+                clientAttribution, /*devicePolicy*/0, &deviceA);
         AutoDisconnectDevice autoDisconnectA(deviceA);
         ASSERT_TRUE(status.isOk()) << "Exception code " << status.exceptionCode() <<
                 " service specific error code " << status.serviceSpecificErrorCode();
         status =
-                sCameraService->connectDevice(callbacks, s.cameraId, std::string(), {},
-                android::CameraService::USE_CALLING_UID, 1/*oomScoreDiff*/,
-                /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false, &deviceB);
+                sCameraService->connectDevice(callbacks, s.cameraId,
+                1/*oomScoreDiff*/, /*targetSdkVersion*/__ANDROID_API_FUTURE__,
+                hardware::ICameraService::ROTATION_OVERRIDE_NONE,
+                clientAttribution, /*devicePolicy*/0, &deviceB);
         AutoDisconnectDevice autoDisconnectB(deviceB);
         ASSERT_TRUE(status.isOk()) << "Exception code " << status.exceptionCode() <<
                 " service specific error code " << status.serviceSpecificErrorCode();
diff --git a/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp b/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp
index a53d26d..939126c 100644
--- a/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp
+++ b/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp
@@ -372,28 +372,22 @@
 };
 
 /**
- * Simple test version of the interaction proxy, to use to inject onRegistered calls to the
+ * Simple test version of HidlServiceInteractionProxy, to use to inject onRegistered calls to the
  * CameraProviderManager
  */
-struct TestInteractionProxy : public CameraProviderManager::HidlServiceInteractionProxy,
-                              public CameraProviderManager::AidlServiceInteractionProxy {
+struct TestHidlInteractionProxy : public CameraProviderManager::HidlServiceInteractionProxy {
     sp<hidl::manager::V1_0::IServiceNotification> mManagerNotificationInterface;
     sp<TestICameraProvider> mTestCameraProvider;
-    std::shared_ptr<TestAidlICameraProvider> mTestAidlCameraProvider;
 
-    TestInteractionProxy() {}
+    TestHidlInteractionProxy() {}
 
     void setProvider(sp<TestICameraProvider> provider) {
         mTestCameraProvider = provider;
     }
 
-    void setAidlProvider(std::shared_ptr<TestAidlICameraProvider> provider) {
-        mTestAidlCameraProvider = provider;
-    }
-
     std::vector<std::string> mLastRequestedServiceNames;
 
-    virtual ~TestInteractionProxy() {}
+    virtual ~TestHidlInteractionProxy() {}
 
     virtual bool registerForNotifications(
             [[maybe_unused]] const std::string &serviceName,
@@ -430,9 +424,47 @@
         hardware::hidl_vec<hardware::hidl_string> ret = {"test/0"};
         return ret;
     }
+};
+
+/**
+ * Simple test version of AidlServiceInteractionProxy, to use to inject onRegistered calls to the
+ * CameraProviderManager
+ */
+struct TestAidlInteractionProxy : public CameraProviderManager::AidlServiceInteractionProxy {
+    std::shared_ptr<TestAidlICameraProvider> mTestAidlCameraProvider;
+
+    TestAidlInteractionProxy() {}
+
+    void setProvider(std::shared_ptr<TestAidlICameraProvider> provider) {
+        mTestAidlCameraProvider = provider;
+    }
+
+    std::vector<std::string> mLastRequestedServiceNames;
+
+    virtual ~TestAidlInteractionProxy() {}
 
     virtual std::shared_ptr<aidl::android::hardware::camera::provider::ICameraProvider>
-    getAidlService(const std::string&) {
+            getService(const std::string& serviceName) override {
+        if (!flags::delay_lazy_hal_instantiation()) {
+            return mTestAidlCameraProvider;
+        }
+
+        // If no provider has been given, fail; in reality, getService would
+        // block for HALs that don't start correctly, so we should never use
+        // getService when we don't have a valid HAL running
+        if (mTestAidlCameraProvider == nullptr) {
+            ADD_FAILURE() << __FUNCTION__ << "called with no valid provider;"
+                          << " would block indefinitely";
+            // Real getService would block, but that's bad in unit tests. So
+            // just record an error and return nullptr
+            return nullptr;
+        }
+        mLastRequestedServiceNames.push_back(serviceName);
+        return mTestAidlCameraProvider;
+    }
+
+    virtual std::shared_ptr<aidl::android::hardware::camera::provider::ICameraProvider>
+    tryGetService(const std::string&) override {
         return mTestAidlCameraProvider;
     }
 };
@@ -462,7 +494,7 @@
     status_t res;
     sp<CameraProviderManager> providerManager = new CameraProviderManager();
     sp<TestStatusListener> statusListener = new TestStatusListener();
-    TestInteractionProxy serviceProxy;
+    TestHidlInteractionProxy serviceProxy;
 
     android::hardware::hidl_vec<uint8_t> chars;
     CameraMetadata meta;
@@ -510,7 +542,7 @@
     status_t res;
     sp<CameraProviderManager> providerManager = new CameraProviderManager();
     sp<TestStatusListener> statusListener = new TestStatusListener();
-    TestInteractionProxy serviceProxy;
+    TestHidlInteractionProxy serviceProxy;
     sp<TestICameraProvider> provider =  new TestICameraProvider(deviceNames,
             vendorSection);
     serviceProxy.setProvider(provider);
@@ -560,7 +592,7 @@
 
     sp<CameraProviderManager> providerManager = new CameraProviderManager();
     sp<TestStatusListener> statusListener = new TestStatusListener();
-    TestInteractionProxy serviceProxy;
+    TestHidlInteractionProxy serviceProxy;
 
     sp<TestICameraProvider> provider =  new TestICameraProvider(deviceNames,
             vendorSection);
@@ -696,7 +728,7 @@
     status_t res;
     sp<CameraProviderManager> providerManager = new CameraProviderManager();
     sp<TestStatusListener> statusListener = new TestStatusListener();
-    TestInteractionProxy serviceProxy;
+    TestHidlInteractionProxy serviceProxy;
     sp<TestICameraProvider> provider =  new TestICameraProvider(deviceNames,
             vendorSection);
     serviceProxy.setProvider(provider);
@@ -730,7 +762,7 @@
 
     sp<CameraProviderManager> providerManager = new CameraProviderManager();
     sp<TestStatusListener> statusListener = new TestStatusListener();
-    TestInteractionProxy serviceProxy;
+    TestHidlInteractionProxy serviceProxy;
     sp<TestICameraProvider> provider =  new TestICameraProvider(deviceNames,
             vendorSection);
 
@@ -779,7 +811,7 @@
 
     sp<CameraProviderManager> providerManager = new CameraProviderManager();
     sp<TestStatusListener> statusListener = new TestStatusListener();
-    TestInteractionProxy serviceProxy;
+    TestHidlInteractionProxy serviceProxy;
     sp<TestICameraProvider> provider =  new TestICameraProvider(deviceNames,
             vendorSection);
 
@@ -821,7 +853,7 @@
 
     sp<CameraProviderManager> providerManager = new CameraProviderManager();
     sp<TestStatusListener> statusListener = new TestStatusListener();
-    TestInteractionProxy serviceProxy;
+    TestHidlInteractionProxy serviceProxy;
 
     android::hardware::hidl_vec<uint8_t> chars;
     CameraMetadata meta;
@@ -857,9 +889,11 @@
                 REQUIRES_FLAGS_ENABLED(ACONFIG_FLAG(vd_flags, virtual_camera_service_discovery))) {
     sp<CameraProviderManager> providerManager = new CameraProviderManager();
     sp<TestStatusListener> statusListener = new TestStatusListener();
-    TestInteractionProxy serviceProxy;
+    TestAidlInteractionProxy aidlServiceProxy;
+    TestHidlInteractionProxy hidlServiceProxy;
 
-    status_t res = providerManager->initialize(statusListener, &serviceProxy, &serviceProxy);
+    status_t res = providerManager->initialize(statusListener,
+                                               &hidlServiceProxy, &aidlServiceProxy);
     ASSERT_EQ(res, OK) << "Unable to initialize provider manager";
 
     std::vector<std::string> cameraList = {"device@1.1/virtual/123"};
@@ -868,7 +902,7 @@
             ndk::SharedRefBase::make<TestAidlICameraProvider>(cameraList);
     ndk::SpAIBinder spBinder = aidlProvider->asBinder();
     AIBinder* aiBinder = spBinder.get();
-    serviceProxy.setAidlProvider(aidlProvider);
+    aidlServiceProxy.setProvider(aidlProvider);
     providerManager->onServiceRegistration(
             String16("android.hardware.camera.provider.ICameraProvider/virtual/0"),
             AIBinder_toPlatformBinder(aiBinder));
@@ -883,15 +917,17 @@
                 REQUIRES_FLAGS_ENABLED(ACONFIG_FLAG(vd_flags, virtual_camera_service_discovery))) {
     sp<CameraProviderManager> providerManager = new CameraProviderManager();
     sp<TestStatusListener> statusListener = new TestStatusListener();
-    TestInteractionProxy serviceProxy;
+    TestAidlInteractionProxy aidlServiceProxy;
+    TestHidlInteractionProxy hidlServiceProxy;
 
     std::vector<std::string> cameraList = {"device@1.1/virtual/123"};
 
     std::shared_ptr<TestAidlICameraProvider> aidlProvider =
             ndk::SharedRefBase::make<TestAidlICameraProvider>(cameraList);
-    serviceProxy.setAidlProvider(aidlProvider);
+    aidlServiceProxy.setProvider(aidlProvider);
 
-    status_t res = providerManager->initialize(statusListener, &serviceProxy, &serviceProxy);
+    status_t res = providerManager->initialize(statusListener,
+                                               &hidlServiceProxy, &aidlServiceProxy);
     ASSERT_EQ(res, OK) << "Unable to initialize provider manager";
 
     std::unordered_map<std::string, std::set<std::string>> unavailableDeviceIds;
diff --git a/services/camera/libcameraservice/tests/SessionStatsBuilderTest.cpp b/services/camera/libcameraservice/tests/SessionStatsBuilderTest.cpp
new file mode 100644
index 0000000..3644358
--- /dev/null
+++ b/services/camera/libcameraservice/tests/SessionStatsBuilderTest.cpp
@@ -0,0 +1,69 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "SessionStatsBuilderTest"
+
+#include <gtest/gtest.h>
+#include <utils/Errors.h>
+
+#include "../utils/SessionStatsBuilder.h"
+
+using namespace std;
+using namespace android;
+
+TEST(SessionStatsBuilderTest, FpsHistogramTest) {
+    SessionStatsBuilder b{};
+
+    int64_t requestCount, resultErrorCount;
+    bool deviceError;
+    pair<int32_t, int32_t> mostRequestedFpsRange;
+    map<int, StreamStats> streamStatsMap;
+
+    // Verify we get the most common FPS
+    int64_t fc = 0;
+    for (size_t i = 0; i < 10; i++, fc++) b.incFpsRequestedCount(30, 30, fc);
+    for (size_t i = 0; i < 15; i++, fc++) b.incFpsRequestedCount(15, 30, fc);
+    for (size_t i = 0; i < 20; i++, fc++) b.incFpsRequestedCount(15, 15, fc);
+    for (size_t i = 0; i < 10; i++, fc++) b.incFpsRequestedCount(60, 60, fc);
+
+    b.buildAndReset(&requestCount, &resultErrorCount,
+        &deviceError, &mostRequestedFpsRange, &streamStatsMap);
+    ASSERT_EQ(mostRequestedFpsRange, make_pair(15, 15)) << "Incorrect most common FPS selected";
+
+    // Verify empty stats behavior
+    b.buildAndReset(&requestCount, &resultErrorCount,
+        &deviceError, &mostRequestedFpsRange, &streamStatsMap);
+    ASSERT_EQ(mostRequestedFpsRange, make_pair(0, 0)) << "Incorrect empty stats FPS reported";
+
+    // Verify one frame behavior
+    b.incFpsRequestedCount(30, 30, 1);
+    b.buildAndReset(&requestCount, &resultErrorCount,
+        &deviceError, &mostRequestedFpsRange, &streamStatsMap);
+    ASSERT_EQ(mostRequestedFpsRange, make_pair(30, 30)) << "Incorrect single-frame FPS reported";
+
+    // Verify overflow stats behavior
+    fc = 0;
+    for (size_t range = 1; range < SessionStatsBuilder::FPS_HISTOGRAM_MAX_SIZE + 2; range++) {
+        int count = SessionStatsBuilder::FPS_HISTOGRAM_MAX_SIZE * 3;
+        for (size_t i = 0; i < count - range; i++, fc++) b.incFpsRequestedCount(range, range, fc);
+    }
+    // Should have the oldest bucket dropped, so second oldest should be most common
+    b.buildAndReset(&requestCount, &resultErrorCount,
+        &deviceError, &mostRequestedFpsRange, &streamStatsMap);
+    ASSERT_EQ(mostRequestedFpsRange, make_pair(2, 2)) << "Incorrect stats overflow behavior";
+
+}
diff --git a/services/camera/libcameraservice/utils/AttributionAndPermissionUtils.cpp b/services/camera/libcameraservice/utils/AttributionAndPermissionUtils.cpp
new file mode 100644
index 0000000..93b440b
--- /dev/null
+++ b/services/camera/libcameraservice/utils/AttributionAndPermissionUtils.cpp
@@ -0,0 +1,221 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "AttributionAndPermissionUtils.h"
+
+#include <binder/AppOpsManager.h>
+#include <binder/PermissionController.h>
+#include <com_android_internal_camera_flags.h>
+#include <cutils/properties.h>
+#include <private/android_filesystem_config.h>
+
+#include "CameraService.h"
+
+#include <binder/IPCThreadState.h>
+#include <hwbinder/IPCThreadState.h>
+#include <binderthreadstate/CallerUtils.h>
+
+namespace android {
+
+namespace flags = com::android::internal::camera::flags;
+
+const std::string AttributionAndPermissionUtils::sDumpPermission("android.permission.DUMP");
+const std::string AttributionAndPermissionUtils::sManageCameraPermission(
+        "android.permission.MANAGE_CAMERA");
+const std::string AttributionAndPermissionUtils::sCameraPermission(
+        "android.permission.CAMERA");
+const std::string AttributionAndPermissionUtils::sSystemCameraPermission(
+        "android.permission.SYSTEM_CAMERA");
+const std::string AttributionAndPermissionUtils::sCameraHeadlessSystemUserPermission(
+        "android.permission.CAMERA_HEADLESS_SYSTEM_USER");
+const std::string AttributionAndPermissionUtils::sCameraPrivacyAllowlistPermission(
+        "android.permission.CAMERA_PRIVACY_ALLOWLIST");
+const std::string AttributionAndPermissionUtils::sCameraSendSystemEventsPermission(
+        "android.permission.CAMERA_SEND_SYSTEM_EVENTS");
+const std::string AttributionAndPermissionUtils::sCameraOpenCloseListenerPermission(
+        "android.permission.CAMERA_OPEN_CLOSE_LISTENER");
+const std::string AttributionAndPermissionUtils::sCameraInjectExternalCameraPermission(
+        "android.permission.CAMERA_INJECT_EXTERNAL_CAMERA");
+
+int AttributionAndPermissionUtils::getCallingUid() {
+    if (getCurrentServingCall() == BinderCallType::HWBINDER) {
+        return hardware::IPCThreadState::self()->getCallingUid();
+    }
+    return IPCThreadState::self()->getCallingUid();
+}
+
+int AttributionAndPermissionUtils::getCallingPid() {
+    if (getCurrentServingCall() == BinderCallType::HWBINDER) {
+        return hardware::IPCThreadState::self()->getCallingPid();
+    }
+    return IPCThreadState::self()->getCallingPid();
+}
+
+int64_t AttributionAndPermissionUtils::clearCallingIdentity() {
+    if (getCurrentServingCall() == BinderCallType::HWBINDER) {
+        return hardware::IPCThreadState::self()->clearCallingIdentity();
+    }
+    return IPCThreadState::self()->clearCallingIdentity();
+}
+
+void AttributionAndPermissionUtils::restoreCallingIdentity(int64_t token) {
+    if (getCurrentServingCall() == BinderCallType::HWBINDER) {
+        hardware::IPCThreadState::self()->restoreCallingIdentity(token);
+    } else {
+        IPCThreadState::self()->restoreCallingIdentity(token);
+    }
+    return;
+}
+
+bool AttributionAndPermissionUtils::checkAutomotivePrivilegedClient(const std::string &cameraId,
+        const AttributionSourceState &attributionSource) {
+    if (isAutomotivePrivilegedClient(attributionSource.uid)) {
+        // If cameraId is empty, then it means that this check is not used for the
+        // purpose of accessing a specific camera, hence grant permission just
+        // based on uid to the automotive privileged client.
+        if (cameraId.empty())
+            return true;
+
+        auto cameraService = mCameraService.promote();
+        if (cameraService == nullptr) {
+            ALOGE("%s: CameraService unavailable.", __FUNCTION__);
+            return false;
+        }
+
+        // If this call is used for accessing a specific camera then cam_id must be provided.
+        // In that case, only pre-grants the permission for accessing the exterior system only
+        // camera.
+        return cameraService->isAutomotiveExteriorSystemCamera(cameraId);
+    }
+
+    return false;
+}
+
+bool AttributionAndPermissionUtils::checkPermissionForPreflight(const std::string &cameraId,
+        const std::string &permission, const AttributionSourceState &attributionSource,
+        const std::string& message, int32_t attributedOpCode) {
+    if (checkAutomotivePrivilegedClient(cameraId, attributionSource)) {
+        return true;
+    }
+
+    if (!flags::cache_permission_services()) {
+        PermissionChecker permissionChecker;
+        return permissionChecker.checkPermissionForPreflight(
+                       toString16(permission), attributionSource, toString16(message),
+                       attributedOpCode) != PermissionChecker::PERMISSION_HARD_DENIED;
+    } else {
+        return mPermissionChecker->checkPermissionForPreflight(
+                       toString16(permission), attributionSource, toString16(message),
+                       attributedOpCode) != PermissionChecker::PERMISSION_HARD_DENIED;
+    }
+}
+
+// Can camera service trust the caller based on the calling UID?
+bool AttributionAndPermissionUtils::isTrustedCallingUid(uid_t uid) {
+    switch (uid) {
+        case AID_MEDIA:        // mediaserver
+        case AID_CAMERASERVER: // cameraserver
+        case AID_RADIO:        // telephony
+            return true;
+        default:
+            return false;
+    }
+}
+
+bool AttributionAndPermissionUtils::isAutomotiveDevice() {
+    // Checks the property ro.hardware.type and returns true if it is
+    // automotive.
+    char value[PROPERTY_VALUE_MAX] = {0};
+    property_get("ro.hardware.type", value, "");
+    return strncmp(value, "automotive", PROPERTY_VALUE_MAX) == 0;
+}
+
+bool AttributionAndPermissionUtils::isHeadlessSystemUserMode() {
+    // Checks if the device is running in headless system user mode
+    // by checking the property ro.fw.mu.headless_system_user.
+    char value[PROPERTY_VALUE_MAX] = {0};
+    property_get("ro.fw.mu.headless_system_user", value, "");
+    return strncmp(value, "true", PROPERTY_VALUE_MAX) == 0;
+}
+
+bool AttributionAndPermissionUtils::isAutomotivePrivilegedClient(int32_t uid) {
+    // Returns false if this is not an automotive device type.
+    if (!isAutomotiveDevice())
+        return false;
+
+    // Returns true if the uid is AID_AUTOMOTIVE_EVS which is a
+    // privileged client uid used for safety critical use cases such as
+    // rear view and surround view.
+    return uid == AID_AUTOMOTIVE_EVS;
+}
+
+status_t AttributionAndPermissionUtils::getUidForPackage(const std::string &packageName,
+        int userId, /*inout*/uid_t& uid, int err) {
+    PermissionController pc;
+    uid = pc.getPackageUid(toString16(packageName), 0);
+    if (uid <= 0) {
+        ALOGE("Unknown package: '%s'", packageName.c_str());
+        dprintf(err, "Unknown package: '%s'\n", packageName.c_str());
+        return BAD_VALUE;
+    }
+
+    if (userId < 0) {
+        ALOGE("Invalid user: %d", userId);
+        dprintf(err, "Invalid user: %d\n", userId);
+        return BAD_VALUE;
+    }
+
+    uid = multiuser_get_uid(userId, uid);
+    return NO_ERROR;
+}
+
+bool AttributionAndPermissionUtils::isCallerCameraServerNotDelegating() {
+    return (getCallingPid() == getpid());
+}
+
+bool AttributionAndPermissionUtils::hasPermissionsForCamera(const std::string& cameraId,
+        const AttributionSourceState& attributionSource) {
+    return checkPermissionForPreflight(cameraId, sCameraPermission,
+            attributionSource, std::string(), AppOpsManager::OP_NONE);
+}
+
+bool AttributionAndPermissionUtils::hasPermissionsForSystemCamera(const std::string& cameraId,
+        const AttributionSourceState& attributionSource, bool checkCameraPermissions) {
+    bool systemCameraPermission = checkPermissionForPreflight(cameraId,
+            sSystemCameraPermission, attributionSource, std::string(), AppOpsManager::OP_NONE);
+    return systemCameraPermission && (!checkCameraPermissions
+            || hasPermissionsForCamera(cameraId, attributionSource));
+}
+
+bool AttributionAndPermissionUtils::hasPermissionsForCameraHeadlessSystemUser(
+        const std::string& cameraId, const AttributionSourceState& attributionSource) {
+    return checkPermissionForPreflight(cameraId, sCameraHeadlessSystemUserPermission,
+            attributionSource, std::string(), AppOpsManager::OP_NONE);
+}
+
+bool AttributionAndPermissionUtils::hasPermissionsForCameraPrivacyAllowlist(
+        const AttributionSourceState& attributionSource) {
+    return checkPermissionForPreflight(std::string(), sCameraPrivacyAllowlistPermission,
+            attributionSource, std::string(), AppOpsManager::OP_NONE);
+}
+
+bool AttributionAndPermissionUtils::hasPermissionsForOpenCloseListener(
+        const AttributionSourceState& attributionSource) {
+    return checkPermissionForPreflight(std::string(), sCameraOpenCloseListenerPermission,
+            attributionSource, std::string(), AppOpsManager::OP_NONE);
+}
+
+} // namespace android
diff --git a/services/camera/libcameraservice/utils/AttributionAndPermissionUtils.h b/services/camera/libcameraservice/utils/AttributionAndPermissionUtils.h
new file mode 100644
index 0000000..4daab0f
--- /dev/null
+++ b/services/camera/libcameraservice/utils/AttributionAndPermissionUtils.h
@@ -0,0 +1,257 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#ifndef ANDROID_SERVERS_CAMERA_ATTRIBUTION_AND_PERMISSION_UTILS_H
+#define ANDROID_SERVERS_CAMERA_ATTRIBUTION_AND_PERMISSION_UTILS_H
+
+#include <android/content/AttributionSourceState.h>
+#include <android/permission/PermissionChecker.h>
+#include <binder/BinderService.h>
+#include <private/android_filesystem_config.h>
+
+namespace android {
+
+class CameraService;
+
+using content::AttributionSourceState;
+using permission::PermissionChecker;
+
+/**
+ * Utility class consolidating methods/data for verifying permissions and the identity of the
+ * caller.
+ */
+class AttributionAndPermissionUtils {
+  public:
+    AttributionAndPermissionUtils() { }
+    virtual ~AttributionAndPermissionUtils() {}
+
+    void setCameraService(wp<CameraService> cameraService) {
+        mCameraService = cameraService;
+    }
+
+    static AttributionSourceState buildAttributionSource(int callingPid, int callingUid) {
+        AttributionSourceState attributionSource{};
+        attributionSource.pid = callingPid;
+        attributionSource.uid = callingUid;
+        return attributionSource;
+    }
+
+    static AttributionSourceState buildAttributionSource(int callingPid, int callingUid,
+            int32_t deviceId) {
+        AttributionSourceState attributionSource = buildAttributionSource(callingPid, callingUid);
+        attributionSource.deviceId = deviceId;
+        return attributionSource;
+    }
+
+    // Utilities handling Binder calling identities (previously in CameraThreadState)
+    virtual int getCallingUid();
+    virtual int getCallingPid();
+    virtual int64_t clearCallingIdentity();
+    virtual void restoreCallingIdentity(int64_t token);
+
+    /**
+     * Pre-grants the permission if the attribution source uid is for an automotive
+     * privileged client. Otherwise uses system service permission checker to check
+     * for the appropriate permission. If this function is called for accessing a specific
+     * camera,then the cameraID must not be empty. CameraId is used only in case of automotive
+     * privileged client so that permission is pre-granted only to access system camera device
+     * which is located outside of the vehicle body frame because camera located inside the vehicle
+     * cabin would need user permission.
+     */
+    virtual bool checkPermissionForPreflight(const std::string &cameraId,
+            const std::string &permission, const AttributionSourceState& attributionSource,
+            const std::string& message, int32_t attributedOpCode);
+
+    // Can camera service trust the caller based on the calling UID?
+    virtual bool isTrustedCallingUid(uid_t uid);
+
+    virtual bool isAutomotiveDevice();
+    virtual bool isHeadlessSystemUserMode();
+
+    /**
+     * Returns true if the client has uid AID_AUTOMOTIVE_EVS and the device is an automotive device.
+     */
+    virtual bool isAutomotivePrivilegedClient(int32_t uid);
+
+    virtual status_t getUidForPackage(const std::string &packageName, int userId,
+            /*inout*/uid_t& uid, int err);
+    virtual bool isCallerCameraServerNotDelegating();
+
+    // Utils for checking specific permissions
+    virtual bool hasPermissionsForCamera(const std::string& cameraId,
+            const AttributionSourceState& attributionSource);
+    virtual bool hasPermissionsForSystemCamera(const std::string& cameraId,
+            const AttributionSourceState& attributionSource, bool checkCameraPermissions = true);
+    virtual bool hasPermissionsForCameraHeadlessSystemUser(const std::string& cameraId,
+            const AttributionSourceState& attributionSource);
+    virtual bool hasPermissionsForCameraPrivacyAllowlist(
+            const AttributionSourceState& attributionSource);
+    virtual bool hasPermissionsForOpenCloseListener(
+            const AttributionSourceState& attributionSource);
+
+    static const std::string sDumpPermission;
+    static const std::string sManageCameraPermission;
+    static const std::string sCameraPermission;
+    static const std::string sSystemCameraPermission;
+    static const std::string sCameraHeadlessSystemUserPermission;
+    static const std::string sCameraPrivacyAllowlistPermission;
+    static const std::string sCameraSendSystemEventsPermission;
+    static const std::string sCameraOpenCloseListenerPermission;
+    static const std::string sCameraInjectExternalCameraPermission;
+
+  protected:
+    wp<CameraService> mCameraService;
+
+    bool checkAutomotivePrivilegedClient(const std::string &cameraId,
+            const AttributionSourceState &attributionSource);
+
+  private:
+    std::unique_ptr<permission::PermissionChecker> mPermissionChecker =
+            std::make_unique<permission::PermissionChecker>();
+};
+
+/**
+ * Class to be inherited by classes encapsulating AttributionAndPermissionUtils. Provides an
+ * additional utility layer above AttributionAndPermissionUtils calls, and avoids verbosity
+ * in the encapsulating class's methods.
+ */
+class AttributionAndPermissionUtilsEncapsulator {
+protected:
+    std::shared_ptr<AttributionAndPermissionUtils> mAttributionAndPermissionUtils;
+
+public:
+    AttributionAndPermissionUtilsEncapsulator(
+        std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils)
+            : mAttributionAndPermissionUtils(attributionAndPermissionUtils) { }
+
+    static AttributionSourceState buildAttributionSource(int callingPid, int callingUid) {
+        return AttributionAndPermissionUtils::buildAttributionSource(callingPid, callingUid);
+    }
+
+    static AttributionSourceState buildAttributionSource(int callingPid, int callingUid,
+            int32_t deviceId) {
+        return AttributionAndPermissionUtils::buildAttributionSource(
+                callingPid, callingUid, deviceId);
+    }
+
+    static AttributionSourceState buildAttributionSource(int callingPid, int callingUid,
+            const std::string& packageName, int32_t deviceId) {
+        AttributionSourceState attributionSource = buildAttributionSource(callingPid, callingUid,
+                deviceId);
+        attributionSource.packageName = packageName;
+        return attributionSource;
+    }
+
+    int getCallingUid() const {
+        return mAttributionAndPermissionUtils->getCallingUid();
+    }
+
+    int getCallingPid() const {
+        return mAttributionAndPermissionUtils->getCallingPid();
+    }
+
+    int64_t clearCallingIdentity() const {
+        return mAttributionAndPermissionUtils->clearCallingIdentity();
+    }
+
+    void restoreCallingIdentity(int64_t token) const {
+        mAttributionAndPermissionUtils->restoreCallingIdentity(token);
+    }
+
+    // The word 'System' here does not refer to callers only on the system
+    // partition. They just need to have an android system uid.
+    bool callerHasSystemUid() const {
+        return (getCallingUid() < AID_APP_START);
+    }
+
+    bool hasPermissionsForCamera(int callingPid, int callingUid, int32_t deviceId) const {
+        return hasPermissionsForCamera(std::string(), callingPid, callingUid, deviceId);
+    }
+
+    bool hasPermissionsForCamera(int callingPid, int callingUid,
+            const std::string& packageName, int32_t deviceId) const {
+        return hasPermissionsForCamera(std::string(), callingPid, callingUid, packageName,
+                deviceId);
+    }
+
+    bool hasPermissionsForCamera(const std::string& cameraId, int callingPid,
+            int callingUid, int32_t deviceId) const {
+        auto attributionSource = buildAttributionSource(callingPid, callingUid,
+                deviceId);
+        return mAttributionAndPermissionUtils->hasPermissionsForCamera(cameraId, attributionSource);
+    }
+
+    bool hasPermissionsForCamera(const std::string& cameraId, int callingPid, int callingUid,
+            const std::string& packageName, int32_t deviceId) const {
+        auto attributionSource = buildAttributionSource(callingPid, callingUid, packageName,
+                deviceId);
+        return mAttributionAndPermissionUtils->hasPermissionsForCamera(cameraId, attributionSource);
+    }
+
+    bool hasPermissionsForSystemCamera(const std::string& cameraId, int callingPid, int callingUid,
+            bool checkCameraPermissions = true) const  {
+        auto attributionSource = buildAttributionSource(callingPid, callingUid);
+        return mAttributionAndPermissionUtils->hasPermissionsForSystemCamera(
+                    cameraId, attributionSource, checkCameraPermissions);
+    }
+
+    bool hasPermissionsForCameraHeadlessSystemUser(const std::string& cameraId, int callingPid,
+            int callingUid) const {
+        auto attributionSource = buildAttributionSource(callingPid, callingUid);
+        return mAttributionAndPermissionUtils->hasPermissionsForCameraHeadlessSystemUser(
+                    cameraId, attributionSource);
+    }
+
+    bool hasPermissionsForCameraPrivacyAllowlist(int callingPid, int callingUid) const {
+        auto attributionSource = buildAttributionSource(callingPid, callingUid);
+        return mAttributionAndPermissionUtils->hasPermissionsForCameraPrivacyAllowlist(
+                attributionSource);
+    }
+
+    bool hasPermissionsForOpenCloseListener(int callingPid, int callingUid) const {
+        auto attributionSource = buildAttributionSource(callingPid, callingUid);
+        return mAttributionAndPermissionUtils->hasPermissionsForOpenCloseListener(
+                attributionSource);
+    }
+
+    bool isAutomotiveDevice() const {
+        return mAttributionAndPermissionUtils->isAutomotiveDevice();
+    }
+
+    bool isAutomotivePrivilegedClient(int32_t uid) const {
+        return mAttributionAndPermissionUtils->isAutomotivePrivilegedClient(uid);
+    }
+
+    bool isTrustedCallingUid(uid_t uid) const {
+        return mAttributionAndPermissionUtils->isTrustedCallingUid(uid);
+    }
+
+    bool isHeadlessSystemUserMode() const {
+        return mAttributionAndPermissionUtils->isHeadlessSystemUserMode();
+    }
+
+    status_t getUidForPackage(const std::string &packageName, int userId,
+            /*inout*/uid_t& uid, int err) const {
+        return mAttributionAndPermissionUtils->getUidForPackage(packageName, userId, uid, err);
+    }
+
+    bool isCallerCameraServerNotDelegating() const {
+        return mAttributionAndPermissionUtils->isCallerCameraServerNotDelegating();
+    }
+};
+
+} // namespace android
+
+#endif // ANDROID_SERVERS_CAMERA_ATTRIBUTION_AND_PERMISSION_UTILS_H
diff --git a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp
index 65e93a9..85bca6f 100644
--- a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp
+++ b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp
@@ -18,19 +18,25 @@
 #define ATRACE_TAG ATRACE_TAG_CAMERA
 //#define LOG_NDEBUG 0
 
+#include <gui/Surface.h>
 #include <inttypes.h>
 #include <utils/Log.h>
 #include <utils/String16.h>
 #include <camera/StringUtils.h>
 #include <binder/IServiceManager.h>
+#include <system/window.h>
+
+#include "aidl/android/hardware/graphics/common/Dataspace.h"
 
 #include "CameraServiceProxyWrapper.h"
 
 namespace android {
 
 using hardware::CameraExtensionSessionStats;
+using hardware::CameraFeatureCombinationStats;
 using hardware::CameraSessionStats;
 using hardware::ICameraServiceProxy;
+using hardware::camera2::params::SessionConfiguration;
 
 namespace {
 // Sentinel value to be returned when extension session with a stale or invalid key is reported.
@@ -95,7 +101,8 @@
         sp<hardware::ICameraServiceProxy>& proxyBinder,
         int64_t requestCount, int64_t resultErrorCount, bool deviceError,
         const std::string& userTag, int32_t videoStabilizationMode, bool usedUltraWide,
-        bool usedZoomOverride, const std::vector<hardware::CameraStreamStats>& streamStats) {
+        bool usedZoomOverride, std::pair<int32_t, int32_t> mostRequestedFpsRange,
+        const std::vector<hardware::CameraStreamStats>& streamStats) {
     Mutex::Autolock l(mLock);
 
     mSessionStats.mNewCameraState = CameraSessionStats::CAMERA_STATE_IDLE;
@@ -106,6 +113,7 @@
     mSessionStats.mVideoStabilizationMode = videoStabilizationMode;
     mSessionStats.mUsedUltraWide = usedUltraWide;
     mSessionStats.mUsedZoomOverride = usedZoomOverride;
+    mSessionStats.mMostRequestedFpsRange = mostRequestedFpsRange;
     mSessionStats.mStreamStats = streamStats;
 
     updateProxyDeviceState(proxyBinder);
@@ -213,6 +221,111 @@
     proxyBinder->pingForUserUpdate();
 }
 
+int64_t CameraServiceProxyWrapper::encodeSessionConfiguration(
+        const SessionConfiguration& sessionConfig) {
+    int64_t features = CameraFeatureCombinationStats::CAMERA_FEATURE_UNKNOWN;
+    const static int32_t WIDTH_4K = 3840;
+    const static int32_t HEIGHT_4K = 2160;
+
+    // Check session parameters
+    if (sessionConfig.hasSessionParameters()) {
+        const auto& parameters = sessionConfig.getSessionParameters();
+
+        camera_metadata_ro_entry fpsEntry = parameters.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE);
+        if (fpsEntry.count == 2 && fpsEntry.data.i32[1] == 60) {
+            features |= CameraFeatureCombinationStats::CAMERA_FEATURE_60_FPS;
+        }
+
+        camera_metadata_ro_entry stabEntry =
+                parameters.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE);
+        if (stabEntry.count == 1 && stabEntry.data.u8[0] ==
+                ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_PREVIEW_STABILIZATION) {
+            features |= CameraFeatureCombinationStats::CAMERA_FEATURE_STABILIZATION;
+        }
+    }
+
+    // Check output configurations
+    const auto& outputConfigs = sessionConfig.getOutputConfigurations();
+    for (const auto& config : outputConfigs) {
+        int format = config.getFormat();
+        int dataSpace = config.getDataspace();
+        int64_t dynamicRangeProfile = config.getDynamicRangeProfile();
+
+        // Check JPEG and JPEG_R features
+        if (format == HAL_PIXEL_FORMAT_BLOB) {
+            if (dataSpace == HAL_DATASPACE_V0_JFIF) {
+                features |= CameraFeatureCombinationStats::CAMERA_FEATURE_JPEG;
+            } else if (dataSpace == static_cast<android_dataspace_t>(
+                    aidl::android::hardware::graphics::common::Dataspace::JPEG_R)) {
+                features |= CameraFeatureCombinationStats::CAMERA_FEATURE_JPEG_R;
+            }
+        } else {
+            if (dynamicRangeProfile == HAL_DATASPACE_BT2020_HLG) {
+                features |= CameraFeatureCombinationStats::CAMERA_FEATURE_HLG10;
+            }
+
+            // Check 4K
+            const auto& gbps = config.getGraphicBufferProducers();
+            int32_t width = 0, height = 0;
+            if (gbps.size() > 0) {
+                if (gbps[0] == nullptr) {
+                    ALOGE("%s: Failed to query size due to abandoned surface.",
+                            __FUNCTION__);
+                    return CameraFeatureCombinationStats::CAMERA_FEATURE_UNKNOWN;
+                }
+
+                sp<Surface> surface = new Surface(gbps[0], /*useAsync*/false);
+                ANativeWindow *anw = surface.get();
+
+                width = ANativeWindow_getWidth(anw);
+                if (width < 0) {
+                    ALOGE("%s: Failed to query Surface width: %s (%d)",
+                            __FUNCTION__, strerror(-width), width);
+                    return CameraFeatureCombinationStats::CAMERA_FEATURE_UNKNOWN;
+                }
+                height = ANativeWindow_getHeight(anw);
+                if (height < 0) {
+                    ALOGE("%s: Failed to query Surface height: %s (%d)",
+                            __FUNCTION__, strerror(-height), height);
+                    return CameraFeatureCombinationStats::CAMERA_FEATURE_UNKNOWN;
+                }
+            } else {
+                width = config.getWidth();
+                height = config.getHeight();
+            }
+            if (width == WIDTH_4K && height == HEIGHT_4K) {
+                features |= CameraFeatureCombinationStats::CAMERA_FEATURE_4K;
+            }
+        }
+    }
+    return features;
+}
+
+// Note: The `ret` parameter is the return value of the
+// `isSessionConfigurationWithParametersSupporteed` binder call from the app.
+void CameraServiceProxyWrapper::logFeatureCombinationInternal(
+        const std::string &cameraId, int clientUid,
+        const SessionConfiguration& sessionConfiguration, binder::Status ret,
+        int type) {
+    sp<hardware::ICameraServiceProxy> proxyBinder = getCameraServiceProxy();
+    if (proxyBinder == nullptr) return;
+
+    int64_t featureCombination = encodeSessionConfiguration(sessionConfiguration);
+    int queryStatus = ret.isOk() ? OK : ret.serviceSpecificErrorCode();
+    CameraFeatureCombinationStats stats;
+    stats.mCameraId = cameraId;
+    stats.mUid = clientUid;
+    stats.mFeatureCombination = featureCombination;
+    stats.mQueryType = type;
+    stats.mStatus = queryStatus;
+
+    auto status = proxyBinder->notifyFeatureCombinationStats(stats);
+    if (!status.isOk()) {
+        ALOGE("%s: Failed to notify feature combination stats: %s", __FUNCTION__,
+                status.exceptionMessage().c_str());
+    }
+}
+
 int CameraServiceProxyWrapper::getRotateAndCropOverride(const std::string &packageName,
         int lensFacing, int userId) {
     sp<ICameraServiceProxy> proxyBinder = getCameraServiceProxy();
@@ -281,7 +394,8 @@
 void CameraServiceProxyWrapper::logIdle(const std::string& id,
         int64_t requestCount, int64_t resultErrorCount, bool deviceError,
         const std::string& userTag, int32_t videoStabilizationMode, bool usedUltraWide,
-        bool usedZoomOverride, const std::vector<hardware::CameraStreamStats>& streamStats) {
+        bool usedZoomOverride, std::pair<int32_t, int32_t> mostRequestedFpsRange,
+        const std::vector<hardware::CameraStreamStats>& streamStats) {
     std::shared_ptr<CameraSessionStatsWrapper> sessionStats;
     {
         Mutex::Autolock l(mLock);
@@ -294,8 +408,9 @@
     }
 
     ALOGV("%s: id %s, requestCount %" PRId64 ", resultErrorCount %" PRId64 ", deviceError %d"
-            ", userTag %s, videoStabilizationMode %d", __FUNCTION__, id.c_str(), requestCount,
-            resultErrorCount, deviceError, userTag.c_str(), videoStabilizationMode);
+            ", userTag %s, videoStabilizationMode %d, most common FPS [%d,%d]",
+            __FUNCTION__, id.c_str(), requestCount, resultErrorCount, deviceError, userTag.c_str(),
+            videoStabilizationMode, mostRequestedFpsRange.first, mostRequestedFpsRange.second);
     for (size_t i = 0; i < streamStats.size(); i++) {
         ALOGV("%s: streamStats[%zu]: w %d h %d, requestedCount %" PRId64 ", dropCount %"
                 PRId64 ", startTimeMs %d" ,
@@ -306,7 +421,8 @@
 
     sp<hardware::ICameraServiceProxy> proxyBinder = getCameraServiceProxy();
     sessionStats->onIdle(proxyBinder, requestCount, resultErrorCount, deviceError, userTag,
-            videoStabilizationMode, usedUltraWide, usedZoomOverride, streamStats);
+            videoStabilizationMode, usedUltraWide, usedZoomOverride,
+            mostRequestedFpsRange, streamStats);
 }
 
 void CameraServiceProxyWrapper::logOpen(const std::string& id, int facing,
diff --git a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h
index 49b7a8c..ad8b1cd 100644
--- a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h
+++ b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h
@@ -17,6 +17,7 @@
 #ifndef ANDROID_SERVERS_CAMERA_SERVICE_PROXY_WRAPPER_H_
 #define ANDROID_SERVERS_CAMERA_SERVICE_PROXY_WRAPPER_H_
 
+#include <android/hardware/CameraFeatureCombinationStats.h>
 #include <android/hardware/ICameraServiceProxy.h>
 
 #include <utils/Mutex.h>
@@ -26,7 +27,7 @@
 #include <string>
 
 #include <camera/CameraSessionStats.h>
-
+#include <camera/camera2/SessionConfiguration.h>
 namespace android {
 
 class CameraServiceProxyWrapper {
@@ -62,7 +63,8 @@
         void onIdle(sp<hardware::ICameraServiceProxy>& proxyBinder,
                 int64_t requestCount, int64_t resultErrorCount, bool deviceError,
                 const std::string& userTag, int32_t videoStabilizationMode, bool usedUltraWide,
-                bool usedZoomOverride, const std::vector<hardware::CameraStreamStats>& streamStats);
+                bool usedZoomOverride, std::pair<int32_t, int32_t> mostRequestedFpsRange,
+                const std::vector<hardware::CameraStreamStats>& streamStats);
 
         std::string updateExtensionSessionStats(
                 const hardware::CameraExtensionSessionStats& extStats);
@@ -85,6 +87,11 @@
     // ID generated for the open event associated with them.
     static int64_t generateLogId(std::random_device& randomDevice);
 
+    static int64_t encodeSessionConfiguration(const SessionConfiguration& sessionConfig);
+
+    void logFeatureCombinationInternal(const std::string &cameraId, int clientUid,
+            const hardware::camera2::params::SessionConfiguration& sessionConfiguration,
+            binder::Status ret, int type);
 public:
     CameraServiceProxyWrapper(sp<hardware::ICameraServiceProxy> serviceProxy = nullptr) :
             mCameraServiceProxy(serviceProxy)
@@ -111,7 +118,22 @@
     void logIdle(const std::string& id,
             int64_t requestCount, int64_t resultErrorCount, bool deviceError,
             const std::string& userTag, int32_t videoStabilizationMode, bool usedUltraWide,
-            bool usedZoomOverride, const std::vector<hardware::CameraStreamStats>& streamStats);
+            bool usedZoomOverride, std::pair<int32_t, int32_t> mostRequestedFpsRange,
+            const std::vector<hardware::CameraStreamStats>& streamStats);
+
+    // Feature combination query
+    void logFeatureCombinationQuery(const std::string &id, int clientUid,
+            const hardware::camera2::params::SessionConfiguration& sessionConfiguration,
+            binder::Status ret) {
+        logFeatureCombinationInternal(id, clientUid, sessionConfiguration, ret,
+                (int)hardware::CameraFeatureCombinationStats::QueryType::QUERY_FEATURE_COMBINATION);
+    }
+    void logSessionCharacteristicsQuery(const std::string &id, int clientUid,
+            const hardware::camera2::params::SessionConfiguration& sessionConfiguration,
+            binder::Status ret) {
+        logFeatureCombinationInternal(id, clientUid, sessionConfiguration, ret, (int)
+                hardware::CameraFeatureCombinationStats::QueryType::QUERY_SESSION_CHARACTERISTICS);
+    }
 
     // Ping camera service proxy for user update
     void pingCameraServiceProxy();
diff --git a/services/camera/libcameraservice/utils/CameraThreadState.cpp b/services/camera/libcameraservice/utils/CameraThreadState.cpp
deleted file mode 100644
index 2352b80..0000000
--- a/services/camera/libcameraservice/utils/CameraThreadState.cpp
+++ /dev/null
@@ -1,55 +0,0 @@
-/*
- * Copyright (C) 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "CameraThreadState.h"
-#include <binder/IPCThreadState.h>
-#include <hwbinder/IPCThreadState.h>
-#include <binderthreadstate/CallerUtils.h>
-#include <unistd.h>
-
-namespace android {
-
-int CameraThreadState::getCallingUid() {
-    if (getCurrentServingCall() == BinderCallType::HWBINDER) {
-        return hardware::IPCThreadState::self()->getCallingUid();
-    }
-    return IPCThreadState::self()->getCallingUid();
-}
-
-int CameraThreadState::getCallingPid() {
-    if (getCurrentServingCall() == BinderCallType::HWBINDER) {
-        return hardware::IPCThreadState::self()->getCallingPid();
-    }
-    return IPCThreadState::self()->getCallingPid();
-}
-
-int64_t CameraThreadState::clearCallingIdentity() {
-    if (getCurrentServingCall() == BinderCallType::HWBINDER) {
-        return hardware::IPCThreadState::self()->clearCallingIdentity();
-    }
-    return IPCThreadState::self()->clearCallingIdentity();
-}
-
-void CameraThreadState::restoreCallingIdentity(int64_t token) {
-    if (getCurrentServingCall() == BinderCallType::HWBINDER) {
-        hardware::IPCThreadState::self()->restoreCallingIdentity(token);
-    } else {
-        IPCThreadState::self()->restoreCallingIdentity(token);
-    }
-    return;
-}
-
-} // android
diff --git a/services/camera/libcameraservice/utils/SchedulingPolicyUtils.cpp b/services/camera/libcameraservice/utils/SchedulingPolicyUtils.cpp
index 92a1030..f3afc69 100644
--- a/services/camera/libcameraservice/utils/SchedulingPolicyUtils.cpp
+++ b/services/camera/libcameraservice/utils/SchedulingPolicyUtils.cpp
@@ -20,7 +20,6 @@
 #include <pthread.h>
 #include <sched.h>
 
-#include "CameraThreadState.h"
 #include <private/android_filesystem_config.h>
 #include <processgroup/processgroup.h>
 #include <processgroup/sched_policy.h>
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
index 11ef9b7..40ca276 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
@@ -21,12 +21,14 @@
 #include "../api2/HeicCompositeStream.h"
 #include "aidl/android/hardware/graphics/common/Dataspace.h"
 #include "api2/JpegRCompositeStream.h"
+#include "binder/Status.h"
 #include "common/CameraDeviceBase.h"
 #include "common/HalConversionsTemplated.h"
 #include "../CameraService.h"
 #include "device3/aidl/AidlCamera3Device.h"
 #include "device3/hidl/HidlCamera3Device.h"
 #include "device3/Camera3OutputStream.h"
+#include "device3/ZoomRatioMapper.h"
 #include "system/graphics-base-v1.1.h"
 #include <camera/StringUtils.h>
 #include <ui/PublicFormat.h>
@@ -432,7 +434,7 @@
         const std::string &logicalCameraId, const CameraMetadata &physicalCameraMetadata,
         const std::vector<int32_t> &sensorPixelModesUsed, int64_t dynamicRangeProfile,
         int64_t streamUseCase, int timestampBase, int mirrorMode,
-        int32_t colorSpace) {
+        int32_t colorSpace, bool respectSurfaceSize) {
     // bufferProducer must be non-null
     if (gbp == nullptr) {
         std::string msg = fmt::sprintf("Camera %s: Surface is NULL", logicalCameraId.c_str());
@@ -529,8 +531,10 @@
         // we can use the default stream configuration map
         foundInMaxRes = true;
     }
-    // Round dimensions to the nearest dimensions available for this format
-    if (flexibleConsumer && isPublicFormat(format) &&
+    // Round dimensions to the nearest dimensions available for this format.
+    // Only do the rounding if the client doesn't ask to respect the surface
+    // size.
+    if (flexibleConsumer && isPublicFormat(format) && !respectSurfaceSize &&
             !SessionConfigurationUtils::roundBufferDimensionNearest(width, height,
             format, dataSpace, physicalCameraMetadata, foundInMaxRes, /*out*/&width,
             /*out*/&height)) {
@@ -676,6 +680,67 @@
     stream->useCase = static_cast<StreamUseCases>(streamInfo.streamUseCase);
 }
 
+binder::Status mapStream(const OutputStreamInfo& streamInfo, bool isCompositeJpegRDisabled,
+        const CameraMetadata& deviceInfo, camera_stream_rotation_t rotation,
+        size_t* streamIdx/*out*/, const std::string &physicalId, int32_t groupId,
+        const std::string& logicalCameraId,
+        aidl::android::hardware::camera::device::StreamConfiguration &streamConfiguration /*out*/,
+        bool *earlyExit /*out*/) {
+    bool isDepthCompositeStream =
+            camera3::DepthCompositeStream::isDepthCompositeStreamInfo(streamInfo);
+    bool isHeicCompositeStream =
+            camera3::HeicCompositeStream::isHeicCompositeStreamInfo(streamInfo);
+    bool isJpegRCompositeStream =
+            camera3::JpegRCompositeStream::isJpegRCompositeStreamInfo(streamInfo) &&
+            !isCompositeJpegRDisabled;
+    if (isDepthCompositeStream || isHeicCompositeStream || isJpegRCompositeStream) {
+        // We need to take in to account that composite streams can have
+        // additional internal camera streams.
+        std::vector<OutputStreamInfo> compositeStreams;
+        status_t ret;
+        if (isDepthCompositeStream) {
+          // TODO: Take care of composite streams.
+            ret = camera3::DepthCompositeStream::getCompositeStreamInfo(streamInfo,
+                    deviceInfo, &compositeStreams);
+        } else if (isHeicCompositeStream) {
+            ret = camera3::HeicCompositeStream::getCompositeStreamInfo(streamInfo,
+                deviceInfo, &compositeStreams);
+        } else {
+            ret = camera3::JpegRCompositeStream::getCompositeStreamInfo(streamInfo,
+                deviceInfo, &compositeStreams);
+        }
+
+        if (ret != OK) {
+            std::string msg = fmt::sprintf(
+                    "Camera %s: Failed adding composite streams: %s (%d)",
+                    logicalCameraId.c_str(), strerror(-ret), ret);
+            ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+            return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
+        }
+
+        if (compositeStreams.size() == 0) {
+            // No internal streams means composite stream not
+            // supported.
+            *earlyExit = true;
+            return binder::Status::ok();
+        } else if (compositeStreams.size() > 1) {
+            size_t streamCount = streamConfiguration.streams.size() + compositeStreams.size() - 1;
+            streamConfiguration.streams.resize(streamCount);
+        }
+
+        for (const auto& compositeStream : compositeStreams) {
+            mapStreamInfo(compositeStream, rotation,
+                    physicalId, groupId,
+                    &streamConfiguration.streams[(*streamIdx)++]);
+        }
+    } else {
+        mapStreamInfo(streamInfo, rotation,
+                physicalId, groupId, &streamConfiguration.streams[(*streamIdx)++]);
+    }
+
+    return binder::Status::ok();
+}
+
 binder::Status
 convertToHALStreamCombination(
         const SessionConfiguration& sessionConfiguration,
@@ -684,7 +749,8 @@
         metadataGetter getMetadata, const std::vector<std::string> &physicalCameraIds,
         aidl::android::hardware::camera::device::StreamConfiguration &streamConfiguration,
         bool overrideForPerfClass, metadata_vendor_id_t vendorTagId,
-        bool checkSessionParams, bool *earlyExit) {
+        bool checkSessionParams, const std::vector<int32_t>& additionalKeys,
+        bool *earlyExit) {
     using SensorPixelMode = aidl::android::hardware::camera::metadata::SensorPixelMode;
     auto operatingMode = sessionConfiguration.getOperatingMode();
     binder::Status res = checkOperatingMode(operatingMode, deviceInfo,
@@ -753,6 +819,7 @@
         const std::vector<sp<IGraphicBufferProducer>>& bufferProducers =
             it.getGraphicBufferProducers();
         bool deferredConsumer = it.isDeferred();
+        bool isConfigurationComplete = it.isComplete();
         const std::string &physicalCameraId = it.getPhysicalCameraId();
 
         int64_t dynamicRangeProfile = it.getDynamicRangeProfile();
@@ -768,7 +835,8 @@
         int32_t groupId = it.isMultiResolution() ? it.getSurfaceSetID() : -1;
         OutputStreamInfo streamInfo;
 
-        res = checkSurfaceType(numBufferProducers, deferredConsumer, it.getSurfaceType());
+        res = checkSurfaceType(numBufferProducers, deferredConsumer, it.getSurfaceType(),
+                               isConfigurationComplete);
         if (!res.isOk()) {
             return res;
         }
@@ -781,15 +849,38 @@
         int64_t streamUseCase = it.getStreamUseCase();
         int timestampBase = it.getTimestampBase();
         int mirrorMode = it.getMirrorMode();
-        if (deferredConsumer) {
+        // If the configuration is a deferred consumer, or a not yet completed
+        // configuration with no buffer producers attached.
+        if (deferredConsumer || (!isConfigurationComplete && numBufferProducers == 0)) {
             streamInfo.width = it.getWidth();
             streamInfo.height = it.getHeight();
-            streamInfo.format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
-            streamInfo.dataSpace = android_dataspace_t::HAL_DATASPACE_UNKNOWN;
             auto surfaceType = it.getSurfaceType();
-            streamInfo.consumerUsage = GraphicBuffer::USAGE_HW_TEXTURE;
-            if (surfaceType == OutputConfiguration::SURFACE_TYPE_SURFACE_VIEW) {
-                streamInfo.consumerUsage |= GraphicBuffer::USAGE_HW_COMPOSER;
+            switch (surfaceType) {
+                case OutputConfiguration::SURFACE_TYPE_SURFACE_TEXTURE:
+                    streamInfo.consumerUsage = GraphicBuffer::USAGE_HW_TEXTURE;
+                    streamInfo.format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
+                    streamInfo.dataSpace = android_dataspace_t::HAL_DATASPACE_UNKNOWN;
+                    break;
+                case OutputConfiguration::SURFACE_TYPE_SURFACE_VIEW:
+                    streamInfo.consumerUsage = GraphicBuffer::USAGE_HW_TEXTURE
+                            | GraphicBuffer::USAGE_HW_COMPOSER;
+                    streamInfo.format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
+                    streamInfo.dataSpace = android_dataspace_t::HAL_DATASPACE_UNKNOWN;
+                    break;
+                case OutputConfiguration::SURFACE_TYPE_MEDIA_RECORDER:
+                case OutputConfiguration::SURFACE_TYPE_MEDIA_CODEC:
+                    streamInfo.consumerUsage = GraphicBuffer::USAGE_HW_VIDEO_ENCODER;
+                    streamInfo.format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
+                    streamInfo.dataSpace = android_dataspace_t::HAL_DATASPACE_UNKNOWN;
+                    break;
+                case OutputConfiguration::SURFACE_TYPE_IMAGE_READER:
+                    streamInfo.consumerUsage = it.getUsage();
+                    streamInfo.format = it.getFormat();
+                    streamInfo.dataSpace = (android_dataspace)it.getDataspace();
+                    break;
+                default:
+                    return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
+                                        "Invalid surface type.");
             }
             streamInfo.dynamicRangeProfile = it.getDynamicRangeProfile();
             if (checkAndOverrideSensorPixelModesUsed(sensorPixelModesUsed,
@@ -802,8 +893,13 @@
                                 "Deferred surface sensor pixel modes not valid");
             }
             streamInfo.streamUseCase = streamUseCase;
-            mapStreamInfo(streamInfo, camera3::CAMERA_STREAM_ROTATION_0, physicalCameraId, groupId,
-                    &streamConfiguration.streams[streamIdx++]);
+            auto status = mapStream(streamInfo, isCompositeJpegRDisabled, deviceInfo,
+                    camera3::CAMERA_STREAM_ROTATION_0, &streamIdx, physicalCameraId, groupId,
+                    logicalCameraId, streamConfiguration, earlyExit);
+            if (*earlyExit || !status.isOk()) {
+                return status;
+            }
+
             isStreamInfoValid = true;
 
             if (numBufferProducers == 0) {
@@ -815,63 +911,18 @@
             sp<Surface> surface;
             res = createSurfaceFromGbp(streamInfo, isStreamInfoValid, surface, bufferProducer,
                     logicalCameraId, metadataChosen, sensorPixelModesUsed, dynamicRangeProfile,
-                    streamUseCase, timestampBase, mirrorMode, colorSpace);
+                    streamUseCase, timestampBase, mirrorMode, colorSpace,
+                    /*respectSurfaceSize*/true);
 
             if (!res.isOk())
                 return res;
 
             if (!isStreamInfoValid) {
-                bool isDepthCompositeStream =
-                        camera3::DepthCompositeStream::isDepthCompositeStream(surface);
-                bool isHeicCompositeStream =
-                        camera3::HeicCompositeStream::isHeicCompositeStream(surface);
-                bool isJpegRCompositeStream =
-                        camera3::JpegRCompositeStream::isJpegRCompositeStream(surface) &&
-                        !isCompositeJpegRDisabled;
-                if (isDepthCompositeStream || isHeicCompositeStream || isJpegRCompositeStream) {
-                    // We need to take in to account that composite streams can have
-                    // additional internal camera streams.
-                    std::vector<OutputStreamInfo> compositeStreams;
-                    if (isDepthCompositeStream) {
-                      // TODO: Take care of composite streams.
-                        ret = camera3::DepthCompositeStream::getCompositeStreamInfo(streamInfo,
-                                deviceInfo, &compositeStreams);
-                    } else if (isHeicCompositeStream) {
-                        ret = camera3::HeicCompositeStream::getCompositeStreamInfo(streamInfo,
-                            deviceInfo, &compositeStreams);
-                    } else {
-                        ret = camera3::JpegRCompositeStream::getCompositeStreamInfo(streamInfo,
-                            deviceInfo, &compositeStreams);
-                    }
-
-                    if (ret != OK) {
-                        std::string msg = fmt::sprintf(
-                                "Camera %s: Failed adding composite streams: %s (%d)",
-                                logicalCameraId.c_str(), strerror(-ret), ret);
-                        ALOGE("%s: %s", __FUNCTION__, msg.c_str());
-                        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
-                    }
-
-                    if (compositeStreams.size() == 0) {
-                        // No internal streams means composite stream not
-                        // supported.
-                        *earlyExit = true;
-                        return binder::Status::ok();
-                    } else if (compositeStreams.size() > 1) {
-                        streamCount += compositeStreams.size() - 1;
-                        streamConfiguration.streams.resize(streamCount);
-                    }
-
-                    for (const auto& compositeStream : compositeStreams) {
-                        mapStreamInfo(compositeStream,
-                                static_cast<camera_stream_rotation_t> (it.getRotation()),
-                                physicalCameraId, groupId,
-                                &streamConfiguration.streams[streamIdx++]);
-                    }
-                } else {
-                    mapStreamInfo(streamInfo,
-                            static_cast<camera_stream_rotation_t> (it.getRotation()),
-                            physicalCameraId, groupId, &streamConfiguration.streams[streamIdx++]);
+                auto status  = mapStream(streamInfo, isCompositeJpegRDisabled, deviceInfo,
+                        static_cast<camera_stream_rotation_t> (it.getRotation()), &streamIdx,
+                        physicalCameraId, groupId, logicalCameraId, streamConfiguration, earlyExit);
+                if (*earlyExit || !status.isOk()) {
+                    return status;
                 }
                 isStreamInfoValid = true;
             }
@@ -884,7 +935,7 @@
         CameraMetadata filteredParams;
 
         filterParameters(sessionConfiguration.getSessionParameters(), deviceInfo,
-                vendorTagId, filteredParams);
+                additionalKeys, vendorTagId, filteredParams);
 
         camera_metadata_t* metadata = const_cast<camera_metadata_t*>(filteredParams.getAndLock());
         uint8_t *metadataP = reinterpret_cast<uint8_t*>(metadata);
@@ -912,22 +963,37 @@
 }
 
 binder::Status checkSurfaceType(size_t numBufferProducers,
-        bool deferredConsumer, int surfaceType)  {
+        bool deferredConsumer, int surfaceType, bool isConfigurationComplete)  {
     if (numBufferProducers > MAX_SURFACES_PER_STREAM) {
         ALOGE("%s: GraphicBufferProducer count %zu for stream exceeds limit of %d",
                 __FUNCTION__, numBufferProducers, MAX_SURFACES_PER_STREAM);
         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, "Surface count is too high");
-    } else if ((numBufferProducers == 0) && (!deferredConsumer)) {
+    } else if ((numBufferProducers == 0) && (!deferredConsumer) && isConfigurationComplete) {
         ALOGE("%s: Number of consumers cannot be smaller than 1", __FUNCTION__);
         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, "No valid consumers.");
     }
 
-    bool validSurfaceType = ((surfaceType == OutputConfiguration::SURFACE_TYPE_SURFACE_VIEW) ||
-            (surfaceType == OutputConfiguration::SURFACE_TYPE_SURFACE_TEXTURE));
-
-    if (deferredConsumer && !validSurfaceType) {
-        ALOGE("%s: Target surface has invalid surfaceType = %d.", __FUNCTION__, surfaceType);
-        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, "Target Surface is invalid");
+    if (deferredConsumer) {
+        bool validSurfaceType = (
+                (surfaceType == OutputConfiguration::SURFACE_TYPE_SURFACE_VIEW) ||
+                (surfaceType == OutputConfiguration::SURFACE_TYPE_SURFACE_TEXTURE));
+        if (!validSurfaceType) {
+            std::string msg = fmt::sprintf("Deferred target surface has invalid "
+                    "surfaceType = %d.", surfaceType);
+            ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+            return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
+        }
+    } else if (!isConfigurationComplete && numBufferProducers == 0) {
+        bool validSurfaceType = (
+                (surfaceType == OutputConfiguration::SURFACE_TYPE_MEDIA_RECORDER) ||
+                (surfaceType == OutputConfiguration::SURFACE_TYPE_MEDIA_CODEC) ||
+                (surfaceType == OutputConfiguration::SURFACE_TYPE_IMAGE_READER));
+        if (!validSurfaceType) {
+            std::string msg = fmt::sprintf("OutputConfiguration target surface has invalid "
+                    "surfaceType = %d.", surfaceType);
+            ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+            return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
+        }
     }
 
     return binder::Status::ok();
@@ -1133,7 +1199,8 @@
 }
 
 void filterParameters(const CameraMetadata& src, const CameraMetadata& deviceInfo,
-        metadata_vendor_id_t vendorTagId, CameraMetadata& dst) {
+        const std::vector<int32_t>& additionalTags, metadata_vendor_id_t vendorTagId,
+        CameraMetadata& dst) {
     const CameraMetadata params(src);
     camera_metadata_ro_entry_t availableSessionKeys = deviceInfo.find(
             ANDROID_REQUEST_AVAILABLE_SESSION_KEYS);
@@ -1142,9 +1209,12 @@
             filteredParams.getAndLock());
     set_camera_metadata_vendor_id(meta, vendorTagId);
     filteredParams.unlock(meta);
-    for (size_t i = 0; i < availableSessionKeys.count; i++) {
-        camera_metadata_ro_entry entry = params.find(
-                availableSessionKeys.data.i32[i]);
+
+    std::unordered_set<int32_t> filteredTags(availableSessionKeys.data.i32,
+            availableSessionKeys.data.i32 + availableSessionKeys.count);
+    filteredTags.insert(additionalTags.begin(), additionalTags.end());
+    for (int32_t tag : filteredTags) {
+        camera_metadata_ro_entry entry = params.find(tag);
         if (entry.count > 0) {
             filteredParams.update(entry);
         }
@@ -1152,6 +1222,29 @@
     dst = std::move(filteredParams);
 }
 
+status_t overrideDefaultRequestKeys(CameraMetadata *request) {
+    // Override the template request with ZoomRatioMapper
+    status_t res = ZoomRatioMapper::initZoomRatioInTemplate(request);
+    if (res != OK) {
+        ALOGE("Failed to update zoom ratio: %s (%d)", strerror(-res), res);
+        return res;
+    }
+
+    // Fill in JPEG_QUALITY if not available
+    if (!request->exists(ANDROID_JPEG_QUALITY)) {
+        static const uint8_t kDefaultJpegQuality = 95;
+        request->update(ANDROID_JPEG_QUALITY, &kDefaultJpegQuality, 1);
+    }
+
+    // Fill in AUTOFRAMING if not available
+    if (!request->exists(ANDROID_CONTROL_AUTOFRAMING)) {
+        static const uint8_t kDefaultAutoframingMode = ANDROID_CONTROL_AUTOFRAMING_OFF;
+        request->update(ANDROID_CONTROL_AUTOFRAMING, &kDefaultAutoframingMode, 1);
+    }
+
+    return OK;
+}
+
 } // namespace SessionConfigurationUtils
 } // namespace camera3
 } // namespace android
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtils.h b/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
index 0545cea..3c0f109 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
@@ -112,7 +112,7 @@
         const std::string &logicalCameraId, const CameraMetadata &physicalCameraMetadata,
         const std::vector<int32_t> &sensorPixelModesUsed,  int64_t dynamicRangeProfile,
         int64_t streamUseCase, int timestampBase, int mirrorMode,
-        int32_t colorSpace);
+        int32_t colorSpace, bool respectSurfaceSize);
 
 //check if format is 10-bit output compatible
 bool is10bitCompatibleFormat(int32_t format, android_dataspace_t dataSpace);
@@ -143,10 +143,10 @@
 const std::string &logicalCameraId);
 
 binder::Status checkSurfaceType(size_t numBufferProducers,
-bool deferredConsumer, int surfaceType);
+        bool deferredConsumer, int surfaceType, bool isConfigurationComplete);
 
 binder::Status checkOperatingMode(int operatingMode,
-const CameraMetadata &staticInfo, const std::string &cameraId);
+        const CameraMetadata &staticInfo, const std::string &cameraId);
 
 binder::Status
 convertToHALStreamCombination(
@@ -156,7 +156,8 @@
     const std::vector<std::string> &physicalCameraIds,
     aidl::android::hardware::camera::device::StreamConfiguration &streamConfiguration,
     bool overrideForPerfClass, metadata_vendor_id_t vendorTagId,
-    bool checkSessionParams, bool *earlyExit);
+    bool checkSessionParams, const std::vector<int32_t>& additionalKeys,
+    bool *earlyExit);
 
 StreamConfigurationPair getStreamConfigurationPair(const CameraMetadata &metadata);
 
@@ -177,7 +178,10 @@
         aidl::android::hardware::camera::device::RequestTemplate* tempId /*out*/);
 
 void filterParameters(const CameraMetadata& src, const CameraMetadata& deviceInfo,
-        metadata_vendor_id_t vendorTagId, CameraMetadata& dst);
+        const std::vector<int32_t>& additionalKeys, metadata_vendor_id_t vendorTagId,
+        CameraMetadata& dst);
+
+status_t overrideDefaultRequestKeys(CameraMetadata *request);
 
 template <typename T> bool contains(std::set<T> container, T value) {
     return container.find(value) != container.end();
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtilsHidl.cpp b/services/camera/libcameraservice/utils/SessionConfigurationUtilsHidl.cpp
index bf8ea84..9986a84 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtilsHidl.cpp
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtilsHidl.cpp
@@ -23,6 +23,7 @@
 #include "device3/aidl/AidlCamera3Device.h"
 #include "device3/hidl/HidlCamera3Device.h"
 #include "device3/Camera3OutputStream.h"
+#include "utils/Utils.h"
 
 using android::camera3::OutputStreamInfo;
 using android::hardware::camera2::ICameraDeviceUser;
@@ -48,16 +49,16 @@
     hidl.streams.resize(aidl.streams.size());
     size_t i = 0;
     for (const auto &stream : aidl.streams) {
-        if (static_cast<int>(stream.dynamicRangeProfile) !=
+        if (eToI(stream.dynamicRangeProfile) !=
                 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD) {
             ALOGE("%s Dynamic range profile %" PRId64 " not supported by HIDL", __FUNCTION__,
-                    stream.dynamicRangeProfile);
+                    eToI(stream.dynamicRangeProfile));
             return BAD_VALUE;
         }
 
-        if (static_cast<int>(stream.useCase) != ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT) {
+        if (eToI(stream.useCase) != ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT) {
             ALOGE("%s Stream use case %" PRId64 "not supported by HIDL", __FUNCTION__,
-                    stream.useCase);
+                    eToI(stream.useCase));
             return BAD_VALUE;
         }
 
@@ -113,7 +114,7 @@
     auto ret = convertToHALStreamCombination(sessionConfiguration, logicalCameraId, deviceInfo,
             false /*isCompositeJpegRDisabled*/, getMetadata, physicalCameraIds,
             aidlStreamConfiguration, overrideForPerfClass, vendorTagId,
-            /*checkSessionParams*/false, earlyExit);
+            /*checkSessionParams*/false, /*additionalKeys*/{}, earlyExit);
     if (!ret.isOk()) {
         return ret;
     }
diff --git a/services/camera/libcameraservice/utils/SessionStatsBuilder.cpp b/services/camera/libcameraservice/utils/SessionStatsBuilder.cpp
index c3aac72..2bca4cb 100644
--- a/services/camera/libcameraservice/utils/SessionStatsBuilder.cpp
+++ b/services/camera/libcameraservice/utils/SessionStatsBuilder.cpp
@@ -48,19 +48,35 @@
 
 void SessionStatsBuilder::buildAndReset(int64_t* requestCount,
         int64_t* errorResultCount, bool* deviceError,
-        std::map<int, StreamStats> *statsMap) {
+        std::pair<int32_t, int32_t>* mostRequestedFpsRange,
+        std::map<int, StreamStats>* statsMap) {
     std::lock_guard<std::mutex> l(mLock);
     *requestCount = mRequestCount;
     *errorResultCount = mErrorResultCount;
     *deviceError = mDeviceError;
     *statsMap = mStatsMap;
 
+    int32_t minFps = 0, maxFps = 0;
+    if (mRequestedFpsRangeHistogram.size() > 0) {
+        auto mostCommonIt = mRequestedFpsRangeHistogram.begin();
+        for (auto it = mostCommonIt; it != mRequestedFpsRangeHistogram.end(); it++) {
+            if (it->second.first > mostCommonIt->second.first) {
+                mostCommonIt = it;
+            }
+        }
+        minFps = mostCommonIt->first >> 32;
+        maxFps = mostCommonIt->first & 0xFFFF'FFFFU;
+    }
+    *mostRequestedFpsRange = std::make_pair(minFps, maxFps);
+
     // Reset internal states
     mRequestCount = 0;
     mErrorResultCount = 0;
     mCounterStopped = false;
     mDeviceError = false;
     mUserTag.clear();
+    mRequestedFpsRangeHistogram.clear();
+
     for (auto& streamStats : mStatsMap) {
         StreamStats& streamStat = streamStats.second;
         streamStat.mRequestedFrameCount = 0;
@@ -125,6 +141,31 @@
     mDeviceError = true;
 }
 
+void SessionStatsBuilder::incFpsRequestedCount(int32_t minFps, int32_t maxFps,
+        int64_t frameNumber) {
+    std::lock_guard<std::mutex> l(mLock);
+
+    // Stuff range into a 64-bit value to make hashing simple
+    uint64_t currentFpsTarget = minFps;
+    currentFpsTarget = currentFpsTarget << 32 | maxFps;
+
+    auto &stats = mRequestedFpsRangeHistogram[currentFpsTarget];
+    stats.first++;
+    stats.second = frameNumber;
+
+    // Ensure weird app input of target FPS ranges doesn't cause unbounded memory growth
+    if (mRequestedFpsRangeHistogram.size() > FPS_HISTOGRAM_MAX_SIZE) {
+        // Find oldest used fps to drop by last seen frame number
+        auto deleteIt = mRequestedFpsRangeHistogram.begin();
+        for (auto it = deleteIt; it != mRequestedFpsRangeHistogram.end(); it++) {
+            if (it->second.second < deleteIt->second.second) {
+                deleteIt = it;
+            }
+        }
+        mRequestedFpsRangeHistogram.erase(deleteIt);
+    }
+}
+
 void StreamStats::updateLatencyHistogram(int32_t latencyMs) {
     size_t i;
     for (i = 0; i < mCaptureLatencyBins.size(); i++) {
diff --git a/services/camera/libcameraservice/utils/SessionStatsBuilder.h b/services/camera/libcameraservice/utils/SessionStatsBuilder.h
index 2936531..914c09e 100644
--- a/services/camera/libcameraservice/utils/SessionStatsBuilder.h
+++ b/services/camera/libcameraservice/utils/SessionStatsBuilder.h
@@ -22,6 +22,8 @@
 #include <array>
 #include <map>
 #include <mutex>
+#include <unordered_map>
+#include <utility>
 
 namespace android {
 
@@ -64,7 +66,8 @@
     void buildAndReset(/*out*/int64_t* requestCount,
             /*out*/int64_t* errorResultCount,
             /*out*/bool* deviceError,
-            /*out*/std::map<int, StreamStats> *statsMap);
+            /*out*/std::pair<int32_t, int32_t>* mostRequestedFpsRange,
+            /*out*/std::map<int, StreamStats>* statsMap);
 
     // Stream specific counter
     void startCounter(int streamId);
@@ -76,6 +79,13 @@
     void incResultCounter(bool dropped);
     void onDeviceError();
 
+    // Session specific statistics
+
+    // Limit on size of FPS range histogram
+    static const size_t FPS_HISTOGRAM_MAX_SIZE = 10;
+
+    void incFpsRequestedCount(int32_t minFps, int32_t maxFps, int64_t frameNumber);
+
     SessionStatsBuilder() : mRequestCount(0), mErrorResultCount(0),
              mCounterStopped(false), mDeviceError(false) {}
 private:
@@ -85,6 +95,11 @@
     bool mCounterStopped;
     bool mDeviceError;
     std::string mUserTag;
+
+    // Histogram of frame counts of requested target FPS ranges
+    // (min_fps << 32 | max_fps) -> (# of frames with this fps, last seen framenumber)
+    std::unordered_map<uint64_t, std::pair<int64_t, int64_t>> mRequestedFpsRangeHistogram;
+
     // Map from stream id to stream statistics
     std::map<int, StreamStats> mStatsMap;
 };
diff --git a/services/camera/libcameraservice/utils/TagMonitor.cpp b/services/camera/libcameraservice/utils/TagMonitor.cpp
index 38de93a..5258c0e 100644
--- a/services/camera/libcameraservice/utils/TagMonitor.cpp
+++ b/services/camera/libcameraservice/utils/TagMonitor.cpp
@@ -384,7 +384,7 @@
         }
         returnStr << "]\n";
     }
-    return std::move(returnStr.str());
+    return returnStr.str();
 }
 
 template<typename T>
diff --git a/services/camera/libcameraservice/utils/Utils.cpp b/services/camera/libcameraservice/utils/Utils.cpp
index c8f5e86..76517dc 100644
--- a/services/camera/libcameraservice/utils/Utils.cpp
+++ b/services/camera/libcameraservice/utils/Utils.cpp
@@ -14,22 +14,27 @@
  * limitations under the License.
  */
 
+#define LOG_TAG "Camera3-Utils"
+
 #include "Utils.h"
 #include <android-base/properties.h>
 #include <com_android_internal_camera_flags.h>
-
+#include <utils/Errors.h>
+#include <utils/Log.h>
+#include <vendorsupport/api_level.h>
 
 namespace android {
 
-using namespace com::android::internal::camera::flags;
+namespace flags = com::android::internal::camera::flags;
 
-constexpr const char *LEGACY_VNDK_VERSION_PROP = "ro.vndk.version";
-constexpr const char *BOARD_API_LEVEL_PROP = "ro.board.api_level";
+namespace {
+constexpr const char* LEGACY_VNDK_VERSION_PROP = "ro.vndk.version";
+constexpr const char* BOARD_API_LEVEL_PROP = "ro.board.api_level";
 constexpr int MAX_VENDOR_API_LEVEL = 1000000;
 constexpr int FIRST_VNDK_VERSION = 202404;
 
-int getVNDKVersionFromProp(int defaultVersion) {
-    if (!com_android_internal_camera_flags_use_ro_board_api_level_for_vndk_version()) {
+int legacyGetVNDKVersionFromProp(int defaultVersion) {
+    if (!flags::use_ro_board_api_level_for_vndk_version()) {
         return base::GetIntProperty(LEGACY_VNDK_VERSION_PROP, defaultVersion);
     }
 
@@ -51,5 +56,56 @@
     vndkVersion = (vndkVersion - FIRST_VNDK_VERSION) / 100;
     return __ANDROID_API_V__ + vndkVersion;
 }
+}  // anonymous namespace
 
-} // namespace android
+int getVNDKVersionFromProp(int defaultVersion) {
+    if (!flags::use_system_api_for_vndk_version()) {
+        return legacyGetVNDKVersionFromProp(defaultVersion);
+    }
+
+    int vendorApiLevel = AVendorSupport_getVendorApiLevel();
+    if (vendorApiLevel == 0) {
+        // Couldn't find vendor API level, return default
+        return defaultVersion;
+    }
+
+    // Vendor API level for Android V and above are of the format YYYYMM starting with 202404.
+    // AVendorSupport_getSdkApiLevelOf maps them back to SDK API levels while leaving older
+    // values unchanged.
+    return AVendorSupport_getSdkApiLevelOf(vendorApiLevel);
+}
+
+RunThreadWithRealtimePriority::RunThreadWithRealtimePriority(int tid)
+    : mTid(tid), mPreviousPolicy(sched_getscheduler(tid)) {
+    if (flags::realtime_priority_bump()) {
+        auto res = sched_getparam(mTid, &mPreviousParams);
+        if (res != OK) {
+            ALOGE("Can't retrieve thread scheduler parameters: %s (%d)", strerror(-res), res);
+            return;
+        }
+
+        struct sched_param param = {0};
+        param.sched_priority = kRequestThreadPriority;
+
+        res = sched_setscheduler(mTid, SCHED_FIFO, &param);
+        if (res != OK) {
+            ALOGW("Can't set realtime priority for thread: %s (%d)", strerror(-res), res);
+        } else {
+            ALOGD("Set real time priority for thread (tid %d)", mTid);
+            mPolicyBumped = true;
+        }
+    }
+}
+
+RunThreadWithRealtimePriority::~RunThreadWithRealtimePriority() {
+    if (mPolicyBumped && flags::realtime_priority_bump()) {
+        auto res = sched_setscheduler(mTid, mPreviousPolicy, &mPreviousParams);
+        if (res != OK) {
+            ALOGE("Can't set regular priority for thread: %s (%d)", strerror(-res), res);
+        } else {
+            ALOGD("Set regular priority for thread (tid %d)", mTid);
+        }
+    }
+}
+
+}  // namespace android
diff --git a/services/camera/libcameraservice/utils/Utils.h b/services/camera/libcameraservice/utils/Utils.h
index f8a107d..0eb5e2c 100644
--- a/services/camera/libcameraservice/utils/Utils.h
+++ b/services/camera/libcameraservice/utils/Utils.h
@@ -17,9 +17,23 @@
 #ifndef ANDROID_SERVERS_CAMERA_UTILS_H
 #define ANDROID_SERVERS_CAMERA_UTILS_H
 
+#include <sched.h>
+#include <unistd.h>
+#include <type_traits>
+
 namespace android {
 
 /**
+ * Magically convert an enum to its underlying integer type, mostly so they can be
+ * printed with printf-style formatters without warnings.
+ * Backport of C++23 std::to_underlying()
+ */
+template<typename Enum>
+constexpr std::underlying_type_t<Enum> eToI(Enum val) {
+    return static_cast<std::underlying_type_t<Enum>>(val);
+}
+
+/**
  * As of Android V, ro.board.api_level returns the year and month of release (ex. 202404)
  * instead of release SDK version. This function maps year/month format back to release
  * SDK version.
@@ -28,6 +42,40 @@
  */
 int getVNDKVersionFromProp(int defaultVersion);
 
+/**
+ * An instance of this class will raise the scheduling policy of a given
+ * given thread to real time and keep it this way throughout the lifetime
+ * of the object. The thread scheduling policy will revert back to its original
+ * state after the instances is released. By default the implementation will
+ * raise the priority of the current thread unless clients explicitly specify
+ * another thread id.
+ * Client must avoid:
+ *  - Keeping an instance of this class for extended and long running operations.
+ *    This is only intended for short/temporarily priority bumps that mitigate
+ *    scheduling delays within critical camera paths.
+ *  - Allocating instances of this class on the memory heap unless clients have
+ *    complete control over the object lifetime. It is preferable to allocate
+ *    instances of this class on the stack instead.
+ *  - Nesting multiple instances of this class using the same default or same thread id.
+ */
+class RunThreadWithRealtimePriority final {
+  public:
+    RunThreadWithRealtimePriority(int tid = gettid());
+    ~RunThreadWithRealtimePriority();
+
+    RunThreadWithRealtimePriority(const RunThreadWithRealtimePriority&) = delete;
+    RunThreadWithRealtimePriority& operator=(const RunThreadWithRealtimePriority&) = delete;
+
+    // SCHED_FIFO priority for request submission thread in HFR mode
+    static const int kRequestThreadPriority = 1;
+
+  private:
+    int mTid;
+    int mPreviousPolicy;
+    bool mPolicyBumped = false;
+    struct sched_param mPreviousParams;
+};
+
 } // namespace android
 
 #endif //ANDROID_SERVERS_CAMERA_UTILS_H
diff --git a/services/camera/libcameraservice/utils/VirtualDeviceCameraIdMapper.cpp b/services/camera/libcameraservice/utils/VirtualDeviceCameraIdMapper.cpp
new file mode 100644
index 0000000..22dd806
--- /dev/null
+++ b/services/camera/libcameraservice/utils/VirtualDeviceCameraIdMapper.cpp
@@ -0,0 +1,143 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "VirtualDeviceCameraIdMapper"
+
+#include <android_companion_virtualdevice_flags.h>
+#include <camera/CameraUtils.h>
+
+#include "VirtualDeviceCameraIdMapper.h"
+
+namespace android {
+
+namespace vd_flags = android::companion::virtualdevice::flags;
+
+void VirtualDeviceCameraIdMapper::addCamera(const std::string& cameraId,
+        int32_t deviceId, const std::string& mappedCameraId) {
+    if (!vd_flags::camera_device_awareness()) {
+        ALOGV("%s: Device-aware camera feature is not enabled", __func__);
+        return;
+    }
+
+    if (deviceId == kDefaultDeviceId) {
+        ALOGV("%s: Not adding entry for a camera of the default device", __func__);
+        return;
+    }
+
+    ALOGV("%s: Adding camera %s for device %d with mapped id %s", __func__, cameraId.c_str(),
+          deviceId, mappedCameraId.c_str());
+
+    std::scoped_lock lock(mLock);
+    mDeviceIdMappedCameraIdPairToCameraIdMap[{deviceId, mappedCameraId}] = cameraId;
+    mCameraIdToDeviceIdMappedCameraIdPairMap[cameraId] = {deviceId, mappedCameraId};
+}
+
+void VirtualDeviceCameraIdMapper::removeCamera(const std::string& cameraId) {
+    if (!vd_flags::camera_device_awareness()) {
+        ALOGV("%s: Device-aware camera feature is not enabled", __func__);
+        return;
+    }
+
+    auto deviceIdAndMappedCameraIdPair = getDeviceIdAndMappedCameraIdPair(cameraId);
+
+    std::scoped_lock lock(mLock);
+    mCameraIdToDeviceIdMappedCameraIdPairMap.erase(cameraId);
+    mDeviceIdMappedCameraIdPairToCameraIdMap.erase(deviceIdAndMappedCameraIdPair);
+}
+
+std::optional<std::string> VirtualDeviceCameraIdMapper::getActualCameraId(
+        int32_t deviceId, const std::string& mappedCameraId) const {
+    if (deviceId == kDefaultDeviceId) {
+        ALOGV("%s: Returning the camera id as the mapped camera id for camera %s, as it "
+              "belongs to the default device", __func__, mappedCameraId.c_str());
+        return mappedCameraId;
+    }
+
+    if (!vd_flags::camera_device_awareness()) {
+        ALOGV("%s: Device-aware camera feature is not enabled, returning the camera id as "
+              "the mapped camera id for camera %s", __func__, mappedCameraId.c_str());
+        return mappedCameraId;
+    }
+
+    std::scoped_lock lock(mLock);
+    auto iterator = mDeviceIdMappedCameraIdPairToCameraIdMap.find(
+            {deviceId, mappedCameraId});
+    if (iterator == mDeviceIdMappedCameraIdPairToCameraIdMap.end()) {
+        ALOGV("%s: No entry found for device id %d and mapped camera id %s", __func__,
+              deviceId, mappedCameraId.c_str());
+        return std::nullopt;
+    }
+    return iterator->second;
+}
+
+std::pair<int32_t, std::string> VirtualDeviceCameraIdMapper::getDeviceIdAndMappedCameraIdPair(
+        const std::string& cameraId) const {
+    if (!vd_flags::camera_device_awareness()) {
+        ALOGV("%s: Device-aware camera feature is not enabled", __func__);
+        return std::make_pair(kDefaultDeviceId, cameraId);
+    }
+
+    std::scoped_lock lock(mLock);
+    auto iterator = mCameraIdToDeviceIdMappedCameraIdPairMap.find(cameraId);
+    if (iterator != mCameraIdToDeviceIdMappedCameraIdPairMap.end()) {
+        return iterator->second;
+    }
+    ALOGV("%s: No device id and mapped camera id found for camera id %s, so it must belong "
+            "to the default device ?", __func__, cameraId.c_str());
+    return std::make_pair(kDefaultDeviceId, cameraId);
+}
+
+int VirtualDeviceCameraIdMapper::getNumberOfCameras(int32_t deviceId) const {
+    if (!vd_flags::camera_device_awareness()) {
+        return 0;
+    }
+
+    int numOfCameras = 0;
+    std::scoped_lock lock(mLock);
+    for (const auto& [deviceIdMappedCameraIdPair, _]
+            : mDeviceIdMappedCameraIdPairToCameraIdMap) {
+        if (deviceIdMappedCameraIdPair.first == deviceId) {
+            numOfCameras++;
+        }
+    }
+    return numOfCameras;
+}
+
+std::optional<std::string> VirtualDeviceCameraIdMapper::getActualCameraId(
+        int api1CameraId, int32_t deviceId) const {
+    if (!vd_flags::camera_device_awareness()) {
+        ALOGV("%s: Device-aware camera feature is not enabled", __func__);
+        return std::nullopt;
+    }
+
+    int matchingCameraIndex = 0;
+    std::scoped_lock lock(mLock);
+    for (const auto& [deviceIdMappedCameraIdPair, actualCameraId]
+            : mDeviceIdMappedCameraIdPairToCameraIdMap) {
+        if (deviceIdMappedCameraIdPair.first == deviceId) {
+            if (matchingCameraIndex == api1CameraId) {
+                return actualCameraId;
+            }
+            matchingCameraIndex++;
+        }
+    }
+    ALOGV("%s: No entry found for device id %d and API 1 camera id %d", __func__,
+          deviceId, api1CameraId);
+    return std::nullopt;
+}
+
+} // namespace android
\ No newline at end of file
diff --git a/services/camera/libcameraservice/utils/VirtualDeviceCameraIdMapper.h b/services/camera/libcameraservice/utils/VirtualDeviceCameraIdMapper.h
new file mode 100644
index 0000000..fdfde23
--- /dev/null
+++ b/services/camera/libcameraservice/utils/VirtualDeviceCameraIdMapper.h
@@ -0,0 +1,89 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SERVERS_CAMERA_VIRTUAL_DEVICE_CAMERA_ID_MAPPER_H
+#define ANDROID_SERVERS_CAMERA_VIRTUAL_DEVICE_CAMERA_ID_MAPPER_H
+
+#include <string>
+#include <map>
+#include <mutex>
+
+#include <utils/Mutex.h>
+
+namespace android {
+
+class VirtualDeviceCameraIdMapper {
+public:
+    VirtualDeviceCameraIdMapper() {}
+
+    virtual ~VirtualDeviceCameraIdMapper() {}
+
+    void addCamera(const std::string& cameraId, int32_t deviceId,
+            const std::string& mappedCameraId) EXCLUDES(mLock);
+
+    void removeCamera(const std::string& cameraId) EXCLUDES(mLock);
+
+    /**
+     * Return the actual camera id for a given device id (i.e., the id of the device owning
+     * the camera, for a virtual camera this would be the id of the virtual device, and for
+     * any other cameras this would be default device id, i.e., 0) and mapped camera
+     * id (for virtual devices, the back and front virtual cameras of that device would have
+     * 0 and 1 respectively as their mapped camera id, and for any other cameras this
+     * would be their actual camera id). When the camera device awareness flag is disabled,
+     * this will return the given camera id itself.
+     */
+    std::optional<std::string> getActualCameraId(int32_t deviceId,
+            const std::string& mappedCameraId) const EXCLUDES(mLock);
+
+    /**
+     * Return the device id (i.e., the id of the device owning the camera, for a virtual
+     * camera this would be the id of the virtual device, and for any other cameras this
+     * would be default device id, i.e., 0) and the mapped camera id (for virtual
+     * devices, the back and front virtual cameras of that device would have 0 and 1
+     * respectively as their mapped camera id, and for any other cameras this would
+     * be their actual camera id) for a given camera id. When the camera device awareness flag is
+     * disabled, this will return a pair of kDefaultDeviceId and the given cameraId.
+     */
+    std::pair<int32_t, std::string> getDeviceIdAndMappedCameraIdPair(
+            const std::string& cameraId) const EXCLUDES(mLock);
+
+    /**
+     * Return the number of virtual cameras corresponding to the legacy camera API
+     * getNumberOfCameras. When the camera device awareness flag is disabled, this will return 0.
+     */
+    int getNumberOfCameras(int32_t deviceId) const EXCLUDES(mLock);
+
+    /**
+     * Return the actual camera id corresponding to the virtual camera with the given API 1 camera
+     * id. When the camera device awareness flag is disabled, this will return std::nullopt.
+     */
+    std::optional<std::string> getActualCameraId(int api1CameraId, int32_t deviceId)
+            const EXCLUDES(mLock);
+
+private:
+    mutable std::mutex mLock;
+
+    // Map of (deviceId, app-visible cameraId) -> HAL-visible cameraId
+    std::map<std::pair<int32_t, std::string>, std::string>
+            mDeviceIdMappedCameraIdPairToCameraIdMap GUARDED_BY(mLock);
+    // Map of HAL-visible cameraId -> (deviceId, app-visible cameraId)
+    std::map<std::string, std::pair<int32_t, std::string>>
+            mCameraIdToDeviceIdMappedCameraIdPairMap GUARDED_BY(mLock);
+};
+
+} // namespace android
+
+#endif // ANDROID_SERVERS_CAMERA_VIRTUAL_DEVICE_CAMERA_ID_MAPPER_H
diff --git a/services/camera/virtualcamera/Android.bp b/services/camera/virtualcamera/Android.bp
index cb4e10f..7ece0cb 100644
--- a/services/camera/virtualcamera/Android.bp
+++ b/services/camera/virtualcamera/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_xr_framework",
     // See: http://go/android-license-faq
     default_applicable_licenses: ["Android-Apache-2.0"],
 }
@@ -11,6 +12,7 @@
         "libbinder",
         "libbinder_ndk",
         "libcamera_metadata",
+        "libexif",
         "liblog",
         "libfmq",
         "libgui",
@@ -46,15 +48,14 @@
     name: "libvirtualcamera_utils",
     srcs: [
         "util/JpegUtil.cc",
-        "util/MetadataBuilder.cc",
+        "util/MetadataUtil.cc",
         "util/Util.cc",
-        "util/TestPatternHelper.cc",
         "util/EglDisplayContext.cc",
         "util/EglFramebuffer.cc",
         "util/EglProgram.cc",
         "util/EglSurfaceTexture.cc",
         "util/EglUtil.cc",
-        "util/Permissions.cc"
+        "util/Permissions.cc",
     ],
     defaults: [
         "libvirtualcamera_defaults",
@@ -64,13 +65,7 @@
 cc_library_static {
     name: "libvirtualcamera",
     srcs: [
-        "VirtualCameraProvider.cc",
-        "VirtualCameraDevice.cc",
-        "VirtualCameraSession.cc",
-        "VirtualCameraStream.cc",
-        "VirtualCameraService.cc",
-        "VirtualCameraSessionContext.cc",
-        "VirtualCameraRenderThread.cc",
+        "*.cc",
     ],
     defaults: [
         "libvirtualcamera_defaults",
diff --git a/services/camera/virtualcamera/README.md b/services/camera/virtualcamera/README.md
new file mode 100644
index 0000000..04b4811
--- /dev/null
+++ b/services/camera/virtualcamera/README.md
@@ -0,0 +1,164 @@
+# Virtual Camera
+
+The virtual camera feature allows 3rd party application to expose a remote or
+virtual camera to the standard Android camera frameworks (Camera2/CameraX, NDK,
+camera1).
+
+The stack is composed into 4 different parts:
+
+1.  The **Virtual Camera Service** (this directory), implementing the Camera HAL
+    and acts as an interface between the Android Camera Server and the *Virtual
+    Camera Owner* (via the VirtualDeviceManager APIs).
+
+2.  The **VirtualDeviceManager** running in the system process and handling the
+    communication between the Virtual Camera service and the Virtual Camera
+    owner
+
+3.  The **Virtual Camera Owner**, the client application declaring the Virtual
+    Camera and handling the production of image data. We will also refer to this
+    part as the **producer**
+
+4.  The **Consumer Application**, the client application consuming camera data,
+    which can be any application using the camera APIs
+
+This document describes the functionalities of the *Virtual Camera Service*
+
+## Before reading
+
+The service implements the Camera HAL. It's best to have a bit of an
+understanding of how it works by reading the
+[HAL documentation first](https://source.android.com/docs/core/camera)
+
+![](https://source.android.com/static/docs/core/camera/images/ape_fwk_camera2.png)
+
+The HAL implementations are declared in: -
+[VirtualCameraDevice](./VirtualCameraDevice.h) -
+[VirtualCameraProvider](./VirtualCameraProvider.h) -
+[VirtualCameraSession](./VirtualCameraSession.h)
+
+## Current supported features
+
+Virtual Cameras report `EXTERNAL`
+[hardware level](https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL)
+but some
+[functionalities of `EXTERNAL`](https://developer.android.com/reference/android/hardware/camera2/CameraMetadata#INFO_SUPPORTED_HARDWARE_LEVEL_EXTERNAL)
+hardware level are not fully supported.
+
+Here is a list of supported features - Single input multiple output stream and
+capture:
+
+-   Support for YUV and JPEG
+
+Notable missing features:
+
+-   Support for auto 3A (AWB, AE, AF): virtual camera will announce convergence
+    of 3A algorithm even though it can't receive any information about this from
+    the owner.
+
+-   No flash/torch support
+
+## Overview
+
+Graphic data are exchanged using the Surface infrastructure. Like any other
+Camera HAL, the Surfaces to write data into are received from the client.
+Virtual Camera exposes a **different** Surface onto which the owner can write
+data. In the middle, we use an EGL Texture which adapts (if needed) the producer
+data to the required consumer format (scaling only for now, but we might also
+add support for rotation and cropping in the future).
+
+When the client application requires multiple resolutions, the closest one among
+supported resolutions is used for the input data and the image data is down
+scaled for the lower resolutions.
+
+Depending on the type of output, the rendering pipelines change. Here is an
+overview of the YUV and JPEG pipelines.
+
+**YUV Rendering:**
+
+```
+Virtual Device Owner Surface[1] (Producer) --{binds to}--> EGL
+Texture[1] --{renders into}--> Client Surface[1-n] (Consumer)
+```
+
+**JPEG Rendering:**
+
+```
+Virtual Device Owner Surface[1] (Producer) --{binds to}--> EGL
+Texture[1] --{compress data into}--> temporary buffer --{renders into}-->
+Client Surface[1-n] (Consumer)
+```
+
+## Life of a capture request
+
+> Before reading the following, you must understand the concepts of
+> [CaptureRequest](https://developer.android.com/reference/android/hardware/camera2/CaptureRequest)
+> and
+> [OutputConfiguration](https://developer.android.com/reference/android/hardware/camera2/OutputConfiguration).
+
+1.  The consumer creates a session with one or more `Surfaces`
+
+2.  The VirtualCamera owner will receive a call to
+    `VirtualCameraCallback#onStreamConfigured` with a reference to another
+    `Suface` where it can write into.
+
+3.  The consumer will then start sending `CaptureRequests`. The owner will
+    receive a call to `VirtualCameraCallback#onProcessCaptureRequest`, at which
+    points it should write the required data into the surface it previously
+    received. At the same time, a new task will be enqueued in the render thread
+
+4.  The [VirtualCameraRenderThread](./VirtualCameraRenderThread.cc) will consume
+    the enqueued tasks as they come. It will wait for the producer to write into
+    the input Surface (using `Surface::waitForNextFrame`).
+
+    > **Note:** Since the Surface API allows us to wait for the next frame,
+    > there is no need for the producer to notify when the frame is ready by
+    > calling a `processCaptureResult()` equivalent.
+
+5.  The EGL Texture is updated with the content of the Surface.
+
+6.  The EGL Texture renders into the output Surfaces.
+
+7.  The Camera client is notified of the "shutter" event and the `CaptureResult`
+    is sent to the consumer.
+
+## EGL Rendering
+
+### The render thread
+
+The [VirtualCameraRenderThread](./VirtualCameraRenderThread.h) module takes care
+of rendering the input from the owner to the output via the EGL Texture. The
+rendering is done either to a JPEG buffer, which is the BLOB rendering for
+creating a JPEG or to a YUV buffer used mainly for preview Surfaces or video.
+Two EGLPrograms (shaders) defined in [EglProgram](./util/EglProgram.cc) handle
+the rendering of the data.
+
+### Initialization
+
+[EGlDisplayContext](./util/EglDisplayContext.h) initializes the whole EGL
+environment (Display, Surface, Context, and Config).
+
+The EGL Rendering is backed by a
+[ANativeWindow](https://developer.android.com/ndk/reference/group/a-native-window)
+which is just the native counterpart of the
+[Surface](https://developer.android.com/reference/android/view/Surface), which
+itself is the producer side of buffer queue, the consumer being either the
+display (Camera preview) or some encoder (to save the data or send it across the
+network).
+
+### More about OpenGL
+
+To better understand how the EGL rendering works the following resources can be
+used:
+
+Introduction to OpenGL: https://learnopengl.com/
+
+The official documentation of EGL API can be queried at:
+https://www.khronos.org/registry/egl/sdk/docs/man/xhtml/
+
+And using Google search with the following query:
+
+```
+[function name] site:https://registry.khronos.org/EGL/sdk/docs/man/html/
+
+// example: eglSwapBuffers site:https://registry.khronos.org/EGL/sdk/docs/man/html/
+```
diff --git a/services/camera/virtualcamera/TEST_MAPPING b/services/camera/virtualcamera/TEST_MAPPING
index 25fca73..e976704 100644
--- a/services/camera/virtualcamera/TEST_MAPPING
+++ b/services/camera/virtualcamera/TEST_MAPPING
@@ -9,7 +9,8 @@
         {
           "exclude-annotation": "androidx.test.filters.FlakyTest"
         }
-      ]
+      ],
+      "keywords": ["primary-device"]
     }
   ]
 }
diff --git a/services/camera/virtualcamera/VirtualCameraCaptureRequest.h b/services/camera/virtualcamera/VirtualCameraCaptureRequest.h
new file mode 100644
index 0000000..cf5402e
--- /dev/null
+++ b/services/camera/virtualcamera/VirtualCameraCaptureRequest.h
@@ -0,0 +1,59 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#ifndef ANDROID_COMPANION_VIRTUALCAMERA_VIRTUALCAMERACAPTUREREQUEST_H
+#define ANDROID_COMPANION_VIRTUALCAMERA_VIRTUALCAMERACAPTUREREQUEST_H
+
+#include "VirtualCameraDevice.h"
+
+namespace android {
+namespace companion {
+namespace virtualcamera {
+
+// Struct used to pass request settings in the different part of
+// the virtual camera system.
+struct RequestSettings {
+  // JPEG_QUALITY metadata
+  int jpegQuality = VirtualCameraDevice::kDefaultJpegQuality;
+
+  // JPEG_ORIENTATION metadata
+  int jpegOrientation = VirtualCameraDevice::kDefaultJpegOrientation;
+
+  // JPEG_THUMBNAIL_SIZE metadata
+  Resolution thumbnailResolution = Resolution(0, 0);
+
+  // JPEG_THUMBNAIL_QUALITY metadata
+  int thumbnailJpegQuality = VirtualCameraDevice::kDefaultJpegQuality;
+
+  // ANDROID_CONTROL_AE_TARGET_FPS_RANGE metadata
+  std::optional<FpsRange> fpsRange;
+
+  // CONTROL_CAPTURE_INTENT metadata
+  camera_metadata_enum_android_control_capture_intent_t captureIntent =
+      VirtualCameraDevice::kDefaultCaptureIntent;
+
+  // JPEG_GPS_LOCATION metadata
+  std::optional<GpsCoordinates> gpsCoordinates;
+
+  // CONTROL_AE_PRECAPTURE_TRIGGER metadata
+  std::optional<camera_metadata_enum_android_control_ae_precapture_trigger>
+      aePrecaptureTrigger;
+};
+
+}  // namespace virtualcamera
+}  // namespace companion
+}  // namespace android
+
+#endif  // ANDROID_COMPANION_VIRTUALCAMERA_VIRTUALCAMERACAPTUREREQUEST_H
\ No newline at end of file
diff --git a/services/camera/virtualcamera/VirtualCameraCaptureResult.cc b/services/camera/virtualcamera/VirtualCameraCaptureResult.cc
new file mode 100644
index 0000000..a61f553
--- /dev/null
+++ b/services/camera/virtualcamera/VirtualCameraCaptureResult.cc
@@ -0,0 +1,119 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include "VirtualCameraCaptureResult.h"
+
+#include <cstdint>
+
+#include "VirtualCameraCaptureRequest.h"
+#include "aidl/android/hardware/camera/device/CameraMetadata.h"
+#include "util/MetadataUtil.h"
+
+namespace android {
+namespace companion {
+namespace virtualcamera {
+
+using ::aidl::android::hardware::camera::device::CameraMetadata;
+namespace {
+// See REQUEST_PIPELINE_DEPTH in CaptureResult.java.
+// This roughly corresponds to frame latency, we set to
+// documented minimum of 2.
+static constexpr uint8_t kPipelineDepth = 2;
+
+}  // namespace
+
+CameraMetadata createCaptureResultMetadata(
+    const std::chrono::nanoseconds timestamp,
+    const RequestSettings& requestSettings,
+    const Resolution reportedSensorSize) {
+  // All of the keys used in the response needs to be referenced in
+  // availableResultKeys in CameraCharacteristics (see initCameraCharacteristics
+  // in VirtualCameraDevice.cc).
+  MetadataBuilder builder =
+      MetadataBuilder()
+          .setAberrationCorrectionMode(
+              ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF)
+          .setControlAeAvailableAntibandingModes(
+              {ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF})
+          .setControlAeAntibandingMode(ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF)
+          .setControlAeExposureCompensation(0)
+          .setControlAeLockAvailable(false)
+          .setControlAeLock(ANDROID_CONTROL_AE_LOCK_OFF)
+          .setControlAeMode(ANDROID_CONTROL_AE_MODE_ON)
+          .setControlAePrecaptureTrigger(
+              // Limited devices are expected to have precapture ae enabled and
+              // respond to cancellation request. Since we don't actuall support
+              // AE at all, let's just respect the cancellation expectation in
+              // case it's requested
+              requestSettings.aePrecaptureTrigger ==
+                      ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL
+                  ? ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL
+                  : ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE)
+          .setControlAeState(ANDROID_CONTROL_AE_STATE_INACTIVE)
+          .setControlAfMode(ANDROID_CONTROL_AF_MODE_OFF)
+          .setControlAfTrigger(ANDROID_CONTROL_AF_TRIGGER_IDLE)
+          .setControlAfState(ANDROID_CONTROL_AF_STATE_INACTIVE)
+          .setControlAwbMode(ANDROID_CONTROL_AWB_MODE_AUTO)
+          .setControlAwbLock(ANDROID_CONTROL_AWB_LOCK_OFF)
+          .setControlAwbState(ANDROID_CONTROL_AWB_STATE_INACTIVE)
+          .setControlCaptureIntent(requestSettings.captureIntent)
+          .setControlEffectMode(ANDROID_CONTROL_EFFECT_MODE_OFF)
+          .setControlMode(ANDROID_CONTROL_MODE_AUTO)
+          .setControlSceneMode(ANDROID_CONTROL_SCENE_MODE_DISABLED)
+          .setControlVideoStabilizationMode(
+              ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF)
+          .setCropRegion(0, 0, reportedSensorSize.width,
+                         reportedSensorSize.height)
+          .setFaceDetectMode(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF)
+          .setFlashState(ANDROID_FLASH_STATE_UNAVAILABLE)
+          .setFlashMode(ANDROID_FLASH_MODE_OFF)
+          .setFocalLength(VirtualCameraDevice::kFocalLength)
+          .setJpegQuality(requestSettings.jpegQuality)
+          .setJpegOrientation(requestSettings.jpegOrientation)
+          .setJpegThumbnailSize(requestSettings.thumbnailResolution.width,
+                                requestSettings.thumbnailResolution.height)
+          .setJpegThumbnailQuality(requestSettings.thumbnailJpegQuality)
+          .setLensOpticalStabilizationMode(
+              ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF)
+          .setNoiseReductionMode(ANDROID_NOISE_REDUCTION_MODE_OFF)
+          .setPipelineDepth(kPipelineDepth)
+          .setSensorTimestamp(timestamp)
+          .setStatisticsHotPixelMapMode(
+              ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF)
+          .setStatisticsLensShadingMapMode(
+              ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF)
+          .setStatisticsSceneFlicker(ANDROID_STATISTICS_SCENE_FLICKER_NONE);
+
+  if (requestSettings.fpsRange.has_value()) {
+    builder.setControlAeTargetFpsRange(requestSettings.fpsRange.value());
+  }
+
+  if (requestSettings.gpsCoordinates.has_value()) {
+    const GpsCoordinates& coordinates = requestSettings.gpsCoordinates.value();
+    builder.setJpegGpsCoordinates(coordinates);
+  }
+
+  std::unique_ptr<CameraMetadata> metadata = builder.build();
+
+  if (metadata == nullptr) {
+    ALOGE("%s: Failed to build capture result metadata", __func__);
+    return CameraMetadata();
+  }
+  return std::move(*metadata);
+}
+
+}  // namespace virtualcamera
+}  // namespace companion
+}  // namespace android
\ No newline at end of file
diff --git a/services/camera/virtualcamera/VirtualCameraCaptureResult.h b/services/camera/virtualcamera/VirtualCameraCaptureResult.h
new file mode 100644
index 0000000..9e5b4d7
--- /dev/null
+++ b/services/camera/virtualcamera/VirtualCameraCaptureResult.h
@@ -0,0 +1,53 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_COMPANION_VIRTUALCAMERA_VIRTUALCAMERACAPTURERESULT_H
+#define ANDROID_COMPANION_VIRTUALCAMERA_VIRTUALCAMERACAPTURERESULT_H
+
+#include <chrono>
+#include <cstdint>
+#include <cstring>
+#include <future>
+#include <memory>
+#include <mutex>
+#include <thread>
+#include <utility>
+#include <vector>
+
+#include "Exif.h"
+#include "GLES/gl.h"
+#include "VirtualCameraCaptureRequest.h"
+#include "VirtualCameraDevice.h"
+#include "VirtualCameraRenderThread.h"
+#include "VirtualCameraSessionContext.h"
+#include "aidl/android/hardware/camera/device/CameraMetadata.h"
+
+namespace android {
+namespace companion {
+namespace virtualcamera {
+
+// Construct the Metadata for the Capture result based on the request
+// settings, timestamp and reported sensore size
+::aidl::android::hardware::camera::device::CameraMetadata
+createCaptureResultMetadata(std::chrono::nanoseconds timestamp,
+                            const RequestSettings& requestSettings,
+                            Resolution reportedSensorSize);
+
+}  // namespace virtualcamera
+}  // namespace companion
+}  // namespace android
+
+#endif  // ANDROID_COMPANION_VIRTUALCAMERA_VIRTUALCAMERACAPTURERESULT_H
\ No newline at end of file
diff --git a/services/camera/virtualcamera/VirtualCameraDevice.cc b/services/camera/virtualcamera/VirtualCameraDevice.cc
index 768dffb..c3be62b 100644
--- a/services/camera/virtualcamera/VirtualCameraDevice.cc
+++ b/services/camera/virtualcamera/VirtualCameraDevice.cc
@@ -23,9 +23,12 @@
 #include <chrono>
 #include <cstdint>
 #include <iterator>
+#include <numeric>
 #include <optional>
 #include <string>
+#include <vector>
 
+#include "VirtualCameraService.h"
 #include "VirtualCameraSession.h"
 #include "aidl/android/companion/virtualcamera/SupportedStreamConfiguration.h"
 #include "aidl/android/companion/virtualcamera/VirtualCameraConfiguration.h"
@@ -36,7 +39,7 @@
 #include "android/binder_status.h"
 #include "log/log.h"
 #include "system/camera_metadata.h"
-#include "util/MetadataBuilder.h"
+#include "util/MetadataUtil.h"
 #include "util/Util.h"
 
 namespace android {
@@ -65,34 +68,88 @@
 
 using namespace std::chrono_literals;
 
-// Prefix of camera name - "device@1.1/virtual/{numerical_id}"
+// Prefix of camera name - "device@1.1/virtual/{camera_id}"
 const char* kDevicePathPrefix = "device@1.1/virtual/";
 
 constexpr int32_t kMaxJpegSize = 3 * 1024 * 1024 /*3MiB*/;
 
+constexpr std::chrono::nanoseconds kMaxFrameDuration =
+    std::chrono::duration_cast<std::chrono::nanoseconds>(
+        1e9ns / VirtualCameraDevice::kMinFps);
+
+constexpr uint8_t kPipelineMaxDepth = 2;
+
+constexpr int k30Fps = 30;
+
 constexpr MetadataBuilder::ControlRegion kDefaultEmptyControlRegion{};
 
-const std::array<int32_t, 3> kOutputFormats{
-    ANDROID_SCALER_AVAILABLE_FORMATS_IMPLEMENTATION_DEFINED,
-    ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
-    ANDROID_SCALER_AVAILABLE_FORMATS_BLOB};
+const std::array<Resolution, 5> kStandardJpegThumbnailSizes{
+    Resolution(176, 144), Resolution(240, 144), Resolution(256, 144),
+    Resolution(240, 160), Resolution(240, 180)};
 
-struct Resolution {
-  Resolution(const int w, const int h) : width(w), height(h) {
-  }
+const std::array<PixelFormat, 3> kOutputFormats{
+    PixelFormat::IMPLEMENTATION_DEFINED, PixelFormat::YCBCR_420_888,
+    PixelFormat::BLOB};
 
-  bool operator<(const Resolution& other) const {
-    return width * height < other.width * other.height;
-  }
-
-  bool operator==(const Resolution& other) const {
-    return width == other.width && height == other.height;
-  }
-
-  const int width;
-  const int height;
+// The resolutions below will used to extend the set of supported output formats.
+// All resolutions with lower pixel count and same aspect ratio as some supported
+// input resolution will be added to the set of supported output resolutions.
+const std::array<Resolution, 10> kOutputResolutions{
+    Resolution(320, 240),   Resolution(640, 360),  Resolution(640, 480),
+    Resolution(720, 480),   Resolution(720, 576),  Resolution(800, 600),
+    Resolution(1024, 576),  Resolution(1280, 720), Resolution(1280, 960),
+    Resolution(1280, 1080),
 };
 
+std::vector<Resolution> getSupportedJpegThumbnailSizes(
+    const std::vector<SupportedStreamConfiguration>& configs) {
+  auto isSupportedByAnyInputConfig =
+      [&configs](const Resolution thumbnailResolution) {
+        return std::any_of(
+            configs.begin(), configs.end(),
+            [thumbnailResolution](const SupportedStreamConfiguration& config) {
+              return isApproximatellySameAspectRatio(
+                  thumbnailResolution, Resolution(config.width, config.height));
+            });
+      };
+
+  std::vector<Resolution> supportedThumbnailSizes({Resolution(0, 0)});
+  std::copy_if(kStandardJpegThumbnailSizes.begin(),
+               kStandardJpegThumbnailSizes.end(),
+               std::back_insert_iterator(supportedThumbnailSizes),
+               isSupportedByAnyInputConfig);
+  return supportedThumbnailSizes;
+}
+
+bool isSupportedOutputFormat(const PixelFormat pixelFormat) {
+  return std::find(kOutputFormats.begin(), kOutputFormats.end(), pixelFormat) !=
+         kOutputFormats.end();
+}
+
+std::vector<FpsRange> fpsRangesForInputConfig(
+    const std::vector<SupportedStreamConfiguration>& configs) {
+  std::set<FpsRange> availableRanges;
+
+  for (const SupportedStreamConfiguration& config : configs) {
+    availableRanges.insert(
+        {.minFps = VirtualCameraDevice::kMinFps, .maxFps = config.maxFps});
+    availableRanges.insert({.minFps = config.maxFps, .maxFps = config.maxFps});
+  }
+
+  if (std::any_of(configs.begin(), configs.end(),
+                  [](const SupportedStreamConfiguration& config) {
+                    return config.maxFps >= k30Fps;
+                  })) {
+    // Extend the set of available ranges with (minFps <= 15, 30) & (30, 30) as
+    // required by CDD.
+    availableRanges.insert(
+        {.minFps = VirtualCameraDevice::kMinFps, .maxFps = k30Fps});
+    availableRanges.insert({.minFps = k30Fps, .maxFps = k30Fps});
+  }
+
+  return std::vector<FpsRange>(availableRanges.begin(), availableRanges.end());
+}
+
 std::optional<Resolution> getMaxResolution(
     const std::vector<SupportedStreamConfiguration>& configs) {
   auto itMax = std::max_element(configs.begin(), configs.end(),
@@ -127,13 +184,44 @@
     }
   }
 
+  std::map<Resolution, int> additionalResolutionToMaxFpsMap;
+  // Add additional resolutions we can support by downscaling input streams with
+  // same aspect ratio.
+  for (const Resolution& outputResolution : kOutputResolutions) {
+    for (const auto& [resolution, maxFps] : resolutionToMaxFpsMap) {
+      if (resolutionToMaxFpsMap.find(outputResolution) !=
+          resolutionToMaxFpsMap.end()) {
+        // Resolution is already in the map, skip it.
+        continue;
+      }
+
+      if (outputResolution < resolution &&
+          isApproximatellySameAspectRatio(outputResolution, resolution)) {
+        // Lower resolution with same aspect ratio, we can achieve this by
+        // downscaling, let's add it to the map.
+        ALOGD(
+            "Extending set of output resolutions with %dx%d which has same "
+            "aspect ratio as supported input %dx%d.",
+            outputResolution.width, outputResolution.height, resolution.width,
+            resolution.height);
+        additionalResolutionToMaxFpsMap[outputResolution] = maxFps;
+        break;
+      }
+    }
+  }
+
+  // Add all resolution we can achieve by downscaling to the map.
+  resolutionToMaxFpsMap.insert(additionalResolutionToMaxFpsMap.begin(),
+                               additionalResolutionToMaxFpsMap.end());
+
   return resolutionToMaxFpsMap;
 }
 
 // TODO(b/301023410) - Populate camera characteristics according to camera configuration.
 std::optional<CameraMetadata> initCameraCharacteristics(
     const std::vector<SupportedStreamConfiguration>& supportedInputConfig,
-    const SensorOrientation sensorOrientation, const LensFacing lensFacing) {
+    const SensorOrientation sensorOrientation, const LensFacing lensFacing,
+    const int32_t deviceId) {
   if (!std::all_of(supportedInputConfig.begin(), supportedInputConfig.end(),
                    [](const SupportedStreamConfiguration& config) {
                      return isFormatSupportedForInput(
@@ -148,28 +236,124 @@
       MetadataBuilder()
           .setSupportedHardwareLevel(
               ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_EXTERNAL)
+          .setDeviceId(deviceId)
           .setFlashAvailable(false)
           .setLensFacing(
               static_cast<camera_metadata_enum_android_lens_facing>(lensFacing))
+          .setAvailableFocalLengths({VirtualCameraDevice::kFocalLength})
           .setSensorOrientation(static_cast<int32_t>(sensorOrientation))
+          .setSensorReadoutTimestamp(
+              ANDROID_SENSOR_READOUT_TIMESTAMP_NOT_SUPPORTED)
+          .setSensorTimestampSource(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN)
+          .setSensorPhysicalSize(36.0, 24.0)
+          .setAvailableAberrationCorrectionModes(
+              {ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF})
+          .setAvailableNoiseReductionModes({ANDROID_NOISE_REDUCTION_MODE_OFF})
           .setAvailableFaceDetectModes({ANDROID_STATISTICS_FACE_DETECT_MODE_OFF})
+          .setAvailableStreamUseCases(
+              {ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
+               ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW,
+               ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_STILL_CAPTURE,
+               ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_RECORD,
+               ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW_VIDEO_STILL,
+               ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_CALL})
+          .setAvailableTestPatternModes({ANDROID_SENSOR_TEST_PATTERN_MODE_OFF})
           .setAvailableMaxDigitalZoom(1.0)
           .setControlAvailableModes({ANDROID_CONTROL_MODE_AUTO})
           .setControlAfAvailableModes({ANDROID_CONTROL_AF_MODE_OFF})
-          .setControlAeAvailableFpsRange(10, 30)
+          .setControlAvailableSceneModes({ANDROID_CONTROL_SCENE_MODE_DISABLED})
+          .setControlAvailableEffects({ANDROID_CONTROL_EFFECT_MODE_OFF})
+          .setControlAvailableVideoStabilizationModes(
+              {ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF})
+          .setControlAeAvailableModes({ANDROID_CONTROL_AE_MODE_ON})
+          .setControlAeAvailableAntibandingModes(
+              {ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO})
+          .setControlAeAvailableFpsRanges(
+              fpsRangesForInputConfig(supportedInputConfig))
           .setControlMaxRegions(0, 0, 0)
           .setControlAfRegions({kDefaultEmptyControlRegion})
           .setControlAeRegions({kDefaultEmptyControlRegion})
           .setControlAwbRegions({kDefaultEmptyControlRegion})
-          .setControlAeCompensationRange(0, 1)
+          .setControlAeCompensationRange(0, 0)
           .setControlAeCompensationStep(camera_metadata_rational_t{0, 1})
+          .setControlAwbLockAvailable(false)
+          .setControlAeLockAvailable(false)
+          .setControlAvailableAwbModes({ANDROID_CONTROL_AWB_MODE_AUTO})
           .setControlZoomRatioRange(/*min=*/1.0, /*max=*/1.0)
+          .setCroppingType(ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY)
+          .setJpegAvailableThumbnailSizes(
+              getSupportedJpegThumbnailSizes(supportedInputConfig))
           .setMaxJpegSize(kMaxJpegSize)
-          .setAvailableRequestKeys({ANDROID_CONTROL_AF_MODE})
-          .setAvailableResultKeys({ANDROID_CONTROL_AF_MODE})
+          .setMaxFaceCount(0)
+          .setMaxFrameDuration(kMaxFrameDuration)
+          .setMaxNumberOutputStreams(
+              VirtualCameraDevice::kMaxNumberOfRawStreams,
+              VirtualCameraDevice::kMaxNumberOfProcessedStreams,
+              VirtualCameraDevice::kMaxNumberOfStallStreams)
+          .setRequestPartialResultCount(1)
+          .setPipelineMaxDepth(kPipelineMaxDepth)
+          .setSyncMaxLatency(ANDROID_SYNC_MAX_LATENCY_UNKNOWN)
+          .setAvailableRequestKeys({ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
+                                    ANDROID_CONTROL_CAPTURE_INTENT,
+                                    ANDROID_CONTROL_AE_MODE,
+                                    ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
+                                    ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
+                                    ANDROID_CONTROL_AE_ANTIBANDING_MODE,
+                                    ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
+                                    ANDROID_CONTROL_AF_TRIGGER,
+                                    ANDROID_CONTROL_AF_MODE,
+                                    ANDROID_CONTROL_AWB_MODE,
+                                    ANDROID_SCALER_CROP_REGION,
+                                    ANDROID_CONTROL_EFFECT_MODE,
+                                    ANDROID_CONTROL_MODE,
+                                    ANDROID_CONTROL_SCENE_MODE,
+                                    ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
+                                    ANDROID_CONTROL_ZOOM_RATIO,
+                                    ANDROID_FLASH_MODE,
+                                    ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
+                                    ANDROID_JPEG_ORIENTATION,
+                                    ANDROID_JPEG_QUALITY,
+                                    ANDROID_JPEG_THUMBNAIL_QUALITY,
+                                    ANDROID_JPEG_THUMBNAIL_SIZE,
+                                    ANDROID_NOISE_REDUCTION_MODE,
+                                    ANDROID_STATISTICS_FACE_DETECT_MODE})
+          .setAvailableResultKeys({
+              ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
+              ANDROID_CONTROL_AE_ANTIBANDING_MODE,
+              ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
+              ANDROID_CONTROL_AE_LOCK,
+              ANDROID_CONTROL_AE_MODE,
+              ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
+              ANDROID_CONTROL_AE_STATE,
+              ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
+              ANDROID_CONTROL_AF_MODE,
+              ANDROID_CONTROL_AF_STATE,
+              ANDROID_CONTROL_AF_TRIGGER,
+              ANDROID_CONTROL_AWB_LOCK,
+              ANDROID_CONTROL_AWB_MODE,
+              ANDROID_CONTROL_AWB_STATE,
+              ANDROID_CONTROL_CAPTURE_INTENT,
+              ANDROID_CONTROL_EFFECT_MODE,
+              ANDROID_CONTROL_MODE,
+              ANDROID_CONTROL_SCENE_MODE,
+              ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
+              ANDROID_STATISTICS_FACE_DETECT_MODE,
+              ANDROID_FLASH_MODE,
+              ANDROID_FLASH_STATE,
+              ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
+              ANDROID_JPEG_QUALITY,
+              ANDROID_JPEG_THUMBNAIL_QUALITY,
+              ANDROID_LENS_FOCAL_LENGTH,
+              ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
+              ANDROID_NOISE_REDUCTION_MODE,
+              ANDROID_REQUEST_PIPELINE_DEPTH,
+              ANDROID_SENSOR_TIMESTAMP,
+              ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
+              ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,
+              ANDROID_STATISTICS_SCENE_FLICKER,
+          })
           .setAvailableCapabilities(
-              {ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE})
-          .setAvailableCharacteristicKeys();
+              {ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE});
 
   // Active array size must correspond to largest supported input resolution.
   std::optional<Resolution> maxResolution =
@@ -179,6 +363,7 @@
   }
   builder.setSensorActiveArraySize(0, 0, maxResolution->width,
                                    maxResolution->height);
+  builder.setSensorPixelArraySize(maxResolution->width, maxResolution->height);
 
   std::vector<MetadataBuilder::StreamConfiguration> outputConfigurations;
 
@@ -189,7 +374,7 @@
       getResolutionToMaxFpsMap(supportedInputConfig);
 
   // Add configurations for all unique input resolutions and output formats.
-  for (int32_t format : kOutputFormats) {
+  for (const PixelFormat format : kOutputFormats) {
     std::transform(
         resolutionToMaxFpsMap.begin(), resolutionToMaxFpsMap.end(),
         std::back_inserter(outputConfigurations), [format](const auto& entry) {
@@ -198,7 +383,7 @@
           return MetadataBuilder::StreamConfiguration{
               .width = resolution.width,
               .height = resolution.height,
-              .format = format,
+              .format = static_cast<int32_t>(format),
               .minFrameDuration = std::chrono::nanoseconds(1s) / maxFps,
               .minStallDuration = 0s};
         });
@@ -207,7 +392,7 @@
   ALOGV("Adding %zu output configurations", outputConfigurations.size());
   builder.setAvailableOutputStreamConfigurations(outputConfigurations);
 
-  auto metadata = builder.build();
+  auto metadata = builder.setAvailableCharacteristicKeys().build();
   if (metadata == nullptr) {
     ALOGE("Failed to build metadata!");
     return CameraMetadata();
@@ -219,13 +404,14 @@
 }  // namespace
 
 VirtualCameraDevice::VirtualCameraDevice(
-    const uint32_t cameraId, const VirtualCameraConfiguration& configuration)
+    const std::string& cameraId,
+    const VirtualCameraConfiguration& configuration, int32_t deviceId)
     : mCameraId(cameraId),
       mVirtualCameraClientCallback(configuration.virtualCameraCallback),
       mSupportedInputConfigurations(configuration.supportedStreamConfigs) {
   std::optional<CameraMetadata> metadata = initCameraCharacteristics(
       mSupportedInputConfigurations, configuration.sensorOrientation,
-      configuration.lensFacing);
+      configuration.lensFacing, deviceId);
   if (metadata.has_value()) {
     mCameraCharacteristics = *metadata;
   } else {
@@ -288,6 +474,24 @@
     return false;
   }
 
+  const std::vector<Stream>& streams = streamConfiguration.streams;
+
+  Resolution firstStreamResolution(streams[0].width, streams[0].height);
+  auto isSameAspectRatioAsFirst = [firstStreamResolution](const Stream& stream) {
+    return isApproximatellySameAspectRatio(
+        firstStreamResolution, Resolution(stream.width, stream.height));
+  };
+  if (!std::all_of(streams.begin(), streams.end(), isSameAspectRatioAsFirst)) {
+    ALOGW(
+        "%s: Requested streams do not have same aspect ratio. Different aspect "
+        "ratios are currently "
+        "not supported by virtual camera. Stream configuration: %s",
+        __func__, streamConfiguration.toString().c_str());
+    return false;
+  }
+
+  int numberOfProcessedStreams = 0;
+  int numberOfStallStreams = 0;
   for (const Stream& stream : streamConfiguration.streams) {
     ALOGV("%s: Configuration queried: %s", __func__, stream.toString().c_str());
 
@@ -296,18 +500,25 @@
       return false;
     }
 
-    // TODO(b/301023410) remove hardcoded format checks, verify against configuration.
     if (stream.rotation != StreamRotation::ROTATION_0 ||
-        (stream.format != PixelFormat::IMPLEMENTATION_DEFINED &&
-         stream.format != PixelFormat::YCBCR_420_888 &&
-         stream.format != PixelFormat::BLOB)) {
+        !isSupportedOutputFormat(stream.format)) {
       ALOGV("Unsupported output stream type");
       return false;
     }
 
+    if (stream.format == PixelFormat::BLOB) {
+      numberOfStallStreams++;
+    } else {
+      numberOfProcessedStreams++;
+    }
+
+    Resolution requestedResolution(stream.width, stream.height);
     auto matchesSupportedInputConfig =
-        [&stream](const SupportedStreamConfiguration& config) {
-          return stream.width == config.width && stream.height == config.height;
+        [requestedResolution](const SupportedStreamConfiguration& config) {
+          Resolution supportedInputResolution(config.width, config.height);
+          return requestedResolution <= supportedInputResolution &&
+                 isApproximatellySameAspectRatio(requestedResolution,
+                                                 supportedInputResolution);
         };
     if (std::none_of(mSupportedInputConfigurations.begin(),
                      mSupportedInputConfigurations.end(),
@@ -316,6 +527,19 @@
       return false;
     }
   }
+
+  if (numberOfProcessedStreams > kMaxNumberOfProcessedStreams) {
+    ALOGE("%s: %d processed streams exceeds the supported maximum of %d",
+          __func__, numberOfProcessedStreams, kMaxNumberOfProcessedStreams);
+    return false;
+  }
+
+  if (numberOfStallStreams > kMaxNumberOfStallStreams) {
+    ALOGE("%s: %d stall streams exceeds the supported maximum of %d", __func__,
+          numberOfStallStreams, kMaxNumberOfStallStreams);
+    return false;
+  }
+
   return true;
 }
 
@@ -357,17 +581,45 @@
   return cameraStatus(Status::OPERATION_NOT_SUPPORTED);
 }
 
-binder_status_t VirtualCameraDevice::dump(int fd, const char** args,
-                                          uint32_t numArgs) {
-  // TODO(b/301023410) Implement.
-  (void)fd;
-  (void)args;
-  (void)numArgs;
+binder_status_t VirtualCameraDevice::dump(int fd, const char**, uint32_t) {
+  ALOGD("Dumping virtual camera %s", mCameraId.c_str());
+  const char* indent = "  ";
+  const char* doubleIndent = "    ";
+  dprintf(fd, "%svirtual_camera %s belongs to virtual device %d\n", indent,
+          mCameraId.c_str(),
+          getDeviceId(mCameraCharacteristics)
+              .value_or(VirtualCameraService::kDefaultDeviceId));
+  dprintf(fd, "%sSupportedStreamConfiguration:\n", indent);
+  for (auto& config : mSupportedInputConfigurations) {
+    dprintf(fd, "%s%s", doubleIndent, config.toString().c_str());
+  }
   return STATUS_OK;
 }
 
 std::string VirtualCameraDevice::getCameraName() const {
-  return std::string(kDevicePathPrefix) + std::to_string(mCameraId);
+  return std::string(kDevicePathPrefix) + mCameraId;
+}
+
+const std::vector<SupportedStreamConfiguration>&
+VirtualCameraDevice::getInputConfigs() const {
+  return mSupportedInputConfigurations;
+}
+
+Resolution VirtualCameraDevice::getMaxInputResolution() const {
+  std::optional<Resolution> maxResolution =
+      getMaxResolution(mSupportedInputConfigurations);
+  if (!maxResolution.has_value()) {
+    ALOGE(
+        "%s: Cannot determine sensor size for virtual camera - input "
+        "configurations empty?",
+        __func__);
+    return Resolution(0, 0);
+  }
+  return maxResolution.value();
+}
+
+int VirtualCameraDevice::allocateInputStreamId() {
+  return mNextInputStreamId++;
 }
 
 std::shared_ptr<VirtualCameraDevice> VirtualCameraDevice::sharedFromThis() {
diff --git a/services/camera/virtualcamera/VirtualCameraDevice.h b/services/camera/virtualcamera/VirtualCameraDevice.h
index 10d52af..a33d4cf 100644
--- a/services/camera/virtualcamera/VirtualCameraDevice.h
+++ b/services/camera/virtualcamera/VirtualCameraDevice.h
@@ -24,6 +24,8 @@
 #include "aidl/android/companion/virtualcamera/SupportedStreamConfiguration.h"
 #include "aidl/android/companion/virtualcamera/VirtualCameraConfiguration.h"
 #include "aidl/android/hardware/camera/device/BnCameraDevice.h"
+#include "system/camera_metadata.h"
+#include "util/Util.h"
 
 namespace android {
 namespace companion {
@@ -35,9 +37,10 @@
     : public ::aidl::android::hardware::camera::device::BnCameraDevice {
  public:
   explicit VirtualCameraDevice(
-      uint32_t cameraId,
+      const std::string& cameraId,
       const aidl::android::companion::virtualcamera::VirtualCameraConfiguration&
-          configuration);
+          configuration,
+      int32_t deviceId);
 
   virtual ~VirtualCameraDevice() override = default;
 
@@ -89,15 +92,58 @@
   binder_status_t dump(int fd, const char** args, uint32_t numArgs) override;
 
   // Returns unique virtual camera name in form
-  // "device@{major}.{minor}/virtual/{numerical_id}"
+  // "device@{major}.{minor}/virtual/{camera_id}"
   std::string getCameraName() const;
 
-  uint32_t getCameraId() const { return mCameraId; }
+  const std::string& getCameraId() const {
+    return mCameraId;
+  }
+
+  const std::vector<
+      aidl::android::companion::virtualcamera::SupportedStreamConfiguration>&
+  getInputConfigs() const;
+
+  // Returns largest supported input resolution.
+  Resolution getMaxInputResolution() const;
+
+  // Allocate and return next id for input stream (input surface).
+  int allocateInputStreamId();
+
+  // Maximal number of RAW streams - virtual camera doesn't support RAW streams.
+  static constexpr int32_t kMaxNumberOfRawStreams = 0;
+
+  // Maximal number of non-jpeg streams configured concurrently in single
+  // session. This should be at least 3 and can be increased at the potential
+  // cost of more CPU/GPU load if there are many concurrent streams.
+  static constexpr int32_t kMaxNumberOfProcessedStreams = 3;
+
+  // Maximal number of stalling (in case of virtual camera only jpeg for now)
+  // streams. Can be increaed at the cost of potential cost of more GPU/CPU
+  // load.
+  static constexpr int32_t kMaxNumberOfStallStreams = 1;
+
+  // Focal length for full frame sensor.
+  static constexpr float kFocalLength = 43.0;
+
+  // Default JPEG compression quality.
+  static constexpr uint8_t kDefaultJpegQuality = 80;
+
+  // Default JPEG orientation.
+  static constexpr uint8_t kDefaultJpegOrientation = 0;
+
+  // Lowest min fps advertised in supported fps ranges.
+  static constexpr int kMinFps = 1;
+
+  // Default Make and Model for Exif
+  static constexpr char kDefaultMakeAndModel[] = "Android Virtual Camera";
+
+  static constexpr camera_metadata_enum_android_control_capture_intent_t
+      kDefaultCaptureIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
 
  private:
   std::shared_ptr<VirtualCameraDevice> sharedFromThis();
 
-  const uint32_t mCameraId;
+  const std::string mCameraId;
   const std::shared_ptr<
       ::aidl::android::companion::virtualcamera::IVirtualCameraCallback>
       mVirtualCameraClientCallback;
@@ -107,6 +153,8 @@
   const std::vector<
       aidl::android::companion::virtualcamera::SupportedStreamConfiguration>
       mSupportedInputConfigurations;
+
+  std::atomic_int mNextInputStreamId;
 };
 
 }  // namespace virtualcamera
diff --git a/services/camera/virtualcamera/VirtualCameraProvider.cc b/services/camera/virtualcamera/VirtualCameraProvider.cc
index e4a68f5..b2c10f6 100644
--- a/services/camera/virtualcamera/VirtualCameraProvider.cc
+++ b/services/camera/virtualcamera/VirtualCameraProvider.cc
@@ -42,10 +42,6 @@
 using ::aidl::android::hardware::camera::provider::ConcurrentCameraIdCombination;
 using ::aidl::android::hardware::camera::provider::ICameraProviderCallback;
 
-// TODO(b/301023410) Make camera id range configurable / dynamic
-// based on already registered devices.
-std::atomic_int VirtualCameraProvider::sNextId{42};
-
 ndk::ScopedAStatus VirtualCameraProvider::setCallback(
     const std::shared_ptr<ICameraProviderCallback>& in_callback) {
   ALOGV("%s", __func__);
@@ -154,9 +150,15 @@
 }
 
 std::shared_ptr<VirtualCameraDevice> VirtualCameraProvider::createCamera(
-    const VirtualCameraConfiguration& configuration) {
-  auto camera =
-      ndk::SharedRefBase::make<VirtualCameraDevice>(sNextId++, configuration);
+    const VirtualCameraConfiguration& configuration,
+    const std::string& cameraId, const int32_t deviceId) {
+  if (cameraId.empty()) {
+    ALOGE("%s: Cannot create camera with empty cameraId", __func__);
+    return nullptr;
+  }
+
+  auto camera = ndk::SharedRefBase::make<VirtualCameraDevice>(
+      cameraId, configuration, deviceId);
   std::shared_ptr<ICameraProviderCallback> callback;
   {
     const std::lock_guard<std::mutex> lock(mLock);
diff --git a/services/camera/virtualcamera/VirtualCameraProvider.h b/services/camera/virtualcamera/VirtualCameraProvider.h
index 11d3123..606b44c 100644
--- a/services/camera/virtualcamera/VirtualCameraProvider.h
+++ b/services/camera/virtualcamera/VirtualCameraProvider.h
@@ -76,7 +76,8 @@
   // Returns nullptr if creation was not successful.
   std::shared_ptr<VirtualCameraDevice> createCamera(
       const aidl::android::companion::virtualcamera::VirtualCameraConfiguration&
-          configuration);
+          configuration,
+      const std::string& cameraId, int32_t deviceId);
 
   std::shared_ptr<VirtualCameraDevice> getCamera(const std::string& name);
 
@@ -91,9 +92,6 @@
 
   std::map<std::string, std::shared_ptr<VirtualCameraDevice>> mCameras
       GUARDED_BY(mLock);
-
-  // Numerical id to assign to next created camera.
-  static std::atomic_int sNextId;
 };
 
 }  // namespace virtualcamera
diff --git a/services/camera/virtualcamera/VirtualCameraRenderThread.cc b/services/camera/virtualcamera/VirtualCameraRenderThread.cc
index 25fe61b..40a96e4 100644
--- a/services/camera/virtualcamera/VirtualCameraRenderThread.cc
+++ b/services/camera/virtualcamera/VirtualCameraRenderThread.cc
@@ -18,17 +18,24 @@
 #include "VirtualCameraRenderThread.h"
 
 #include <chrono>
-#include <cstddef>
 #include <cstdint>
+#include <cstring>
 #include <future>
 #include <memory>
 #include <mutex>
 #include <thread>
+#include <utility>
+#include <vector>
 
+#include "Exif.h"
 #include "GLES/gl.h"
+#include "VirtualCameraCaptureResult.h"
+#include "VirtualCameraDevice.h"
 #include "VirtualCameraSessionContext.h"
 #include "aidl/android/hardware/camera/common/Status.h"
 #include "aidl/android/hardware/camera/device/BufferStatus.h"
+#include "aidl/android/hardware/camera/device/CameraBlob.h"
+#include "aidl/android/hardware/camera/device/CameraBlobId.h"
 #include "aidl/android/hardware/camera/device/CameraMetadata.h"
 #include "aidl/android/hardware/camera/device/CaptureResult.h"
 #include "aidl/android/hardware/camera/device/ErrorCode.h"
@@ -39,11 +46,13 @@
 #include "android-base/thread_annotations.h"
 #include "android/binder_auto_utils.h"
 #include "android/hardware_buffer.h"
+#include "hardware/gralloc.h"
+#include "system/camera_metadata.h"
 #include "ui/GraphicBuffer.h"
+#include "ui/Rect.h"
 #include "util/EglFramebuffer.h"
 #include "util/JpegUtil.h"
-#include "util/MetadataBuilder.h"
-#include "util/TestPatternHelper.h"
+#include "util/MetadataUtil.h"
 #include "util/Util.h"
 #include "utils/Errors.h"
 
@@ -53,6 +62,8 @@
 
 using ::aidl::android::hardware::camera::common::Status;
 using ::aidl::android::hardware::camera::device::BufferStatus;
+using ::aidl::android::hardware::camera::device::CameraBlob;
+using ::aidl::android::hardware::camera::device::CameraBlobId;
 using ::aidl::android::hardware::camera::device::CameraMetadata;
 using ::aidl::android::hardware::camera::device::CaptureResult;
 using ::aidl::android::hardware::camera::device::ErrorCode;
@@ -65,22 +76,26 @@
 using ::aidl::android::hardware::graphics::common::PixelFormat;
 using ::android::base::ScopedLockAssertion;
 
+using ::android::hardware::camera::common::helper::ExifUtils;
+
 namespace {
 
+// helper type for the visitor
+template <class... Ts>
+struct overloaded : Ts... {
+  using Ts::operator()...;
+};
+// explicit deduction guide (not needed as of C++20)
+template <class... Ts>
+overloaded(Ts...) -> overloaded<Ts...>;
+
 using namespace std::chrono_literals;
 
 static constexpr std::chrono::milliseconds kAcquireFenceTimeout = 500ms;
 
-CameraMetadata createCaptureResultMetadata(
-    const std::chrono::nanoseconds timestamp) {
-  std::unique_ptr<CameraMetadata> metadata =
-      MetadataBuilder().setSensorTimestamp(timestamp).build();
-  if (metadata == nullptr) {
-    ALOGE("%s: Failed to build capture result metadata", __func__);
-    return CameraMetadata();
-  }
-  return std::move(*metadata);
-}
+static constexpr size_t kJpegThumbnailBufferSize = 32 * 1024;  // 32 KiB
+
+static constexpr UpdateTextureTask kUpdateTextureTask;
 
 NotifyMsg createShutterNotifyMsg(int frameNumber,
                                  std::chrono::nanoseconds timestamp) {
@@ -150,6 +165,71 @@
   }
 }
 
+std::vector<uint8_t> createExif(
+    Resolution imageSize, const CameraMetadata resultMetadata,
+    const std::vector<uint8_t>& compressedThumbnail = {}) {
+  std::unique_ptr<ExifUtils> exifUtils(ExifUtils::create());
+  exifUtils->initialize();
+
+  // Make a copy of the metadata in order to converting it the HAL metadata
+  // format (as opposed to the AIDL class) and use the setFromMetadata method
+  // from ExifUtil
+  camera_metadata_t* rawSettings =
+      clone_camera_metadata((camera_metadata_t*)resultMetadata.metadata.data());
+  if (rawSettings != nullptr) {
+    android::hardware::camera::common::helper::CameraMetadata halMetadata(
+        rawSettings);
+    exifUtils->setFromMetadata(halMetadata, imageSize.width, imageSize.height);
+  }
+  exifUtils->setMake(VirtualCameraDevice::kDefaultMakeAndModel);
+  exifUtils->setModel(VirtualCameraDevice::kDefaultMakeAndModel);
+  exifUtils->setFlash(0);
+
+  std::vector<uint8_t> app1Data;
+
+  size_t thumbnailDataSize = compressedThumbnail.size();
+  const void* thumbnailData =
+      thumbnailDataSize > 0
+          ? reinterpret_cast<const void*>(compressedThumbnail.data())
+          : nullptr;
+
+  if (!exifUtils->generateApp1(thumbnailData, thumbnailDataSize)) {
+    ALOGE("%s: Failed to generate APP1 segment for EXIF metadata", __func__);
+    return app1Data;
+  }
+
+  const uint8_t* data = exifUtils->getApp1Buffer();
+  const size_t size = exifUtils->getApp1Length();
+
+  app1Data.insert(app1Data.end(), data, data + size);
+  return app1Data;
+}
+
+std::chrono::nanoseconds getMaxFrameDuration(
+    const RequestSettings& requestSettings) {
+  if (requestSettings.fpsRange.has_value()) {
+    return std::chrono::nanoseconds(static_cast<uint64_t>(
+        1e9 / std::max(1, requestSettings.fpsRange->minFps)));
+  }
+  return std::chrono::nanoseconds(
+      static_cast<uint64_t>(1e9 / VirtualCameraDevice::kMinFps));
+}
+
+class FrameAvailableListenerProxy : public ConsumerBase::FrameAvailableListener {
+ public:
+  FrameAvailableListenerProxy(std::function<void()> callback)
+      : mOnFrameAvailableCallback(callback) {
+  }
+
+  virtual void onFrameAvailable(const BufferItem&) override {
+    ALOGV("%s: onFrameAvailable", __func__);
+    mOnFrameAvailableCallback();
+  }
+
+ private:
+  std::function<void()> mOnFrameAvailableCallback;
+};
+
 }  // namespace
 
 CaptureRequestBuffer::CaptureRequestBuffer(int streamId, int bufferId,
@@ -170,14 +250,14 @@
 }
 
 VirtualCameraRenderThread::VirtualCameraRenderThread(
-    VirtualCameraSessionContext& sessionContext, const int mWidth,
-    const int mHeight,
-    std::shared_ptr<ICameraDeviceCallback> cameraDeviceCallback, bool testMode)
+    VirtualCameraSessionContext& sessionContext,
+    const Resolution inputSurfaceSize, const Resolution reportedSensorSize,
+    std::shared_ptr<ICameraDeviceCallback> cameraDeviceCallback)
     : mCameraDeviceCallback(cameraDeviceCallback),
-      mInputSurfaceWidth(mWidth),
-      mInputSurfaceHeight(mHeight),
-      mTestMode(testMode),
-      mSessionContext(sessionContext) {
+      mInputSurfaceSize(inputSurfaceSize),
+      mReportedSensorSize(reportedSensorSize),
+      mSessionContext(sessionContext),
+      mInputSurfaceFuture(mInputSurfacePromise.get_future()) {
 }
 
 VirtualCameraRenderThread::~VirtualCameraRenderThread() {
@@ -188,8 +268,11 @@
 }
 
 ProcessCaptureRequestTask::ProcessCaptureRequestTask(
-    int frameNumber, const std::vector<CaptureRequestBuffer>& requestBuffers)
-    : mFrameNumber(frameNumber), mBuffers(requestBuffers) {
+    int frameNumber, const std::vector<CaptureRequestBuffer>& requestBuffers,
+    const RequestSettings& requestSettings)
+    : mFrameNumber(frameNumber),
+      mBuffers(requestBuffers),
+      mRequestSettings(requestSettings) {
 }
 
 int ProcessCaptureRequestTask::getFrameNumber() const {
@@ -201,9 +284,29 @@
   return mBuffers;
 }
 
+const RequestSettings& ProcessCaptureRequestTask::getRequestSettings() const {
+  return mRequestSettings;
+}
+
+void VirtualCameraRenderThread::requestTextureUpdate() {
+  std::lock_guard<std::mutex> lock(mLock);
+  // If queue is not empty, we don't need to set the mTextureUpdateRequested
+  // flag, since the texture will be updated during ProcessCaptureRequestTask
+  // processing anyway.
+  if (mQueue.empty()) {
+    mTextureUpdateRequested = true;
+    mCondVar.notify_one();
+  }
+}
+
 void VirtualCameraRenderThread::enqueueTask(
     std::unique_ptr<ProcessCaptureRequestTask> task) {
   std::lock_guard<std::mutex> lock(mLock);
+  // When enqueving process capture request task, clear the
+  // mTextureUpdateRequested flag. If this flag is set, the texture was not yet
+  // updated and it will be updated when processing ProcessCaptureRequestTask
+  // anyway.
+  mTextureUpdateRequested = false;
   mQueue.emplace_back(std::move(task));
   mCondVar.notify_one();
 }
@@ -230,11 +333,10 @@
 }
 
 sp<Surface> VirtualCameraRenderThread::getInputSurface() {
-  return mInputSurfacePromise.get_future().get();
+  return mInputSurfaceFuture.get();
 }
 
-std::unique_ptr<ProcessCaptureRequestTask>
-VirtualCameraRenderThread::dequeueTask() {
+RenderThreadTask VirtualCameraRenderThread::dequeueTask() {
   std::unique_lock<std::mutex> lock(mLock);
   // Clang's thread safety analysis doesn't perform alias analysis,
   // so it doesn't support moveable std::unique_lock.
@@ -245,12 +347,20 @@
   ScopedLockAssertion lockAssertion(mLock);
 
   mCondVar.wait(lock, [this]() REQUIRES(mLock) {
-    return mPendingExit || !mQueue.empty();
+    return mPendingExit || mTextureUpdateRequested || !mQueue.empty();
   });
   if (mPendingExit) {
-    return nullptr;
+    // Render thread task with null task signals render thread to terminate.
+    return RenderThreadTask(nullptr);
   }
-  std::unique_ptr<ProcessCaptureRequestTask> task = std::move(mQueue.front());
+  if (mTextureUpdateRequested) {
+    // If mTextureUpdateRequested, it's guaranteed the queue is empty, return
+    // kUpdateTextureTask to signal we want render thread to update the texture
+    // (consume buffer from the queue).
+    mTextureUpdateRequested = false;
+    return RenderThreadTask(kUpdateTextureTask);
+  }
+  RenderThreadTask task(std::move(mQueue.front()));
   mQueue.pop_front();
   return task;
 }
@@ -263,22 +373,96 @@
       std::make_unique<EglTextureProgram>(EglTextureProgram::TextureFormat::YUV);
   mEglTextureRgbProgram = std::make_unique<EglTextureProgram>(
       EglTextureProgram::TextureFormat::RGBA);
-  mEglSurfaceTexture = std::make_unique<EglSurfaceTexture>(mInputSurfaceWidth,
-                                                           mInputSurfaceHeight);
+  mEglSurfaceTexture = std::make_unique<EglSurfaceTexture>(
+      mInputSurfaceSize.width, mInputSurfaceSize.height);
+  sp<FrameAvailableListenerProxy> frameAvailableListener =
+      sp<FrameAvailableListenerProxy>::make(
+          [this]() { requestTextureUpdate(); });
+  mEglSurfaceTexture->setFrameAvailableListener(frameAvailableListener);
+
   mInputSurfacePromise.set_value(mEglSurfaceTexture->getSurface());
 
-  while (std::unique_ptr<ProcessCaptureRequestTask> task = dequeueTask()) {
-    processCaptureRequest(*task);
+  while (RenderThreadTask task = dequeueTask()) {
+    std::visit(
+        overloaded{[this](const std::unique_ptr<ProcessCaptureRequestTask>& t) {
+                     processTask(*t);
+                   },
+                   [this](const UpdateTextureTask&) {
+                     ALOGV("Idle update of the texture");
+                     mEglSurfaceTexture->updateTexture();
+                   }},
+        task);
   }
 
+  // Destroy EGL utilities still on the render thread.
+  mEglSurfaceTexture.reset();
+  mEglTextureRgbProgram.reset();
+  mEglTextureYuvProgram.reset();
+  mEglDisplayContext.reset();
+
   ALOGV("Render thread exiting");
 }
 
-void VirtualCameraRenderThread::processCaptureRequest(
+void VirtualCameraRenderThread::processTask(
     const ProcessCaptureRequestTask& request) {
-  const std::chrono::nanoseconds timestamp =
+  std::chrono::nanoseconds timestamp =
       std::chrono::duration_cast<std::chrono::nanoseconds>(
           std::chrono::steady_clock::now().time_since_epoch());
+  const std::chrono::nanoseconds lastAcquisitionTimestamp(
+      mLastAcquisitionTimestampNanoseconds.exchange(timestamp.count(),
+                                                    std::memory_order_relaxed));
+
+  if (request.getRequestSettings().fpsRange) {
+    const int maxFps =
+        std::max(1, request.getRequestSettings().fpsRange->maxFps);
+    const std::chrono::nanoseconds minFrameDuration(
+        static_cast<uint64_t>(1e9 / maxFps));
+    const std::chrono::nanoseconds frameDuration =
+        timestamp - lastAcquisitionTimestamp;
+    if (frameDuration < minFrameDuration) {
+      // We're too fast for the configured maxFps, let's wait a bit.
+      const std::chrono::nanoseconds sleepTime =
+          minFrameDuration - frameDuration;
+      ALOGV("Current frame duration would  be %" PRIu64
+            " ns corresponding to, "
+            "sleeping for %" PRIu64
+            " ns before updating texture to match maxFps %d",
+            static_cast<uint64_t>(frameDuration.count()),
+            static_cast<uint64_t>(sleepTime.count()), maxFps);
+
+      std::this_thread::sleep_for(sleepTime);
+      timestamp = std::chrono::duration_cast<std::chrono::nanoseconds>(
+          std::chrono::steady_clock::now().time_since_epoch());
+      mLastAcquisitionTimestampNanoseconds.store(timestamp.count(),
+                                                 std::memory_order_relaxed);
+    }
+  }
+
+  // Calculate the maximal amount of time we can afford to wait for next frame.
+  const std::chrono::nanoseconds maxFrameDuration =
+      getMaxFrameDuration(request.getRequestSettings());
+  const std::chrono::nanoseconds elapsedDuration =
+      timestamp - lastAcquisitionTimestamp;
+  if (elapsedDuration < maxFrameDuration) {
+    // We can afford to wait for next frame.
+    // Note that if there's already new frame in the input Surface, the call
+    // below returns immediatelly.
+    bool gotNewFrame = mEglSurfaceTexture->waitForNextFrame(maxFrameDuration -
+                                                            elapsedDuration);
+    timestamp = std::chrono::duration_cast<std::chrono::nanoseconds>(
+        std::chrono::steady_clock::now().time_since_epoch());
+    if (!gotNewFrame) {
+      ALOGV(
+          "%s: No new frame received on input surface after waiting for "
+          "%" PRIu64 "ns, repeating last frame.",
+          __func__,
+          static_cast<uint64_t>((timestamp - lastAcquisitionTimestamp).count()));
+    }
+    mLastAcquisitionTimestampNanoseconds.store(timestamp.count(),
+                                               std::memory_order_relaxed);
+  }
+  // Acquire new (most recent) image from the Surface.
+  mEglSurfaceTexture->updateTexture();
 
   CaptureResult captureResult;
   captureResult.fmqResultSize = 0;
@@ -287,19 +471,12 @@
   captureResult.partialResult = 1;
   captureResult.inputBuffer.streamId = -1;
   captureResult.physicalCameraMetadata.resize(0);
-  captureResult.result = createCaptureResultMetadata(timestamp);
+  captureResult.result = createCaptureResultMetadata(
+      timestamp, request.getRequestSettings(), mReportedSensorSize);
 
   const std::vector<CaptureRequestBuffer>& buffers = request.getBuffers();
   captureResult.outputBuffers.resize(buffers.size());
 
-  if (mTestMode) {
-    // In test mode let's just render something to the Surface ourselves.
-    renderTestPatternYCbCr420(mEglSurfaceTexture->getSurface(),
-                              request.getFrameNumber());
-  }
-
-  mEglSurfaceTexture->updateTexture();
-
   for (int i = 0; i < buffers.size(); ++i) {
     const CaptureRequestBuffer& reqBuffer = buffers[i];
     StreamBuffer& resBuffer = captureResult.outputBuffers[i];
@@ -316,9 +493,10 @@
     }
 
     auto status = streamConfig->format == PixelFormat::BLOB
-                      ? renderIntoBlobStreamBuffer(reqBuffer.getStreamId(),
-                                                   reqBuffer.getBufferId(),
-                                                   reqBuffer.getFence())
+                      ? renderIntoBlobStreamBuffer(
+                            reqBuffer.getStreamId(), reqBuffer.getBufferId(),
+                            captureResult.result, request.getRequestSettings(),
+                            reqBuffer.getFence())
                       : renderIntoImageStreamBuffer(reqBuffer.getStreamId(),
                                                     reqBuffer.getBufferId(),
                                                     reqBuffer.getFence());
@@ -354,7 +532,7 @@
     return;
   }
 
-  ALOGD("%s: Successfully called processCaptureResult", __func__);
+  ALOGV("%s: Successfully called processCaptureResult", __func__);
 }
 
 void VirtualCameraRenderThread::flushCaptureRequest(
@@ -398,9 +576,56 @@
   }
 }
 
+std::vector<uint8_t> VirtualCameraRenderThread::createThumbnail(
+    const Resolution resolution, const int quality) {
+  if (resolution.width == 0 || resolution.height == 0) {
+    ALOGV("%s: Skipping thumbnail creation, zero size requested", __func__);
+    return {};
+  }
+
+  ALOGV("%s: Creating thumbnail with size %d x %d, quality %d", __func__,
+        resolution.width, resolution.height, quality);
+  Resolution bufferSize = roundTo2DctSize(resolution);
+  std::shared_ptr<EglFrameBuffer> framebuffer = allocateTemporaryFramebuffer(
+      mEglDisplayContext->getEglDisplay(), bufferSize.width, bufferSize.height);
+  if (framebuffer == nullptr) {
+    ALOGE(
+        "Failed to allocate temporary framebuffer for JPEG thumbnail "
+        "compression");
+    return {};
+  }
+
+  // TODO(b/324383963) Add support for letterboxing if the thumbnail sizese
+  // doesn't correspond
+  //  to input texture aspect ratio.
+  if (!renderIntoEglFramebuffer(*framebuffer, /*fence=*/nullptr,
+                                Rect(resolution.width, resolution.height))
+           .isOk()) {
+    ALOGE(
+        "Failed to render input texture into temporary framebuffer for JPEG "
+        "thumbnail");
+    return {};
+  }
+
+  std::vector<uint8_t> compressedThumbnail;
+  compressedThumbnail.resize(kJpegThumbnailBufferSize);
+  ALOGE("%s: Compressing thumbnail %d x %d", __func__, resolution.width,
+        resolution.height);
+  std::optional<size_t> compressedSize =
+      compressJpeg(resolution.width, resolution.height, quality,
+                   framebuffer->getHardwareBuffer(), {},
+                   compressedThumbnail.size(), compressedThumbnail.data());
+  if (!compressedSize.has_value()) {
+    ALOGE("%s: Failed to compress jpeg thumbnail", __func__);
+    return {};
+  }
+  compressedThumbnail.resize(compressedSize.value());
+  return compressedThumbnail;
+}
+
 ndk::ScopedAStatus VirtualCameraRenderThread::renderIntoBlobStreamBuffer(
-    const int streamId, const int bufferId, sp<Fence> fence) {
-  ALOGV("%s", __func__);
+    const int streamId, const int bufferId, const CameraMetadata& resultMetadata,
+    const RequestSettings& requestSettings, sp<Fence> fence) {
   std::shared_ptr<AHardwareBuffer> hwBuffer =
       mSessionContext.fetchHardwareBuffer(streamId, bufferId);
   if (hwBuffer == nullptr) {
@@ -415,74 +640,77 @@
     return cameraStatus(Status::INTERNAL_ERROR);
   }
 
+  ALOGV("%s: Rendering JPEG with size %d x %d, quality %d", __func__,
+        stream->width, stream->height, requestSettings.jpegQuality);
+
   // Let's create YUV framebuffer and render the surface into this.
   // This will take care about rescaling as well as potential format conversion.
+  // The buffer dimensions need to be rounded to nearest multiple of JPEG DCT
+  // size, however we pass the viewport corresponding to size of the stream so
+  // the image will be only rendered to the area corresponding to the stream
+  // size.
+  Resolution bufferSize =
+      roundTo2DctSize(Resolution(stream->width, stream->height));
   std::shared_ptr<EglFrameBuffer> framebuffer = allocateTemporaryFramebuffer(
-      mEglDisplayContext->getEglDisplay(), stream->width, stream->height);
+      mEglDisplayContext->getEglDisplay(), bufferSize.width, bufferSize.height);
   if (framebuffer == nullptr) {
     ALOGE("Failed to allocate temporary framebuffer for JPEG compression");
     return cameraStatus(Status::INTERNAL_ERROR);
   }
 
   // Render into temporary framebuffer.
-  ndk::ScopedAStatus status = renderIntoEglFramebuffer(*framebuffer);
+  ndk::ScopedAStatus status = renderIntoEglFramebuffer(
+      *framebuffer, /*fence=*/nullptr, Rect(stream->width, stream->height));
   if (!status.isOk()) {
     ALOGE("Failed to render input texture into temporary framebuffer");
     return status;
   }
 
-  AHardwareBuffer_Planes planes_info;
-
-  int32_t rawFence = fence != nullptr ? fence->get() : -1;
-  int result = AHardwareBuffer_lockPlanes(hwBuffer.get(),
-                                          AHARDWAREBUFFER_USAGE_CPU_READ_RARELY,
-                                          rawFence, nullptr, &planes_info);
-  if (result != OK) {
-    ALOGE("%s: Failed to lock planes for BLOB buffer: %d", __func__, result);
+  PlanesLockGuard planesLock(hwBuffer, AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN,
+                             fence);
+  if (planesLock.getStatus() != OK) {
+    ALOGE("Failed to lock hwBuffer planes");
     return cameraStatus(Status::INTERNAL_ERROR);
   }
 
-  std::shared_ptr<AHardwareBuffer> inHwBuffer = framebuffer->getHardwareBuffer();
-  GraphicBuffer* gBuffer = GraphicBuffer::fromAHardwareBuffer(inHwBuffer.get());
+  std::vector<uint8_t> app1ExifData =
+      createExif(Resolution(stream->width, stream->height), resultMetadata,
+                 createThumbnail(requestSettings.thumbnailResolution,
+                                 requestSettings.thumbnailJpegQuality));
 
-  bool compressionSuccess = true;
-  if (gBuffer != nullptr) {
-    android_ycbcr ycbcr;
-    if (gBuffer->getPixelFormat() != HAL_PIXEL_FORMAT_YCbCr_420_888) {
-      // This should never happen since we're allocating the temporary buffer
-      // with YUV420 layout above.
-      ALOGE("%s: Cannot compress non-YUV buffer (pixelFormat %d)", __func__,
-            gBuffer->getPixelFormat());
-      AHardwareBuffer_unlock(hwBuffer.get(), nullptr);
-      return cameraStatus(Status::INTERNAL_ERROR);
-    }
+  unsigned long outBufferSize = stream->bufferSize - sizeof(CameraBlob);
+  void* outBuffer = (*planesLock).planes[0].data;
+  std::optional<size_t> compressedSize = compressJpeg(
+      stream->width, stream->height, requestSettings.jpegQuality,
+      framebuffer->getHardwareBuffer(), app1ExifData, outBufferSize, outBuffer);
 
-    status_t status =
-        gBuffer->lockYCbCr(AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN, &ycbcr);
-    ALOGV("Locked buffers");
-    if (status != NO_ERROR) {
-      AHardwareBuffer_unlock(hwBuffer.get(), nullptr);
-      ALOGE("%s: Failed to lock graphic buffer: %d", __func__, status);
-      return cameraStatus(Status::INTERNAL_ERROR);
-    }
-
-    compressionSuccess =
-        compressJpeg(gBuffer->getWidth(), gBuffer->getHeight(), ycbcr,
-                     stream->bufferSize, planes_info.planes[0].data);
-
-    status_t res = gBuffer->unlock();
-    if (res != NO_ERROR) {
-      ALOGE("Failed to unlock graphic buffer: %d", res);
-    }
-  } else {
-    compressionSuccess =
-        compressBlackJpeg(stream->width, stream->height, stream->bufferSize,
-                          planes_info.planes[0].data);
+  if (!compressedSize.has_value()) {
+    ALOGE("%s: Failed to compress JPEG image", __func__);
+    return cameraStatus(Status::INTERNAL_ERROR);
   }
-  AHardwareBuffer_unlock(hwBuffer.get(), nullptr);
-  ALOGV("Unlocked buffers");
-  return compressionSuccess ? ndk::ScopedAStatus::ok()
-                            : cameraStatus(Status::INTERNAL_ERROR);
+
+  // Add the transport header at the end of the JPEG output buffer.
+  //
+  // jpegBlobId must start at byte[buffer_size - sizeof(CameraBlob)],
+  // where the buffer_size is the size of gralloc buffer.
+  //
+  // See
+  // hardware/interfaces/camera/device/aidl/android/hardware/camera/device/CameraBlobId.aidl
+  // for the full explanation of the following code.
+  CameraBlob cameraBlob{
+      .blobId = CameraBlobId::JPEG,
+      .blobSizeBytes = static_cast<int32_t>(compressedSize.value())};
+
+  // Copy the cameraBlob to the end of the JPEG buffer.
+  uint8_t* jpegStreamEndAddress =
+      reinterpret_cast<uint8_t*>((*planesLock).planes[0].data) +
+      (stream->bufferSize - sizeof(cameraBlob));
+  memcpy(jpegStreamEndAddress, &cameraBlob, sizeof(cameraBlob));
+
+  ALOGV("%s: Successfully compressed JPEG image, resulting size %zu B",
+        __func__, compressedSize.value());
+
+  return ndk::ScopedAStatus::ok();
 }
 
 ndk::ScopedAStatus VirtualCameraRenderThread::renderIntoImageStreamBuffer(
@@ -518,7 +746,7 @@
 }
 
 ndk::ScopedAStatus VirtualCameraRenderThread::renderIntoEglFramebuffer(
-    EglFrameBuffer& framebuffer, sp<Fence> fence) {
+    EglFrameBuffer& framebuffer, sp<Fence> fence, std::optional<Rect> viewport) {
   ALOGV("%s", __func__);
   // Wait for fence to clear.
   if (fence != nullptr && fence->isValid()) {
@@ -532,6 +760,11 @@
   mEglDisplayContext->makeCurrent();
   framebuffer.beforeDraw();
 
+  Rect viewportRect =
+      viewport.value_or(Rect(framebuffer.getWidth(), framebuffer.getHeight()));
+  glViewport(viewportRect.left, viewportRect.top, viewportRect.getWidth(),
+             viewportRect.getHeight());
+
   sp<GraphicBuffer> textureBuffer = mEglSurfaceTexture->getCurrentBuffer();
   if (textureBuffer == nullptr) {
     // If there's no current buffer, nothing was written to the surface and
diff --git a/services/camera/virtualcamera/VirtualCameraRenderThread.h b/services/camera/virtualcamera/VirtualCameraRenderThread.h
index b3aaed8..aafed44 100644
--- a/services/camera/virtualcamera/VirtualCameraRenderThread.h
+++ b/services/camera/virtualcamera/VirtualCameraRenderThread.h
@@ -17,18 +17,26 @@
 #ifndef ANDROID_COMPANION_VIRTUALCAMERA_VIRTUALCAMERARENDERTHREAD_H
 #define ANDROID_COMPANION_VIRTUALCAMERA_VIRTUALCAMERARENDERTHREAD_H
 
+#include <atomic>
+#include <cstdint>
 #include <deque>
 #include <future>
 #include <memory>
 #include <thread>
+#include <variant>
+#include <vector>
 
+#include "VirtualCameraCaptureRequest.h"
+#include "VirtualCameraDevice.h"
 #include "VirtualCameraSessionContext.h"
+#include "aidl/android/hardware/camera/device/CameraMetadata.h"
 #include "aidl/android/hardware/camera/device/ICameraDeviceCallback.h"
 #include "android/binder_auto_utils.h"
 #include "util/EglDisplayContext.h"
 #include "util/EglFramebuffer.h"
 #include "util/EglProgram.h"
 #include "util/EglSurfaceTexture.h"
+#include "util/Util.h"
 
 namespace android {
 namespace companion {
@@ -53,7 +61,8 @@
 class ProcessCaptureRequestTask {
  public:
   ProcessCaptureRequestTask(
-      int frameNumber, const std::vector<CaptureRequestBuffer>& requestBuffers);
+      int frameNumber, const std::vector<CaptureRequestBuffer>& requestBuffers,
+      const RequestSettings& RequestSettings = {});
 
   // Returns frame number corresponding to the request.
   int getFrameNumber() const;
@@ -65,9 +74,30 @@
   // so it cannot be access outside of its lifetime.
   const std::vector<CaptureRequestBuffer>& getBuffers() const;
 
+  const RequestSettings& getRequestSettings() const;
+
  private:
   const int mFrameNumber;
   const std::vector<CaptureRequestBuffer> mBuffers;
+  const RequestSettings mRequestSettings;
+};
+
+struct UpdateTextureTask {};
+
+struct RenderThreadTask
+    : public std::variant<std::unique_ptr<ProcessCaptureRequestTask>,
+                          UpdateTextureTask> {
+  // Allow implicit conversion to bool.
+  //
+  // Returns false, if the RenderThreadTask consist of null
+  // ProcessCaptureRequestTask, which signals that the thread should terminate.
+  operator bool() const {
+    const bool isExitSignal =
+        std::holds_alternative<std::unique_ptr<ProcessCaptureRequestTask>>(
+            *this) &&
+        std::get<std::unique_ptr<ProcessCaptureRequestTask>>(*this) == nullptr;
+    return !isExitSignal;
+  }
 };
 
 // Wraps dedicated rendering thread and rendering business with corresponding
@@ -77,18 +107,17 @@
   // Create VirtualCameraRenderThread instance:
   // * sessionContext - VirtualCameraSessionContext reference for shared access
   // to mapped buffers.
-  // * inputWidth - requested width of input surface ("virtual camera sensor")
-  // * inputHeight - requested height of input surface ("virtual camera sensor")
+  // * inputSurfaceSize - requested size of input surface.
+  // * reportedSensorSize - reported static sensor size of virtual camera.
   // * cameraDeviceCallback - callback for corresponding camera instance
   // * testMode - when set to true, test pattern is rendered to input surface
   // before each capture request is processed to simulate client input.
   VirtualCameraRenderThread(
-      VirtualCameraSessionContext& sessionContext, int inputWidth,
-      int inputHeight,
+      VirtualCameraSessionContext& sessionContext, Resolution inputSurfaceSize,
+      Resolution reportedSensorSize,
       std::shared_ptr<
           ::aidl::android::hardware::camera::device::ICameraDeviceCallback>
-          cameraDeviceCallback,
-      bool testMode = false);
+          cameraDeviceCallback);
 
   ~VirtualCameraRenderThread();
 
@@ -97,6 +126,12 @@
   // Stop rendering thread.
   void stop();
 
+  // Send request to render thread to update the texture.
+  // Currently queued buffers in the input surface will be consumed and the most
+  // recent buffer in the input surface will be attached to the texture), all
+  // other buffers will be returned to the buffer queue.
+  void requestTextureUpdate() EXCLUDES(mLock);
+
   // Equeue capture task for processing on render thread.
   void enqueueTask(std::unique_ptr<ProcessCaptureRequestTask> task)
       EXCLUDES(mLock);
@@ -108,13 +143,13 @@
   sp<Surface> getInputSurface();
 
  private:
-  std::unique_ptr<ProcessCaptureRequestTask> dequeueTask() EXCLUDES(mLock);
+  RenderThreadTask dequeueTask() EXCLUDES(mLock);
 
   // Rendering thread entry point.
   void threadLoop();
 
   // Process single capture request task (always called on render thread).
-  void processCaptureRequest(const ProcessCaptureRequestTask& captureRequestTask);
+  void processTask(const ProcessCaptureRequestTask& captureRequestTask);
 
   // Flush single capture request task returning the error status immediately.
   void flushCaptureRequest(const ProcessCaptureRequestTask& captureRequestTask);
@@ -122,13 +157,21 @@
   // TODO(b/301023410) - Refactor the actual rendering logic off this class for
   // easier testability.
 
+  // Create thumbnail with specified size for current image.
+  // The compressed image size is limited by 32KiB.
+  // Returns vector with compressed thumbnail if successful,
+  // empty vector otherwise.
+  std::vector<uint8_t> createThumbnail(Resolution resolution, int quality);
+
   // Render current image to the BLOB buffer.
   // If fence is specified, this function will block until the fence is cleared
   // before writing to the buffer.
   // Always called on render thread.
-  ndk::ScopedAStatus renderIntoBlobStreamBuffer(const int streamId,
-                                                const int bufferId,
-                                                sp<Fence> fence = nullptr);
+  ndk::ScopedAStatus renderIntoBlobStreamBuffer(
+      const int streamId, const int bufferId,
+      const ::aidl::android::hardware::camera::device::CameraMetadata&
+          resultMetadata,
+      const RequestSettings& requestSettings, sp<Fence> fence = nullptr);
 
   // Render current image to the YCbCr buffer.
   // If fence is specified, this function will block until the fence is cleared
@@ -141,17 +184,17 @@
   // If fence is specified, this function will block until the fence is cleared
   // before writing to the buffer.
   // Always called on the render thread.
-  ndk::ScopedAStatus renderIntoEglFramebuffer(EglFrameBuffer& framebuffer,
-                                              sp<Fence> fence = nullptr);
+  ndk::ScopedAStatus renderIntoEglFramebuffer(
+      EglFrameBuffer& framebuffer, sp<Fence> fence = nullptr,
+      std::optional<Rect> viewport = std::nullopt);
 
   // Camera callback
   const std::shared_ptr<
       ::aidl::android::hardware::camera::device::ICameraDeviceCallback>
       mCameraDeviceCallback;
 
-  const int mInputSurfaceWidth;
-  const int mInputSurfaceHeight;
-  const int mTestMode;
+  const Resolution mInputSurfaceSize;
+  const Resolution mReportedSensorSize;
 
   VirtualCameraSessionContext& mSessionContext;
 
@@ -161,8 +204,12 @@
   std::mutex mLock;
   std::deque<std::unique_ptr<ProcessCaptureRequestTask>> mQueue GUARDED_BY(mLock);
   std::condition_variable mCondVar;
+  volatile bool mTextureUpdateRequested GUARDED_BY(mLock);
   volatile bool mPendingExit GUARDED_BY(mLock);
 
+  // Acquisition timestamp of last frame.
+  std::atomic<uint64_t> mLastAcquisitionTimestampNanoseconds;
+
   // EGL helpers - constructed and accessed only from rendering thread.
   std::unique_ptr<EglDisplayContext> mEglDisplayContext;
   std::unique_ptr<EglTextureProgram> mEglTextureYuvProgram;
@@ -170,6 +217,7 @@
   std::unique_ptr<EglSurfaceTexture> mEglSurfaceTexture;
 
   std::promise<sp<Surface>> mInputSurfacePromise;
+  std::shared_future<sp<Surface>> mInputSurfaceFuture;
 };
 
 }  // namespace virtualcamera
diff --git a/services/camera/virtualcamera/VirtualCameraService.cc b/services/camera/virtualcamera/VirtualCameraService.cc
index 1144997..7466089 100644
--- a/services/camera/virtualcamera/VirtualCameraService.cc
+++ b/services/camera/virtualcamera/VirtualCameraService.cc
@@ -18,19 +18,30 @@
 #define LOG_TAG "VirtualCameraService"
 #include "VirtualCameraService.h"
 
+#include <algorithm>
+#include <array>
 #include <cinttypes>
 #include <cstdint>
-#include <cstdio>
 #include <memory>
 #include <mutex>
+#include <optional>
+#include <regex>
+#include <variant>
 
 #include "VirtualCameraDevice.h"
 #include "VirtualCameraProvider.h"
+#include "VirtualCameraTestInstance.h"
 #include "aidl/android/companion/virtualcamera/Format.h"
+#include "aidl/android/companion/virtualcamera/LensFacing.h"
 #include "aidl/android/companion/virtualcamera/VirtualCameraConfiguration.h"
 #include "android/binder_auto_utils.h"
+#include "android/binder_interface_utils.h"
 #include "android/binder_libbinder.h"
+#include "android/binder_status.h"
 #include "binder/Status.h"
+#include "fmt/format.h"
+#include "util/EglDisplayContext.h"
+#include "util/EglUtil.h"
 #include "util/Permissions.h"
 #include "util/Util.h"
 
@@ -48,19 +59,39 @@
 
 namespace {
 
+constexpr char kCameraIdPrefix[] = "v";
 constexpr int kVgaWidth = 640;
 constexpr int kVgaHeight = 480;
 constexpr int kMaxFps = 60;
+constexpr int kTestCameraDefaultInputFps = 30;
 constexpr char kEnableTestCameraCmd[] = "enable_test_camera";
 constexpr char kDisableTestCameraCmd[] = "disable_test_camera";
+constexpr char kHelp[] = "help";
 constexpr char kShellCmdHelp[] = R"(
+Usage:
+   cmd virtual_camera command [--option=value]
 Available commands:
  * enable_test_camera
+     Options:
+       --camera_id=(ID) - override numerical ID for test camera instance
+       --lens_facing=(front|back|external) - specifies lens facing for test camera instance
+       --input_fps=(fps) - specify input fps for test camera, valid values are from 1 to 1000
+       --sensor_orientation=(0|90|180|270) - Clockwise angle through which the output image 
+           needs to be rotated to be upright on the device screen in its native orientation
  * disable_test_camera
 )";
 constexpr char kCreateVirtualDevicePermission[] =
     "android.permission.CREATE_VIRTUAL_DEVICE";
 
+constexpr std::array<const char*, 3> kRequiredEglExtensions = {
+    "GL_OES_EGL_image_external",
+    "GL_OES_EGL_image_external_essl3",
+    "GL_EXT_YUV_target",
+};
+
+// Numerical portion for id to assign to next created camera.
+static std::atomic_int sNextIdNumericalPortion{1000};
+
 ndk::ScopedAStatus validateConfiguration(
     const VirtualCameraConfiguration& configuration) {
   if (configuration.supportedStreamConfigs.empty()) {
@@ -69,6 +100,13 @@
         Status::EX_ILLEGAL_ARGUMENT);
   }
 
+  if (configuration.virtualCameraCallback == nullptr) {
+    ALOGE("%s: Input configuration is missing virtual camera callback",
+          __func__);
+    return ndk::ScopedAStatus::fromServiceSpecificError(
+        Status::EX_ILLEGAL_ARGUMENT);
+  }
+
   for (const SupportedStreamConfiguration& config :
        configuration.supportedStreamConfigs) {
     if (!isFormatSupportedForInput(config.width, config.height,
@@ -98,6 +136,88 @@
   return ndk::ScopedAStatus::ok();
 }
 
+enum class Command {
+  ENABLE_TEST_CAMERA,
+  DISABLE_TEST_CAMERA,
+  HELP,
+};
+
+struct CommandWithOptions {
+  Command command;
+  std::map<std::string, std::string> optionToValueMap;
+};
+
+std::optional<int> parseInt(const std::string& s) {
+  if (!std::all_of(s.begin(), s.end(), [](char c) { return std::isdigit(c); })) {
+    return std::nullopt;
+  }
+  int ret = atoi(s.c_str());
+  return ret > 0 ? std::optional(ret) : std::nullopt;
+}
+
+std::optional<LensFacing> parseLensFacing(const std::string& s) {
+  static const std::map<std::string, LensFacing> strToLensFacing{
+      {"front", LensFacing::FRONT},
+      {"back", LensFacing::BACK},
+      {"external", LensFacing::EXTERNAL}};
+  auto it = strToLensFacing.find(s);
+  return it == strToLensFacing.end() ? std::nullopt : std::optional(it->second);
+}
+
+std::variant<CommandWithOptions, std::string> parseCommand(
+    const char** args, const uint32_t numArgs) {
+  static const std::regex optionRegex("^--(\\w+)(?:=(.+))?$");
+  static const std::map<std::string, Command> strToCommand{
+      {kHelp, Command::HELP},
+      {kEnableTestCameraCmd, Command::ENABLE_TEST_CAMERA},
+      {kDisableTestCameraCmd, Command::DISABLE_TEST_CAMERA}};
+
+  if (numArgs < 1) {
+    return CommandWithOptions{.command = Command::HELP};
+  }
+
+  // We interpret the first argument as command;
+  auto it = strToCommand.find(args[0]);
+  if (it == strToCommand.end()) {
+    return "Unknown command: " + std::string(args[0]);
+  }
+
+  CommandWithOptions cmd{.command = it->second};
+
+  for (int i = 1; i < numArgs; i++) {
+    std::cmatch cm;
+    if (!std::regex_match(args[i], cm, optionRegex)) {
+      return "Not an option: " + std::string(args[i]);
+    }
+
+    cmd.optionToValueMap[cm[1]] = cm[2];
+  }
+
+  return cmd;
+}
+
+ndk::ScopedAStatus verifyRequiredEglExtensions() {
+  EglDisplayContext context;
+  for (const char* eglExtension : kRequiredEglExtensions) {
+    if (!isGlExtensionSupported(eglExtension)) {
+      ALOGE("%s not supported", eglExtension);
+      return ndk::ScopedAStatus::fromExceptionCodeWithMessage(
+          EX_UNSUPPORTED_OPERATION,
+          fmt::format(
+              "Cannot create virtual camera, because required EGL extension {} "
+              "is not supported on this system",
+              eglExtension)
+              .c_str());
+    }
+  }
+  return ndk::ScopedAStatus::ok();
+}
+
+std::string createCameraId(const int32_t deviceId) {
+  return kCameraIdPrefix + std::to_string(deviceId) + "_" +
+         std::to_string(sNextIdNumericalPortion++);
+}
+
 }  // namespace
 
 VirtualCameraService::VirtualCameraService(
@@ -109,7 +229,16 @@
 
 ndk::ScopedAStatus VirtualCameraService::registerCamera(
     const ::ndk::SpAIBinder& token,
-    const VirtualCameraConfiguration& configuration, bool* _aidl_return) {
+    const VirtualCameraConfiguration& configuration, const int32_t deviceId,
+    bool* _aidl_return) {
+  return registerCamera(token, configuration, createCameraId(deviceId),
+                        deviceId, _aidl_return);
+}
+
+ndk::ScopedAStatus VirtualCameraService::registerCamera(
+    const ::ndk::SpAIBinder& token,
+    const VirtualCameraConfiguration& configuration,
+    const std::string& cameraId, const int32_t deviceId, bool* _aidl_return) {
   if (!mPermissionProxy.checkCallingPermission(kCreateVirtualDevicePermission)) {
     ALOGE("%s: caller (pid %d, uid %d) doesn't hold %s permission", __func__,
           getpid(), getuid(), kCreateVirtualDevicePermission);
@@ -121,7 +250,13 @@
         Status::EX_ILLEGAL_ARGUMENT);
   }
 
-  *_aidl_return = true;
+  if (mVerifyEglExtensions) {
+    auto status = verifyRequiredEglExtensions();
+    if (!status.isOk()) {
+      *_aidl_return = false;
+      return status;
+    }
+  }
 
   auto status = validateConfiguration(configuration);
   if (!status.isOk()) {
@@ -141,7 +276,7 @@
   }
 
   std::shared_ptr<VirtualCameraDevice> camera =
-      mVirtualCameraProvider->createCamera(configuration);
+      mVirtualCameraProvider->createCamera(configuration, cameraId, deviceId);
   if (camera == nullptr) {
     ALOGE("Failed to create camera for binder token 0x%" PRIxPTR,
           reinterpret_cast<uintptr_t>(token.get()));
@@ -151,6 +286,7 @@
   }
 
   mTokenToCameraName[token] = camera->getCameraName();
+  *_aidl_return = true;
   return ndk::ScopedAStatus::ok();
 }
 
@@ -175,11 +311,12 @@
 
   mVirtualCameraProvider->removeCamera(it->second);
 
+  mTokenToCameraName.erase(it);
   return ndk::ScopedAStatus::ok();
 }
 
 ndk::ScopedAStatus VirtualCameraService::getCameraId(
-        const ::ndk::SpAIBinder& token, int32_t* _aidl_return) {
+    const ::ndk::SpAIBinder& token, std::string* _aidl_return) {
   if (!mPermissionProxy.checkCallingPermission(kCreateVirtualDevicePermission)) {
     ALOGE("%s: caller (pid %d, uid %d) doesn't hold %s permission", __func__,
           getpid(), getuid(), kCreateVirtualDevicePermission);
@@ -188,7 +325,7 @@
 
   if (_aidl_return == nullptr) {
     return ndk::ScopedAStatus::fromServiceSpecificError(
-            Status::EX_ILLEGAL_ARGUMENT);
+        Status::EX_ILLEGAL_ARGUMENT);
   }
 
   auto camera = getCamera(token);
@@ -220,8 +357,7 @@
   return mVirtualCameraProvider->getCamera(it->second);
 }
 
-binder_status_t VirtualCameraService::handleShellCommand(int in, int out,
-                                                         int err,
+binder_status_t VirtualCameraService::handleShellCommand(int, int out, int err,
                                                          const char** args,
                                                          uint32_t numArgs) {
   if (numArgs <= 0) {
@@ -230,27 +366,104 @@
     return STATUS_OK;
   }
 
-  if (args == nullptr || args[0] == nullptr) {
+  auto isNullptr = [](const char* ptr) { return ptr == nullptr; };
+  if (args == nullptr || std::any_of(args, args + numArgs, isNullptr)) {
     return STATUS_BAD_VALUE;
   }
-  const char* const cmd = args[0];
-  if (strcmp(kEnableTestCameraCmd, cmd) == 0) {
-    enableTestCameraCmd(in, err);
-  } else if (strcmp(kDisableTestCameraCmd, cmd) == 0) {
-    disableTestCameraCmd(in);
-  } else {
-    dprintf(out, kShellCmdHelp);
+
+  std::variant<CommandWithOptions, std::string> cmdOrErrorMessage =
+      parseCommand(args, numArgs);
+  if (std::holds_alternative<std::string>(cmdOrErrorMessage)) {
+    dprintf(err, "Error: %s\n",
+            std::get<std::string>(cmdOrErrorMessage).c_str());
+    return STATUS_BAD_VALUE;
   }
 
+  const CommandWithOptions& cmd =
+      std::get<CommandWithOptions>(cmdOrErrorMessage);
+  binder_status_t status = STATUS_OK;
+  switch (cmd.command) {
+    case Command::HELP:
+      dprintf(out, kShellCmdHelp);
+      break;
+    case Command::ENABLE_TEST_CAMERA:
+      status = enableTestCameraCmd(out, err, cmd.optionToValueMap);
+      break;
+    case Command::DISABLE_TEST_CAMERA:
+      disableTestCameraCmd(out);
+      break;
+  }
+
+  fsync(err);
   fsync(out);
-  return STATUS_OK;
+  return status;
 }
 
-void VirtualCameraService::enableTestCameraCmd(const int out, const int err) {
+binder_status_t VirtualCameraService::enableTestCameraCmd(
+    const int out, const int err,
+    const std::map<std::string, std::string>& options) {
   if (mTestCameraToken != nullptr) {
-    dprintf(out, "Test camera is already enabled (%s).",
+    dprintf(out, "Test camera is already enabled (%s).\n",
             getCamera(mTestCameraToken)->getCameraName().c_str());
-    return;
+    return STATUS_OK;
+  }
+
+  std::optional<std::string> cameraId;
+  auto it = options.find("camera_id");
+  if (it != options.end()) {
+    cameraId = it->second;
+    if (!cameraId.has_value()) {
+      dprintf(err, "Invalid camera_id: %s", it->second.c_str());
+      return STATUS_BAD_VALUE;
+    }
+  }
+
+  std::optional<LensFacing> lensFacing;
+  it = options.find("lens_facing");
+  if (it != options.end()) {
+    lensFacing = parseLensFacing(it->second);
+    if (!lensFacing.has_value()) {
+      dprintf(err, "Invalid lens_facing: %s\n, must be front|back|external",
+              it->second.c_str());
+      return STATUS_BAD_VALUE;
+    }
+  }
+
+  std::optional<int> inputFps;
+  it = options.find("input_fps");
+  if (it != options.end()) {
+    inputFps = parseInt(it->second);
+    if (!inputFps.has_value() || inputFps.value() < 1 ||
+        inputFps.value() > 1000) {
+      dprintf(err, "Invalid input fps: %s\n, must be integer in <1,1000> range.",
+              it->second.c_str());
+      return STATUS_BAD_VALUE;
+    }
+  }
+
+  std::optional<SensorOrientation> sensorOrientation;
+  std::optional<int> sensorOrientationInt;
+  it = options.find("sensor_orientation");
+  if (it != options.end()) {
+    sensorOrientationInt = parseInt(it->second);
+    switch (sensorOrientationInt.value_or(0)) {
+      case 0:
+        sensorOrientation = SensorOrientation::ORIENTATION_0;
+        break;
+      case 90:
+        sensorOrientation = SensorOrientation::ORIENTATION_90;
+        break;
+      case 180:
+        sensorOrientation = SensorOrientation::ORIENTATION_180;
+        break;
+      case 270:
+        sensorOrientation = SensorOrientation::ORIENTATION_270;
+        break;
+      default:
+        dprintf(err, "Invalid sensor rotation: %s\n, must be 0, 90, 180 or 270.",
+                it->second.c_str());
+        return STATUS_BAD_VALUE;
+    }
   }
 
   sp<BBinder> token = sp<BBinder>::make();
@@ -260,16 +473,24 @@
   VirtualCameraConfiguration configuration;
   configuration.supportedStreamConfigs.push_back({.width = kVgaWidth,
                                                   .height = kVgaHeight,
-                                                  Format::YUV_420_888,
+                                                  Format::RGBA_8888,
                                                   .maxFps = kMaxFps});
-  configuration.lensFacing = LensFacing::EXTERNAL;
-  registerCamera(mTestCameraToken, configuration, &ret);
+  configuration.lensFacing = lensFacing.value_or(LensFacing::EXTERNAL);
+  configuration.sensorOrientation =
+      sensorOrientation.value_or(SensorOrientation::ORIENTATION_0);
+  configuration.virtualCameraCallback =
+      ndk::SharedRefBase::make<VirtualCameraTestInstance>(
+          inputFps.value_or(kTestCameraDefaultInputFps));
+  registerCamera(mTestCameraToken, configuration,
+                 cameraId.value_or(std::to_string(sNextIdNumericalPortion++)),
+                 kDefaultDeviceId, &ret);
   if (ret) {
-    dprintf(out, "Successfully registered test camera %s",
+    dprintf(out, "Successfully registered test camera %s\n",
             getCamera(mTestCameraToken)->getCameraName().c_str());
   } else {
-    dprintf(err, "Failed to create test camera");
+    dprintf(err, "Failed to create test camera\n");
   }
+  return STATUS_OK;
 }
 
 void VirtualCameraService::disableTestCameraCmd(const int out) {
diff --git a/services/camera/virtualcamera/VirtualCameraService.h b/services/camera/virtualcamera/VirtualCameraService.h
index d573986..4ef01c7 100644
--- a/services/camera/virtualcamera/VirtualCameraService.h
+++ b/services/camera/virtualcamera/VirtualCameraService.h
@@ -43,15 +43,24 @@
       const ::ndk::SpAIBinder& token,
       const ::aidl::android::companion::virtualcamera::VirtualCameraConfiguration&
           configuration,
-      bool* _aidl_return) override EXCLUDES(mLock);
+      int32_t deviceId, bool* _aidl_return) override EXCLUDES(mLock);
+
+  // Register camera corresponding to the binder token.
+  ndk::ScopedAStatus registerCamera(
+      const ::ndk::SpAIBinder& token,
+      const ::aidl::android::companion::virtualcamera::VirtualCameraConfiguration&
+          configuration,
+      const std::string& cameraId, int32_t deviceId, bool* _aidl_return)
+      EXCLUDES(mLock);
 
   // Unregisters camera corresponding to the binder token.
   ndk::ScopedAStatus unregisterCamera(const ::ndk::SpAIBinder& token) override
       EXCLUDES(mLock);
 
   // Returns the camera id corresponding to the binder token.
-  ndk::ScopedAStatus getCameraId(
-      const ::ndk::SpAIBinder& token, int32_t* _aidl_return) override EXCLUDES(mLock);
+  ndk::ScopedAStatus getCameraId(const ::ndk::SpAIBinder& token,
+                                 std::string* _aidl_return) override
+      EXCLUDES(mLock);
 
   // Returns VirtualCameraDevice corresponding to binder token or nullptr if
   // there's no camera asociated with the token.
@@ -62,14 +71,24 @@
   binder_status_t handleShellCommand(int in, int out, int err, const char** args,
                                      uint32_t numArgs) override;
 
+  // Do not verify presence on required EGL extensions when registering virtual
+  // camera. Only to be used by unit tests.
+  void disableEglVerificationForTest() {
+    mVerifyEglExtensions = false;
+  }
+
+  // Default virtual device id (the host device id)
+  static constexpr int kDefaultDeviceId = 0;
+
  private:
   // Create and enable test camera instance if there's none.
-  void enableTestCameraCmd(int out, int err);
+  binder_status_t enableTestCameraCmd(
+      int out, int err, const std::map<std::string, std::string>& options);
   // Disable and destroy test camera instance if there's one.
   void disableTestCameraCmd(int out);
 
   std::shared_ptr<VirtualCameraProvider> mVirtualCameraProvider;
-
+  bool mVerifyEglExtensions = true;
   const PermissionsProxy& mPermissionProxy;
 
   std::mutex mLock;
diff --git a/services/camera/virtualcamera/VirtualCameraSession.cc b/services/camera/virtualcamera/VirtualCameraSession.cc
index 03d63b8..88929cc 100644
--- a/services/camera/virtualcamera/VirtualCameraSession.cc
+++ b/services/camera/virtualcamera/VirtualCameraSession.cc
@@ -21,12 +21,14 @@
 #include <algorithm>
 #include <atomic>
 #include <chrono>
+#include <cmath>
 #include <cstddef>
 #include <cstdint>
 #include <cstring>
 #include <map>
 #include <memory>
 #include <mutex>
+#include <numeric>
 #include <optional>
 #include <tuple>
 #include <unordered_set>
@@ -38,13 +40,17 @@
 #include "VirtualCameraDevice.h"
 #include "VirtualCameraRenderThread.h"
 #include "VirtualCameraStream.h"
+#include "aidl/android/companion/virtualcamera/SupportedStreamConfiguration.h"
 #include "aidl/android/hardware/camera/common/Status.h"
 #include "aidl/android/hardware/camera/device/BufferCache.h"
 #include "aidl/android/hardware/camera/device/BufferStatus.h"
+#include "aidl/android/hardware/camera/device/CameraMetadata.h"
 #include "aidl/android/hardware/camera/device/CaptureRequest.h"
 #include "aidl/android/hardware/camera/device/HalStream.h"
 #include "aidl/android/hardware/camera/device/NotifyMsg.h"
+#include "aidl/android/hardware/camera/device/RequestTemplate.h"
 #include "aidl/android/hardware/camera/device/ShutterMsg.h"
+#include "aidl/android/hardware/camera/device/Stream.h"
 #include "aidl/android/hardware/camera/device/StreamBuffer.h"
 #include "aidl/android/hardware/camera/device/StreamConfiguration.h"
 #include "aidl/android/hardware/camera/device/StreamRotation.h"
@@ -59,16 +65,15 @@
 #include "util/EglFramebuffer.h"
 #include "util/EglProgram.h"
 #include "util/JpegUtil.h"
-#include "util/MetadataBuilder.h"
-#include "util/TestPatternHelper.h"
+#include "util/MetadataUtil.h"
 #include "util/Util.h"
 
 namespace android {
 namespace companion {
 namespace virtualcamera {
 
-using ::aidl::android::companion::virtualcamera::Format;
 using ::aidl::android::companion::virtualcamera::IVirtualCameraCallback;
+using ::aidl::android::companion::virtualcamera::SupportedStreamConfiguration;
 using ::aidl::android::hardware::camera::common::Status;
 using ::aidl::android::hardware::camera::device::BufferCache;
 using ::aidl::android::hardware::camera::device::CameraMetadata;
@@ -81,7 +86,6 @@
 using ::aidl::android::hardware::camera::device::Stream;
 using ::aidl::android::hardware::camera::device::StreamBuffer;
 using ::aidl::android::hardware::camera::device::StreamConfiguration;
-using ::aidl::android::hardware::camera::device::StreamRotation;
 using ::aidl::android::hardware::common::fmq::MQDescriptor;
 using ::aidl::android::hardware::common::fmq::SynchronizedReadWrite;
 using ::aidl::android::hardware::graphics::common::BufferUsage;
@@ -97,36 +101,75 @@
 
 // Size of request/result metadata fast message queue.
 // Setting to 0 to always disables FMQ.
-static constexpr size_t kMetadataMsgQueueSize = 0;
+constexpr size_t kMetadataMsgQueueSize = 0;
 
 // Maximum number of buffers to use per single stream.
-static constexpr size_t kMaxStreamBuffers = 2;
+constexpr size_t kMaxStreamBuffers = 2;
 
-CameraMetadata createDefaultRequestSettings(RequestTemplate type) {
-  hardware::camera::common::V1_0::helper::CameraMetadata metadataHelper;
+// Thumbnail size (0,0) correspods to disabling thumbnail.
+const Resolution kDefaultJpegThumbnailSize(0, 0);
 
-  camera_metadata_enum_android_control_capture_intent_t intent =
-      ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
+camera_metadata_enum_android_control_capture_intent_t requestTemplateToIntent(
+    const RequestTemplate type) {
   switch (type) {
     case RequestTemplate::PREVIEW:
-      intent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
-      break;
+      return ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
     case RequestTemplate::STILL_CAPTURE:
-      intent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
-      break;
+      return ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
     case RequestTemplate::VIDEO_RECORD:
-      intent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
-      break;
+      return ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
     case RequestTemplate::VIDEO_SNAPSHOT:
-      intent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
-      break;
+      return ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
     default:
-      // Leave default.
-      break;
+      // Return PREVIEW by default
+      return ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
   }
+}
 
-  auto metadata = MetadataBuilder().setControlCaptureIntent(intent).build();
-  return (metadata != nullptr) ? std::move(*metadata) : CameraMetadata();
+int getMaxFps(const std::vector<SupportedStreamConfiguration>& configs) {
+  return std::transform_reduce(
+      configs.begin(), configs.end(), 0,
+      [](const int a, const int b) { return std::max(a, b); },
+      [](const SupportedStreamConfiguration& config) { return config.maxFps; });
+}
+
+CameraMetadata createDefaultRequestSettings(
+    const RequestTemplate type,
+    const std::vector<SupportedStreamConfiguration>& inputConfigs) {
+  int maxFps = getMaxFps(inputConfigs);
+  auto metadata =
+      MetadataBuilder()
+          .setAberrationCorrectionMode(
+              ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF)
+          .setControlCaptureIntent(requestTemplateToIntent(type))
+          .setControlMode(ANDROID_CONTROL_MODE_AUTO)
+          .setControlAeMode(ANDROID_CONTROL_AE_MODE_ON)
+          .setControlAeExposureCompensation(0)
+          .setControlAeTargetFpsRange(FpsRange{maxFps, maxFps})
+          .setControlAeAntibandingMode(ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO)
+          .setControlAePrecaptureTrigger(
+              ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE)
+          .setControlAfTrigger(ANDROID_CONTROL_AF_TRIGGER_IDLE)
+          .setControlAfMode(ANDROID_CONTROL_AF_MODE_OFF)
+          .setControlAwbMode(ANDROID_CONTROL_AWB_MODE_AUTO)
+          .setControlEffectMode(ANDROID_CONTROL_EFFECT_MODE_OFF)
+          .setFaceDetectMode(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF)
+          .setFlashMode(ANDROID_FLASH_MODE_OFF)
+          .setFlashState(ANDROID_FLASH_STATE_UNAVAILABLE)
+          .setJpegQuality(VirtualCameraDevice::kDefaultJpegQuality)
+          .setJpegThumbnailQuality(VirtualCameraDevice::kDefaultJpegQuality)
+          .setJpegThumbnailSize(0, 0)
+          .setNoiseReductionMode(ANDROID_NOISE_REDUCTION_MODE_OFF)
+          .build();
+  if (metadata == nullptr) {
+    ALOGE("%s: Failed to construct metadata for default request type %s",
+          __func__, toString(type).c_str());
+    return CameraMetadata();
+  } else {
+    ALOGV("%s: Successfully created metadata for request type %s", __func__,
+          toString(type).c_str());
+  }
+  return *metadata;
 }
 
 HalStream getHalStream(const Stream& stream) {
@@ -158,6 +201,92 @@
                             }));
 }
 
+Resolution resolutionFromStream(const Stream& stream) {
+  return Resolution(stream.width, stream.height);
+}
+
+Resolution resolutionFromInputConfig(
+    const SupportedStreamConfiguration& inputConfig) {
+  return Resolution(inputConfig.width, inputConfig.height);
+}
+
+std::optional<Resolution> resolutionFromSurface(const sp<Surface> surface) {
+  Resolution res{0, 0};
+  if (surface == nullptr) {
+    ALOGE("%s: Cannot get resolution from null surface", __func__);
+    return std::nullopt;
+  }
+
+  int status = surface->query(NATIVE_WINDOW_WIDTH, &res.width);
+  if (status != NO_ERROR) {
+    ALOGE("%s: Failed to get width from surface", __func__);
+    return std::nullopt;
+  }
+
+  status = surface->query(NATIVE_WINDOW_HEIGHT, &res.height);
+  if (status != NO_ERROR) {
+    ALOGE("%s: Failed to get height from surface", __func__);
+    return std::nullopt;
+  }
+  return res;
+}
+
+std::optional<SupportedStreamConfiguration> pickInputConfigurationForStreams(
+    const std::vector<Stream>& requestedStreams,
+    const std::vector<SupportedStreamConfiguration>& supportedInputConfigs) {
+  Stream maxResolutionStream = getHighestResolutionStream(requestedStreams);
+  Resolution maxResolution = resolutionFromStream(maxResolutionStream);
+
+  // Find best fitting stream to satisfy all requested streams:
+  // Best fitting => same or higher resolution as input with lowest pixel count
+  // difference and same aspect ratio.
+  auto isBetterInputConfig = [maxResolution](
+                                 const SupportedStreamConfiguration& configA,
+                                 const SupportedStreamConfiguration& configB) {
+    int maxResPixelCount = maxResolution.width * maxResolution.height;
+    int pixelCountDiffA =
+        std::abs((configA.width * configA.height) - maxResPixelCount);
+    int pixelCountDiffB =
+        std::abs((configB.width * configB.height) - maxResPixelCount);
+
+    return pixelCountDiffA < pixelCountDiffB;
+  };
+
+  std::optional<SupportedStreamConfiguration> bestConfig;
+  for (const SupportedStreamConfiguration& inputConfig : supportedInputConfigs) {
+    Resolution inputConfigResolution = resolutionFromInputConfig(inputConfig);
+    if (inputConfigResolution < maxResolution ||
+        !isApproximatellySameAspectRatio(inputConfigResolution, maxResolution)) {
+      // We don't want to upscale from lower resolution, or use different aspect
+      // ratio, skip.
+      continue;
+    }
+
+    if (!bestConfig.has_value() ||
+        isBetterInputConfig(inputConfig, bestConfig.value())) {
+      bestConfig = inputConfig;
+    }
+  }
+
+  return bestConfig;
+}
+
+RequestSettings createSettingsFromMetadata(const CameraMetadata& metadata) {
+  return RequestSettings{
+      .jpegQuality = getJpegQuality(metadata).value_or(
+          VirtualCameraDevice::kDefaultJpegQuality),
+      .jpegOrientation = getJpegOrientation(metadata),
+      .thumbnailResolution =
+          getJpegThumbnailSize(metadata).value_or(Resolution(0, 0)),
+      .thumbnailJpegQuality = getJpegThumbnailQuality(metadata).value_or(
+          VirtualCameraDevice::kDefaultJpegQuality),
+      .fpsRange = getFpsRange(metadata),
+      .captureIntent = getCaptureIntent(metadata).value_or(
+          ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW),
+      .gpsCoordinates = getGpsCoordinates(metadata),
+      .aePrecaptureTrigger = getPrecaptureTrigger(metadata)};
+}
+
 }  // namespace
 
 VirtualCameraSession::VirtualCameraSession(
@@ -182,13 +311,13 @@
 
 ndk::ScopedAStatus VirtualCameraSession::close() {
   ALOGV("%s", __func__);
-
-  if (mVirtualCameraClientCallback != nullptr) {
-    mVirtualCameraClientCallback->onStreamClosed(/*streamId=*/0);
-  }
-
   {
     std::lock_guard<std::mutex> lock(mLock);
+
+    if (mVirtualCameraClientCallback != nullptr) {
+      mVirtualCameraClientCallback->onStreamClosed(mCurrentInputStreamId);
+    }
+
     if (mRenderThread != nullptr) {
       mRenderThread->stop();
       mRenderThread = nullptr;
@@ -223,15 +352,14 @@
   halStreams.clear();
   halStreams.resize(in_requestedConfiguration.streams.size());
 
-  sp<Surface> inputSurface = nullptr;
-  int inputWidth;
-  int inputHeight;
-
   if (!virtualCamera->isStreamCombinationSupported(in_requestedConfiguration)) {
     ALOGE("%s: Requested stream configuration is not supported", __func__);
     return cameraStatus(Status::ILLEGAL_ARGUMENT);
   }
 
+  sp<Surface> inputSurface = nullptr;
+  int inputStreamId = -1;
+  std::optional<SupportedStreamConfiguration> inputConfig;
   {
     std::lock_guard<std::mutex> lock(mLock);
     for (int i = 0; i < in_requestedConfiguration.streams.size(); ++i) {
@@ -241,18 +369,58 @@
       }
     }
 
-    Stream maxResStream = getHighestResolutionStream(streams);
-    inputWidth = maxResStream.width;
-    inputHeight = maxResStream.height;
-    if (mRenderThread == nullptr) {
-      // If there's no client callback, start camera in test mode.
-      const bool testMode = mVirtualCameraClientCallback == nullptr;
-      mRenderThread = std::make_unique<VirtualCameraRenderThread>(
-          mSessionContext, inputWidth, inputHeight, mCameraDeviceCallback,
-          testMode);
-      mRenderThread->start();
-      inputSurface = mRenderThread->getInputSurface();
+    inputConfig = pickInputConfigurationForStreams(
+        streams, virtualCamera->getInputConfigs());
+    if (!inputConfig.has_value()) {
+      ALOGE(
+          "%s: Failed to pick any input configuration for stream configuration "
+          "request: %s",
+          __func__, in_requestedConfiguration.toString().c_str());
+      return cameraStatus(Status::ILLEGAL_ARGUMENT);
     }
+
+    if (mRenderThread != nullptr) {
+      // If there's already a render thread, it means this is not a first
+      // configuration call. If the surface has the same resolution and pixel
+      // format as the picked config, we don't need to do anything, the current
+      // render thread is capable of serving new set of configuration. However
+      // if it differens, we need to discard the current surface and
+      // reinitialize the render thread.
+
+      std::optional<Resolution> currentInputResolution =
+          resolutionFromSurface(mRenderThread->getInputSurface());
+      if (currentInputResolution.has_value() &&
+          *currentInputResolution == resolutionFromInputConfig(*inputConfig)) {
+        ALOGI(
+            "%s: Newly configured set of streams matches existing client "
+            "surface (%dx%d)",
+            __func__, currentInputResolution->width,
+            currentInputResolution->height);
+        return ndk::ScopedAStatus::ok();
+      }
+
+      if (mVirtualCameraClientCallback != nullptr) {
+        mVirtualCameraClientCallback->onStreamClosed(mCurrentInputStreamId);
+      }
+
+      ALOGV(
+          "%s: Newly requested output streams are not suitable for "
+          "pre-existing surface (%dx%d), creating new surface (%dx%d)",
+          __func__, currentInputResolution->width,
+          currentInputResolution->height, inputConfig->width,
+          inputConfig->height);
+
+      mRenderThread->flush();
+      mRenderThread->stop();
+    }
+
+    mRenderThread = std::make_unique<VirtualCameraRenderThread>(
+        mSessionContext, resolutionFromInputConfig(*inputConfig),
+        virtualCamera->getMaxInputResolution(), mCameraDeviceCallback);
+    mRenderThread->start();
+    inputSurface = mRenderThread->getInputSurface();
+    inputStreamId = mCurrentInputStreamId =
+        virtualCamera->allocateInputStreamId();
   }
 
   if (mVirtualCameraClientCallback != nullptr && inputSurface != nullptr) {
@@ -260,11 +428,10 @@
     // support for multiple input streams is implemented. For now we always
     // create single texture.
     mVirtualCameraClientCallback->onStreamConfigured(
-        /*streamId=*/0, aidl::android::view::Surface(inputSurface.get()),
-        inputWidth, inputHeight, Format::YUV_420_888);
+        inputStreamId, aidl::android::view::Surface(inputSurface.get()),
+        inputConfig->width, inputConfig->height, inputConfig->pixelFormat);
   }
 
-  mFirstRequest.store(true);
   return ndk::ScopedAStatus::ok();
 }
 
@@ -272,12 +439,22 @@
     RequestTemplate in_type, CameraMetadata* _aidl_return) {
   ALOGV("%s: type %d", __func__, static_cast<int32_t>(in_type));
 
+  std::shared_ptr<VirtualCameraDevice> camera = mCameraDevice.lock();
+  if (camera == nullptr) {
+    ALOGW(
+        "%s: constructDefaultRequestSettings called on already unregistered "
+        "camera",
+        __func__);
+    return cameraStatus(Status::CAMERA_DISCONNECTED);
+  }
+
   switch (in_type) {
     case RequestTemplate::PREVIEW:
     case RequestTemplate::STILL_CAPTURE:
     case RequestTemplate::VIDEO_RECORD:
     case RequestTemplate::VIDEO_SNAPSHOT: {
-      *_aidl_return = createDefaultRequestSettings(in_type);
+      *_aidl_return =
+          createDefaultRequestSettings(in_type, camera->getInputConfigs());
       return ndk::ScopedAStatus::ok();
     }
     case RequestTemplate::MANUAL:
@@ -394,16 +571,30 @@
 
 ndk::ScopedAStatus VirtualCameraSession::processCaptureRequest(
     const CaptureRequest& request) {
-  ALOGD("%s: request: %s", __func__, request.toString().c_str());
-
-  if (mFirstRequest.exchange(false) && request.settings.metadata.empty()) {
-    return cameraStatus(Status::ILLEGAL_ARGUMENT);
-  }
+  ALOGV("%s: request: %s", __func__, request.toString().c_str());
 
   std::shared_ptr<ICameraDeviceCallback> cameraCallback = nullptr;
+  RequestSettings requestSettings;
+  int currentInputStreamId;
   {
     std::lock_guard<std::mutex> lock(mLock);
+
+    // If metadata it empty, last received metadata applies, if  it's non-empty
+    // update it.
+    if (!request.settings.metadata.empty()) {
+      mCurrentRequestMetadata = request.settings;
+    }
+
+    // We don't have any metadata for this request - this means we received none
+    // in first request, this is an error state.
+    if (mCurrentRequestMetadata.metadata.empty()) {
+      return cameraStatus(Status::ILLEGAL_ARGUMENT);
+    }
+
+    requestSettings = createSettingsFromMetadata(mCurrentRequestMetadata);
+
     cameraCallback = mCameraDeviceCallback;
+    currentInputStreamId = mCurrentInputStreamId;
   }
 
   if (cameraCallback == nullptr) {
@@ -436,12 +627,12 @@
       return cameraStatus(Status::INTERNAL_ERROR);
     }
     mRenderThread->enqueueTask(std::make_unique<ProcessCaptureRequestTask>(
-        request.frameNumber, taskBuffers));
+        request.frameNumber, taskBuffers, requestSettings));
   }
 
   if (mVirtualCameraClientCallback != nullptr) {
     auto status = mVirtualCameraClientCallback->onProcessCaptureRequest(
-        /*streamId=*/0, request.frameNumber);
+        currentInputStreamId, request.frameNumber);
     if (!status.isOk()) {
       ALOGE(
           "Failed to invoke onProcessCaptureRequest client callback for frame "
diff --git a/services/camera/virtualcamera/VirtualCameraSession.h b/services/camera/virtualcamera/VirtualCameraSession.h
index 82a7a34..c2044b9 100644
--- a/services/camera/virtualcamera/VirtualCameraSession.h
+++ b/services/camera/virtualcamera/VirtualCameraSession.h
@@ -25,6 +25,7 @@
 #include "VirtualCameraSessionContext.h"
 #include "aidl/android/companion/virtualcamera/IVirtualCameraCallback.h"
 #include "aidl/android/hardware/camera/device/BnCameraDeviceSession.h"
+#include "aidl/android/hardware/camera/device/CameraMetadata.h"
 #include "aidl/android/hardware/camera/device/ICameraDeviceCallback.h"
 #include "utils/Mutex.h"
 
@@ -138,9 +139,12 @@
       int8_t, ::aidl::android::hardware::common::fmq::SynchronizedReadWrite>;
   std::shared_ptr<ResultMetadataQueue> mResultMetadataQueue;
 
-  std::atomic_bool mFirstRequest{true};
+  aidl::android::hardware::camera::device::CameraMetadata mCurrentRequestMetadata
+      GUARDED_BY(mLock);
 
   std::unique_ptr<VirtualCameraRenderThread> mRenderThread GUARDED_BY(mLock);
+
+  int mCurrentInputStreamId GUARDED_BY(mLock);
 };
 
 }  // namespace virtualcamera
diff --git a/services/camera/virtualcamera/VirtualCameraSessionContext.cc b/services/camera/virtualcamera/VirtualCameraSessionContext.cc
index 284ad05..aab2d0d 100644
--- a/services/camera/virtualcamera/VirtualCameraSessionContext.cc
+++ b/services/camera/virtualcamera/VirtualCameraSessionContext.cc
@@ -129,7 +129,8 @@
           streamId);
     return std::optional<Stream>();
   }
-  return {it->second->getStreamConfig()};
+  VirtualCameraStream& stream = *it->second;
+  return {stream.getStreamConfig()};
 }
 
 std::shared_ptr<AHardwareBuffer> VirtualCameraSessionContext::fetchHardwareBuffer(
@@ -141,7 +142,8 @@
           streamId);
     return nullptr;
   }
-  return it->second->getHardwareBuffer(bufferId);
+  VirtualCameraStream& stream = *it->second;
+  return stream.getHardwareBuffer(bufferId);
 }
 
 std::shared_ptr<EglFrameBuffer>
@@ -154,7 +156,8 @@
           streamId);
     return nullptr;
   }
-  return it->second->getEglFrameBuffer(eglDisplay, bufferId);
+  VirtualCameraStream& stream = *it->second;
+  return stream.getEglFrameBuffer(eglDisplay, bufferId);
 }
 
 std::set<int> VirtualCameraSessionContext::getStreamIds() const {
diff --git a/services/camera/virtualcamera/VirtualCameraStream.cc b/services/camera/virtualcamera/VirtualCameraStream.cc
index 03da171..fad6cac 100644
--- a/services/camera/virtualcamera/VirtualCameraStream.cc
+++ b/services/camera/virtualcamera/VirtualCameraStream.cc
@@ -26,8 +26,6 @@
 
 #include "EGL/egl.h"
 #include "aidl/android/hardware/camera/device/Stream.h"
-#include "aidl/android/hardware/camera/device/StreamBuffer.h"
-#include "aidl/android/hardware/graphics/common/PixelFormat.h"
 #include "aidlcommonsupport/NativeHandle.h"
 #include "android/hardware_buffer.h"
 #include "cutils/native_handle.h"
@@ -39,52 +37,33 @@
 namespace virtualcamera {
 
 using ::aidl::android::hardware::camera::device::Stream;
-using ::aidl::android::hardware::camera::device::StreamBuffer;
 using ::aidl::android::hardware::common::NativeHandle;
-using ::aidl::android::hardware::graphics::common::PixelFormat;
 
 namespace {
 
-sp<GraphicBuffer> createBlobGraphicBuffer(GraphicBufferMapper& mapper,
-                                          buffer_handle_t bufferHandle) {
-  uint64_t allocationSize;
-  uint64_t usage;
-  uint64_t layerCount;
-  if (mapper.getAllocationSize(bufferHandle, &allocationSize) != NO_ERROR ||
-      mapper.getUsage(bufferHandle, &usage) != NO_ERROR ||
-      mapper.getLayerCount(bufferHandle, &layerCount) != NO_ERROR) {
-    ALOGE("Error fetching metadata for the imported BLOB buffer handle.");
-    return nullptr;
-  }
-
-  return sp<GraphicBuffer>::make(
-      bufferHandle, GraphicBuffer::HandleWrapMethod::TAKE_HANDLE,
-      allocationSize, /*height=*/1, static_cast<int>(ui::PixelFormat::BLOB),
-      layerCount, usage, 0);
-}
-
-sp<GraphicBuffer> createYCbCr420GraphicBuffer(GraphicBufferMapper& mapper,
-                                              buffer_handle_t bufferHandle) {
+sp<GraphicBuffer> createGraphicBuffer(GraphicBufferMapper& mapper,
+                                      const buffer_handle_t bufferHandle) {
   uint64_t width;
   uint64_t height;
   uint64_t usage;
   uint64_t layerCount;
+  ui::PixelFormat pixelFormat;
   if (mapper.getWidth(bufferHandle, &width) != NO_ERROR ||
       mapper.getHeight(bufferHandle, &height) != NO_ERROR ||
       mapper.getUsage(bufferHandle, &usage) != NO_ERROR ||
-      mapper.getLayerCount(bufferHandle, &layerCount) != NO_ERROR) {
+      mapper.getLayerCount(bufferHandle, &layerCount) != NO_ERROR ||
+      mapper.getPixelFormatRequested(bufferHandle, &pixelFormat) != NO_ERROR) {
     ALOGE("Error fetching metadata for the imported YCbCr420 buffer handle.");
     return nullptr;
   }
 
   return sp<GraphicBuffer>::make(
       bufferHandle, GraphicBuffer::HandleWrapMethod::TAKE_HANDLE, width, height,
-      static_cast<int>(ui::PixelFormat::YCBCR_420_888), /*layers=*/1, usage,
-      width);
+      static_cast<int>(pixelFormat), layerCount, usage, width);
 }
 
 std::shared_ptr<AHardwareBuffer> importBufferInternal(
-    const NativeHandle& aidlHandle, const Stream& streamConfig) {
+    const NativeHandle& aidlHandle) {
   if (aidlHandle.fds.empty()) {
     ALOGE("Empty handle - nothing to import");
     return nullptr;
@@ -103,12 +82,9 @@
     return nullptr;
   }
 
-  sp<GraphicBuffer> buf =
-      streamConfig.format == PixelFormat::BLOB
-          ? createBlobGraphicBuffer(mapper, bufferHandle)
-          : createYCbCr420GraphicBuffer(mapper, bufferHandle);
+  sp<GraphicBuffer> buf = createGraphicBuffer(mapper, bufferHandle);
 
-  if (buf->initCheck() != NO_ERROR) {
+  if (buf == nullptr || buf->initCheck() != NO_ERROR) {
     ALOGE("Imported graphic buffer is not correcly initialized.");
     return nullptr;
   }
@@ -128,7 +104,7 @@
 
 std::shared_ptr<AHardwareBuffer> VirtualCameraStream::importBuffer(
     const ::aidl::android::hardware::camera::device::StreamBuffer& buffer) {
-  auto hwBufferPtr = importBufferInternal(buffer.buffer, mStreamConfig);
+  auto hwBufferPtr = importBufferInternal(buffer.buffer);
   if (hwBufferPtr != nullptr) {
     std::lock_guard<std::mutex> lock(mLock);
     mBuffers.emplace(std::piecewise_construct,
diff --git a/services/camera/virtualcamera/VirtualCameraTestInstance.cc b/services/camera/virtualcamera/VirtualCameraTestInstance.cc
new file mode 100644
index 0000000..ff4a2d8
--- /dev/null
+++ b/services/camera/virtualcamera/VirtualCameraTestInstance.cc
@@ -0,0 +1,169 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "VirtualCameraTestInstance"
+
+#include "VirtualCameraTestInstance.h"
+
+#include <atomic>
+#include <chrono>
+#include <memory>
+#include <mutex>
+#include <ratio>
+#include <thread>
+
+#include "GLES/gl.h"
+#include "android/binder_auto_utils.h"
+#include "android/native_window.h"
+#include "log/log.h"
+#include "util/EglDisplayContext.h"
+#include "util/EglProgram.h"
+
+namespace android {
+namespace companion {
+namespace virtualcamera {
+
+using ::aidl::android::companion::virtualcamera::Format;
+using ::aidl::android::view::Surface;
+using ::ndk::ScopedAStatus;
+
+namespace {
+
+std::shared_ptr<ANativeWindow> nativeWindowFromSurface(const Surface& surface) {
+  ANativeWindow* nativeWindow = surface.get();
+  if (nativeWindow != nullptr) {
+    ANativeWindow_acquire(nativeWindow);
+  }
+  return std::shared_ptr<ANativeWindow>(nativeWindow, ANativeWindow_release);
+}
+
+std::chrono::nanoseconds getCurrentTimestamp() {
+  return std::chrono::duration_cast<std::chrono::nanoseconds>(
+      std::chrono::steady_clock::now().time_since_epoch());
+}
+
+}  // namespace
+
+TestPatternRenderer::TestPatternRenderer(
+    std::shared_ptr<ANativeWindow> nativeWindow, int fps)
+    : mFps(fps), mNativeWindow(nativeWindow) {
+}
+
+void TestPatternRenderer::start() {
+  std::lock_guard<std::mutex> lock(mLock);
+  if (mRunning.exchange(true, std::memory_order_relaxed)) {
+    ALOGW("Render thread already started.");
+    return;
+  }
+  mThread =
+      std::thread(&TestPatternRenderer::renderThreadLoop, this, mNativeWindow);
+}
+
+void TestPatternRenderer::stop() {
+  std::lock_guard<std::mutex> lock(mLock);
+  if (!mRunning.exchange(false, std::memory_order_relaxed)) {
+    ALOGW("Render thread already stopped.");
+    return;
+  }
+  mThread.detach();
+  mRunning = false;
+}
+
+void TestPatternRenderer::renderThreadLoop(
+    std::shared_ptr<ANativeWindow> nativeWindow) {
+  // Prevent destruction of this instance until the thread terminates.
+  std::shared_ptr<TestPatternRenderer> thiz = shared_from_this();
+
+  ALOGV("Starting test client render loop");
+
+  EglDisplayContext eglDisplayContext(nativeWindow);
+  EglTestPatternProgram testPatternProgram;
+
+  const std::chrono::nanoseconds frameDuration(
+      static_cast<uint64_t>(1e9 / mFps));
+
+  std::chrono::nanoseconds lastFrameTs(0);
+  int frameNumber = 0;
+  while (mRunning) {
+    // Wait for appropriate amount of time to meet configured FPS.
+    std::chrono::nanoseconds ts = getCurrentTimestamp();
+    std::chrono::nanoseconds currentDuration = ts - lastFrameTs;
+    if (currentDuration < frameDuration) {
+      std::this_thread::sleep_for(frameDuration - currentDuration);
+    }
+
+    // Render the test pattern and update timestamp.
+    testPatternProgram.draw(ts);
+    eglDisplayContext.swapBuffers();
+    lastFrameTs = getCurrentTimestamp();
+  }
+
+  ALOGV("Terminating test client render loop");
+}
+
+VirtualCameraTestInstance::VirtualCameraTestInstance(const int fps)
+    : mFps(fps) {
+}
+
+ScopedAStatus VirtualCameraTestInstance::onStreamConfigured(
+    const int32_t streamId, const Surface& surface, const int32_t width,
+    const int32_t height, const Format pixelFormat) {
+  ALOGV("%s: streamId %d, %dx%d pixFmt=%s", __func__, streamId, width, height,
+        toString(pixelFormat).c_str());
+
+  auto renderer = std::make_shared<TestPatternRenderer>(
+      nativeWindowFromSurface(surface), mFps);
+
+  std::lock_guard<std::mutex> lock(mLock);
+  if (mInputRenderers.try_emplace(streamId, renderer).second) {
+    renderer->start();
+  } else {
+    ALOGE(
+        "%s: Input stream with id %d is already active, ignoring "
+        "onStreamConfigured call",
+        __func__, streamId);
+  }
+
+  return ScopedAStatus::ok();
+}
+
+ScopedAStatus VirtualCameraTestInstance::onProcessCaptureRequest(
+    const int32_t /*in_streamId*/, const int32_t /*in_frameId*/) {
+  return ScopedAStatus::ok();
+}
+
+ScopedAStatus VirtualCameraTestInstance::onStreamClosed(const int32_t streamId) {
+  ALOGV("%s: streamId %d", __func__, streamId);
+
+  std::shared_ptr<TestPatternRenderer> renderer;
+  {
+    std::lock_guard<std::mutex> lock(mLock);
+    auto it = mInputRenderers.find(streamId);
+    if (it != mInputRenderers.end()) {
+      renderer = std::move(it->second);
+      mInputRenderers.erase(it);
+    }
+  }
+  if (renderer != nullptr) {
+    renderer->stop();
+  }
+  return ScopedAStatus::ok();
+}
+
+}  // namespace virtualcamera
+}  // namespace companion
+}  // namespace android
diff --git a/services/camera/virtualcamera/VirtualCameraTestInstance.h b/services/camera/virtualcamera/VirtualCameraTestInstance.h
new file mode 100644
index 0000000..c130645
--- /dev/null
+++ b/services/camera/virtualcamera/VirtualCameraTestInstance.h
@@ -0,0 +1,93 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#ifndef ANDROID_COMPANION_VIRTUALCAMERA_VIRTUALCAMERATESTINSTANCE_H
+#define ANDROID_COMPANION_VIRTUALCAMERA_VIRTUALCAMERATESTINSTANCE_H
+
+#include <atomic>
+#include <map>
+#include <memory>
+#include <thread>
+
+#include "aidl/android/companion/virtualcamera/BnVirtualCameraCallback.h"
+#include "android/native_window.h"
+#include "utils/Mutex.h"
+
+namespace android {
+namespace companion {
+namespace virtualcamera {
+
+// Wraps render loop run in a dedicated thread, rendering test pattern to
+// provided Surface (a.k.a. native window) at configured FPS.
+class TestPatternRenderer
+    : public std::enable_shared_from_this<TestPatternRenderer> {
+ public:
+  TestPatternRenderer(std::shared_ptr<ANativeWindow> nativeWindow, int fps);
+
+  // Start rendering.
+  void start() EXCLUDES(mLock);
+
+  // Stop rendering.
+  // Call returns immediatelly, render thread might take some time (1 frame)
+  // to finish rendering and terminate the thread.
+  void stop() EXCLUDES(mLock);
+
+ private:
+  // Render thread entry point.
+  void renderThreadLoop(std::shared_ptr<ANativeWindow> nativeWindow);
+
+  const int mFps;
+
+  std::shared_ptr<ANativeWindow> mNativeWindow;
+
+  std::mutex mLock;
+  std::atomic_bool mRunning;
+  std::thread mThread GUARDED_BY(mLock);
+};
+
+// VirtualCamera callback implementation for test camera.
+//
+// For every configure call, starts rendering of test pattern on provided surface.
+class VirtualCameraTestInstance
+    : public aidl::android::companion::virtualcamera::BnVirtualCameraCallback {
+ public:
+  explicit VirtualCameraTestInstance(int fps = 30);
+
+  ::ndk::ScopedAStatus onStreamConfigured(
+      int32_t streamId, const ::aidl::android::view::Surface& surface,
+      int32_t width, int32_t height,
+      ::aidl::android::companion::virtualcamera::Format pixelFormat) override
+      EXCLUDES(mLock);
+
+  ::ndk::ScopedAStatus onProcessCaptureRequest(int32_t in_streamId,
+                                               int32_t in_frameId) override;
+
+  ::ndk::ScopedAStatus onStreamClosed(int32_t streamId) override EXCLUDES(mLock);
+
+ private:
+  const int mFps;
+
+  std::mutex mLock;
+  // Map maintaining streamId -> TestPatternRenderer mapping for active
+  // input streams.
+  std::map<int, std::shared_ptr<TestPatternRenderer>> mInputRenderers
+      GUARDED_BY(mLock);
+};
+
+}  // namespace virtualcamera
+}  // namespace companion
+}  // namespace android
+
+#endif  // ANDROID_COMPANION_VIRTUALCAMERA_VIRTUALCAMERATESTINSTANCE_H
diff --git a/services/camera/virtualcamera/aidl/Android.bp b/services/camera/virtualcamera/aidl/Android.bp
index a9c2195..b3fe3ad 100644
--- a/services/camera/virtualcamera/aidl/Android.bp
+++ b/services/camera/virtualcamera/aidl/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_xr_framework",
     // See: http://go/android-license-faq
     default_applicable_licenses: ["Android-Apache-2.0"],
 }
@@ -23,6 +24,9 @@
         cpp: {
             enabled: false,
         },
+        rust: {
+            enabled: false,
+        },
         ndk: {
             enabled: true,
             additional_shared_libraries: [
@@ -33,6 +37,6 @@
         java: {
             enabled: true,
             platform_apis: true,
-        }
+        },
     },
 }
diff --git a/services/camera/virtualcamera/aidl/android/companion/virtualcamera/IVirtualCameraService.aidl b/services/camera/virtualcamera/aidl/android/companion/virtualcamera/IVirtualCameraService.aidl
index bb74f5c..2f1e2a9 100644
--- a/services/camera/virtualcamera/aidl/android/companion/virtualcamera/IVirtualCameraService.aidl
+++ b/services/camera/virtualcamera/aidl/android/companion/virtualcamera/IVirtualCameraService.aidl
@@ -28,7 +28,8 @@
      * Registers a new camera with the virtual camera hal.
      * @return true if the camera was successfully registered
      */
-    boolean registerCamera(in IBinder token, in VirtualCameraConfiguration configuration);
+    boolean registerCamera(in IBinder token, in VirtualCameraConfiguration configuration,
+            int deviceId);
 
     /**
      * Unregisters the camera from the virtual camera hal. After this call the virtual camera won't
@@ -40,5 +41,5 @@
      * Returns the camera id for a given binder token. Note that this id corresponds to the id of
      * the camera device in the camera framework.
      */
-    int getCameraId(in IBinder token);
+    @utf8InCpp String getCameraId(in IBinder token);
 }
diff --git a/services/camera/virtualcamera/flags/Android.bp b/services/camera/virtualcamera/flags/Android.bp
new file mode 100644
index 0000000..5fa8852
--- /dev/null
+++ b/services/camera/virtualcamera/flags/Android.bp
@@ -0,0 +1,37 @@
+package {
+    default_team: "trendy_team_xr_framework",
+}
+
+soong_config_module_type {
+    name: "virtual_device_build_flags_cc_defaults",
+    module_type: "cc_defaults",
+    config_namespace: "vdm",
+    bool_variables: [
+        "virtual_camera_service_enabled",
+    ],
+    properties: [
+        "cflags",
+    ],
+}
+
+soong_config_bool_variable {
+    name: "virtual_camera_service_enabled",
+}
+
+virtual_device_build_flags_cc_defaults {
+    name: "virtual_device_build_flags_defaults",
+    soong_config_variables: {
+        virtual_camera_service_enabled: {
+            cflags: ["-DVIRTUAL_CAMERA_SERVICE_ENABLED=1"],
+        },
+    },
+}
+
+cc_library_static {
+    name: "libvirtualdevicebuildflags",
+    srcs: [
+        "android_companion_virtualdevice_build_flags.cc",
+    ],
+    export_include_dirs: ["."],
+    defaults: ["virtual_device_build_flags_defaults"],
+}
diff --git a/services/camera/libcameraservice/utils/CameraThreadState.h b/services/camera/virtualcamera/flags/android_companion_virtualdevice_build_flags.cc
similarity index 62%
copy from services/camera/libcameraservice/utils/CameraThreadState.h
copy to services/camera/virtualcamera/flags/android_companion_virtualdevice_build_flags.cc
index e1a70de..5525bc9 100644
--- a/services/camera/libcameraservice/utils/CameraThreadState.h
+++ b/services/camera/virtualcamera/flags/android_companion_virtualdevice_build_flags.cc
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2018 The Android Open Source Project
+ * Copyright 2024 The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -14,18 +14,20 @@
  * limitations under the License.
  */
 
-#include <stdint.h>
-
 namespace android {
-class CameraThreadState {
-public:
-  static int64_t clearCallingIdentity();
+namespace companion {
+namespace virtualdevice {
+namespace flags {
 
-  static void restoreCallingIdentity(int64_t token);
+bool virtual_camera_service_build_flag() {
+#if VIRTUAL_CAMERA_SERVICE_ENABLED
+  return true;
+#else
+  return false;
+#endif
+}
 
-  static int getCallingUid();
-
-  static int getCallingPid();
-};
-
-} // android
+}  // namespace flags
+}  // namespace virtualdevice
+}  // namespace companion
+}  // namespace android
diff --git a/services/camera/virtualcamera/flags/android_companion_virtualdevice_build_flags.h b/services/camera/virtualcamera/flags/android_companion_virtualdevice_build_flags.h
new file mode 100644
index 0000000..718ce9b
--- /dev/null
+++ b/services/camera/virtualcamera/flags/android_companion_virtualdevice_build_flags.h
@@ -0,0 +1,32 @@
+/*
+ * Copyright 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+namespace android {
+namespace companion {
+namespace virtualdevice {
+namespace flags {
+
+// Returns true if the virtual camera service is enabled
+// in the build.
+//
+// TODO(b/309090563) - Deprecate in favor of autogened library to query build
+// flags once available.
+bool virtual_camera_service_build_flag();
+
+}  // namespace flags
+}  // namespace virtualdevice
+}  // namespace companion
+}  // namespace android
diff --git a/services/camera/virtualcamera/fuzzer/Android.bp b/services/camera/virtualcamera/fuzzer/Android.bp
index 71e8f50..6a72167 100644
--- a/services/camera/virtualcamera/fuzzer/Android.bp
+++ b/services/camera/virtualcamera/fuzzer/Android.bp
@@ -15,7 +15,8 @@
  * limitations under the License.
  *
  *****************************************************************************/
- package {
+package {
+    default_team: "trendy_team_xr_framework",
     // See: http://go/android-license-faq
     default_applicable_licenses: ["Android-Apache-2.0"],
 }
diff --git a/services/camera/virtualcamera/main.cc b/services/camera/virtualcamera/main.cc
index 43b0219..3db9d9c 100644
--- a/services/camera/virtualcamera/main.cc
+++ b/services/camera/virtualcamera/main.cc
@@ -38,7 +38,7 @@
 }  // namespace
 
 int main() {
-  ALOGI("CameraProvider: virtual webcam service is starting.");
+  ALOGI("virtual_camera service is starting.");
 
   ABinderProcess_setThreadPoolMaxThreadCount(HWBINDER_THREAD_COUNT);
 
diff --git a/services/camera/virtualcamera/tests/Android.bp b/services/camera/virtualcamera/tests/Android.bp
index bc46ba0..543cc10 100644
--- a/services/camera/virtualcamera/tests/Android.bp
+++ b/services/camera/virtualcamera/tests/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_xr_framework",
     // See: http://go/android-license-faq
     default_applicable_licenses: ["Android-Apache-2.0"],
 }
@@ -14,11 +15,14 @@
         "libgtest",
         "libgmock",
     ],
-    srcs: ["EglUtilTest.cc",
-           "VirtualCameraDeviceTest.cc",
-           "VirtualCameraProviderTest.cc",
-           "VirtualCameraRenderThreadTest.cc",
-           "VirtualCameraServiceTest.cc",
-           "VirtualCameraSessionTest.cc"],
+    srcs: [
+        "EglUtilTest.cc",
+        "JpegUtilTest.cc",
+        "VirtualCameraDeviceTest.cc",
+        "VirtualCameraProviderTest.cc",
+        "VirtualCameraRenderThreadTest.cc",
+        "VirtualCameraServiceTest.cc",
+        "VirtualCameraSessionTest.cc",
+    ],
     test_suites: ["device-tests"],
 }
diff --git a/services/camera/virtualcamera/tests/EglUtilTest.cc b/services/camera/virtualcamera/tests/EglUtilTest.cc
index 589e312..813be75 100644
--- a/services/camera/virtualcamera/tests/EglUtilTest.cc
+++ b/services/camera/virtualcamera/tests/EglUtilTest.cc
@@ -55,6 +55,11 @@
 };
 
 TEST_F(EglTest, EglTestPatternProgramSuccessfulInit) {
+  if (!isGlExtensionSupported(kGlExtYuvTarget)) {
+    GTEST_SKIP() << "Skipping test because of missing required GL extension "
+                 << kGlExtYuvTarget;
+  }
+
   EglTestPatternProgram eglTestPatternProgram;
 
   // Verify the shaders compiled and linked successfully.
diff --git a/services/camera/virtualcamera/tests/JpegUtilTest.cc b/services/camera/virtualcamera/tests/JpegUtilTest.cc
new file mode 100644
index 0000000..e6481f0
--- /dev/null
+++ b/services/camera/virtualcamera/tests/JpegUtilTest.cc
@@ -0,0 +1,199 @@
+/*
+ * Copyright 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <sys/types.h>
+
+#include "system/graphics.h"
+#define LOG_TAG "JpegUtilTest"
+
+#include <array>
+#include <cstdint>
+#include <cstring>
+
+#include "android/hardware_buffer.h"
+#include "gmock/gmock.h"
+#include "gtest/gtest.h"
+#include "jpeglib.h"
+#include "util/JpegUtil.h"
+#include "util/Util.h"
+#include "utils/Errors.h"
+
+namespace android {
+namespace companion {
+namespace virtualcamera {
+namespace {
+
+using testing::Eq;
+using testing::Gt;
+using testing::Optional;
+using testing::VariantWith;
+
+constexpr int kOutputBufferSize = 1024 * 1024;  // 1 MiB.
+constexpr int kJpegQuality = 80;
+
+// Create black YUV420 buffer for testing purposes.
+std::shared_ptr<AHardwareBuffer> createHardwareBufferForTest(const int width,
+                                                             const int height) {
+  const AHardwareBuffer_Desc desc{.width = static_cast<uint32_t>(width),
+                                  .height = static_cast<uint32_t>(height),
+                                  .layers = 1,
+                                  .format = AHARDWAREBUFFER_FORMAT_Y8Cb8Cr8_420,
+                                  .usage = AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN,
+                                  .stride = 0,
+                                  .rfu0 = 0,
+                                  .rfu1 = 0};
+
+  AHardwareBuffer* hwBufferPtr;
+  int status = AHardwareBuffer_allocate(&desc, &hwBufferPtr);
+  if (status != NO_ERROR) {
+    ALOGE(
+        "%s: Failed to allocate hardware buffer for temporary framebuffer: %d",
+        __func__, status);
+    return nullptr;
+  }
+
+  std::shared_ptr<AHardwareBuffer> hwBuffer(hwBufferPtr,
+                                            AHardwareBuffer_release);
+
+  YCbCrLockGuard yCbCrLock(hwBuffer, AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN);
+  const android_ycbcr& ycbr = (*yCbCrLock);
+
+  uint8_t* y = reinterpret_cast<uint8_t*>(ycbr.y);
+  for (int r = 0; r < height; r++) {
+    memset(y + r * ycbr.ystride, 0x00, width);
+  }
+
+  uint8_t* cb = reinterpret_cast<uint8_t*>(ycbr.cb);
+  uint8_t* cr = reinterpret_cast<uint8_t*>(ycbr.cr);
+  for (int r = 0; r < height / 2; r++) {
+    for (int c = 0; c < width / 2; c++) {
+      cb[r * ycbr.cstride + c * ycbr.chroma_step] = 0xff / 2;
+      cr[r * ycbr.cstride + c * ycbr.chroma_step] = 0xff / 2;
+    }
+  }
+
+  return hwBuffer;
+}
+
+// Decode JPEG header, return image resolution on success or error message on error.
+std::variant<std::string, Resolution> verifyHeaderAndGetResolution(
+    const uint8_t* data, int size) {
+  struct jpeg_decompress_struct ctx;
+  struct jpeg_error_mgr jerr;
+
+  struct DecompressionError {
+    bool success = true;
+    std::string error;
+  } result;
+
+  ctx.client_data = &result;
+
+  ctx.err = jpeg_std_error(&jerr);
+  ctx.err->error_exit = [](j_common_ptr cinfo) {
+    reinterpret_cast<DecompressionError*>(cinfo->client_data)->success = false;
+  };
+  ctx.err->output_message = [](j_common_ptr cinfo) {
+    char buffer[JMSG_LENGTH_MAX];
+    (*cinfo->err->format_message)(cinfo, buffer);
+    reinterpret_cast<DecompressionError*>(cinfo->client_data)->error = buffer;
+    ALOGE("libjpeg error: %s", buffer);
+  };
+
+  jpeg_create_decompress(&ctx);
+  jpeg_mem_src(&ctx, data, size);
+  jpeg_read_header(&ctx, /*require_image=*/true);
+
+  if (!result.success) {
+    jpeg_destroy_decompress(&ctx);
+    return result.error;
+  }
+
+  Resolution resolution(ctx.image_width, ctx.image_height);
+  jpeg_destroy_decompress(&ctx);
+  return resolution;
+}
+
+TEST(JpegUtil, roundToDctSize) {
+  EXPECT_THAT(roundTo2DctSize(Resolution(640, 480)), Eq(Resolution(640, 480)));
+  EXPECT_THAT(roundTo2DctSize(Resolution(5, 5)), Eq(Resolution(16, 16)));
+  EXPECT_THAT(roundTo2DctSize(Resolution(32, 32)), Eq(Resolution(32, 32)));
+  EXPECT_THAT(roundTo2DctSize(Resolution(33, 32)), Eq(Resolution(48, 32)));
+  EXPECT_THAT(roundTo2DctSize(Resolution(32, 33)), Eq(Resolution(32, 48)));
+}
+
+class JpegUtilTest : public ::testing::Test {
+ public:
+  void SetUp() override {
+    std::fill(mOutputBuffer.begin(), mOutputBuffer.end(), 0);
+  }
+
+ protected:
+  std::optional<size_t> compress(int imageWidth, int imageHeight,
+                                 std::shared_ptr<AHardwareBuffer> inBuffer) {
+    return compressJpeg(imageWidth, imageHeight, kJpegQuality, inBuffer,
+                        /*app1ExifData=*/{}, mOutputBuffer.size(),
+                        mOutputBuffer.data());
+  }
+
+  std::array<uint8_t, kOutputBufferSize> mOutputBuffer;
+};
+
+TEST_F(JpegUtilTest, compressImageSizeAlignedWithDctSucceeds) {
+  std::shared_ptr<AHardwareBuffer> inBuffer =
+      createHardwareBufferForTest(640, 480);
+
+  std::optional<size_t> compressedSize = compress(640, 480, inBuffer);
+
+  EXPECT_THAT(compressedSize, Optional(Gt(0)));
+  EXPECT_THAT(verifyHeaderAndGetResolution(mOutputBuffer.data(),
+                                           compressedSize.value()),
+              VariantWith<Resolution>(Resolution(640, 480)));
+}
+
+TEST_F(JpegUtilTest, compressImageSizeNotAlignedWidthDctSucceeds) {
+  std::shared_ptr<AHardwareBuffer> inBuffer =
+      createHardwareBufferForTest(640, 480);
+
+  std::optional<size_t> compressedSize = compress(630, 470, inBuffer);
+
+  EXPECT_THAT(compressedSize, Optional(Gt(0)));
+  EXPECT_THAT(verifyHeaderAndGetResolution(mOutputBuffer.data(),
+                                           compressedSize.value()),
+              VariantWith<Resolution>(Resolution(630, 470)));
+}
+
+TEST_F(JpegUtilTest, compressImageWithBufferNotAlignedWithDctFails) {
+  std::shared_ptr<AHardwareBuffer> inBuffer =
+      createHardwareBufferForTest(641, 480);
+
+  std::optional<size_t> compressedSize = compress(640, 480, inBuffer);
+
+  EXPECT_THAT(compressedSize, Eq(std::nullopt));
+}
+
+TEST_F(JpegUtilTest, compressImageWithBufferTooSmallFails) {
+  std::shared_ptr<AHardwareBuffer> inBuffer =
+      createHardwareBufferForTest(634, 464);
+
+  std::optional<size_t> compressedSize = compress(640, 480, inBuffer);
+
+  EXPECT_THAT(compressedSize, Eq(std::nullopt));
+}
+
+}  // namespace
+}  // namespace virtualcamera
+}  // namespace companion
+}  // namespace android
diff --git a/services/camera/virtualcamera/tests/VirtualCameraDeviceTest.cc b/services/camera/virtualcamera/tests/VirtualCameraDeviceTest.cc
index 027ecb7..32cd23f 100644
--- a/services/camera/virtualcamera/tests/VirtualCameraDeviceTest.cc
+++ b/services/camera/virtualcamera/tests/VirtualCameraDeviceTest.cc
@@ -14,6 +14,8 @@
  * limitations under the License.
  */
 
+#include <algorithm>
+#include <iterator>
 #include <memory>
 
 #include "VirtualCameraDevice.h"
@@ -22,11 +24,14 @@
 #include "aidl/android/companion/virtualcamera/VirtualCameraConfiguration.h"
 #include "aidl/android/hardware/camera/device/CameraMetadata.h"
 #include "aidl/android/hardware/camera/device/StreamConfiguration.h"
+#include "aidl/android/hardware/graphics/common/PixelFormat.h"
 #include "android/binder_interface_utils.h"
 #include "gmock/gmock.h"
 #include "gtest/gtest.h"
 #include "log/log_main.h"
 #include "system/camera_metadata.h"
+#include "util/MetadataUtil.h"
+#include "util/Util.h"
 #include "utils/Errors.h"
 
 namespace android {
@@ -44,22 +49,41 @@
 using ::aidl::android::hardware::camera::device::StreamConfiguration;
 using ::aidl::android::hardware::camera::device::StreamType;
 using ::aidl::android::hardware::graphics::common::PixelFormat;
+using ::testing::ElementsAre;
 using ::testing::UnorderedElementsAreArray;
 using metadata_stream_t =
     camera_metadata_enum_android_scaler_available_stream_configurations_t;
 
-constexpr int kCameraId = 42;
+constexpr char kCameraId[] = "42";
+constexpr int kQvgaWidth = 320;
+constexpr int kQvgaHeight = 240;
 constexpr int kVgaWidth = 640;
 constexpr int kVgaHeight = 480;
 constexpr int kHdWidth = 1280;
 constexpr int kHdHeight = 720;
 constexpr int kMaxFps = 30;
+constexpr int kDefaultDeviceId = 0;
+
+const Stream kVgaYUV420Stream = Stream{
+    .streamType = StreamType::OUTPUT,
+    .width = kVgaWidth,
+    .height = kVgaHeight,
+    .format = PixelFormat::YCBCR_420_888,
+};
+
+const Stream kVgaJpegStream = Stream{
+    .streamType = StreamType::OUTPUT,
+    .width = kVgaWidth,
+    .height = kVgaHeight,
+    .format = PixelFormat::BLOB,
+};
 
 struct AvailableStreamConfiguration {
   const int width;
   const int height;
   const int pixelFormat;
-  const metadata_stream_t streamConfiguration;
+  const metadata_stream_t streamConfiguration =
+      ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
 };
 
 bool operator==(const AvailableStreamConfiguration& a,
@@ -105,14 +129,15 @@
   std::vector<AvailableStreamConfiguration> expectedAvailableStreamConfigs;
 };
 
-class VirtualCameraDeviceTest
+class VirtualCameraDeviceCharacterisicsTest
     : public testing::TestWithParam<VirtualCameraConfigTestParam> {};
 
-TEST_P(VirtualCameraDeviceTest, cameraCharacteristicsForInputFormat) {
+TEST_P(VirtualCameraDeviceCharacterisicsTest,
+       cameraCharacteristicsForInputFormat) {
   const VirtualCameraConfigTestParam& param = GetParam();
   std::shared_ptr<VirtualCameraDevice> camera =
-      ndk::SharedRefBase::make<VirtualCameraDevice>(kCameraId,
-                                                    param.inputConfig);
+      ndk::SharedRefBase::make<VirtualCameraDevice>(
+          kCameraId, param.inputConfig, kDefaultDeviceId);
 
   CameraMetadata metadata;
   ASSERT_TRUE(camera->getCameraCharacteristics(&metadata).isOk());
@@ -137,7 +162,7 @@
 }
 
 INSTANTIATE_TEST_SUITE_P(
-    cameraCharacteristicsForInputFormat, VirtualCameraDeviceTest,
+    cameraCharacteristicsForInputFormat, VirtualCameraDeviceCharacterisicsTest,
     testing::Values(
         VirtualCameraConfigTestParam{
             .inputConfig =
@@ -152,24 +177,33 @@
                     .lensFacing = LensFacing::FRONT},
             .expectedAvailableStreamConfigs =
                 {AvailableStreamConfiguration{
-                     .width = kVgaWidth,
-                     .height = kVgaHeight,
-                     .pixelFormat = ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
-                     .streamConfiguration =
-                         ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT},
+                     .width = kQvgaWidth,
+                     .height = kQvgaHeight,
+                     .pixelFormat =
+                         ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888},
+                 AvailableStreamConfiguration{
+                     .width = kQvgaWidth,
+                     .height = kQvgaHeight,
+                     .pixelFormat =
+                         ANDROID_SCALER_AVAILABLE_FORMATS_IMPLEMENTATION_DEFINED},
+                 AvailableStreamConfiguration{
+                     .width = kQvgaWidth,
+                     .height = kQvgaHeight,
+                     .pixelFormat = ANDROID_SCALER_AVAILABLE_FORMATS_BLOB},
                  AvailableStreamConfiguration{
                      .width = kVgaWidth,
                      .height = kVgaHeight,
                      .pixelFormat =
-                         ANDROID_SCALER_AVAILABLE_FORMATS_IMPLEMENTATION_DEFINED,
-                     .streamConfiguration =
-                         ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT},
+                         ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888},
                  AvailableStreamConfiguration{
                      .width = kVgaWidth,
                      .height = kVgaHeight,
-                     .pixelFormat = ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
-                     .streamConfiguration =
-                         ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT}}},
+                     .pixelFormat =
+                         ANDROID_SCALER_AVAILABLE_FORMATS_IMPLEMENTATION_DEFINED},
+                 AvailableStreamConfiguration{
+                     .width = kVgaWidth,
+                     .height = kVgaHeight,
+                     .pixelFormat = ANDROID_SCALER_AVAILABLE_FORMATS_BLOB}}},
         VirtualCameraConfigTestParam{
             .inputConfig =
                 VirtualCameraConfiguration{
@@ -189,43 +223,170 @@
                     .lensFacing = LensFacing::BACK},
             .expectedAvailableStreamConfigs = {
                 AvailableStreamConfiguration{
+                    .width = kQvgaWidth,
+                    .height = kQvgaHeight,
+                    .pixelFormat = ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888},
+                AvailableStreamConfiguration{
+                    .width = kQvgaWidth,
+                    .height = kQvgaHeight,
+                    .pixelFormat =
+                        ANDROID_SCALER_AVAILABLE_FORMATS_IMPLEMENTATION_DEFINED},
+                AvailableStreamConfiguration{
+                    .width = kQvgaWidth,
+                    .height = kQvgaHeight,
+                    .pixelFormat = ANDROID_SCALER_AVAILABLE_FORMATS_BLOB},
+                AvailableStreamConfiguration{
+                    .width = 640,
+                    .height = 360,
+                    .pixelFormat = ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888},
+                AvailableStreamConfiguration{
+                    .width = 640,
+                    .height = 360,
+                    .pixelFormat =
+                        ANDROID_SCALER_AVAILABLE_FORMATS_IMPLEMENTATION_DEFINED},
+                AvailableStreamConfiguration{
+                    .width = 640,
+                    .height = 360,
+                    .pixelFormat = ANDROID_SCALER_AVAILABLE_FORMATS_BLOB},
+                AvailableStreamConfiguration{
                     .width = kVgaWidth,
                     .height = kVgaHeight,
-                    .pixelFormat = ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
-                    .streamConfiguration =
-                        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT},
+                    .pixelFormat = ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888},
                 AvailableStreamConfiguration{
                     .width = kVgaWidth,
                     .height = kVgaHeight,
                     .pixelFormat =
-                        ANDROID_SCALER_AVAILABLE_FORMATS_IMPLEMENTATION_DEFINED,
-                    .streamConfiguration =
-                        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT},
+                        ANDROID_SCALER_AVAILABLE_FORMATS_IMPLEMENTATION_DEFINED},
                 AvailableStreamConfiguration{
                     .width = kVgaWidth,
                     .height = kVgaHeight,
-                    .pixelFormat = ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
-                    .streamConfiguration =
-                        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT},
+                    .pixelFormat = ANDROID_SCALER_AVAILABLE_FORMATS_BLOB},
+                AvailableStreamConfiguration{
+                    .width = 1024,
+                    .height = 576,
+                    .pixelFormat = ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888},
+                AvailableStreamConfiguration{
+                    .width = 1024,
+                    .height = 576,
+                    .pixelFormat =
+                        ANDROID_SCALER_AVAILABLE_FORMATS_IMPLEMENTATION_DEFINED},
+                AvailableStreamConfiguration{
+                    .width = 1024,
+                    .height = 576,
+                    .pixelFormat = ANDROID_SCALER_AVAILABLE_FORMATS_BLOB},
                 AvailableStreamConfiguration{
                     .width = kHdWidth,
                     .height = kHdHeight,
-                    .pixelFormat = ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
-                    .streamConfiguration =
-                        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT},
+                    .pixelFormat = ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888},
                 AvailableStreamConfiguration{
                     .width = kHdWidth,
                     .height = kHdHeight,
                     .pixelFormat =
-                        ANDROID_SCALER_AVAILABLE_FORMATS_IMPLEMENTATION_DEFINED,
-                    .streamConfiguration =
-                        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT},
+                        ANDROID_SCALER_AVAILABLE_FORMATS_IMPLEMENTATION_DEFINED},
                 AvailableStreamConfiguration{
                     .width = kHdWidth,
                     .height = kHdHeight,
-                    .pixelFormat = ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
-                    .streamConfiguration =
-                        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT}}}));
+                    .pixelFormat = ANDROID_SCALER_AVAILABLE_FORMATS_BLOB}}}));
+
+class VirtualCameraDeviceTest : public ::testing::Test {
+ public:
+  void SetUp() override {
+    mCamera = ndk::SharedRefBase::make<VirtualCameraDevice>(
+        kCameraId,
+        VirtualCameraConfiguration{
+            .supportedStreamConfigs = {SupportedStreamConfiguration{
+                .width = kVgaWidth,
+                .height = kVgaHeight,
+                .pixelFormat = Format::YUV_420_888,
+                .maxFps = kMaxFps}},
+            .virtualCameraCallback = nullptr,
+            .sensorOrientation = SensorOrientation::ORIENTATION_0,
+            .lensFacing = LensFacing::FRONT},
+        kDefaultDeviceId);
+  }
+
+ protected:
+  std::shared_ptr<VirtualCameraDevice> mCamera;
+};
+
+TEST_F(VirtualCameraDeviceTest, configureMaximalNumberOfNonStallStreamsSuceeds) {
+  StreamConfiguration config;
+  std::fill_n(std::back_insert_iterator(config.streams),
+              VirtualCameraDevice::kMaxNumberOfProcessedStreams,
+              kVgaYUV420Stream);
+
+  bool aidl_ret;
+  ASSERT_TRUE(mCamera->isStreamCombinationSupported(config, &aidl_ret).isOk());
+  EXPECT_TRUE(aidl_ret);
+}
+
+TEST_F(VirtualCameraDeviceTest, configureTooManyNonStallStreamsFails) {
+  StreamConfiguration config;
+  std::fill_n(std::back_insert_iterator(config.streams),
+              VirtualCameraDevice::kMaxNumberOfProcessedStreams + 1,
+              kVgaYUV420Stream);
+
+  bool aidl_ret;
+  ASSERT_TRUE(mCamera->isStreamCombinationSupported(config, &aidl_ret).isOk());
+  EXPECT_FALSE(aidl_ret);
+}
+
+TEST_F(VirtualCameraDeviceTest, configureMaximalNumberOfStallStreamsSuceeds) {
+  StreamConfiguration config;
+  std::fill_n(std::back_insert_iterator(config.streams),
+              VirtualCameraDevice::kMaxNumberOfStallStreams, kVgaJpegStream);
+
+  bool aidl_ret;
+  ASSERT_TRUE(mCamera->isStreamCombinationSupported(config, &aidl_ret).isOk());
+  EXPECT_TRUE(aidl_ret);
+}
+
+TEST_F(VirtualCameraDeviceTest, configureTooManyStallStreamsFails) {
+  StreamConfiguration config;
+  std::fill_n(std::back_insert_iterator(config.streams),
+              VirtualCameraDevice::kMaxNumberOfStallStreams + 1, kVgaJpegStream);
+
+  bool aidl_ret;
+  ASSERT_TRUE(mCamera->isStreamCombinationSupported(config, &aidl_ret).isOk());
+  EXPECT_FALSE(aidl_ret);
+}
+
+TEST_F(VirtualCameraDeviceTest, thumbnailSizeWithCompatibleAspectRatio) {
+  CameraMetadata metadata;
+  ASSERT_TRUE(mCamera->getCameraCharacteristics(&metadata).isOk());
+
+  // Camera is configured with VGA input, we expect 240 x 180 thumbnail size in
+  // characteristics, since it has same aspect ratio.
+  EXPECT_THAT(getJpegAvailableThumbnailSizes(metadata),
+              ElementsAre(Resolution(0, 0), Resolution(240, 180)));
+}
+
+TEST_F(VirtualCameraDeviceTest, dump) {
+  std::string expected = R"(  virtual_camera 42 belongs to virtual device 0
+  SupportedStreamConfiguration:
+    SupportedStreamConfiguration{width: 640, height: 480, pixelFormat: YUV_420_888, maxFps: 30})";
+  int expectedSize = expected.size() * sizeof(char);
+  char buffer[expectedSize];
+
+  // Create an in memory fd
+  int fd = memfd_create("tmpFile", 0);
+  mCamera->dump(fd, {}, 0);
+
+  // Check that we wrote the expected size
+  int dumpSize = lseek(fd, 0, SEEK_END);
+
+  // Rewind and read from the fd
+  lseek(fd, 0, SEEK_SET);
+  read(fd, buffer, expectedSize);
+  close(fd);
+
+  // Check the content of the dump
+  std::string name = std::string(buffer, expectedSize);
+  ASSERT_EQ(expected, name);
+  // Check the size after the content to display the string mismatch when a
+  // failure occurs
+  ASSERT_EQ(expectedSize, dumpSize);
+}
 
 }  // namespace
 }  // namespace virtualcamera
diff --git a/services/camera/virtualcamera/tests/VirtualCameraProviderTest.cc b/services/camera/virtualcamera/tests/VirtualCameraProviderTest.cc
index ab647a4..d4bc6de 100644
--- a/services/camera/virtualcamera/tests/VirtualCameraProviderTest.cc
+++ b/services/camera/virtualcamera/tests/VirtualCameraProviderTest.cc
@@ -50,11 +50,12 @@
 using ::testing::Not;
 using ::testing::Return;
 
+constexpr char kCameraId[] = "42";
 constexpr int kVgaWidth = 640;
 constexpr int kVgaHeight = 480;
 constexpr int kMaxFps = 30;
-constexpr char kVirtualCameraNameRegex[] =
-    "device@[0-9]+\\.[0-9]+/virtual/[0-9]+";
+constexpr int kDefaultDeviceId = 0;
+constexpr char kVirtualCameraNameRegex[] = "device@[0-9]+\\.[0-9]+/virtual/.+";
 
 class MockCameraProviderCallback : public BnCameraProviderCallback {
  public:
@@ -118,7 +119,7 @@
 
   ASSERT_TRUE(mCameraProvider->setCallback(mMockCameraProviderCallback).isOk());
   std::shared_ptr<VirtualCameraDevice> camera =
-      mCameraProvider->createCamera(mInputConfig);
+      mCameraProvider->createCamera(mInputConfig, kCameraId, kDefaultDeviceId);
   EXPECT_THAT(camera, Not(IsNull()));
   EXPECT_THAT(camera->getCameraName(), MatchesRegex(kVirtualCameraNameRegex));
 
@@ -136,7 +137,7 @@
       .WillOnce(Return(ndk::ScopedAStatus::ok()));
 
   std::shared_ptr<VirtualCameraDevice> camera =
-      mCameraProvider->createCamera(mInputConfig);
+      mCameraProvider->createCamera(mInputConfig, kCameraId, kDefaultDeviceId);
   ASSERT_TRUE(mCameraProvider->setCallback(mMockCameraProviderCallback).isOk());
 
   // Created camera should be in the list of cameras.
@@ -148,7 +149,7 @@
 TEST_F(VirtualCameraProviderTest, RemoveCamera) {
   ASSERT_TRUE(mCameraProvider->setCallback(mMockCameraProviderCallback).isOk());
   std::shared_ptr<VirtualCameraDevice> camera =
-      mCameraProvider->createCamera(mInputConfig);
+      mCameraProvider->createCamera(mInputConfig, kCameraId, kDefaultDeviceId);
 
   EXPECT_CALL(*mMockCameraProviderCallback,
               cameraDeviceStatusChange(Eq(camera->getCameraName()),
@@ -165,7 +166,7 @@
 TEST_F(VirtualCameraProviderTest, RemoveNonExistingCamera) {
   ASSERT_TRUE(mCameraProvider->setCallback(mMockCameraProviderCallback).isOk());
   std::shared_ptr<VirtualCameraDevice> camera =
-      mCameraProvider->createCamera(mInputConfig);
+      mCameraProvider->createCamera(mInputConfig, kCameraId, kDefaultDeviceId);
 
   // Removing non-existing camera should fail.
   const std::string cameraName = "DefinitelyNoTCamera";
diff --git a/services/camera/virtualcamera/tests/VirtualCameraRenderThreadTest.cc b/services/camera/virtualcamera/tests/VirtualCameraRenderThreadTest.cc
index 5f899b8..ddcb789 100644
--- a/services/camera/virtualcamera/tests/VirtualCameraRenderThreadTest.cc
+++ b/services/camera/virtualcamera/tests/VirtualCameraRenderThreadTest.cc
@@ -33,6 +33,7 @@
 #include "android/binder_auto_utils.h"
 #include "gmock/gmock.h"
 #include "gtest/gtest.h"
+#include "util/Util.h"
 
 namespace android {
 namespace companion {
@@ -62,6 +63,7 @@
 
 constexpr int kInputWidth = 640;
 constexpr int kInputHeight = 480;
+const Resolution kInputResolution(kInputWidth, kInputHeight);
 
 Matcher<StreamBuffer> IsStreamBufferWithStatus(const int streamId,
                                                const int bufferId,
@@ -102,7 +104,8 @@
     mMockCameraDeviceCallback =
         ndk::SharedRefBase::make<MockCameraDeviceCallback>();
     mRenderThread = std::make_unique<VirtualCameraRenderThread>(
-        *mSessionContext, kInputWidth, kInputHeight, mMockCameraDeviceCallback);
+        *mSessionContext, kInputResolution,
+        /*reportedSensorSize*/ kInputResolution, mMockCameraDeviceCallback);
   }
 
  protected:
diff --git a/services/camera/virtualcamera/tests/VirtualCameraServiceTest.cc b/services/camera/virtualcamera/tests/VirtualCameraServiceTest.cc
index d4d00a2..719f64d 100644
--- a/services/camera/virtualcamera/tests/VirtualCameraServiceTest.cc
+++ b/services/camera/virtualcamera/tests/VirtualCameraServiceTest.cc
@@ -14,8 +14,13 @@
  * limitations under the License.
  */
 
+#include <algorithm>
+#include <cstdint>
 #include <cstdio>
+#include <iterator>
 #include <memory>
+#include <optional>
+#include <regex>
 
 #include "VirtualCameraService.h"
 #include "aidl/android/companion/virtualcamera/BnVirtualCameraCallback.h"
@@ -29,6 +34,7 @@
 #include "binder/Binder.h"
 #include "gmock/gmock.h"
 #include "gtest/gtest.h"
+#include "util/MetadataUtil.h"
 #include "util/Permissions.h"
 #include "utils/Errors.h"
 
@@ -44,15 +50,18 @@
 using ::aidl::android::companion::virtualcamera::VirtualCameraConfiguration;
 using ::aidl::android::hardware::camera::common::CameraDeviceStatus;
 using ::aidl::android::hardware::camera::common::TorchModeStatus;
+using ::aidl::android::hardware::camera::device::CameraMetadata;
 using ::aidl::android::hardware::camera::provider::BnCameraProviderCallback;
 using ::aidl::android::hardware::graphics::common::PixelFormat;
 using ::aidl::android::view::Surface;
 using ::testing::_;
+using ::testing::ElementsAre;
 using ::testing::Eq;
 using ::testing::Ge;
 using ::testing::IsEmpty;
 using ::testing::IsNull;
 using ::testing::Not;
+using ::testing::Optional;
 using ::testing::Return;
 using ::testing::SizeIs;
 
@@ -62,11 +71,23 @@
 constexpr SensorOrientation kSensorOrientation =
     SensorOrientation::ORIENTATION_0;
 constexpr LensFacing kLensFacing = LensFacing::FRONT;
+constexpr int kDefaultDeviceId = 0;
 constexpr char kCreateVirtualDevicePermissions[] =
     "android.permission.CREATE_VIRTUAL_DEVICE";
 
 const VirtualCameraConfiguration kEmptyVirtualCameraConfiguration;
 
+class MockVirtualCameraCallback : public BnVirtualCameraCallback {
+ public:
+  MOCK_METHOD(ndk::ScopedAStatus, onStreamConfigured,
+              (int32_t, const ::aidl::android::view::Surface&, int, int,
+               ::aidl::android::companion::virtualcamera::Format pixelFormat),
+              (override));
+  MOCK_METHOD(ndk::ScopedAStatus, onProcessCaptureRequest, (int32_t, int32_t),
+              (override));
+  MOCK_METHOD(ndk::ScopedAStatus, onStreamClosed, (int32_t), (override));
+};
+
 VirtualCameraConfiguration createConfiguration(const int width, const int height,
                                                const Format format,
                                                const int maxFps) {
@@ -77,6 +98,8 @@
                                                   .maxFps = maxFps});
   configuration.sensorOrientation = kSensorOrientation;
   configuration.lensFacing = kLensFacing;
+  configuration.virtualCameraCallback =
+      ndk::SharedRefBase::make<MockVirtualCameraCallback>();
   return configuration;
 }
 
@@ -110,6 +133,7 @@
     mCameraProvider->setCallback(mMockCameraProviderCallback);
     mCameraService = ndk::SharedRefBase::make<VirtualCameraService>(
         mCameraProvider, mMockPermissionsProxy);
+    mCameraService->disableEglVerificationForTest();
 
     ON_CALL(mMockPermissionsProxy, checkCallingPermission)
         .WillByDefault(Return(true));
@@ -124,8 +148,8 @@
     bool aidlRet;
 
     ASSERT_TRUE(mCameraService
-                    ->registerCamera(mNdkOwnerToken,
-                                     mVgaYUV420OnlyConfiguration, &aidlRet)
+                    ->registerCamera(mNdkOwnerToken, mVgaYUV420OnlyConfiguration,
+                                     kDefaultDeviceId, &aidlRet)
                     .isOk());
     ASSERT_TRUE(aidlRet);
   }
@@ -134,12 +158,21 @@
     close(mDevNullFd);
   }
 
-  void execute_shell_command(const std::string cmd) {
-    std::array<const char*, 1> args{cmd.data()};
-    ASSERT_THAT(
-        mCameraService->handleShellCommand(mDevNullFd, mDevNullFd, mDevNullFd,
-                                           args.data(), args.size()),
-        Eq(NO_ERROR));
+  binder_status_t execute_shell_command(const std::string& cmd) {
+    const static std::regex whitespaceRegex("\\s+");
+    std::vector<std::string> tokens;
+    std::copy_if(
+        std::sregex_token_iterator(cmd.begin(), cmd.end(), whitespaceRegex, -1),
+        std::sregex_token_iterator(), std::back_inserter(tokens),
+        [](const std::string& token) { return !token.empty(); });
+
+    std::vector<const char*> argv;
+    argv.reserve(tokens.size());
+    std::transform(tokens.begin(), tokens.end(), std::back_inserter(argv),
+                   [](const std::string& str) { return str.c_str(); });
+
+    return mCameraService->handleShellCommand(
+        mDevNullFd, mDevNullFd, mDevNullFd, argv.data(), argv.size());
   }
 
   std::vector<std::string> getCameraIds() {
@@ -148,6 +181,27 @@
     return cameraIds;
   }
 
+  std::optional<camera_metadata_enum_android_lens_facing> getCameraLensFacing(
+      const std::string& id) {
+    std::shared_ptr<VirtualCameraDevice> camera = mCameraProvider->getCamera(id);
+    if (camera == nullptr) {
+      return std::nullopt;
+    }
+    CameraMetadata metadata;
+    camera->getCameraCharacteristics(&metadata);
+    return getLensFacing(metadata);
+  }
+
+  std::optional<int32_t> getCameraSensorOrienation(const std::string& id) {
+    std::shared_ptr<VirtualCameraDevice> camera = mCameraProvider->getCamera(id);
+    if (camera == nullptr) {
+      return std::nullopt;
+    }
+    CameraMetadata metadata;
+    camera->getCameraCharacteristics(&metadata);
+    return getSensorOrientation(metadata);
+  }
+
  protected:
   std::shared_ptr<VirtualCameraService> mCameraService;
   std::shared_ptr<VirtualCameraProvider> mCameraProvider;
@@ -169,10 +223,10 @@
   ndk::SpAIBinder ndkToken(AIBinder_fromPlatformBinder(token));
   bool aidlRet;
 
-  ASSERT_TRUE(
-      mCameraService
-          ->registerCamera(ndkToken, mVgaYUV420OnlyConfiguration, &aidlRet)
-          .isOk());
+  ASSERT_TRUE(mCameraService
+                  ->registerCamera(ndkToken, mVgaYUV420OnlyConfiguration,
+                                   kDefaultDeviceId, &aidlRet)
+                  .isOk());
 
   EXPECT_TRUE(aidlRet);
   EXPECT_THAT(getCameraIds(), SizeIs(1));
@@ -186,7 +240,9 @@
   VirtualCameraConfiguration config =
       createConfiguration(kVgaWidth, kVgaHeight, Format::RGBA_8888, kMaxFps);
 
-  ASSERT_TRUE(mCameraService->registerCamera(ndkToken, config, &aidlRet).isOk());
+  ASSERT_TRUE(mCameraService
+                  ->registerCamera(ndkToken, config, kDefaultDeviceId, &aidlRet)
+                  .isOk());
 
   EXPECT_TRUE(aidlRet);
   EXPECT_THAT(getCameraIds(), SizeIs(1));
@@ -198,7 +254,7 @@
 
   ASSERT_TRUE(mCameraService
                   ->registerCamera(mNdkOwnerToken, mVgaYUV420OnlyConfiguration,
-                                   &aidlRet)
+                                   kDefaultDeviceId, &aidlRet)
                   .isOk());
   EXPECT_FALSE(aidlRet);
   EXPECT_THAT(getCameraIds(), SizeIs(1));
@@ -209,12 +265,31 @@
 
   ASSERT_FALSE(mCameraService
                    ->registerCamera(mNdkOwnerToken,
-                                    kEmptyVirtualCameraConfiguration, &aidlRet)
+                                    kEmptyVirtualCameraConfiguration,
+                                    kDefaultDeviceId, &aidlRet)
                    .isOk());
   EXPECT_FALSE(aidlRet);
   EXPECT_THAT(getCameraIds(), IsEmpty());
 }
 
+TEST_F(VirtualCameraServiceTest,
+       ConfigurationWithoutVirtualCameraCallbackFails) {
+  sp<BBinder> token = sp<BBinder>::make();
+  ndk::SpAIBinder ndkToken(AIBinder_fromPlatformBinder(token));
+  bool aidlRet;
+
+  VirtualCameraConfiguration config =
+      createConfiguration(kVgaWidth, kVgaHeight, Format::RGBA_8888, kMaxFps);
+  config.virtualCameraCallback = nullptr;
+
+  ASSERT_FALSE(mCameraService
+                   ->registerCamera(ndkToken, config, kDefaultDeviceId, &aidlRet)
+                   .isOk());
+
+  EXPECT_FALSE(aidlRet);
+  EXPECT_THAT(getCameraIds(), IsEmpty());
+}
+
 TEST_F(VirtualCameraServiceTest, ConfigurationWithUnsupportedPixelFormatFails) {
   bool aidlRet;
 
@@ -222,7 +297,9 @@
       createConfiguration(kVgaWidth, kVgaHeight, Format::UNKNOWN, kMaxFps);
 
   ASSERT_FALSE(
-      mCameraService->registerCamera(mNdkOwnerToken, config, &aidlRet).isOk());
+      mCameraService
+          ->registerCamera(mNdkOwnerToken, config, kDefaultDeviceId, &aidlRet)
+          .isOk());
   EXPECT_FALSE(aidlRet);
   EXPECT_THAT(getCameraIds(), IsEmpty());
 }
@@ -233,18 +310,9 @@
       createConfiguration(1000000, 1000000, Format::YUV_420_888, kMaxFps);
 
   ASSERT_FALSE(
-      mCameraService->registerCamera(mNdkOwnerToken, config, &aidlRet).isOk());
-  EXPECT_FALSE(aidlRet);
-  EXPECT_THAT(getCameraIds(), IsEmpty());
-}
-
-TEST_F(VirtualCameraServiceTest, ConfigurationWithUnalignedResolutionFails) {
-  bool aidlRet;
-  VirtualCameraConfiguration config =
-      createConfiguration(641, 481, Format::YUV_420_888, kMaxFps);
-
-  ASSERT_FALSE(
-      mCameraService->registerCamera(mNdkOwnerToken, config, &aidlRet).isOk());
+      mCameraService
+          ->registerCamera(mNdkOwnerToken, config, kDefaultDeviceId, &aidlRet)
+          .isOk());
   EXPECT_FALSE(aidlRet);
   EXPECT_THAT(getCameraIds(), IsEmpty());
 }
@@ -255,7 +323,9 @@
       createConfiguration(-1, kVgaHeight, Format::YUV_420_888, kMaxFps);
 
   ASSERT_FALSE(
-      mCameraService->registerCamera(mNdkOwnerToken, config, &aidlRet).isOk());
+      mCameraService
+          ->registerCamera(mNdkOwnerToken, config, kDefaultDeviceId, &aidlRet)
+          .isOk());
   EXPECT_FALSE(aidlRet);
   EXPECT_THAT(getCameraIds(), IsEmpty());
 }
@@ -266,7 +336,9 @@
       createConfiguration(kVgaWidth, kVgaHeight, Format::YUV_420_888, 0);
 
   ASSERT_FALSE(
-      mCameraService->registerCamera(mNdkOwnerToken, config, &aidlRet).isOk());
+      mCameraService
+          ->registerCamera(mNdkOwnerToken, config, kDefaultDeviceId, &aidlRet)
+          .isOk());
   EXPECT_FALSE(aidlRet);
   EXPECT_THAT(getCameraIds(), IsEmpty());
 }
@@ -277,7 +349,9 @@
       createConfiguration(kVgaWidth, kVgaHeight, Format::YUV_420_888, 90);
 
   ASSERT_FALSE(
-      mCameraService->registerCamera(mNdkOwnerToken, config, &aidlRet).isOk());
+      mCameraService
+          ->registerCamera(mNdkOwnerToken, config, kDefaultDeviceId, &aidlRet)
+          .isOk());
   EXPECT_FALSE(aidlRet);
   EXPECT_THAT(getCameraIds(), IsEmpty());
 }
@@ -311,7 +385,7 @@
 
   EXPECT_THAT(mCameraService
                   ->registerCamera(mNdkOwnerToken, mVgaYUV420OnlyConfiguration,
-                                   &aidlRet)
+                                   kDefaultDeviceId, &aidlRet)
                   .getExceptionCode(),
               Eq(EX_SECURITY));
 }
@@ -327,7 +401,7 @@
 }
 
 TEST_F(VirtualCameraServiceTest, GetIdWithoutPermissionFails) {
-  int32_t aidlRet;
+  std::string aidlRet;
   EXPECT_CALL(mMockPermissionsProxy,
               checkCallingPermission(kCreateVirtualDevicePermissions))
       .WillOnce(Return(false));
@@ -370,17 +444,97 @@
 }
 
 TEST_F(VirtualCameraServiceTest, TestCameraShellCmd) {
-  execute_shell_command("enable_test_camera");
+  EXPECT_THAT(execute_shell_command("enable_test_camera"), Eq(NO_ERROR));
 
   std::vector<std::string> cameraIdsAfterEnable = getCameraIds();
   EXPECT_THAT(cameraIdsAfterEnable, SizeIs(1));
 
-  execute_shell_command("disable_test_camera");
+  EXPECT_THAT(execute_shell_command("disable_test_camera"), Eq(NO_ERROR));
 
   std::vector<std::string> cameraIdsAfterDisable = getCameraIds();
   EXPECT_THAT(cameraIdsAfterDisable, IsEmpty());
 }
 
+TEST_F(VirtualCameraServiceTest, TestCameraShellCmdWithId) {
+  EXPECT_THAT(
+      execute_shell_command("enable_test_camera --camera_id=hello12345"),
+      Eq(NO_ERROR));
+
+  std::vector<std::string> cameraIdsAfterEnable = getCameraIds();
+  EXPECT_THAT(cameraIdsAfterEnable,
+              ElementsAre("device@1.1/virtual/hello12345"));
+
+  EXPECT_THAT(execute_shell_command("disable_test_camera"), Eq(NO_ERROR));
+
+  std::vector<std::string> cameraIdsAfterDisable = getCameraIds();
+  EXPECT_THAT(cameraIdsAfterDisable, IsEmpty());
+}
+
+TEST_F(VirtualCameraServiceTest, TestCameraShellCmdWithInvalidId) {
+  EXPECT_THAT(execute_shell_command("enable_test_camera --camera_id="),
+              Eq(STATUS_BAD_VALUE));
+}
+
+TEST_F(VirtualCameraServiceTest, TestCameraShellCmdWithUnknownCommand) {
+  EXPECT_THAT(execute_shell_command("brew_coffee --flavor=vanilla"),
+              Eq(STATUS_BAD_VALUE));
+}
+
+TEST_F(VirtualCameraServiceTest, TestCameraShellCmdWithMalformedOption) {
+  EXPECT_THAT(execute_shell_command("enable_test_camera **camera_id=12345"),
+              Eq(STATUS_BAD_VALUE));
+}
+
+TEST_F(VirtualCameraServiceTest, TestCameraShellCmdWithLensFacing) {
+  EXPECT_THAT(execute_shell_command("enable_test_camera --lens_facing=front"),
+              Eq(NO_ERROR));
+
+  std::vector<std::string> cameraIds = getCameraIds();
+  ASSERT_THAT(cameraIds, SizeIs(1));
+  EXPECT_THAT(getCameraLensFacing(cameraIds[0]),
+              Optional(Eq(ANDROID_LENS_FACING_FRONT)));
+}
+
+TEST_F(VirtualCameraServiceTest, TestCameraShellCmdWithInvalidLensFacing) {
+  EXPECT_THAT(execute_shell_command("enable_test_camera --lens_facing=west"),
+              Eq(STATUS_BAD_VALUE));
+}
+
+TEST_F(VirtualCameraServiceTest, TestCameraShellCmdWithInputFps) {
+  EXPECT_THAT(execute_shell_command("enable_test_camera --input_fps=15"),
+              Eq(NO_ERROR));
+
+  std::vector<std::string> cameraIds = getCameraIds();
+  ASSERT_THAT(cameraIds, SizeIs(1));
+}
+
+TEST_F(VirtualCameraServiceTest, TestCameraShellCmdWithInvalidInputFps) {
+  EXPECT_THAT(execute_shell_command("enable_test_camera --input_fps=1001"),
+              Eq(STATUS_BAD_VALUE));
+  EXPECT_THAT(execute_shell_command("enable_test_camera --input_fps=0"),
+              Eq(STATUS_BAD_VALUE));
+  EXPECT_THAT(execute_shell_command("enable_test_camera --input_fps=foo"),
+              Eq(STATUS_BAD_VALUE));
+}
+
+TEST_F(VirtualCameraServiceTest, TestCameraShellCmdWithSensorOrientation90) {
+  EXPECT_THAT(
+      execute_shell_command("enable_test_camera --sensor_orientation=90"),
+      Eq(NO_ERROR));
+
+  std::vector<std::string> cameraIds = getCameraIds();
+  ASSERT_THAT(cameraIds, SizeIs(1));
+  EXPECT_THAT(getCameraSensorOrienation(cameraIds[0]), Optional(Eq(90)));
+}
+
+TEST_F(VirtualCameraServiceTest, TestCameraShellCmdWithSensorOrientationNoArgs) {
+  EXPECT_THAT(execute_shell_command("enable_test_camera"), Eq(NO_ERROR));
+
+  std::vector<std::string> cameraIds = getCameraIds();
+  ASSERT_THAT(cameraIds, SizeIs(1));
+  EXPECT_THAT(getCameraSensorOrienation(cameraIds[0]), Optional(Eq(0)));
+}
+
 }  // namespace
 }  // namespace virtualcamera
 }  // namespace companion
diff --git a/services/camera/virtualcamera/tests/VirtualCameraSessionTest.cc b/services/camera/virtualcamera/tests/VirtualCameraSessionTest.cc
index 446c679..a9eb413 100644
--- a/services/camera/virtualcamera/tests/VirtualCameraSessionTest.cc
+++ b/services/camera/virtualcamera/tests/VirtualCameraSessionTest.cc
@@ -30,13 +30,16 @@
 #include "android/binder_interface_utils.h"
 #include "gmock/gmock.h"
 #include "gtest/gtest.h"
-#include "util/MetadataBuilder.h"
+#include "util/MetadataUtil.h"
 
 namespace android {
 namespace companion {
 namespace virtualcamera {
 namespace {
 
+constexpr char kCameraId[] = "42";
+constexpr int kQvgaWidth = 320;
+constexpr int kQvgaHeight = 240;
 constexpr int kVgaWidth = 640;
 constexpr int kVgaHeight = 480;
 constexpr int kSvgaWidth = 800;
@@ -44,7 +47,7 @@
 constexpr int kMaxFps = 30;
 constexpr int kStreamId = 0;
 constexpr int kSecondStreamId = 1;
-constexpr int kCameraId = 42;
+constexpr int kDefaultDeviceId = 0;
 
 using ::aidl::android::companion::virtualcamera::BnVirtualCameraCallback;
 using ::aidl::android::companion::virtualcamera::Format;
@@ -104,32 +107,13 @@
   MOCK_METHOD(ndk::ScopedAStatus, onStreamClosed, (int), (override));
 };
 
-class VirtualCameraSessionTest : public ::testing::Test {
+class VirtualCameraSessionTestBase : public ::testing::Test {
  public:
-  void SetUp() override {
+  virtual void SetUp() override {
     mMockCameraDeviceCallback =
         ndk::SharedRefBase::make<MockCameraDeviceCallback>();
     mMockVirtualCameraClientCallback =
         ndk::SharedRefBase::make<MockVirtualCameraCallback>();
-    mVirtualCameraDevice = ndk::SharedRefBase::make<VirtualCameraDevice>(
-        kCameraId,
-        VirtualCameraConfiguration{
-            .supportedStreamConfigs = {SupportedStreamConfiguration{
-                                           .width = kVgaWidth,
-                                           .height = kVgaHeight,
-                                           .pixelFormat = Format::YUV_420_888,
-                                           .maxFps = kMaxFps},
-                                       SupportedStreamConfiguration{
-                                           .width = kSvgaWidth,
-                                           .height = kSvgaHeight,
-                                           .pixelFormat = Format::YUV_420_888,
-                                           .maxFps = kMaxFps}},
-            .virtualCameraCallback = nullptr,
-            .sensorOrientation = SensorOrientation::ORIENTATION_0,
-            .lensFacing = LensFacing::FRONT});
-    mVirtualCameraSession = ndk::SharedRefBase::make<VirtualCameraSession>(
-        mVirtualCameraDevice, mMockCameraDeviceCallback,
-        mMockVirtualCameraClientCallback);
 
     // Explicitly defining default actions below to prevent gmock from
     // default-constructing ndk::ScopedAStatus, because default-constructed
@@ -155,6 +139,36 @@
  protected:
   std::shared_ptr<MockCameraDeviceCallback> mMockCameraDeviceCallback;
   std::shared_ptr<MockVirtualCameraCallback> mMockVirtualCameraClientCallback;
+};
+
+class VirtualCameraSessionTest : public VirtualCameraSessionTestBase {
+ public:
+  void SetUp() override {
+    VirtualCameraSessionTestBase::SetUp();
+
+    mVirtualCameraDevice = ndk::SharedRefBase::make<VirtualCameraDevice>(
+        kCameraId,
+        VirtualCameraConfiguration{
+            .supportedStreamConfigs = {SupportedStreamConfiguration{
+                                           .width = kVgaWidth,
+                                           .height = kVgaHeight,
+                                           .pixelFormat = Format::YUV_420_888,
+                                           .maxFps = kMaxFps},
+                                       SupportedStreamConfiguration{
+                                           .width = kSvgaWidth,
+                                           .height = kSvgaHeight,
+                                           .pixelFormat = Format::YUV_420_888,
+                                           .maxFps = kMaxFps}},
+            .virtualCameraCallback = mMockVirtualCameraClientCallback,
+            .sensorOrientation = SensorOrientation::ORIENTATION_0,
+            .lensFacing = LensFacing::FRONT},
+        kDefaultDeviceId);
+    mVirtualCameraSession = ndk::SharedRefBase::make<VirtualCameraSession>(
+        mVirtualCameraDevice, mMockCameraDeviceCallback,
+        mMockVirtualCameraClientCallback);
+  }
+
+ protected:
   std::shared_ptr<VirtualCameraDevice> mVirtualCameraDevice;
   std::shared_ptr<VirtualCameraSession> mVirtualCameraSession;
 };
@@ -272,6 +286,185 @@
       Eq(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT)));
 }
 
+TEST_F(VirtualCameraSessionTest, ConfigureWithDifferentAspectRatioFails) {
+  StreamConfiguration streamConfiguration;
+  streamConfiguration.streams = {
+      createStream(kStreamId, kVgaWidth, kVgaHeight, PixelFormat::YCBCR_420_888),
+      createStream(kSecondStreamId, kVgaHeight, kVgaWidth,
+                   PixelFormat::YCBCR_420_888)};
+
+  std::vector<HalStream> halStreams;
+
+  // Expect configuration attempt returns CAMERA_DISCONNECTED service specific code.
+  EXPECT_THAT(
+      mVirtualCameraSession->configureStreams(streamConfiguration, &halStreams)
+          .getServiceSpecificError(),
+      Eq(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT)));
+}
+
+class VirtualCameraSessionInputChoiceTest : public VirtualCameraSessionTestBase {
+ public:
+  std::shared_ptr<VirtualCameraSession> createSession(
+      const std::vector<SupportedStreamConfiguration>& supportedInputConfigs) {
+    mVirtualCameraDevice = ndk::SharedRefBase::make<VirtualCameraDevice>(
+        kCameraId,
+        VirtualCameraConfiguration{
+            .supportedStreamConfigs = supportedInputConfigs,
+            .virtualCameraCallback = mMockVirtualCameraClientCallback,
+            .sensorOrientation = SensorOrientation::ORIENTATION_0,
+            .lensFacing = LensFacing::FRONT},
+        kDefaultDeviceId);
+    return ndk::SharedRefBase::make<VirtualCameraSession>(
+        mVirtualCameraDevice, mMockCameraDeviceCallback,
+        mMockVirtualCameraClientCallback);
+  }
+
+ protected:
+  std::shared_ptr<VirtualCameraDevice> mVirtualCameraDevice;
+};
+
+TEST_F(VirtualCameraSessionInputChoiceTest,
+       configureChoosesCorrectInputStreamForDownsampledOutput) {
+  // Create camera configured to support SVGA YUV input and RGB QVGA input.
+  auto virtualCameraSession = createSession(
+      {SupportedStreamConfiguration{.width = kSvgaWidth,
+                                    .height = kSvgaHeight,
+                                    .pixelFormat = Format::YUV_420_888,
+                                    .maxFps = kMaxFps},
+       SupportedStreamConfiguration{.width = kQvgaWidth,
+                                    .height = kQvgaHeight,
+                                    .pixelFormat = Format::RGBA_8888,
+                                    .maxFps = kMaxFps}});
+
+  // Configure VGA stream. Expect SVGA input to be chosen to downscale from.
+  StreamConfiguration streamConfiguration;
+  streamConfiguration.streams = {createStream(
+      kStreamId, kVgaWidth, kVgaHeight, PixelFormat::IMPLEMENTATION_DEFINED)};
+  std::vector<HalStream> halStreams;
+
+  // Expect configuration attempt returns CAMERA_DISCONNECTED service specific code.
+  EXPECT_CALL(*mMockVirtualCameraClientCallback,
+              onStreamConfigured(kStreamId, _, kSvgaWidth, kSvgaHeight,
+                                 Format::YUV_420_888));
+  EXPECT_TRUE(
+      virtualCameraSession->configureStreams(streamConfiguration, &halStreams)
+          .isOk());
+}
+
+TEST_F(VirtualCameraSessionInputChoiceTest,
+       configureChoosesCorrectInputStreamForMatchingResolution) {
+  // Create camera configured to support SVGA YUV input and RGB QVGA input.
+  auto virtualCameraSession = createSession(
+      {SupportedStreamConfiguration{.width = kSvgaWidth,
+                                    .height = kSvgaHeight,
+                                    .pixelFormat = Format::YUV_420_888,
+                                    .maxFps = kMaxFps},
+       SupportedStreamConfiguration{.width = kQvgaWidth,
+                                    .height = kQvgaHeight,
+                                    .pixelFormat = Format::RGBA_8888,
+                                    .maxFps = kMaxFps}});
+
+  // Configure VGA stream. Expect SVGA input to be chosen to downscale from.
+  StreamConfiguration streamConfiguration;
+  streamConfiguration.streams = {createStream(
+      kStreamId, kQvgaWidth, kQvgaHeight, PixelFormat::IMPLEMENTATION_DEFINED)};
+  std::vector<HalStream> halStreams;
+
+  // Expect configuration attempt returns CAMERA_DISCONNECTED service specific code.
+  EXPECT_CALL(*mMockVirtualCameraClientCallback,
+              onStreamConfigured(kStreamId, _, kQvgaWidth, kQvgaHeight,
+                                 Format::RGBA_8888));
+  EXPECT_TRUE(
+      virtualCameraSession->configureStreams(streamConfiguration, &halStreams)
+          .isOk());
+}
+
+TEST_F(VirtualCameraSessionInputChoiceTest, reconfigureSwitchesInputStream) {
+  // Create camera configured to support SVGA YUV input and RGB QVGA input.
+  auto virtualCameraSession = createSession(
+      {SupportedStreamConfiguration{.width = kSvgaWidth,
+                                    .height = kSvgaHeight,
+                                    .pixelFormat = Format::YUV_420_888,
+                                    .maxFps = kMaxFps},
+       SupportedStreamConfiguration{.width = kQvgaWidth,
+                                    .height = kQvgaHeight,
+                                    .pixelFormat = Format::RGBA_8888,
+                                    .maxFps = kMaxFps}});
+
+  // First configure QVGA stream.
+  StreamConfiguration streamConfiguration;
+  streamConfiguration.streams = {createStream(
+      kStreamId, kQvgaWidth, kQvgaHeight, PixelFormat::IMPLEMENTATION_DEFINED)};
+  std::vector<HalStream> halStreams;
+
+  // Expect QVGA input configuragion to be chosen.
+  EXPECT_CALL(*mMockVirtualCameraClientCallback,
+              onStreamConfigured(kStreamId, _, kQvgaWidth, kQvgaHeight,
+                                 Format::RGBA_8888));
+  EXPECT_TRUE(
+      virtualCameraSession->configureStreams(streamConfiguration, &halStreams)
+          .isOk());
+
+  // Reconfigure with additional VGA stream.
+  streamConfiguration.streams.push_back(
+      createStream(kStreamId + 1, kVgaWidth, kVgaHeight,
+                   PixelFormat::IMPLEMENTATION_DEFINED));
+
+  // Expect original surface to be discarded.
+  EXPECT_CALL(*mMockVirtualCameraClientCallback, onStreamClosed(kStreamId));
+
+  // Expect SVGA input configuragion to be chosen.
+  EXPECT_CALL(*mMockVirtualCameraClientCallback,
+              onStreamConfigured(kStreamId + 1, _, kSvgaWidth, kSvgaHeight,
+                                 Format::YUV_420_888));
+  EXPECT_TRUE(
+      virtualCameraSession->configureStreams(streamConfiguration, &halStreams)
+          .isOk());
+}
+
+TEST_F(VirtualCameraSessionInputChoiceTest,
+       reconfigureKeepsInputStreamIfUnchanged) {
+  // Create camera configured to support SVGA YUV input and RGB QVGA input.
+  auto virtualCameraSession = createSession(
+      {SupportedStreamConfiguration{.width = kSvgaWidth,
+                                    .height = kSvgaHeight,
+                                    .pixelFormat = Format::YUV_420_888,
+                                    .maxFps = kMaxFps},
+       SupportedStreamConfiguration{.width = kQvgaWidth,
+                                    .height = kQvgaHeight,
+                                    .pixelFormat = Format::RGBA_8888,
+                                    .maxFps = kMaxFps}});
+
+  // First configure SVGA stream.
+  StreamConfiguration streamConfiguration;
+  streamConfiguration.streams = {createStream(
+      kStreamId, kSvgaWidth, kSvgaHeight, PixelFormat::IMPLEMENTATION_DEFINED)};
+  std::vector<HalStream> halStreams;
+
+  // Expect SVGA input configuragion to be chosen.
+  EXPECT_CALL(*mMockVirtualCameraClientCallback,
+              onStreamConfigured(kStreamId, _, kSvgaWidth, kSvgaHeight,
+                                 Format::YUV_420_888));
+  EXPECT_TRUE(
+      virtualCameraSession->configureStreams(streamConfiguration, &halStreams)
+          .isOk());
+
+  // Reconfigure with VGA + QVA stream. Because we only allow downscaling,
+  // this will be matched to SVGA input resolution.
+  streamConfiguration.streams = {
+      createStream(kStreamId + 1, kVgaWidth, kVgaHeight,
+                   PixelFormat::IMPLEMENTATION_DEFINED),
+      createStream(kStreamId + 2, kVgaWidth, kVgaHeight,
+                   PixelFormat::IMPLEMENTATION_DEFINED)};
+
+  // Expect the onStreamConfigured callback not to be invoked, since the
+  // original Surface is still best fit for current output streams.
+  EXPECT_CALL(*mMockVirtualCameraClientCallback, onStreamConfigured).Times(0);
+  EXPECT_TRUE(
+      virtualCameraSession->configureStreams(streamConfiguration, &halStreams)
+          .isOk());
+}
+
 }  // namespace
 }  // namespace virtualcamera
 }  // namespace companion
diff --git a/services/camera/virtualcamera/util/EglDisplayContext.cc b/services/camera/virtualcamera/util/EglDisplayContext.cc
index 6d343a2..ccd0d71 100644
--- a/services/camera/virtualcamera/util/EglDisplayContext.cc
+++ b/services/camera/virtualcamera/util/EglDisplayContext.cc
@@ -30,8 +30,9 @@
 namespace companion {
 namespace virtualcamera {
 
-EglDisplayContext::EglDisplayContext()
+EglDisplayContext::EglDisplayContext(std::shared_ptr<ANativeWindow> nativeWindow)
     : mEglDisplay(EGL_NO_DISPLAY),
+      mEglSurface(EGL_NO_SURFACE),
       mEglContext(EGL_NO_CONTEXT),
       mEglConfig(nullptr) {
   EGLBoolean result;
@@ -52,8 +53,12 @@
 
   EGLint numConfigs = 0;
   EGLint configAttribs[] = {
-      EGL_SURFACE_TYPE, EGL_PBUFFER_BIT, EGL_RENDERABLE_TYPE,
-      EGL_OPENGL_ES2_BIT, EGL_RED_SIZE, 8, EGL_GREEN_SIZE, 8, EGL_BLUE_SIZE, 8,
+      EGL_SURFACE_TYPE,
+      nativeWindow == nullptr
+          ? EGL_PBUFFER_BIT  // Render into individual AHardwareBuffer
+          : EGL_WINDOW_BIT,  // Render into Surface (ANativeWindow)
+      EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT, EGL_RED_SIZE, 8, EGL_GREEN_SIZE,
+      8, EGL_BLUE_SIZE, 8,
       // no alpha
       EGL_NONE};
 
@@ -72,6 +77,17 @@
     return;
   }
 
+  if (nativeWindow != nullptr) {
+    mEglSurface = eglCreateWindowSurface(mEglDisplay, mEglConfig,
+                                         nativeWindow.get(), NULL);
+    if (mEglSurface == EGL_NO_SURFACE) {
+      ALOGE("eglCreateWindowSurface error: %#x", eglGetError());
+    }
+  }
+
+  // EGL is a big state machine. Now that we have a configuration ready, we set
+  // this state machine to that configuration (we make it the "current"
+  // configuration).
   if (!makeCurrent()) {
     ALOGE(
         "Failed to set newly initialized EGLContext and EGLDisplay connection "
@@ -82,13 +98,16 @@
 }
 
 EglDisplayContext::~EglDisplayContext() {
-  if (mEglDisplay != EGL_NO_DISPLAY) {
-    eglTerminate(mEglDisplay);
+  eglMakeCurrent(mEglDisplay, EGL_NO_SURFACE, EGL_NO_SURFACE, EGL_NO_CONTEXT);
+  if (mEglSurface != EGL_NO_SURFACE) {
+    eglDestroySurface(mEglDisplay, mEglSurface);
   }
   if (mEglContext != EGL_NO_CONTEXT) {
     eglDestroyContext(mEglDisplay, mEglContext);
   }
-  eglReleaseThread();
+  if (mEglDisplay != EGL_NO_DISPLAY) {
+    eglTerminate(mEglDisplay);
+  }
 }
 
 EGLDisplay EglDisplayContext::getEglDisplay() const {
@@ -99,8 +118,14 @@
   return mEglContext != EGL_NO_CONTEXT && mEglDisplay != EGL_NO_DISPLAY;
 }
 
+void EglDisplayContext::swapBuffers() const {
+  if (mEglSurface != EGL_NO_SURFACE) {
+    eglSwapBuffers(mEglDisplay, mEglSurface);
+  }
+}
+
 bool EglDisplayContext::makeCurrent() {
-  if (!eglMakeCurrent(mEglDisplay, EGL_NO_SURFACE, EGL_NO_SURFACE, mEglContext)) {
+  if (!eglMakeCurrent(mEglDisplay, mEglSurface, mEglSurface, mEglContext)) {
     ALOGE("eglMakeCurrent failed: %#x", eglGetError());
     return false;
   }
diff --git a/services/camera/virtualcamera/util/EglDisplayContext.h b/services/camera/virtualcamera/util/EglDisplayContext.h
index 402ca3c..6dc3080 100644
--- a/services/camera/virtualcamera/util/EglDisplayContext.h
+++ b/services/camera/virtualcamera/util/EglDisplayContext.h
@@ -17,7 +17,10 @@
 #ifndef ANDROID_COMPANION_VIRTUALCAMERA_EGLDISPLAYCONTEXT_H
 #define ANDROID_COMPANION_VIRTUALCAMERA_EGLDISPLAYCONTEXT_H
 
+#include <memory>
+
 #include "EGL/egl.h"
+#include "system/window.h"
 
 namespace android {
 namespace companion {
@@ -30,7 +33,7 @@
 // out of scope.
 class EglDisplayContext {
  public:
-  EglDisplayContext();
+  EglDisplayContext(std::shared_ptr<ANativeWindow> nativeWindow = nullptr);
   ~EglDisplayContext();
 
   // Sets EGLDisplay & EGLContext for current thread.
@@ -44,8 +47,13 @@
   // EGLDisplay & EGLContext.
   bool isInitialized() const;
 
+  void swapBuffers() const;
+
  private:
+  std::shared_ptr<ANativeWindow> mNativeWindow;
+
   EGLDisplay mEglDisplay;
+  EGLSurface mEglSurface;
   EGLContext mEglContext;
   EGLConfig mEglConfig;
 };
diff --git a/services/camera/virtualcamera/util/EglProgram.cc b/services/camera/virtualcamera/util/EglProgram.cc
index 7554a67..eda4169 100644
--- a/services/camera/virtualcamera/util/EglProgram.cc
+++ b/services/camera/virtualcamera/util/EglProgram.cc
@@ -35,19 +35,28 @@
 
 constexpr char kGlExtYuvTarget[] = "GL_EXT_YUV_target";
 
-constexpr char kIdentityVertexShader[] = R"(
-    attribute vec4 vPosition;
+constexpr char kJuliaFractalVertexShader[] = R"(#version 300 es
+    in vec4 aPosition;
+    in vec2 aTextureCoord;
+    out vec2 vFractalCoord;
+    out vec2 vUVCoord;
     void main() {
-      gl_Position = vPosition;
+      gl_Position = aPosition;
+      vUVCoord = aTextureCoord;
+      vFractalCoord = vec2(aTextureCoord.x - 0.5, aTextureCoord.y - 0.5) * 4.0;
     })";
 
-constexpr char kJuliaFractalFragmentShader[] = R"(
+constexpr char kJuliaFractalFragmentShader[] = R"(#version 300 es
+    #extension GL_EXT_YUV_target : require
     precision mediump float;
-    uniform vec2 uResolution;
-    uniform vec2 uC;
-    uniform vec2 uUV;
+
     const float kIter = 64.0;
 
+    in vec2 vFractalCoord;
+    in vec2 vUVCoord;
+    out vec4 fragColor;
+    uniform vec2 uC;
+
     vec2 imSq(vec2 n){
       return vec2(pow(n.x,2.0)-pow(n.y,2.0), 2.0*n.x*n.y);
     }
@@ -62,9 +71,8 @@
     }
 
     void main() {
-      vec2 uv = vec2(gl_FragCoord.x / uResolution.x - 0.5, gl_FragCoord.y / uResolution.y - 0.5);
-      float juliaVal = julia(uv * 4.0, uC);
-      gl_FragColor = vec4( juliaVal,uUV.x,uUV.y,0.0);
+      float juliaVal = julia(vFractalCoord, uC);
+      fragColor = vec4(yuv_2_rgb(vec3(juliaVal, vUVCoord.x, vUVCoord.y), itu_601_full_range), 0.0);
     })";
 
 constexpr char kExternalTextureVertexShader[] = R"(#version 300 es
@@ -88,8 +96,9 @@
       fragColor = texture(uTexture, vTextureCoord);
     })";
 
+// Shader to render a RGBA texture into a YUV buffer.
 constexpr char kExternalRgbaTextureFragmentShader[] = R"(#version 300 es
-    #extension GL_OES_EGL_image_external : require
+    #extension GL_OES_EGL_image_external_essl3 : require
     #extension GL_EXT_YUV_target : require
     precision mediump float;
     in vec2 vTextureCoord;
@@ -200,47 +209,46 @@
 }
 
 EglTestPatternProgram::EglTestPatternProgram() {
-  if (initialize(kIdentityVertexShader, kJuliaFractalFragmentShader)) {
+  if (initialize(kJuliaFractalVertexShader, kJuliaFractalFragmentShader)) {
     ALOGV("Successfully initialized EGL shaders for test pattern program.");
   } else {
     ALOGE("Test pattern EGL shader program initialization failed.");
   }
+
+  mCHandle = glGetUniformLocation(mProgram, "uC");
+  mPositionHandle = glGetAttribLocation(mProgram, "aPosition");
+  mTextureCoordHandle = glGetAttribLocation(mProgram, "aTextureCoord");
+
+  // Pass vertex array to draw.
+  glEnableVertexAttribArray(mPositionHandle);
+  // Prepare the triangle coordinate data.
+  glVertexAttribPointer(mPositionHandle, kCoordsPerVertex, GL_FLOAT, false,
+                        kSquareCoords.size(), kSquareCoords.data());
+
+  glEnableVertexAttribArray(mTextureCoordHandle);
+  glVertexAttribPointer(mTextureCoordHandle, 2, GL_FLOAT, false,
+                        kTextureCoords.size(), kTextureCoords.data());
 }
 
-bool EglTestPatternProgram::draw(int width, int height, int frameNumber) {
-  glViewport(0, 0, static_cast<GLsizei>(width), static_cast<GLsizei>(height));
-  checkEglError("glViewport");
+EglTestPatternProgram::~EglTestPatternProgram() {
+  if (mPositionHandle != -1) {
+    glDisableVertexAttribArray(mPositionHandle);
+  }
+  if (mTextureCoordHandle != -1) {
+    glDisableVertexAttribArray(mTextureCoordHandle);
+  }
+}
 
+bool EglTestPatternProgram::draw(const std::chrono::nanoseconds timestamp) {
   // Load compiled shader.
   glUseProgram(mProgram);
   checkEglError("glUseProgram");
 
-  // Compute point in complex plane corresponding to fractal for this frame number.
-  float time = float(frameNumber) / 120.0f;
+  float time = float(timestamp.count() / 1e9) / 10;
   const std::complex<float> c(std::sin(time) * 0.78f, std::cos(time) * 0.78f);
 
-  // Pass uniform values to the shader.
-  int resolutionHandle = glGetUniformLocation(mProgram, "uResolution");
-  checkEglError("glGetUniformLocation -> uResolution");
-  glUniform2f(resolutionHandle, static_cast<float>(width),
-              static_cast<float>(height));
-  checkEglError("glUniform2f -> uResolution");
-
   // Pass "C" constant value determining the Julia set to the shader.
-  int cHandle = glGetUniformLocation(mProgram, "uC");
-  glUniform2f(cHandle, c.imag(), c.real());
-
-  // Pass chroma value to the shader.
-  int uvHandle = glGetUniformLocation(mProgram, "uUV");
-  glUniform2f(uvHandle, (c.imag() + 1.f) / 2.f, (c.real() + 1.f) / 2.f);
-
-  // Pass vertex array to draw.
-  int positionHandle = glGetAttribLocation(mProgram, "vPosition");
-  glEnableVertexAttribArray(positionHandle);
-
-  // Prepare the triangle coordinate data.
-  glVertexAttribPointer(positionHandle, kCoordsPerVertex, GL_FLOAT, false,
-                        kSquareCoords.size(), kSquareCoords.data());
+  glUniform2f(mCHandle, c.imag(), c.real());
 
   // Draw triangle strip forming a square filling the viewport.
   glDrawElements(GL_TRIANGLES, kDrawOrder.size(), GL_UNSIGNED_BYTE,
diff --git a/services/camera/virtualcamera/util/EglProgram.h b/services/camera/virtualcamera/util/EglProgram.h
index c695cbb..cf93157 100644
--- a/services/camera/virtualcamera/util/EglProgram.h
+++ b/services/camera/virtualcamera/util/EglProgram.h
@@ -18,6 +18,7 @@
 #define ANDROID_COMPANION_VIRTUALCAMERA_EGLPROGRAM_H
 
 #include <array>
+#include <chrono>
 
 #include "GLES/gl.h"
 
@@ -45,8 +46,14 @@
 class EglTestPatternProgram : public EglProgram {
  public:
   EglTestPatternProgram();
+  virtual ~EglTestPatternProgram();
 
-  bool draw(int width, int height, int frameNumber);
+  bool draw(std::chrono::nanoseconds timestamp);
+
+ private:
+  int mPositionHandle = -1;
+  int mTextureCoordHandle = -1;
+  int mCHandle = -1;
 };
 
 // Shader program to  draw texture.
diff --git a/services/camera/virtualcamera/util/EglSurfaceTexture.cc b/services/camera/virtualcamera/util/EglSurfaceTexture.cc
index 9f26e19..98bf62a 100644
--- a/services/camera/virtualcamera/util/EglSurfaceTexture.cc
+++ b/services/camera/virtualcamera/util/EglSurfaceTexture.cc
@@ -15,21 +15,30 @@
  */
 
 // #define LOG_NDEBUG 0
+#include "utils/Timers.h"
 #define LOG_TAG "EglSurfaceTexture"
 
+#include <GLES/gl.h>
+#include <com_android_graphics_libgui_flags.h>
+#include <gui/BufferQueue.h>
+#include <gui/GLConsumer.h>
+#include <gui/IGraphicBufferProducer.h>
+#include <hardware/gralloc.h>
+
 #include <cstdint>
 
 #include "EglSurfaceTexture.h"
 #include "EglUtil.h"
-#include "GLES/gl.h"
-#include "gui/BufferQueue.h"
-#include "gui/GLConsumer.h"
-#include "gui/IGraphicBufferProducer.h"
-#include "hardware/gralloc.h"
 
 namespace android {
 namespace companion {
 namespace virtualcamera {
+namespace {
+
+// Maximal number of buffers producer can dequeue without blocking.
+constexpr int kBufferProducerMaxDequeueBufferCount = 64;
+
+}  // namespace
 
 EglSurfaceTexture::EglSurfaceTexture(const uint32_t width, const uint32_t height)
     : mWidth(width), mHeight(height) {
@@ -38,7 +47,23 @@
     ALOGE("Failed to generate texture");
     return;
   }
+
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+  mGlConsumer = sp<GLConsumer>::make(mTextureId, GLConsumer::TEXTURE_EXTERNAL,
+                                     false, false);
+  mGlConsumer->setName(String8("VirtualCameraEglSurfaceTexture"));
+  mGlConsumer->setDefaultBufferSize(mWidth, mHeight);
+  mGlConsumer->setConsumerUsageBits(GRALLOC_USAGE_HW_TEXTURE);
+  mGlConsumer->setDefaultBufferFormat(AHARDWAREBUFFER_FORMAT_Y8Cb8Cr8_420);
+
+  mSurface = mGlConsumer->getSurface();
+  mSurface->setMaxDequeuedBufferCount(kBufferProducerMaxDequeueBufferCount);
+#else
   BufferQueue::createBufferQueue(&mBufferProducer, &mBufferConsumer);
+  // Set max dequeue buffer count for producer to maximal value to prevent
+  // blocking when dequeuing input buffers.
+  mBufferProducer->setMaxDequeuedBufferCount(
+      kBufferProducerMaxDequeueBufferCount);
   mGlConsumer = sp<GLConsumer>::make(
       mBufferConsumer, mTextureId, GLConsumer::TEXTURE_EXTERNAL, false, false);
   mGlConsumer->setName(String8("VirtualCameraEglSurfaceTexture"));
@@ -47,6 +72,7 @@
   mGlConsumer->setDefaultBufferFormat(AHARDWAREBUFFER_FORMAT_Y8Cb8Cr8_420);
 
   mSurface = sp<Surface>::make(mBufferProducer);
+#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
 }
 
 EglSurfaceTexture::~EglSurfaceTexture() {
@@ -63,8 +89,37 @@
   return mGlConsumer->getCurrentBuffer();
 }
 
+void EglSurfaceTexture::setFrameAvailableListener(
+    const wp<ConsumerBase::FrameAvailableListener>& listener) {
+  mGlConsumer->setFrameAvailableListener(listener);
+}
+
+bool EglSurfaceTexture::waitForNextFrame(const std::chrono::nanoseconds timeout) {
+  return mSurface->waitForNextFrame(mGlConsumer->getFrameNumber(),
+                                    static_cast<nsecs_t>(timeout.count()));
+}
+
 GLuint EglSurfaceTexture::updateTexture() {
-  mGlConsumer->updateTexImage();
+  int previousFrameId;
+  int framesAdvance = 0;
+  // Consume buffers one at the time.
+  // Contrary to the code comments in GLConsumer, the GLConsumer acquires
+  // next queued buffer (not the most recently queued buffer).
+  while (true) {
+    previousFrameId = mGlConsumer->getFrameNumber();
+    mGlConsumer->updateTexImage();
+    int currentFrameId = mGlConsumer->getFrameNumber();
+    if (previousFrameId == currentFrameId) {
+      // Frame number didn't change after updating the texture,
+      // this means we're at the end of the queue and current attached
+      // buffer is the most recent buffer.
+      break;
+    }
+
+    framesAdvance++;
+    previousFrameId = currentFrameId;
+  }
+  ALOGV("%s: Advanced %d frames", __func__, framesAdvance);
   return mTextureId;
 }
 
diff --git a/services/camera/virtualcamera/util/EglSurfaceTexture.h b/services/camera/virtualcamera/util/EglSurfaceTexture.h
index faad7c4..a46af8f 100644
--- a/services/camera/virtualcamera/util/EglSurfaceTexture.h
+++ b/services/camera/virtualcamera/util/EglSurfaceTexture.h
@@ -17,16 +17,21 @@
 #ifndef ANDROID_COMPANION_VIRTUALCAMERA_EGLSURFACETEXTURE_H
 #define ANDROID_COMPANION_VIRTUALCAMERA_EGLSURFACETEXTURE_H
 
-#include <cstdint>
+#include <GLES/gl.h>
+#include <gui/ConsumerBase.h>
+#include <gui/Surface.h>
+#include <utils/RefBase.h>
 
-#include "GLES/gl.h"
-#include "gui/Surface.h"
-#include "utils/RefBase.h"
+#include <chrono>
+#include <cstdint>
 
 namespace android {
 
+#if !COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
 class IGraphicBufferProducer;
 class IGraphicBufferConsumer;
+#endif  // !COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+
 class GLConsumer;
 
 namespace companion {
@@ -51,6 +56,15 @@
   // Get height of surface / texture.
   uint32_t getHeight() const;
 
+  // Wait for next frame to be available in the surface
+  // until timeout.
+  //
+  // Returns false on timeout, true if new frame was received before timeout.
+  bool waitForNextFrame(std::chrono::nanoseconds timeout);
+
+  void setFrameAvailableListener(
+      const wp<ConsumerBase::FrameAvailableListener>& listener);
+
   // Update the texture with the most recent submitted buffer.
   // Most be called on thread with EGL context.
   //
@@ -69,8 +83,10 @@
   std::array<float, 16> getTransformMatrix();
 
  private:
+#if !COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
   sp<IGraphicBufferProducer> mBufferProducer;
   sp<IGraphicBufferConsumer> mBufferConsumer;
+#endif  // !COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
   sp<GLConsumer> mGlConsumer;
   sp<Surface> mSurface;
   GLuint mTextureId;
diff --git a/services/camera/virtualcamera/util/EglUtil.cc b/services/camera/virtualcamera/util/EglUtil.cc
index 481d8f0..1c685f1 100644
--- a/services/camera/virtualcamera/util/EglUtil.cc
+++ b/services/camera/virtualcamera/util/EglUtil.cc
@@ -20,6 +20,7 @@
 
 #include <cstring>
 
+#include "EglDisplayContext.h"
 #include "GLES/gl.h"
 #include "log/log.h"
 
@@ -27,6 +28,9 @@
 namespace companion {
 namespace virtualcamera {
 
+// Lower bound for maximum supported texture size is at least 2048x2048
+constexpr int kDefaultMaxTextureSize = 2048;
+
 bool checkEglError(const char* operation) {
   GLenum err = glGetError();
   if (err == GL_NO_ERROR) {
@@ -45,6 +49,20 @@
   return strstr(extensions, extension) != nullptr;
 }
 
+int getMaximumTextureSize() {
+  static const int kMaxTextureSize = [] {
+    EglDisplayContext displayContext;
+    displayContext.makeCurrent();
+    int maxTextureSize = -1;
+    glGetIntegerv(GL_MAX_TEXTURE_SIZE, &maxTextureSize);
+    return maxTextureSize;
+  }();
+  if (kMaxTextureSize <= 0) {
+    return kDefaultMaxTextureSize;
+  }
+  return kMaxTextureSize;
+}
+
 }  // namespace virtualcamera
 }  // namespace companion
 }  // namespace android
diff --git a/services/camera/virtualcamera/util/EglUtil.h b/services/camera/virtualcamera/util/EglUtil.h
index 71640e3..f339b4e 100644
--- a/services/camera/virtualcamera/util/EglUtil.h
+++ b/services/camera/virtualcamera/util/EglUtil.h
@@ -27,6 +27,8 @@
 // Returns true if the GL extension is supported, false otherwise.
 bool isGlExtensionSupported(const char* extension);
 
+int getMaximumTextureSize();
+
 }  // namespace virtualcamera
 }  // namespace companion
 }  // namespace android
diff --git a/services/camera/virtualcamera/util/JpegUtil.cc b/services/camera/virtualcamera/util/JpegUtil.cc
index 2b19c13..b034584 100644
--- a/services/camera/virtualcamera/util/JpegUtil.cc
+++ b/services/camera/virtualcamera/util/JpegUtil.cc
@@ -14,19 +14,20 @@
  * limitations under the License.
  */
 // #define LOG_NDEBUG 0
+#include "system/graphics.h"
 #define LOG_TAG "JpegUtil"
-#include "JpegUtil.h"
-
 #include <cstddef>
 #include <cstdint>
-#include <memory>
+#include <optional>
 #include <vector>
 
+#include "JpegUtil.h"
 #include "android/hardware_buffer.h"
 #include "jpeglib.h"
 #include "log/log.h"
 #include "ui/GraphicBuffer.h"
 #include "ui/GraphicBufferMapper.h"
+#include "util/Util.h"
 #include "utils/Errors.h"
 
 namespace android {
@@ -34,11 +35,11 @@
 namespace virtualcamera {
 namespace {
 
-constexpr int kJpegQuality = 80;
+constexpr int k2DCTSIZE = 2 * DCTSIZE;
 
 class LibJpegContext {
  public:
-  LibJpegContext(int width, int height, const size_t outBufferSize,
+  LibJpegContext(int width, int height, int quality, const size_t outBufferSize,
                  void* outBuffer)
       : mWidth(width),
         mHeight(height),
@@ -76,7 +77,7 @@
     jpeg_set_defaults(&mCompressStruct);
 
     // Set quality and colorspace.
-    jpeg_set_quality(&mCompressStruct, kJpegQuality, 1);
+    jpeg_set_quality(&mCompressStruct, quality, 1);
     jpeg_set_colorspace(&mCompressStruct, JCS_YCbCr);
 
     // Configure RAW input mode - this let's libjpeg know we're providing raw,
@@ -94,11 +95,63 @@
     mCompressStruct.comp_info[2].v_samp_factor = 1;
   }
 
-  bool compress(const android_ycbcr& ycbr) {
+  LibJpegContext& setApp1Data(const uint8_t* app1Data, const size_t size) {
+    mApp1Data = app1Data;
+    mApp1DataSize = size;
+    return *this;
+  }
+
+  std::optional<size_t> compress(std::shared_ptr<AHardwareBuffer> inBuffer) {
+    GraphicBuffer* gBuffer = GraphicBuffer::fromAHardwareBuffer(inBuffer.get());
+
+    if (gBuffer == nullptr) {
+      ALOGE("%s: Input graphic buffer is nullptr", __func__);
+      return std::nullopt;
+    }
+
+    if (gBuffer->getPixelFormat() != HAL_PIXEL_FORMAT_YCbCr_420_888) {
+      // This should never happen since we're allocating the temporary buffer
+      // with YUV420 layout above.
+      ALOGE("%s: Cannot compress non-YUV buffer (pixelFormat %d)", __func__,
+            gBuffer->getPixelFormat());
+      return std::nullopt;
+    }
+
+    YCbCrLockGuard yCbCrLock(inBuffer, AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN);
+    if (yCbCrLock.getStatus() != OK) {
+      ALOGE("%s: Failed to lock the input buffer: %s", __func__,
+            statusToString(yCbCrLock.getStatus()).c_str());
+      return std::nullopt;
+    }
+    const android_ycbcr& ycbr = *yCbCrLock;
+
+    const int inBufferWidth = gBuffer->getWidth();
+    const int inBufferHeight = gBuffer->getHeight();
+
+    if (inBufferWidth % k2DCTSIZE || (inBufferHeight % k2DCTSIZE)) {
+      ALOGE(
+          "%s: Compressing YUV420 buffer with size %dx%d not aligned with 2 * "
+          "DCTSIZE (%d) is not currently supported.",
+          __func__, inBufferWidth, inBufferHeight, DCTSIZE);
+      return std::nullopt;
+    }
+
+    if (inBufferWidth < mWidth || inBufferHeight < mHeight) {
+      ALOGE(
+          "%s: Input buffer has smaller size (%dx%d) than image to be "
+          "compressed (%dx%d)",
+          __func__, inBufferWidth, inBufferHeight, mWidth, mHeight);
+      return std::nullopt;
+    }
+
+    // Chroma planes have 1/2 resolution of the original image.
+    const int cHeight = inBufferHeight / 2;
+    const int cWidth = inBufferWidth / 2;
+
     // Prepare arrays of pointers to scanlines of each plane.
-    std::vector<JSAMPROW> yLines(mHeight);
-    std::vector<JSAMPROW> cbLines(mHeight / 2);
-    std::vector<JSAMPROW> crLines(mHeight / 2);
+    std::vector<JSAMPROW> yLines(inBufferHeight);
+    std::vector<JSAMPROW> cbLines(cHeight);
+    std::vector<JSAMPROW> crLines(cHeight);
 
     uint8_t* y = static_cast<uint8_t*>(ycbr.y);
     uint8_t* cb = static_cast<uint8_t*>(ycbr.cb);
@@ -107,42 +160,34 @@
     // Since UV samples might be interleaved (semiplanar) we need to copy
     // them to separate planes, since libjpeg doesn't directly
     // support processing semiplanar YUV.
-    const int c_samples = (mWidth / 2) * (mHeight / 2);
-    std::vector<uint8_t> cb_plane(c_samples);
-    std::vector<uint8_t> cr_plane(c_samples);
+    const int cSamples = cWidth * cHeight;
+    std::vector<uint8_t> cb_plane(cSamples);
+    std::vector<uint8_t> cr_plane(cSamples);
 
     // TODO(b/301023410) - Use libyuv or ARM SIMD for "unzipping" the data.
-    for (int i = 0; i < c_samples; ++i) {
-      cb_plane[i] = *cb;
-      cr_plane[i] = *cr;
-      cb += ycbr.chroma_step;
-      cr += ycbr.chroma_step;
+    int out_idx = 0;
+    for (int i = 0; i < cHeight; ++i) {
+      for (int j = 0; j < cWidth; ++j) {
+        cb_plane[out_idx] = cb[j * ycbr.chroma_step];
+        cr_plane[out_idx] = cr[j * ycbr.chroma_step];
+        out_idx++;
+      }
+      cb += ycbr.cstride;
+      cr += ycbr.cstride;
     }
 
     // Collect pointers to individual scanline of each plane.
-    for (int i = 0; i < mHeight; ++i) {
+    for (int i = 0; i < inBufferHeight; ++i) {
       yLines[i] = y + i * ycbr.ystride;
     }
-    for (int i = 0; i < (mHeight / 2); ++i) {
-      cbLines[i] = cb_plane.data() + i * (mWidth / 2);
-      crLines[i] = cr_plane.data() + i * (mWidth / 2);
+    for (int i = 0; i < cHeight; ++i) {
+      cbLines[i] = cb_plane.data() + i * cWidth;
+      crLines[i] = cr_plane.data() + i * cWidth;
     }
 
     return compress(yLines, cbLines, crLines);
   }
 
-  bool compressBlackImage() {
-    // We only really need to prepare one scanline for Y and one shared scanline
-    // for Cb & Cr.
-    std::vector<uint8_t> yLine(mWidth, 0);
-    std::vector<uint8_t> chromaLine(mWidth / 2, 0xff / 2);
-
-    std::vector<JSAMPROW> yLines(mHeight, yLine.data());
-    std::vector<JSAMPROW> cLines(mHeight / 2, chromaLine.data());
-
-    return compress(yLines, cLines, cLines);
-  }
-
  private:
   void setSuccess(const boolean success) {
     mSuccess = success;
@@ -165,11 +210,18 @@
   // Takes vector of pointers to Y / Cb / Cr scanlines as an input. Length of
   // each vector needs to correspond to height of corresponding plane.
   //
-  // Returns true if compression is successful, false otherwise.
-  bool compress(std::vector<JSAMPROW>& yLines, std::vector<JSAMPROW>& cbLines,
-                std::vector<JSAMPROW>& crLines) {
+  // Returns size of compressed image in bytes on success, empty optional otherwise.
+  std::optional<size_t> compress(std::vector<JSAMPROW>& yLines,
+                                 std::vector<JSAMPROW>& cbLines,
+                                 std::vector<JSAMPROW>& crLines) {
     jpeg_start_compress(&mCompressStruct, TRUE);
 
+    if (mApp1Data != nullptr && mApp1DataSize > 0) {
+      ALOGV("%s: Writing exif, size %zu B", __func__, mApp1DataSize);
+      jpeg_write_marker(&mCompressStruct, JPEG_APP0 + 1,
+                        static_cast<const JOCTET*>(mApp1Data), mApp1DataSize);
+    }
+
     while (mCompressStruct.next_scanline < mCompressStruct.image_height) {
       const uint32_t batchSize = DCTSIZE * 2;
       const uint32_t nl = mCompressStruct.next_scanline;
@@ -181,11 +233,11 @@
         ALOGE("%s: compressed %u lines, expected %u (total %u/%u)",
               __FUNCTION__, done, batchSize, mCompressStruct.next_scanline,
               mCompressStruct.image_height);
-        return false;
+        return std::nullopt;
       }
     }
     jpeg_finish_compress(&mCompressStruct);
-    return mSuccess;
+    return mEncodedSize;
   }
 
   // === libjpeg callbacks below ===
@@ -217,6 +269,10 @@
   jpeg_error_mgr mErrorMgr;
   jpeg_destination_mgr mDestinationMgr;
 
+  // APP1 data.
+  const uint8_t* mApp1Data = nullptr;
+  size_t mApp1DataSize = 0;
+
   // Dimensions of the input image.
   int mWidth;
   int mHeight;
@@ -233,17 +289,28 @@
   boolean mSuccess = true;
 };
 
-}  // namespace
-
-bool compressJpeg(int width, int height, const android_ycbcr& ycbcr,
-                  size_t outBufferSize, void* outBuffer) {
-  return LibJpegContext(width, height, outBufferSize, outBuffer).compress(ycbcr);
+int roundTo2DCTMultiple(const int n) {
+  const int mod = n % k2DCTSIZE;
+  return mod == 0 ? n : n + (k2DCTSIZE - mod);
 }
 
-bool compressBlackJpeg(int width, int height, size_t outBufferSize,
-                       void* outBuffer) {
-  return LibJpegContext(width, height, outBufferSize, outBuffer)
-      .compressBlackImage();
+}  // namespace
+
+std::optional<size_t> compressJpeg(const int width, const int height,
+                                   const int quality,
+                                   std::shared_ptr<AHardwareBuffer> inBuffer,
+                                   const std::vector<uint8_t>& app1ExifData,
+                                   size_t outBufferSize, void* outBuffer) {
+  LibJpegContext context(width, height, quality, outBufferSize, outBuffer);
+  if (!app1ExifData.empty()) {
+    context.setApp1Data(app1ExifData.data(), app1ExifData.size());
+  }
+  return context.compress(inBuffer);
+}
+
+Resolution roundTo2DctSize(const Resolution resolution) {
+  return Resolution(roundTo2DCTMultiple(resolution.width),
+                    roundTo2DCTMultiple(resolution.height));
 }
 
 }  // namespace virtualcamera
diff --git a/services/camera/virtualcamera/util/JpegUtil.h b/services/camera/virtualcamera/util/JpegUtil.h
index c44d0a8..0a8df90 100644
--- a/services/camera/virtualcamera/util/JpegUtil.h
+++ b/services/camera/virtualcamera/util/JpegUtil.h
@@ -17,24 +17,37 @@
 #ifndef ANDROID_COMPANION_VIRTUALCAMERA_JPEGUTIL_H
 #define ANDROID_COMPANION_VIRTUALCAMERA_JPEGUTIL_H
 
-#include <memory>
+#include <optional>
+#include <vector>
 
 #include "android/hardware_buffer.h"
-#include "system/graphics.h"
+#include "util/Util.h"
 
 namespace android {
 namespace companion {
 namespace virtualcamera {
 
 // Jpeg-compress image into the output buffer.
-// Returns true if the compression was successful, false otherwise.
-bool compressJpeg(int width, int height, const android_ycbcr& ycbcr,
-                  size_t outBufferSize, void* outBuffer);
+// * width - width of the image, can be less than width of inBuffer.
+// * heigh - height of the image, can be less than height of inBuffer.
+// * quality - 0-100, higher number corresponds to higher quality.
+// * inBuffer - Input buffer, the dimensions of the buffer must be aligned to
+//   2*DCT_SIZE (16) to include necessary padding in case width and height of
+//   image is not aligned with 2*DCT_SIZE.
+// * app1ExifData - vector containing data to be included in APP1
+//   segment. Can be empty.
+// * outBufferSize - capacity of the output buffer.
+// * outBuffer - output buffer to write compressed data into.
+// Returns size of compressed data if the compression was successful,
+// empty optional otherwise.
+std::optional<size_t> compressJpeg(int width, int height, int quality,
+                                   std::shared_ptr<AHardwareBuffer> inBuffer,
+                                   const std::vector<uint8_t>& app1ExifData,
+                                   size_t outBufferSize, void* outBuffer);
 
-// Jpeg-compress all-black image into the output buffer.
-// Returns true if the compression was successful, false otherwise.
-bool compressBlackJpeg(int width, int height, size_t outBufferSize,
-                       void* outBuffer);
+// Round the resolution to the closest higher resolution where width and height
+// are divisible by 2*DCT_SIZE ().
+Resolution roundTo2DctSize(Resolution resolution);
 
 }  // namespace virtualcamera
 }  // namespace companion
diff --git a/services/camera/virtualcamera/util/MetadataBuilder.cc b/services/camera/virtualcamera/util/MetadataBuilder.cc
deleted file mode 100644
index 70e22be..0000000
--- a/services/camera/virtualcamera/util/MetadataBuilder.cc
+++ /dev/null
@@ -1,338 +0,0 @@
-/*
- * Copyright 2023 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-// #define LOG_NDEBUG 0
-#define LOG_TAG "MetadataBuilder"
-
-#include "MetadataBuilder.h"
-
-#include <algorithm>
-#include <cstdint>
-#include <iterator>
-#include <memory>
-#include <utility>
-#include <variant>
-#include <vector>
-
-#include "CameraMetadata.h"
-#include "aidl/android/hardware/camera/device/CameraMetadata.h"
-#include "log/log.h"
-#include "system/camera_metadata.h"
-#include "utils/Errors.h"
-
-namespace android {
-namespace companion {
-namespace virtualcamera {
-
-namespace {
-
-using ::android::hardware::camera::common::helper::CameraMetadata;
-
-template <typename To, typename From>
-std::vector<To> convertTo(const std::vector<From>& from) {
-  std::vector<To> to;
-  to.reserve(from.size());
-  std::transform(from.begin(), from.end(), std::back_inserter(to),
-                 [](const From& x) { return static_cast<To>(x); });
-  return to;
-}
-
-}  // namespace
-
-MetadataBuilder& MetadataBuilder::setSupportedHardwareLevel(
-    camera_metadata_enum_android_info_supported_hardware_level_t hwLevel) {
-  mEntryMap[ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL] =
-      std::vector<uint8_t>({static_cast<uint8_t>(hwLevel)});
-  return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setFlashAvailable(bool flashAvailable) {
-  const uint8_t metadataVal = flashAvailable
-                                  ? ANDROID_FLASH_INFO_AVAILABLE_TRUE
-                                  : ANDROID_FLASH_INFO_AVAILABLE_FALSE;
-  mEntryMap[ANDROID_FLASH_INFO_AVAILABLE] = std::vector<uint8_t>({metadataVal});
-  return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setLensFacing(
-    camera_metadata_enum_android_lens_facing lensFacing) {
-  mEntryMap[ANDROID_LENS_FACING] =
-      std::vector<uint8_t>({static_cast<uint8_t>(lensFacing)});
-  return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setSensorOrientation(int32_t sensorOrientation) {
-  mEntryMap[ANDROID_SENSOR_ORIENTATION] =
-      std::vector<int32_t>({sensorOrientation});
-  return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setSensorTimestamp(
-    std::chrono::nanoseconds timestamp) {
-  mEntryMap[ANDROID_SENSOR_TIMESTAMP] =
-      std::vector<int64_t>({timestamp.count()});
-  return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setAvailableFaceDetectModes(
-    const std::vector<camera_metadata_enum_android_statistics_face_detect_mode_t>&
-        faceDetectModes) {
-  mEntryMap[ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES] =
-      convertTo<uint8_t>(faceDetectModes);
-  return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setControlAvailableModes(
-    const std::vector<camera_metadata_enum_android_control_mode_t>&
-        availableModes) {
-  mEntryMap[ANDROID_CONTROL_AVAILABLE_MODES] =
-      convertTo<uint8_t>(availableModes);
-  return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setControlAfAvailableModes(
-    const std::vector<camera_metadata_enum_android_control_af_mode_t>&
-        availableModes) {
-  mEntryMap[ANDROID_CONTROL_AF_AVAILABLE_MODES] =
-      convertTo<uint8_t>(availableModes);
-  return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setControlAfMode(
-    const camera_metadata_enum_android_control_af_mode_t mode) {
-  mEntryMap[ANDROID_CONTROL_AF_MODE] =
-      std::vector<uint8_t>({static_cast<uint8_t>(mode)});
-  return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setControlAeAvailableFpsRange(
-    const int32_t minFps, const int32_t maxFps) {
-  mEntryMap[ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES] =
-      std::vector<int32_t>({minFps, maxFps});
-  return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setControlMaxRegions(int32_t maxAeRegions,
-                                                       int32_t maxAwbRegions,
-                                                       int32_t maxAfRegions) {
-  mEntryMap[ANDROID_CONTROL_MAX_REGIONS] =
-      std::vector<int32_t>({maxAeRegions, maxAwbRegions, maxAfRegions});
-  return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setControlAeRegions(
-    const std::vector<ControlRegion>& aeRegions) {
-  std::vector<int32_t> regions;
-  regions.reserve(5 * aeRegions.size());
-  for (const ControlRegion& region : aeRegions) {
-    regions.push_back(region.x0);
-    regions.push_back(region.y0);
-    regions.push_back(region.x1);
-    regions.push_back(region.y1);
-    regions.push_back(region.weight);
-  }
-  mEntryMap[ANDROID_CONTROL_AE_REGIONS] = std::move(regions);
-  return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setControlAfRegions(
-    const std::vector<ControlRegion>& afRegions) {
-  std::vector<int32_t> regions;
-  regions.reserve(5 * afRegions.size());
-  for (const ControlRegion& region : afRegions) {
-    regions.push_back(region.x0);
-    regions.push_back(region.y0);
-    regions.push_back(region.x1);
-    regions.push_back(region.y1);
-    regions.push_back(region.weight);
-  }
-  mEntryMap[ANDROID_CONTROL_AF_REGIONS] = std::move(regions);
-  return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setControlAwbRegions(
-    const std::vector<ControlRegion>& awbRegions) {
-  std::vector<int32_t> regions;
-  regions.reserve(5 * awbRegions.size());
-  for (const ControlRegion& region : awbRegions) {
-    regions.push_back(region.x0);
-    regions.push_back(region.y0);
-    regions.push_back(region.x1);
-    regions.push_back(region.y1);
-    regions.push_back(region.weight);
-  }
-  mEntryMap[ANDROID_CONTROL_AWB_REGIONS] = std::move(regions);
-  return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setControlCaptureIntent(
-    const camera_metadata_enum_android_control_capture_intent_t intent) {
-  mEntryMap[ANDROID_CONTROL_CAPTURE_INTENT] =
-      std::vector<uint8_t>({static_cast<uint8_t>(intent)});
-  return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setMaxJpegSize(const int32_t size) {
-  mEntryMap[ANDROID_JPEG_MAX_SIZE] = std::vector<int32_t>({size});
-  return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setAvailableOutputStreamConfigurations(
-    const std::vector<StreamConfiguration>& streamConfigurations) {
-  std::vector<int32_t> metadataStreamConfigs;
-  std::vector<int64_t> metadataMinFrameDurations;
-  std::vector<int64_t> metadataStallDurations;
-  metadataStreamConfigs.reserve(streamConfigurations.size());
-  metadataMinFrameDurations.reserve(streamConfigurations.size());
-  metadataStallDurations.reserve(streamConfigurations.size());
-
-  for (const auto& config : streamConfigurations) {
-    metadataStreamConfigs.push_back(config.format);
-    metadataStreamConfigs.push_back(config.width);
-    metadataStreamConfigs.push_back(config.height);
-    metadataStreamConfigs.push_back(
-        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
-
-    metadataMinFrameDurations.push_back(config.format);
-    metadataMinFrameDurations.push_back(config.width);
-    metadataMinFrameDurations.push_back(config.height);
-    metadataMinFrameDurations.push_back(config.minFrameDuration.count());
-
-    metadataStallDurations.push_back(config.format);
-    metadataStallDurations.push_back(config.width);
-    metadataStallDurations.push_back(config.height);
-    metadataStallDurations.push_back(config.minStallDuration.count());
-  }
-
-  mEntryMap[ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS] =
-      metadataStreamConfigs;
-  mEntryMap[ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS] =
-      metadataMinFrameDurations;
-  mEntryMap[ANDROID_SCALER_AVAILABLE_STALL_DURATIONS] = metadataStallDurations;
-
-  return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setAvailableMaxDigitalZoom(const float maxZoom) {
-  mEntryMap[ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM] =
-      std::vector<float>(maxZoom);
-  return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setControlZoomRatioRange(const float min,
-                                                           const float max) {
-  mEntryMap[ANDROID_CONTROL_ZOOM_RATIO_RANGE] = std::vector<float>({min, max});
-  return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setSensorActiveArraySize(int x0, int y0,
-                                                           int x1, int y1) {
-  mEntryMap[ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE] =
-      std::vector<int32_t>({x0, y0, x1, y1});
-  return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setControlAeCompensationRange(int32_t min,
-                                                                int32_t max) {
-  mEntryMap[ANDROID_CONTROL_AE_COMPENSATION_RANGE] =
-      std::vector<int32_t>({min, max});
-  return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setControlAeCompensationStep(
-    const camera_metadata_rational step) {
-  mEntryMap[ANDROID_CONTROL_AE_COMPENSATION_STEP] =
-      std::vector<camera_metadata_rational>({step});
-  return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setAvailableRequestKeys(
-    const std::vector<int32_t>& keys) {
-  mEntryMap[ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS] = keys;
-  return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setAvailableResultKeys(
-    const std::vector<int32_t>& keys) {
-  mEntryMap[ANDROID_REQUEST_AVAILABLE_RESULT_KEYS] = keys;
-  return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setAvailableCapabilities(
-    const std::vector<camera_metadata_enum_android_request_available_capabilities_t>&
-        capabilities) {
-  mEntryMap[ANDROID_REQUEST_AVAILABLE_CAPABILITIES] =
-      convertTo<uint8_t>(capabilities);
-  return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setAvailableCharacteristicKeys(
-    const std::vector<camera_metadata_tag_t>& keys) {
-  mEntryMap[ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS] =
-      convertTo<int32_t>(keys);
-  return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setAvailableCharacteristicKeys() {
-  std::vector<camera_metadata_tag_t> availableKeys;
-  availableKeys.reserve(mEntryMap.size());
-  for (const auto& [key, _] : mEntryMap) {
-    if (key != ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS) {
-      availableKeys.push_back(key);
-    }
-  }
-  setAvailableCharacteristicKeys(availableKeys);
-  return *this;
-}
-
-std::unique_ptr<aidl::android::hardware::camera::device::CameraMetadata>
-MetadataBuilder::build() const {
-  CameraMetadata metadataHelper;
-  for (const auto& entry : mEntryMap) {
-    status_t ret = std::visit(
-        [&](auto&& arg) {
-          return metadataHelper.update(entry.first, arg.data(), arg.size());
-        },
-        entry.second);
-    if (ret != NO_ERROR) {
-      ALOGE("Failed to update metadata with key %d - %s: %s", entry.first,
-            get_camera_metadata_tag_name(entry.first),
-            ::android::statusToString(ret).c_str());
-      return nullptr;
-    }
-  }
-
-  const camera_metadata_t* metadata = metadataHelper.getAndLock();
-  if (metadata == nullptr) {
-    ALOGE(
-        "Failure when constructing metadata -> CameraMetadata helper returned "
-        "nullptr");
-    return nullptr;
-  }
-
-  auto aidlMetadata =
-      std::make_unique<aidl::android::hardware::camera::device::CameraMetadata>();
-  const uint8_t* data_ptr = reinterpret_cast<const uint8_t*>(metadata);
-  aidlMetadata->metadata.assign(data_ptr,
-                                data_ptr + get_camera_metadata_size(metadata));
-  metadataHelper.unlock(metadata);
-
-  return aidlMetadata;
-}
-
-}  // namespace virtualcamera
-}  // namespace companion
-}  // namespace android
diff --git a/services/camera/virtualcamera/util/MetadataBuilder.h b/services/camera/virtualcamera/util/MetadataBuilder.h
deleted file mode 100644
index 46f4c43..0000000
--- a/services/camera/virtualcamera/util/MetadataBuilder.h
+++ /dev/null
@@ -1,204 +0,0 @@
-/*
- * Copyright 2023 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_COMPANION_VIRTUALCAMERA_METADATABUILDER_H
-#define ANDROID_COMPANION_VIRTUALCAMERA_METADATABUILDER_H
-
-#include <chrono>
-#include <cstdint>
-#include <map>
-#include <memory>
-#include <variant>
-#include <vector>
-
-#include "aidl/android/hardware/camera/device/CameraMetadata.h"
-#include "system/camera_metadata.h"
-
-namespace android {
-namespace companion {
-namespace virtualcamera {
-
-// Convenience builder for the
-// aidl::android::hardware::camera::device::CameraMetadata.
-//
-// Calling the same builder setter multiple will overwrite the value.
-// This class is not thread-safe.
-class MetadataBuilder {
- public:
-  struct StreamConfiguration {
-    int32_t width = 0;
-    int32_t height = 0;
-    int32_t format = 0;
-    // Minimal frame duration - corresponds to maximal FPS for given format.
-    // See ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS in CameraMetadataTag.aidl.
-    std::chrono::nanoseconds minFrameDuration{0};
-    // Minimal stall duration.
-    // See ANDROID_SCALER_AVAILABLE_STALL_DURATIONS in CameraMetadataTag.aidl.
-    std::chrono::nanoseconds minStallDuration{0};
-  };
-
-  struct ControlRegion {
-    int32_t x0 = 0;
-    int32_t y0 = 0;
-    int32_t x1 = 0;
-    int32_t y1 = 0;
-    int32_t weight = 0;
-  };
-
-  MetadataBuilder() = default;
-  ~MetadataBuilder() = default;
-
-  // See ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL in CameraMetadataTag.aidl.
-  MetadataBuilder& setSupportedHardwareLevel(
-      camera_metadata_enum_android_info_supported_hardware_level_t hwLevel);
-
-  // Whether this camera device has a flash unit
-  // See ANDROID_FLASH_INFO_AVAILABLE in CameraMetadataTag.aidl.
-  MetadataBuilder& setFlashAvailable(bool flashAvailable);
-
-  // See ANDROID_LENS_FACING in CameraMetadataTag.aidl.
-  MetadataBuilder& setLensFacing(
-      camera_metadata_enum_android_lens_facing lensFacing);
-
-  // See ANDROID_SENSOR_ORIENTATION in CameraMetadataTag.aidl.
-  MetadataBuilder& setSensorOrientation(int32_t sensorOrientation);
-
-  // Time at start of exposure of first row of the image
-  // sensor active array, in nanoseconds.
-  //
-  // See ANDROID_SENSOR_TIMESTAMP in CameraMetadataTag.aidl.
-  MetadataBuilder& setSensorTimestamp(std::chrono::nanoseconds timestamp);
-
-  // See ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE in CameraMetadataTag.aidl.
-  MetadataBuilder& setSensorActiveArraySize(int x0, int y0, int x1, int y1);
-
-  // See ANDROID_STATISTICS_FACE_DETECT_MODE in CameraMetadataTag.aidl.
-  MetadataBuilder& setAvailableFaceDetectModes(
-      const std::vector<camera_metadata_enum_android_statistics_face_detect_mode_t>&
-          faceDetectMode);
-
-  // Sets available stream configurations along with corresponding minimal frame
-  // durations (corresponding to max fps) and stall durations.
-  //
-  // See ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
-  // ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS and
-  // ANDROID_SCALER_AVAILABLE_STALL_DURATIONS in CameraMetadataTag.aidl.
-  MetadataBuilder& setAvailableOutputStreamConfigurations(
-      const std::vector<StreamConfiguration>& streamConfigurations);
-
-  // See ANDROID_CONTROL_AVAILABLE_MODES in CameraMetadataTag.aidl.
-  MetadataBuilder& setControlAvailableModes(
-      const std::vector<camera_metadata_enum_android_control_mode_t>&
-          availableModes);
-
-  // See ANDROID_CONTROL_AE_COMPENSATION_RANGE in CameraMetadataTag.aidl.
-  MetadataBuilder& setControlAeCompensationRange(int32_t min, int32_t max);
-
-  // See ANDROID_CONTROL_AE_COMPENSATION_STEP in CameraMetadataTag.aidl.
-  MetadataBuilder& setControlAeCompensationStep(camera_metadata_rational step);
-
-  // See ANDROID_CONTROL_AF_AVAILABLE_MODES in CameraMetadataTag.aidl.
-  MetadataBuilder& setControlAfAvailableModes(
-      const std::vector<camera_metadata_enum_android_control_af_mode_t>&
-          availableModes);
-
-  // See ANDROID_CONTROL_AF_MODE in CameraMetadataTag.aidl.
-  MetadataBuilder& setControlAfMode(
-      const camera_metadata_enum_android_control_af_mode_t mode);
-
-  // See ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES in CameraMetadataTag.aidl.
-  MetadataBuilder& setControlAeAvailableFpsRange(int32_t min, int32_t max);
-
-  // See ANDROID_CONTROL_CAPTURE_INTENT in CameraMetadataTag.aidl.
-  MetadataBuilder& setControlCaptureIntent(
-      camera_metadata_enum_android_control_capture_intent_t intent);
-
-  // See ANDROID_CONTROL_MAX_REGIONS in CameraMetadataTag.aidl.
-  MetadataBuilder& setControlMaxRegions(int32_t maxAeRegions,
-                                        int32_t maxAwbRegions,
-                                        int32_t maxAfRegions);
-
-  // See ANDROID_CONTROL_AE_REGIONS in CameraMetadataTag.aidl.
-  MetadataBuilder& setControlAeRegions(
-      const std::vector<ControlRegion>& aeRegions);
-
-  // See ANDROID_CONTROL_AWB_REGIONS in CameraMetadataTag.aidl.
-  MetadataBuilder& setControlAwbRegions(
-      const std::vector<ControlRegion>& awbRegions);
-
-  // See ANDROID_CONTROL_AF_REGIONS in CameraMetadataTag.aidl.
-  MetadataBuilder& setControlAfRegions(
-      const std::vector<ControlRegion>& afRegions);
-
-  // The size of the compressed JPEG image, in bytes.
-  //
-  // See ANDROID_JPEG_SIZE in CameraMetadataTag.aidl.
-  MetadataBuilder& setMaxJpegSize(int32_t size);
-
-  // See ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM in CameraMetadataTag.aidl.
-  MetadataBuilder& setAvailableMaxDigitalZoom(const float maxZoom);
-
-  // See ANDROID_CONTROL_ZOOM_RATIO_RANGE in CameraMetadataTag.aidl.
-  MetadataBuilder& setControlZoomRatioRange(float min, float max);
-
-  // A list of all keys that the camera device has available to use with
-  // CaptureRequest.
-  //
-  // See ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS in CameraMetadataTag.aidl.
-  MetadataBuilder& setAvailableRequestKeys(const std::vector<int32_t>& keys);
-
-  // A list of all keys that the camera device has available to use with
-  // CaptureResult.
-  //
-  // See ANDROID_RESULT_AVAILABLE_REQUEST_KEYS in CameraMetadataTag.aidl.
-  MetadataBuilder& setAvailableResultKeys(const std::vector<int32_t>& keys);
-
-  // See ANDROID_REQUEST_AVAILABLE_CAPABILITIES in CameraMetadataTag.aidl.
-  MetadataBuilder& setAvailableCapabilities(
-      const std::vector<
-          camera_metadata_enum_android_request_available_capabilities_t>&
-          capabilities);
-
-  // A list of all keys that the camera device has available to use.
-  //
-  // See ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS in CameraMetadataTag.aidl.
-  MetadataBuilder& setAvailableCharacteristicKeys(
-      const std::vector<camera_metadata_tag_t>& keys);
-
-  // Extends metadata with ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS
-  // containing all previously set tags.
-  MetadataBuilder& setAvailableCharacteristicKeys();
-
-  // Build CameraMetadata instance.
-  //
-  // Returns nullptr in case something went wrong.
-  std::unique_ptr<::aidl::android::hardware::camera::device::CameraMetadata>
-  build() const;
-
- private:
-  // Maps metadata tags to vectors of values for the given tag.
-  std::map<camera_metadata_tag_t,
-           std::variant<std::vector<int64_t>, std::vector<int32_t>,
-                        std::vector<uint8_t>, std::vector<float>,
-                        std::vector<camera_metadata_rational_t>>>
-      mEntryMap;
-};
-
-}  // namespace virtualcamera
-}  // namespace companion
-}  // namespace android
-
-#endif  // ANDROID_COMPANION_VIRTUALCAMERA_METADATABUILDER_H
diff --git a/services/camera/virtualcamera/util/MetadataUtil.cc b/services/camera/virtualcamera/util/MetadataUtil.cc
new file mode 100644
index 0000000..4889830
--- /dev/null
+++ b/services/camera/virtualcamera/util/MetadataUtil.cc
@@ -0,0 +1,980 @@
+/*
+ * Copyright 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "MetadataUtil"
+
+#include "MetadataUtil.h"
+
+#include <algorithm>
+#include <cstdint>
+#include <iterator>
+#include <memory>
+#include <optional>
+#include <string>
+#include <utility>
+#include <variant>
+#include <vector>
+
+#include "CameraMetadata.h"
+#include "aidl/android/hardware/camera/device/CameraMetadata.h"
+#include "log/log.h"
+#include "system/camera_metadata.h"
+#include "util/Util.h"
+#include "utils/Errors.h"
+
+namespace android {
+namespace companion {
+namespace virtualcamera {
+
+namespace {
+
+using ::android::hardware::camera::common::helper::CameraMetadata;
+
+template <typename To, typename From>
+std::vector<To> convertTo(const std::vector<From>& from) {
+  std::vector<To> to;
+  to.reserve(from.size());
+  std::transform(from.begin(), from.end(), std::back_inserter(to),
+                 [](const From& x) { return static_cast<To>(x); });
+  return to;
+}
+
+template <typename To, typename From>
+std::vector<To> asVectorOf(const From from) {
+  return std::vector<To>({static_cast<To>(from)});
+}
+
+}  // namespace
+
+MetadataBuilder& MetadataBuilder::setSupportedHardwareLevel(
+    const camera_metadata_enum_android_info_supported_hardware_level_t hwLevel) {
+  mEntryMap[ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL] =
+      asVectorOf<uint8_t>(hwLevel);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setDeviceId(int32_t deviceId) {
+  mEntryMap[ANDROID_INFO_DEVICE_ID] = std::vector<int32_t>({deviceId});
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setFlashAvailable(bool flashAvailable) {
+  const uint8_t metadataVal = flashAvailable
+                                  ? ANDROID_FLASH_INFO_AVAILABLE_TRUE
+                                  : ANDROID_FLASH_INFO_AVAILABLE_FALSE;
+  mEntryMap[ANDROID_FLASH_INFO_AVAILABLE] = asVectorOf<uint8_t>(metadataVal);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setFlashState(
+    const camera_metadata_enum_android_flash_state_t flashState) {
+  mEntryMap[ANDROID_FLASH_STATE] = asVectorOf<uint8_t>(flashState);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setFlashMode(
+    const camera_metadata_enum_android_flash_mode_t flashMode) {
+  mEntryMap[ANDROID_FLASH_MODE] = asVectorOf<uint8_t>(flashMode);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setLensFacing(
+    const camera_metadata_enum_android_lens_facing lensFacing) {
+  mEntryMap[ANDROID_LENS_FACING] = asVectorOf<uint8_t>(lensFacing);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setSensorReadoutTimestamp(
+    const camera_metadata_enum_android_sensor_readout_timestamp_t
+        sensorReadoutTimestamp) {
+  mEntryMap[ANDROID_SENSOR_READOUT_TIMESTAMP] =
+      asVectorOf<uint8_t>(sensorReadoutTimestamp);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setAvailableFocalLengths(
+    const std::vector<float>& focalLengths) {
+  mEntryMap[ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS] = focalLengths;
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setFocalLength(float focalLength) {
+  mEntryMap[ANDROID_LENS_FOCAL_LENGTH] = asVectorOf<float>(focalLength);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setSensorOrientation(int32_t sensorOrientation) {
+  mEntryMap[ANDROID_SENSOR_ORIENTATION] = asVectorOf<int32_t>(sensorOrientation);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setSensorTimestampSource(
+    const camera_metadata_enum_android_sensor_info_timestamp_source_t
+        timestampSource) {
+  mEntryMap[ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE] =
+      asVectorOf<uint8_t>(timestampSource);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setSensorTimestamp(
+    std::chrono::nanoseconds timestamp) {
+  mEntryMap[ANDROID_SENSOR_TIMESTAMP] = asVectorOf<int64_t>(timestamp.count());
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setAvailableFaceDetectModes(
+    const std::vector<camera_metadata_enum_android_statistics_face_detect_mode_t>&
+        faceDetectModes) {
+  mEntryMap[ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES] =
+      convertTo<uint8_t>(faceDetectModes);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setAvailableTestPatternModes(
+    const std::vector<camera_metadata_enum_android_sensor_test_pattern_mode>&
+        testPatternModes) {
+  mEntryMap[ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES] =
+      convertTo<int32_t>(testPatternModes);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setAvailableStreamUseCases(
+    const std::vector<
+        camera_metadata_enum_android_scaler_available_stream_use_cases>&
+        availableUseCases) {
+  mEntryMap[ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES] =
+      convertTo<int64_t>(availableUseCases);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setFaceDetectMode(
+    const camera_metadata_enum_android_statistics_face_detect_mode_t
+        faceDetectMode) {
+  mEntryMap[ANDROID_STATISTICS_FACE_DETECT_MODE] =
+      asVectorOf<uint8_t>(faceDetectMode);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAvailableModes(
+    const std::vector<camera_metadata_enum_android_control_mode_t>&
+        availableModes) {
+  mEntryMap[ANDROID_CONTROL_AVAILABLE_MODES] =
+      convertTo<uint8_t>(availableModes);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlMode(
+    const camera_metadata_enum_android_control_mode_t mode) {
+  mEntryMap[ANDROID_CONTROL_MODE] = asVectorOf<uint8_t>(mode);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAvailableSceneModes(
+    const std::vector<camera_metadata_enum_android_control_scene_mode>&
+        availableSceneModes) {
+  mEntryMap[ANDROID_CONTROL_AVAILABLE_SCENE_MODES] =
+      convertTo<uint8_t>(availableSceneModes);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlSceneMode(
+    const camera_metadata_enum_android_control_scene_mode sceneMode) {
+  mEntryMap[ANDROID_CONTROL_SCENE_MODE] = asVectorOf<uint8_t>(sceneMode);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAvailableEffects(
+    const std::vector<camera_metadata_enum_android_control_effect_mode>&
+        availableEffects) {
+  mEntryMap[ANDROID_CONTROL_AVAILABLE_EFFECTS] =
+      convertTo<uint8_t>(availableEffects);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlEffectMode(
+    const camera_metadata_enum_android_control_effect_mode_t effectMode) {
+  mEntryMap[ANDROID_CONTROL_EFFECT_MODE] = asVectorOf<uint8_t>(effectMode);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAvailableVideoStabilizationModes(
+    const std::vector<
+        camera_metadata_enum_android_control_video_stabilization_mode_t>&
+        videoStabilizationModes) {
+  mEntryMap[ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES] =
+      convertTo<uint8_t>(videoStabilizationModes);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlVideoStabilizationMode(
+    const camera_metadata_enum_android_control_video_stabilization_mode
+        stabilizationMode) {
+  mEntryMap[ANDROID_CONTROL_VIDEO_STABILIZATION_MODE] =
+      asVectorOf<uint8_t>(stabilizationMode);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAfAvailableModes(
+    const std::vector<camera_metadata_enum_android_control_af_mode_t>&
+        availableModes) {
+  mEntryMap[ANDROID_CONTROL_AF_AVAILABLE_MODES] =
+      convertTo<uint8_t>(availableModes);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAfMode(
+    const camera_metadata_enum_android_control_af_mode_t mode) {
+  mEntryMap[ANDROID_CONTROL_AF_MODE] = asVectorOf<uint8_t>(mode);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAfState(
+    const camera_metadata_enum_android_control_af_state afState) {
+  mEntryMap[ANDROID_CONTROL_AF_STATE] = asVectorOf<uint8_t>(afState);
+  return *this;
+}
+
+// See ANDROID_CONTROL_AF_TRIGGER_MODE in CameraMetadataTag.aidl.
+MetadataBuilder& MetadataBuilder::setControlAfTrigger(
+    const camera_metadata_enum_android_control_af_trigger_t trigger) {
+  mEntryMap[ANDROID_CONTROL_AF_TRIGGER] = asVectorOf<uint8_t>(trigger);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAeAvailableFpsRanges(
+    const std::vector<FpsRange>& fpsRanges) {
+  std::vector<int32_t> ranges;
+  ranges.reserve(2 * fpsRanges.size());
+  for (const FpsRange fpsRange : fpsRanges) {
+    ranges.push_back(fpsRange.minFps);
+    ranges.push_back(fpsRange.maxFps);
+  }
+  mEntryMap[ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES] = std::move(ranges);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAeTargetFpsRange(
+    const FpsRange fpsRange) {
+  mEntryMap[ANDROID_CONTROL_AE_TARGET_FPS_RANGE] =
+      std::vector<int32_t>({fpsRange.minFps, fpsRange.maxFps});
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAeMode(
+    const camera_metadata_enum_android_control_ae_mode_t mode) {
+  mEntryMap[ANDROID_CONTROL_AE_MODE] = asVectorOf<uint8_t>(mode);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAeAvailableModes(
+    const std::vector<camera_metadata_enum_android_control_ae_mode_t>& modes) {
+  mEntryMap[ANDROID_CONTROL_AE_AVAILABLE_MODES] = convertTo<uint8_t>(modes);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAePrecaptureTrigger(
+    const camera_metadata_enum_android_control_ae_precapture_trigger_t trigger) {
+  mEntryMap[ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER] =
+      asVectorOf<uint8_t>(trigger);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlMaxRegions(int32_t maxAeRegions,
+                                                       int32_t maxAwbRegions,
+                                                       int32_t maxAfRegions) {
+  mEntryMap[ANDROID_CONTROL_MAX_REGIONS] =
+      std::vector<int32_t>({maxAeRegions, maxAwbRegions, maxAfRegions});
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAvailableAwbModes(
+    const std::vector<camera_metadata_enum_android_control_awb_mode>& awbModes) {
+  mEntryMap[ANDROID_CONTROL_AWB_AVAILABLE_MODES] = convertTo<uint8_t>(awbModes);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAwbMode(
+    const camera_metadata_enum_android_control_awb_mode awbMode) {
+  mEntryMap[ANDROID_CONTROL_AWB_MODE] = asVectorOf<uint8_t>(awbMode);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAwbState(
+    const camera_metadata_enum_android_control_awb_state awbState) {
+  mEntryMap[ANDROID_CONTROL_AWB_STATE] = asVectorOf<uint8_t>(awbState);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAwbLockAvailable(
+    const bool awbLockAvailable) {
+  const uint8_t lockAvailable = awbLockAvailable
+                                    ? ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE
+                                    : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
+  mEntryMap[ANDROID_CONTROL_AWB_LOCK_AVAILABLE] =
+      std::vector<uint8_t>({lockAvailable});
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAwbLock(
+    const camera_metadata_enum_android_control_awb_lock awbLock) {
+  mEntryMap[ANDROID_CONTROL_AWB_LOCK] = asVectorOf<uint8_t>(awbLock);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAeAvailableAntibandingModes(
+    const std::vector<camera_metadata_enum_android_control_ae_antibanding_mode_t>&
+        antibandingModes) {
+  mEntryMap[ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES] =
+      convertTo<uint8_t>(antibandingModes);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAeAntibandingMode(
+    const camera_metadata_enum_android_control_ae_antibanding_mode_t
+        antibandingMode) {
+  mEntryMap[ANDROID_CONTROL_AE_ANTIBANDING_MODE] =
+      asVectorOf<uint8_t>(antibandingMode);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAeLockAvailable(
+    const bool aeLockAvailable) {
+  const uint8_t lockAvailable = aeLockAvailable
+                                    ? ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE
+                                    : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
+  mEntryMap[ANDROID_CONTROL_AE_LOCK_AVAILABLE] =
+      asVectorOf<uint8_t>(lockAvailable);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAeLock(
+    const camera_metadata_enum_android_control_ae_lock aeLock) {
+  mEntryMap[ANDROID_CONTROL_AE_LOCK] = asVectorOf<uint8_t>(aeLock);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAeRegions(
+    const std::vector<ControlRegion>& aeRegions) {
+  std::vector<int32_t> regions;
+  regions.reserve(5 * aeRegions.size());
+  for (const ControlRegion& region : aeRegions) {
+    regions.push_back(region.x0);
+    regions.push_back(region.y0);
+    regions.push_back(region.x1);
+    regions.push_back(region.y1);
+    regions.push_back(region.weight);
+  }
+  mEntryMap[ANDROID_CONTROL_AE_REGIONS] = std::move(regions);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAfRegions(
+    const std::vector<ControlRegion>& afRegions) {
+  std::vector<int32_t> regions;
+  regions.reserve(5 * afRegions.size());
+  for (const ControlRegion& region : afRegions) {
+    regions.push_back(region.x0);
+    regions.push_back(region.y0);
+    regions.push_back(region.x1);
+    regions.push_back(region.y1);
+    regions.push_back(region.weight);
+  }
+  mEntryMap[ANDROID_CONTROL_AF_REGIONS] = std::move(regions);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAwbRegions(
+    const std::vector<ControlRegion>& awbRegions) {
+  std::vector<int32_t> regions;
+  regions.reserve(5 * awbRegions.size());
+  for (const ControlRegion& region : awbRegions) {
+    regions.push_back(region.x0);
+    regions.push_back(region.y0);
+    regions.push_back(region.x1);
+    regions.push_back(region.y1);
+    regions.push_back(region.weight);
+  }
+  mEntryMap[ANDROID_CONTROL_AWB_REGIONS] = std::move(regions);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlCaptureIntent(
+    const camera_metadata_enum_android_control_capture_intent_t intent) {
+  mEntryMap[ANDROID_CONTROL_CAPTURE_INTENT] = asVectorOf<uint8_t>(intent);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setCropRegion(const int32_t x, const int32_t y,
+                                                const int32_t width,
+                                                const int32_t height) {
+  mEntryMap[ANDROID_SCALER_CROP_REGION] =
+      std::vector<int32_t>({x, y, width, height});
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setMaxJpegSize(const int32_t size) {
+  mEntryMap[ANDROID_JPEG_MAX_SIZE] = asVectorOf<int32_t>(size);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setMaxFrameDuration(
+    const std::chrono::nanoseconds duration) {
+  mEntryMap[ANDROID_SENSOR_INFO_MAX_FRAME_DURATION] =
+      asVectorOf<int64_t>(duration.count());
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setJpegAvailableThumbnailSizes(
+    const std::vector<Resolution>& thumbnailSizes) {
+  std::vector<int32_t> sizes;
+  sizes.reserve(thumbnailSizes.size() * 2);
+  for (const Resolution& resolution : thumbnailSizes) {
+    sizes.push_back(resolution.width);
+    sizes.push_back(resolution.height);
+  }
+  mEntryMap[ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES] = std::move(sizes);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setJpegGpsCoordinates(
+    const GpsCoordinates& gpsCoordinates) {
+  mEntryMap[ANDROID_JPEG_GPS_COORDINATES] =
+      std::vector<double>({gpsCoordinates.latitude, gpsCoordinates.longitude,
+                           gpsCoordinates.altitude});
+
+  if (!gpsCoordinates.provider.empty()) {
+    mEntryMap[ANDROID_JPEG_GPS_PROCESSING_METHOD] = std::vector<uint8_t>{
+        gpsCoordinates.provider.begin(), gpsCoordinates.provider.end()};
+  }
+
+  if (gpsCoordinates.timestamp.has_value()) {
+    mEntryMap[ANDROID_JPEG_GPS_TIMESTAMP] =
+        asVectorOf<int64_t>(gpsCoordinates.timestamp.value());
+  }
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setJpegOrientation(const int32_t orientation) {
+  mEntryMap[ANDROID_JPEG_ORIENTATION] = asVectorOf<int32_t>(orientation);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setJpegQuality(const uint8_t quality) {
+  mEntryMap[ANDROID_JPEG_QUALITY] = asVectorOf<uint8_t>(quality);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setJpegThumbnailSize(const int width,
+                                                       const int height) {
+  mEntryMap[ANDROID_JPEG_THUMBNAIL_SIZE] = std::vector<int32_t>({width, height});
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setJpegThumbnailQuality(const uint8_t quality) {
+  mEntryMap[ANDROID_JPEG_THUMBNAIL_QUALITY] = asVectorOf<uint8_t>(quality);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setMaxNumberOutputStreams(
+    const int32_t maxRawStreams, const int32_t maxProcessedStreams,
+    const int32_t maxStallStreams) {
+  mEntryMap[ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS] = std::vector<int32_t>(
+      {maxRawStreams, maxProcessedStreams, maxStallStreams});
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setSyncMaxLatency(
+    const camera_metadata_enum_android_sync_max_latency latency) {
+  mEntryMap[ANDROID_SYNC_MAX_LATENCY] = asVectorOf<int32_t>(latency);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setPipelineMaxDepth(const uint8_t maxDepth) {
+  mEntryMap[ANDROID_REQUEST_PIPELINE_MAX_DEPTH] = asVectorOf<uint8_t>(maxDepth);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setPipelineDepth(const uint8_t depth) {
+  mEntryMap[ANDROID_REQUEST_PIPELINE_DEPTH] = asVectorOf<uint8_t>(depth);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setAvailableRequestCapabilities(
+    const std::vector<camera_metadata_enum_android_request_available_capabilities_t>&
+        requestCapabilities) {
+  mEntryMap[ANDROID_REQUEST_AVAILABLE_CAPABILITIES] =
+      convertTo<uint8_t>(requestCapabilities);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setAvailableOutputStreamConfigurations(
+    const std::vector<StreamConfiguration>& streamConfigurations) {
+  std::vector<int32_t> metadataStreamConfigs;
+  std::vector<int64_t> metadataMinFrameDurations;
+  std::vector<int64_t> metadataStallDurations;
+  metadataStreamConfigs.reserve(streamConfigurations.size());
+  metadataMinFrameDurations.reserve(streamConfigurations.size());
+  metadataStallDurations.reserve(streamConfigurations.size());
+
+  for (const auto& config : streamConfigurations) {
+    metadataStreamConfigs.push_back(config.format);
+    metadataStreamConfigs.push_back(config.width);
+    metadataStreamConfigs.push_back(config.height);
+    metadataStreamConfigs.push_back(
+        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
+
+    metadataMinFrameDurations.push_back(config.format);
+    metadataMinFrameDurations.push_back(config.width);
+    metadataMinFrameDurations.push_back(config.height);
+    metadataMinFrameDurations.push_back(config.minFrameDuration.count());
+
+    metadataStallDurations.push_back(config.format);
+    metadataStallDurations.push_back(config.width);
+    metadataStallDurations.push_back(config.height);
+    metadataStallDurations.push_back(config.minStallDuration.count());
+  }
+
+  mEntryMap[ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS] =
+      std::move(metadataStreamConfigs);
+  mEntryMap[ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS] =
+      std::move(metadataMinFrameDurations);
+  mEntryMap[ANDROID_SCALER_AVAILABLE_STALL_DURATIONS] =
+      std::move(metadataStallDurations);
+
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setAvailableAberrationCorrectionModes(
+    const std::vector<camera_metadata_enum_android_color_correction_aberration_mode>&
+        aberrationCorectionModes) {
+  mEntryMap[ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES] =
+      convertTo<uint8_t>(aberrationCorectionModes);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setAberrationCorrectionMode(
+    const camera_metadata_enum_android_color_correction_aberration_mode
+        aberrationCorrectionMode) {
+  mEntryMap[ANDROID_COLOR_CORRECTION_ABERRATION_MODE] =
+      asVectorOf<uint8_t>(aberrationCorrectionMode);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setAvailableNoiseReductionModes(
+    const std::vector<camera_metadata_enum_android_noise_reduction_mode>&
+        noiseReductionModes) {
+  mEntryMap[ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES] =
+      convertTo<uint8_t>(noiseReductionModes);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setNoiseReductionMode(
+    const camera_metadata_enum_android_noise_reduction_mode noiseReductionMode) {
+  mEntryMap[ANDROID_NOISE_REDUCTION_MODE] =
+      asVectorOf<uint8_t>(noiseReductionMode);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setRequestPartialResultCount(
+    const int partialResultCount) {
+  mEntryMap[ANDROID_REQUEST_PARTIAL_RESULT_COUNT] =
+      asVectorOf<int32_t>(partialResultCount);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setCroppingType(
+    const camera_metadata_enum_android_scaler_cropping_type croppingType) {
+  mEntryMap[ANDROID_SCALER_CROPPING_TYPE] = asVectorOf<uint8_t>(croppingType);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setMaxFaceCount(const int maxFaceCount) {
+  mEntryMap[ANDROID_STATISTICS_INFO_MAX_FACE_COUNT] =
+      asVectorOf<int32_t>(maxFaceCount);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setAvailableMaxDigitalZoom(const float maxZoom) {
+  mEntryMap[ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM] =
+      asVectorOf<float>(maxZoom);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlZoomRatioRange(const float min,
+                                                           const float max) {
+  mEntryMap[ANDROID_CONTROL_ZOOM_RATIO_RANGE] = std::vector<float>({min, max});
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setSensorActiveArraySize(int x0, int y0,
+                                                           int x1, int y1) {
+  mEntryMap[ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE] =
+      std::vector<int32_t>({x0, y0, x1, y1});
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setSensorPixelArraySize(int width,
+                                                          int height) {
+  mEntryMap[ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE] =
+      std::vector<int32_t>({width, height});
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setSensorPhysicalSize(float width,
+                                                        float height) {
+  mEntryMap[ANDROID_SENSOR_INFO_PHYSICAL_SIZE] =
+      std::vector<float>({width, height});
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAeCompensationRange(int32_t min,
+                                                                int32_t max) {
+  mEntryMap[ANDROID_CONTROL_AE_COMPENSATION_RANGE] =
+      std::vector<int32_t>({min, max});
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAeCompensationStep(
+    const camera_metadata_rational step) {
+  mEntryMap[ANDROID_CONTROL_AE_COMPENSATION_STEP] =
+      asVectorOf<camera_metadata_rational>(step);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAeExposureCompensation(
+    const int32_t exposureCompensation) {
+  mEntryMap[ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION] =
+      asVectorOf<int32_t>(exposureCompensation);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAeState(
+    const camera_metadata_enum_android_control_ae_state aeState) {
+  mEntryMap[ANDROID_CONTROL_AE_STATE] = asVectorOf<uint8_t>(aeState);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setStatisticsSceneFlicker(
+    const camera_metadata_enum_android_statistics_scene_flicker sceneFlicker) {
+  mEntryMap[ANDROID_STATISTICS_SCENE_FLICKER] =
+      asVectorOf<uint8_t>(sceneFlicker);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setStatisticsHotPixelMapMode(
+    const camera_metadata_enum_android_statistics_hot_pixel_map_mode
+        hotPixelMapMode) {
+  mEntryMap[ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE] =
+      asVectorOf<uint8_t>(hotPixelMapMode);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setStatisticsLensShadingMapMode(
+    const camera_metadata_enum_android_statistics_lens_shading_map_mode
+        lensShadingMapMode) {
+  mEntryMap[ANDROID_STATISTICS_LENS_SHADING_MAP_MODE] =
+      asVectorOf<uint8_t>(lensShadingMapMode);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setLensOpticalStabilizationMode(
+    const camera_metadata_enum_android_lens_optical_stabilization_mode_t
+        opticalStabilizationMode) {
+  mEntryMap[ANDROID_LENS_OPTICAL_STABILIZATION_MODE] =
+      asVectorOf<uint8_t>(opticalStabilizationMode);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setAvailableRequestKeys(
+    const std::vector<int32_t>& keys) {
+  mEntryMap[ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS] = keys;
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setAvailableResultKeys(
+    const std::vector<int32_t>& keys) {
+  mEntryMap[ANDROID_REQUEST_AVAILABLE_RESULT_KEYS] = keys;
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setAvailableCapabilities(
+    const std::vector<camera_metadata_enum_android_request_available_capabilities_t>&
+        capabilities) {
+  mEntryMap[ANDROID_REQUEST_AVAILABLE_CAPABILITIES] =
+      convertTo<uint8_t>(capabilities);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setAvailableCharacteristicKeys(
+    const std::vector<camera_metadata_tag_t>& keys) {
+  mEntryMap[ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS] =
+      convertTo<int32_t>(keys);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setAvailableCharacteristicKeys() {
+  mExtendWithAvailableCharacteristicsKeys = true;
+  return *this;
+}
+
+std::unique_ptr<aidl::android::hardware::camera::device::CameraMetadata>
+MetadataBuilder::build() {
+  if (mExtendWithAvailableCharacteristicsKeys) {
+    std::vector<camera_metadata_tag_t> availableKeys;
+    availableKeys.reserve(mEntryMap.size());
+    for (const auto& [key, _] : mEntryMap) {
+      if (key != ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS) {
+        availableKeys.push_back(key);
+      }
+    }
+    setAvailableCharacteristicKeys(availableKeys);
+  }
+
+  CameraMetadata metadataHelper;
+  for (const auto& entry : mEntryMap) {
+    status_t ret = std::visit(
+        [&](auto&& arg) {
+          return metadataHelper.update(entry.first, arg.data(), arg.size());
+        },
+        entry.second);
+    if (ret != NO_ERROR) {
+      ALOGE("Failed to update metadata with key %d - %s: %s", entry.first,
+            get_camera_metadata_tag_name(entry.first),
+            ::android::statusToString(ret).c_str());
+      return nullptr;
+    }
+  }
+
+  const camera_metadata_t* metadata = metadataHelper.getAndLock();
+  if (metadata == nullptr) {
+    ALOGE(
+        "Failure when constructing metadata -> CameraMetadata helper returned "
+        "nullptr");
+    return nullptr;
+  }
+
+  auto aidlMetadata =
+      std::make_unique<aidl::android::hardware::camera::device::CameraMetadata>();
+  const uint8_t* data_ptr = reinterpret_cast<const uint8_t*>(metadata);
+  aidlMetadata->metadata.assign(data_ptr,
+                                data_ptr + get_camera_metadata_size(metadata));
+  metadataHelper.unlock(metadata);
+
+  return aidlMetadata;
+}
+
+std::optional<int32_t> getJpegQuality(
+    const aidl::android::hardware::camera::device::CameraMetadata& cameraMetadata) {
+  auto metadata =
+      reinterpret_cast<const camera_metadata_t*>(cameraMetadata.metadata.data());
+
+  camera_metadata_ro_entry_t entry;
+  if (find_camera_metadata_ro_entry(metadata, ANDROID_JPEG_QUALITY, &entry) !=
+      OK) {
+    return std::nullopt;
+  }
+
+  return *entry.data.i32;
+}
+
+int32_t getJpegOrientation(
+    const aidl::android::hardware::camera::device::CameraMetadata& cameraMetadata) {
+  auto metadata =
+      reinterpret_cast<const camera_metadata_t*>(cameraMetadata.metadata.data());
+
+  camera_metadata_ro_entry_t entry;
+  if (find_camera_metadata_ro_entry(metadata, ANDROID_JPEG_ORIENTATION,
+                                    &entry) != OK) {
+    return 0;
+  }
+
+  return *entry.data.i32;
+}
+
+std::optional<Resolution> getJpegThumbnailSize(
+    const aidl::android::hardware::camera::device::CameraMetadata& cameraMetadata) {
+  auto metadata =
+      reinterpret_cast<const camera_metadata_t*>(cameraMetadata.metadata.data());
+
+  camera_metadata_ro_entry_t entry;
+  if (find_camera_metadata_ro_entry(metadata, ANDROID_JPEG_THUMBNAIL_SIZE,
+                                    &entry) != OK) {
+    return std::nullopt;
+  }
+
+  return Resolution(entry.data.i32[0], entry.data.i32[1]);
+}
+
+std::optional<int32_t> getJpegThumbnailQuality(
+    const aidl::android::hardware::camera::device::CameraMetadata& cameraMetadata) {
+  auto metadata =
+      reinterpret_cast<const camera_metadata_t*>(cameraMetadata.metadata.data());
+
+  camera_metadata_ro_entry_t entry;
+  if (find_camera_metadata_ro_entry(metadata, ANDROID_JPEG_THUMBNAIL_QUALITY,
+                                    &entry) != OK) {
+    return std::nullopt;
+  }
+
+  return *entry.data.i32;
+}
+
+std::vector<Resolution> getJpegAvailableThumbnailSizes(
+    const aidl::android::hardware::camera::device::CameraMetadata& cameraMetadata) {
+  auto metadata =
+      reinterpret_cast<const camera_metadata_t*>(cameraMetadata.metadata.data());
+
+  camera_metadata_ro_entry_t entry;
+  if (find_camera_metadata_ro_entry(
+          metadata, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES, &entry) != OK) {
+    return {};
+  }
+
+  std::vector<Resolution> thumbnailSizes;
+  thumbnailSizes.reserve(entry.count / 2);
+  for (int i = 0; i < entry.count; i += 2) {
+    thumbnailSizes.emplace_back(entry.data.i32[i], entry.data.i32[i + 1]);
+  }
+  return thumbnailSizes;
+}
+
+std::optional<FpsRange> getFpsRange(
+    const aidl::android::hardware::camera::device::CameraMetadata& cameraMetadata) {
+  auto metadata =
+      reinterpret_cast<const camera_metadata_t*>(cameraMetadata.metadata.data());
+
+  camera_metadata_ro_entry_t entry;
+  if (find_camera_metadata_ro_entry(
+          metadata, ANDROID_CONTROL_AE_TARGET_FPS_RANGE, &entry) != OK ||
+      entry.count != 2) {
+    return {};
+  }
+
+  FpsRange range{.minFps = entry.data.i32[0], .maxFps = entry.data.i32[1]};
+  return range;
+}
+
+std::optional<camera_metadata_enum_android_control_capture_intent>
+getCaptureIntent(const aidl::android::hardware::camera::device::CameraMetadata&
+                     cameraMetadata) {
+  auto metadata =
+      reinterpret_cast<const camera_metadata_t*>(cameraMetadata.metadata.data());
+
+  camera_metadata_ro_entry_t entry;
+  if (find_camera_metadata_ro_entry(metadata, ANDROID_CONTROL_CAPTURE_INTENT,
+                                    &entry) != OK) {
+    return {};
+  }
+
+  return static_cast<camera_metadata_enum_android_control_capture_intent>(
+      entry.data.u8[0]);
+}
+
+std::optional<GpsCoordinates> getGpsCoordinates(
+    const aidl::android::hardware::camera::device::CameraMetadata& cameraMetadata) {
+  auto metadata =
+      reinterpret_cast<const camera_metadata_t*>(cameraMetadata.metadata.data());
+
+  camera_metadata_ro_entry_t entry;
+  if (find_camera_metadata_ro_entry(metadata, ANDROID_JPEG_GPS_COORDINATES,
+                                    &entry) != OK) {
+    return std::nullopt;
+  }
+
+  GpsCoordinates coordinates{.latitude = entry.data.d[0],
+                             .longitude = entry.data.d[1],
+                             .altitude = entry.data.d[2]};
+
+  if (find_camera_metadata_ro_entry(metadata, ANDROID_JPEG_GPS_TIMESTAMP,
+                                    &entry) == OK) {
+    coordinates.timestamp = entry.data.i64[0];
+  }
+
+  // According to types.hal, the string describing the GPS processing method has
+  // a 32 characters size
+  static constexpr float kGpsProviderStringLength = 32;
+  if (find_camera_metadata_ro_entry(
+          metadata, ANDROID_JPEG_GPS_PROCESSING_METHOD, &entry) == OK) {
+    coordinates.provider.assign(
+        reinterpret_cast<const char*>(entry.data.u8),
+        std::min(entry.count, static_cast<size_t>(kGpsProviderStringLength)));
+  }
+
+  return coordinates;
+}
+
+std::optional<camera_metadata_enum_android_lens_facing> getLensFacing(
+    const aidl::android::hardware::camera::device::CameraMetadata& cameraMetadata) {
+  auto metadata =
+      reinterpret_cast<const camera_metadata_t*>(cameraMetadata.metadata.data());
+
+  camera_metadata_ro_entry_t entry;
+  if (find_camera_metadata_ro_entry(metadata, ANDROID_LENS_FACING, &entry) !=
+      OK) {
+    return std::nullopt;
+  }
+
+  return static_cast<camera_metadata_enum_android_lens_facing>(entry.data.u8[0]);
+}
+
+std::optional<camera_metadata_enum_android_control_ae_precapture_trigger>
+getPrecaptureTrigger(
+    const aidl::android::hardware::camera::device::CameraMetadata& cameraMetadata) {
+  auto metadata =
+      reinterpret_cast<const camera_metadata_t*>(cameraMetadata.metadata.data());
+
+  camera_metadata_ro_entry_t entry;
+  if (find_camera_metadata_ro_entry(
+          metadata, ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &entry) != OK) {
+    return std::nullopt;
+  }
+
+  return static_cast<camera_metadata_enum_android_control_ae_precapture_trigger>(
+      entry.data.u8[0]);
+}
+
+std::optional<int32_t> getDeviceId(
+    const aidl::android::hardware::camera::device::CameraMetadata& cameraMetadata) {
+  auto metadata =
+      reinterpret_cast<const camera_metadata_t*>(cameraMetadata.metadata.data());
+
+  camera_metadata_ro_entry_t entry;
+  if (find_camera_metadata_ro_entry(metadata, ANDROID_INFO_DEVICE_ID, &entry) !=
+      OK) {
+    return std::nullopt;
+  }
+
+  return static_cast<int32_t>(entry.data.i32[0]);
+}
+
+std::optional<int32_t> getSensorOrientation(
+    const aidl::android::hardware::camera::device::CameraMetadata& cameraMetadata) {
+  auto metadata =
+      reinterpret_cast<const camera_metadata_t*>(cameraMetadata.metadata.data());
+
+  camera_metadata_ro_entry_t entry;
+  if (find_camera_metadata_ro_entry(metadata, ANDROID_SENSOR_ORIENTATION,
+                                    &entry) != OK) {
+    return std::nullopt;
+  }
+
+  return static_cast<int32_t>(entry.data.i32[0]);
+}
+
+}  // namespace virtualcamera
+}  // namespace companion
+}  // namespace android
diff --git a/services/camera/virtualcamera/util/MetadataUtil.h b/services/camera/virtualcamera/util/MetadataUtil.h
new file mode 100644
index 0000000..22d3657
--- /dev/null
+++ b/services/camera/virtualcamera/util/MetadataUtil.h
@@ -0,0 +1,500 @@
+/*
+ * Copyright 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_COMPANION_VIRTUALCAMERA_METADATAUTIL_H
+#define ANDROID_COMPANION_VIRTUALCAMERA_METADATAUTIL_H
+
+#include <chrono>
+#include <cstdint>
+#include <map>
+#include <memory>
+#include <variant>
+#include <vector>
+
+#include "aidl/android/hardware/camera/device/CameraMetadata.h"
+#include "system/camera_metadata.h"
+#include "util/Util.h"
+
+namespace android {
+namespace companion {
+namespace virtualcamera {
+
+// Convenience builder for the
+// aidl::android::hardware::camera::device::CameraMetadata.
+//
+// Calling the same builder setter multiple will overwrite the value.
+// This class is not thread-safe.
+class MetadataBuilder {
+ public:
+  struct StreamConfiguration {
+    int32_t width = 0;
+    int32_t height = 0;
+    int32_t format = 0;
+    // Minimal frame duration - corresponds to maximal FPS for given format.
+    // See ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS in CameraMetadataTag.aidl.
+    std::chrono::nanoseconds minFrameDuration{0};
+    // Minimal stall duration.
+    // See ANDROID_SCALER_AVAILABLE_STALL_DURATIONS in CameraMetadataTag.aidl.
+    std::chrono::nanoseconds minStallDuration{0};
+  };
+
+  struct ControlRegion {
+    int32_t x0 = 0;
+    int32_t y0 = 0;
+    int32_t x1 = 0;
+    int32_t y1 = 0;
+    int32_t weight = 0;
+  };
+
+  MetadataBuilder() = default;
+  ~MetadataBuilder() = default;
+
+  // See ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL in CameraMetadataTag.aidl.
+  MetadataBuilder& setSupportedHardwareLevel(
+      camera_metadata_enum_android_info_supported_hardware_level_t hwLevel);
+
+  // See ANDROID_INFO_DEVICE_ID in CameraMetadataTag.aidl.
+  MetadataBuilder& setDeviceId(int32_t deviceId);
+
+  // Whether this camera device has a flash unit
+  // See ANDROID_FLASH_INFO_AVAILABLE in CameraMetadataTag.aidl.
+  MetadataBuilder& setFlashAvailable(bool flashAvailable);
+
+  // See FLASH_STATE in CaptureResult.java.
+  MetadataBuilder& setFlashState(
+      camera_metadata_enum_android_flash_state_t flashState);
+
+  // See FLASH_MODE in CaptureRequest.java.
+  MetadataBuilder& setFlashMode(
+      camera_metadata_enum_android_flash_mode_t flashMode);
+
+  // See ANDROID_LENS_FACING in CameraMetadataTag.aidl.
+  MetadataBuilder& setLensFacing(
+      camera_metadata_enum_android_lens_facing lensFacing);
+
+  // See ANDROID_SENSOR_READOUT_TIMESTAMP in CameraMetadataTag.aidl.
+  MetadataBuilder& setSensorReadoutTimestamp(
+      camera_metadata_enum_android_sensor_readout_timestamp_t
+          sensorReadoutTimestamp);
+
+  // See ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS in CameraMetadataTag.aidl.
+  MetadataBuilder& setAvailableFocalLengths(
+      const std::vector<float>& focalLengths);
+
+  // See ANDROID_LENS_FOCAL_LENGTH in CameraMetadataTag.aidl.
+  MetadataBuilder& setFocalLength(float focalLength);
+
+  // See ANDROID_SENSOR_ORIENTATION in CameraMetadataTag.aidl.
+  MetadataBuilder& setSensorOrientation(int32_t sensorOrientation);
+
+  // Time at start of exposure of first row of the image
+  // sensor active array, in nanoseconds.
+  //
+  // See ANDROID_SENSOR_TIMESTAMP in CameraMetadataTag.aidl.
+  MetadataBuilder& setSensorTimestamp(std::chrono::nanoseconds timestamp);
+
+  // See SENSOR_INFO_TIMESTAMP_SOURCE in CameraCharacteristic.java.
+  MetadataBuilder& setSensorTimestampSource(
+      camera_metadata_enum_android_sensor_info_timestamp_source_t timestampSource);
+
+  // See ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE in CameraMetadataTag.aidl.
+  MetadataBuilder& setSensorActiveArraySize(int x0, int y0, int x1, int y1);
+
+  // See ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE in CameraMetadataTag.aidl.
+  MetadataBuilder& setSensorPixelArraySize(int width, int height);
+
+  // See ANDROID_SENSOR_INFO_PHYSICAL_SIZE in CameraMetadataTag.aidl.
+  MetadataBuilder& setSensorPhysicalSize(float width, float height);
+
+  // See ANDROID_STATISTICS_FACE_DETECT_MODE in CameraMetadataTag.aidl.
+  MetadataBuilder& setAvailableFaceDetectModes(
+      const std::vector<camera_metadata_enum_android_statistics_face_detect_mode_t>&
+          faceDetectMode);
+
+  // See SENSOR_AVAILABLE_TEST_PATTERN_MODES in CameraCharacteristics.java.
+  MetadataBuilder& setAvailableTestPatternModes(
+      const std::vector<camera_metadata_enum_android_sensor_test_pattern_mode>&
+          testPatternModes);
+
+  // See ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES in CameraCharacteristics.java
+  MetadataBuilder& setAvailableStreamUseCases(
+      const std::vector<
+          camera_metadata_enum_android_scaler_available_stream_use_cases>& availableUseCases);
+
+  // See ANDROID_STATISTICS_FACE_DETECT_MODE in CaptureRequest.java.
+  MetadataBuilder& setFaceDetectMode(
+      camera_metadata_enum_android_statistics_face_detect_mode_t faceDetectMode);
+
+  // Sets available stream configurations along with corresponding minimal frame
+  // durations (corresponding to max fps) and stall durations.
+  //
+  // See ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
+  // ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS and
+  // ANDROID_SCALER_AVAILABLE_STALL_DURATIONS in CameraMetadataTag.aidl.
+  MetadataBuilder& setAvailableOutputStreamConfigurations(
+      const std::vector<StreamConfiguration>& streamConfigurations);
+
+  // See COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES in CameraCharacteristics.java.
+  MetadataBuilder& setAvailableAberrationCorrectionModes(
+      const std::vector<
+          camera_metadata_enum_android_color_correction_aberration_mode>&
+          aberrationCorectionModes);
+
+  // See COLOR_CORRECTION_ABERRATION_MODE in CaptureRequest.java.
+  MetadataBuilder& setAberrationCorrectionMode(
+      camera_metadata_enum_android_color_correction_aberration_mode
+          aberrationCorrectionMode);
+
+  // See NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES in CameraCharacteristics.java.
+  MetadataBuilder& setAvailableNoiseReductionModes(
+      const std::vector<camera_metadata_enum_android_noise_reduction_mode>&
+          noiseReductionModes);
+
+  // See NOISE_REDUCTION_MODE in CaptureRequest.java.
+  MetadataBuilder& setNoiseReductionMode(
+      camera_metadata_enum_android_noise_reduction_mode noiseReductionMode);
+
+  // See REQUEST_PARTIAL_RESULT_COUNT in CameraCharacteristics.java.
+  MetadataBuilder& setRequestPartialResultCount(int partialResultCount);
+
+  // See SCALER_CROPPING_TYPE in CameraCharacteristics.java.
+  MetadataBuilder& setCroppingType(
+      camera_metadata_enum_android_scaler_cropping_type croppingType);
+
+  // See STATISTICS_INFO_MAX_FACE_COUNT in CameraCharacteristic.java.
+  MetadataBuilder& setMaxFaceCount(int maxFaceCount);
+
+  // See ANDROID_CONTROL_AVAILABLE_MODES in CameraMetadataTag.aidl.
+  MetadataBuilder& setControlAvailableModes(
+      const std::vector<camera_metadata_enum_android_control_mode_t>&
+          availableModes);
+
+  // See ANDROID_CONTROL_MODE in CaptureRequest.java.
+  MetadataBuilder& setControlMode(
+      camera_metadata_enum_android_control_mode_t mode);
+
+  // See ANDROID_CONTROL_AVAILABLE_SCENE_MODES in CameraMetadataTag.aidl.
+  MetadataBuilder& setControlAvailableSceneModes(
+      const std::vector<camera_metadata_enum_android_control_scene_mode>&
+          availableSceneModes);
+
+  // See ANDROID_CONTROL_SCENE_MODE in CameraMetadataTag.aidl
+  MetadataBuilder& setControlSceneMode(
+      camera_metadata_enum_android_control_scene_mode sceneMode);
+
+  // See ANDROID_CONTROL_AVAILABLE_EFFECTS in CameraMetadataTag.aidl.
+  MetadataBuilder& setControlAvailableEffects(
+      const std::vector<camera_metadata_enum_android_control_effect_mode>&
+          availableEffects);
+
+  // See CONTROL_EFFECT_MODE in CaptureRequest.java.
+  MetadataBuilder& setControlEffectMode(
+      camera_metadata_enum_android_control_effect_mode_t effectMode);
+
+  // See ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES in CameraMetadataTag.aidl.
+  MetadataBuilder& setControlAvailableVideoStabilizationModes(
+      const std::vector<
+          camera_metadata_enum_android_control_video_stabilization_mode_t>&
+          videoStabilizationModes);
+
+  // See ANDROID_CONTROL_VIDEO_STABILIZATION_MODE in CameraMetadataTag.aidl.
+  MetadataBuilder& setControlVideoStabilizationMode(
+      camera_metadata_enum_android_control_video_stabilization_mode
+          stabilizationMode);
+
+  // See CONTROL_AE_AVAILABLE_ANTIBANDING_MODES in CameraCharacteristics.java.
+  MetadataBuilder& setControlAeAvailableAntibandingModes(
+      const std::vector<camera_metadata_enum_android_control_ae_antibanding_mode_t>&
+          antibandingModes);
+
+  // See CONTROL_AE_ANTIBANDING_MODE in CaptureRequest.java.
+  MetadataBuilder& setControlAeAntibandingMode(
+      camera_metadata_enum_android_control_ae_antibanding_mode_t antibandingMode);
+
+  // See ANDROID_CONTROL_AE_COMPENSATION_RANGE in CameraMetadataTag.aidl.
+  MetadataBuilder& setControlAeCompensationRange(int32_t min, int32_t max);
+
+  // See ANDROID_CONTROL_AE_COMPENSATION_STEP in CameraMetadataTag.aidl.
+  MetadataBuilder& setControlAeCompensationStep(camera_metadata_rational step);
+
+  // See ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION in CameraMetadataTag.aidl.
+  MetadataBuilder& setControlAeExposureCompensation(int32_t exposureCompensation);
+
+  // See ANDROID_CONTROL_AE_AVAILABLE_MODES in CameraCharacteristics.java.
+  MetadataBuilder& setControlAeAvailableModes(
+      const std::vector<camera_metadata_enum_android_control_ae_mode_t>& modes);
+
+  // See ANDROID_CONTROL_AE_MODE in CaptureRequest.java.
+  MetadataBuilder& setControlAeMode(
+      camera_metadata_enum_android_control_ae_mode_t step);
+
+  // See ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER in CaptureRequest.java.
+  MetadataBuilder& setControlAePrecaptureTrigger(
+      camera_metadata_enum_android_control_ae_precapture_trigger_t trigger);
+
+  // See ANDROID_CONTROL_AF_AVAILABLE_MODES in CameraMetadataTag.aidl.
+  MetadataBuilder& setControlAfAvailableModes(
+      const std::vector<camera_metadata_enum_android_control_af_mode_t>&
+          availableModes);
+
+  // See ANDROID_CONTROL_AF_MODE in CameraMetadataTag.aidl.
+  MetadataBuilder& setControlAfMode(
+      const camera_metadata_enum_android_control_af_mode_t mode);
+
+  // See ANDROID_CONTROL_AF_TRIGGER_MODE in CameraMetadataTag.aidl.
+  MetadataBuilder& setControlAfTrigger(
+      const camera_metadata_enum_android_control_af_trigger_t trigger);
+
+  // See ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES in CameraMetadataTag.aidl.
+  MetadataBuilder& setControlAeAvailableFpsRanges(
+      const std::vector<FpsRange>& fpsRanges);
+
+  // See ANDROID_CONTROL_AE_TARGET_FPS_RANGE in CaptureRequest.java.
+  MetadataBuilder& setControlAeTargetFpsRange(FpsRange fpsRange);
+
+  // See ANDROID_CONTROL_CAPTURE_INTENT in CameraMetadataTag.aidl.
+  MetadataBuilder& setControlCaptureIntent(
+      camera_metadata_enum_android_control_capture_intent_t intent);
+
+  // See ANDROID_CONTROL_MAX_REGIONS in CameraMetadataTag.aidl.
+  MetadataBuilder& setControlMaxRegions(int32_t maxAeRegions,
+                                        int32_t maxAwbRegions,
+                                        int32_t maxAfRegions);
+
+  // See ANDROID_CONTROL_AWB_AVAILABLE_MODES in CameraMetadataTag.aidl.
+  MetadataBuilder& setControlAvailableAwbModes(
+      const std::vector<camera_metadata_enum_android_control_awb_mode>& awbModes);
+
+  // See ANDROID_CONTROL_AWB_AVAILABLE_MODE in CaptureRequest.java.
+  MetadataBuilder& setControlAwbMode(
+      camera_metadata_enum_android_control_awb_mode awb);
+
+  // See CONTROL_AWB_LOCK_AVAILABLE in CameraMetadataTag.aidl.
+  MetadataBuilder& setControlAwbLockAvailable(bool awbLockAvailable);
+
+  // See CONTROL_AWB_LOCK in CameraMetadataTag.aidl
+  MetadataBuilder& setControlAwbLock(
+      camera_metadata_enum_android_control_awb_lock awbLock);
+
+  // See CONTROL_AE_LOCK_AVAILABLE in CameraMetadataTag.aidl.
+  MetadataBuilder& setControlAeLockAvailable(bool aeLockAvailable);
+
+  // See CONTROL_AE_LOCK in CameraMetadataTag.aidl.
+  MetadataBuilder& setControlAeLock(
+      camera_metadata_enum_android_control_ae_lock aeLock);
+
+  // See CONTROL_AE_STATE in CameraMetadataTag.aidl
+  MetadataBuilder& setControlAeState(
+      camera_metadata_enum_android_control_ae_state aeState);
+
+  // See ANDROID_CONTROL_AE_REGIONS in CameraMetadataTag.aidl.
+  MetadataBuilder& setControlAeRegions(
+      const std::vector<ControlRegion>& aeRegions);
+
+  // See ANDROID_CONTROL_AWB_REGIONS in CameraMetadataTag.aidl.
+  MetadataBuilder& setControlAwbRegions(
+      const std::vector<ControlRegion>& awbRegions);
+
+  // See ANDROID_CONTROL_AWB_STATE in CameraMetadataTag.aidl.
+  MetadataBuilder& setControlAwbState(
+      camera_metadata_enum_android_control_awb_state awbState);
+
+  // See ANDROID_SCALER_CROP_REGION in CaptureRequest.java.
+  MetadataBuilder& setCropRegion(int32_t x, int32_t y, int32_t width,
+                                 int32_t height);
+
+  // See ANDROID_CONTROL_AF_REGIONS in CameraMetadataTag.aidl.
+  MetadataBuilder& setControlAfRegions(
+      const std::vector<ControlRegion>& afRegions);
+
+  // See ANDROID_CONTROL_AF_STATE in CameraMetadataTag.aidl.
+  MetadataBuilder& setControlAfState(
+      camera_metadata_enum_android_control_af_state aeftate);
+
+  // The size of the compressed JPEG image, in bytes.
+  //
+  // See ANDROID_JPEG_SIZE in CameraMetadataTag.aidl.
+  MetadataBuilder& setMaxJpegSize(int32_t size);
+
+  // See SENSOR_INFO_MAX_FRAME_DURATION in CameraCharacteristic.java.
+  MetadataBuilder& setMaxFrameDuration(std::chrono::nanoseconds duration);
+
+  // See JPEG_AVAILABLE_THUMBNAIL_SIZES in CameraCharacteristic.java.
+  MetadataBuilder& setJpegAvailableThumbnailSizes(
+      const std::vector<Resolution>& thumbnailSizes);
+
+  // See ANDROID_JPEG_GPS_COORDINATES.
+  MetadataBuilder& setJpegGpsCoordinates(const GpsCoordinates& gpsCoordinates);
+
+  // See JPEG_ORIENTATION in CaptureRequest.java.
+  MetadataBuilder& setJpegOrientation(int32_t orientation);
+
+  // See JPEG_QUALITY in CaptureRequest.java.
+  MetadataBuilder& setJpegQuality(uint8_t quality);
+
+  // See JPEG_THUMBNAIL_SIZE in CaptureRequest.java.
+  MetadataBuilder& setJpegThumbnailSize(int width, int height);
+
+  // See JPEG_THUMBNAIL_QUALITY in CaptureRequest.java.
+  MetadataBuilder& setJpegThumbnailQuality(uint8_t quality);
+
+  // The maximum numbers of different types of output streams
+  // that can be configured and used simultaneously by a camera device.
+  //
+  // See ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS in CameraMetadataTag.aidl.
+  MetadataBuilder& setMaxNumberOutputStreams(int32_t maxRawStreams,
+                                             int32_t maxProcessedStreams,
+                                             int32_t maxStallStreams);
+
+  // See ANDROID_SYNC_MAX_LATENCY in CameraMetadataTag.aidl.
+  MetadataBuilder& setSyncMaxLatency(
+      camera_metadata_enum_android_sync_max_latency setSyncMaxLatency);
+
+  // See REQUEST_PIPELINE_MAX_DEPTH in CameraCharacteristic.java.
+  MetadataBuilder& setPipelineMaxDepth(uint8_t maxDepth);
+
+  // See REQUEST_PIPELINE_DEPTH in CaptureResult.java.
+  MetadataBuilder& setPipelineDepth(uint8_t depth);
+
+  // See ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM in CameraMetadataTag.aidl.
+  MetadataBuilder& setAvailableMaxDigitalZoom(const float maxZoom);
+
+  // See ANDROID_CONTROL_ZOOM_RATIO_RANGE in CameraMetadataTag.aidl.
+  MetadataBuilder& setControlZoomRatioRange(float min, float max);
+
+  // See ANDROID_STATISTICS_SCENE_FLICKER in CameraMetadataTag.aidl.
+  MetadataBuilder& setStatisticsSceneFlicker(
+      camera_metadata_enum_android_statistics_scene_flicker sceneFlicker);
+
+  // See ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE in CameraMetadataTag.aidl.
+  MetadataBuilder& setStatisticsHotPixelMapMode(
+      camera_metadata_enum_android_statistics_hot_pixel_map_mode mode);
+
+  // See ANDROID_STATISTICS_LENS_SHADING_MAP_MODE in CameraMetadataTag.aidl.
+  MetadataBuilder& setStatisticsLensShadingMapMode(
+      camera_metadata_enum_android_statistics_lens_shading_map_mode
+          lensShadingMapMode);
+
+  // See ANDROID_LENS_OPTICAL_STABILIZATION_MODE in CameraMetadataTag.aidl.
+  MetadataBuilder& setLensOpticalStabilizationMode(
+      camera_metadata_enum_android_lens_optical_stabilization_mode_t
+          opticalStabilizationMode);
+
+  // See ANDROID_REQUEST_AVAILABLE_CAPABILITIES in CameraMetadataTag.aidl.
+  MetadataBuilder& setAvailableRequestCapabilities(
+      const std::vector<
+          camera_metadata_enum_android_request_available_capabilities_t>&
+          requestCapabilities);
+
+  // A list of all keys that the camera device has available to use with
+  // CaptureRequest.
+  //
+  // See ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS in CameraMetadataTag.aidl.
+  MetadataBuilder& setAvailableRequestKeys(const std::vector<int32_t>& keys);
+
+  // A list of all keys that the camera device has available to use with
+  // CaptureResult.
+  //
+  // See ANDROID_RESULT_AVAILABLE_REQUEST_KEYS in CameraMetadataTag.aidl.
+  MetadataBuilder& setAvailableResultKeys(const std::vector<int32_t>& keys);
+
+  // See ANDROID_REQUEST_AVAILABLE_CAPABILITIES in CameraMetadataTag.aidl.
+  MetadataBuilder& setAvailableCapabilities(
+      const std::vector<
+          camera_metadata_enum_android_request_available_capabilities_t>&
+          capabilities);
+
+  // A list of all keys that the camera device has available to use.
+  //
+  // See ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS in CameraMetadataTag.aidl.
+  MetadataBuilder& setAvailableCharacteristicKeys(
+      const std::vector<camera_metadata_tag_t>& keys);
+
+  // Extends metadata with ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS
+  // containing all set tags.
+  MetadataBuilder& setAvailableCharacteristicKeys();
+
+  // Build CameraMetadata instance.
+  //
+  // Returns nullptr in case something went wrong.
+  std::unique_ptr<::aidl::android::hardware::camera::device::CameraMetadata>
+  build();
+
+ private:
+  // Maps metadata tags to vectors of values for the given tag.
+  std::map<
+      camera_metadata_tag_t,
+      std::variant<std::vector<int64_t>, std::vector<int32_t>,
+                   std::vector<uint8_t>, std::vector<float>,
+                   std::vector<camera_metadata_rational_t>, std::vector<double>>>
+      mEntryMap;
+  // Extend metadata with ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS.
+  bool mExtendWithAvailableCharacteristicsKeys = false;
+};
+
+// Returns JPEG_QUALITY from metadata, or nullopt if the key is not present.
+std::optional<int32_t> getJpegQuality(
+    const aidl::android::hardware::camera::device::CameraMetadata& metadata);
+
+// Return JPEG_ORIENTATION from metadata, or 0 if the key is not present
+int32_t getJpegOrientation(
+    const aidl::android::hardware::camera::device::CameraMetadata& metadata);
+
+// Returns JPEG_THUMBNAIL_SIZE from metadata, or nullopt if the key is not present.
+std::optional<Resolution> getJpegThumbnailSize(
+    const aidl::android::hardware::camera::device::CameraMetadata& metadata);
+
+// Returns JPEG_THUMBNAIL_QUALITY from metadata, or nullopt if the key is not present.
+std::optional<int32_t> getJpegThumbnailQuality(
+    const aidl::android::hardware::camera::device::CameraMetadata& metadata);
+
+// Returns JPEG_AVAILABLE_THUMBNAIL_SIZES from metadata, or nullopt if the key
+// is not present.
+std::vector<Resolution> getJpegAvailableThumbnailSizes(
+    const aidl::android::hardware::camera::device::CameraMetadata& metadata);
+
+std::optional<FpsRange> getFpsRange(
+    const aidl::android::hardware::camera::device::CameraMetadata& metadata);
+
+std::optional<camera_metadata_enum_android_control_capture_intent> getCaptureIntent(
+    const aidl::android::hardware::camera::device::CameraMetadata& metadata);
+
+// Returns ANDROID_JPEG_GPS_COORDINATES in a GpsCoordinate object or nullopt if
+// the key is not present.
+std::optional<GpsCoordinates> getGpsCoordinates(
+    const aidl::android::hardware::camera::device::CameraMetadata& metadata);
+
+std::optional<camera_metadata_enum_android_lens_facing> getLensFacing(
+    const aidl::android::hardware::camera::device::CameraMetadata& metadata);
+
+std::optional<camera_metadata_enum_android_control_ae_precapture_trigger>
+getPrecaptureTrigger(
+    const aidl::android::hardware::camera::device::CameraMetadata& cameraMetadata);
+
+// Returns the virtual device id. This is not the camera id.
+std::optional<int32_t> getDeviceId(
+    const aidl::android::hardware::camera::device::CameraMetadata& cameraMetadata);
+
+// Return the value of ANDROID_SENSOR_ORIENTATION or nullopt if the key is not
+// present (which is equivalent to a orientation of 0).
+std::optional<int32_t> getSensorOrientation(
+    const aidl::android::hardware::camera::device::CameraMetadata& cameraMetadata);
+
+}  // namespace virtualcamera
+}  // namespace companion
+}  // namespace android
+
+#endif  // ANDROID_COMPANION_VIRTUALCAMERA_METADATAUTIL_H
diff --git a/services/camera/virtualcamera/util/TestPatternHelper.cc b/services/camera/virtualcamera/util/TestPatternHelper.cc
deleted file mode 100644
index a00a1b8..0000000
--- a/services/camera/virtualcamera/util/TestPatternHelper.cc
+++ /dev/null
@@ -1,113 +0,0 @@
-/*
- * Copyright (C) 2023 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-// #define LOG_NDEBUG 0
-#define LOG_TAG "TestPatternHelper"
-
-#include "TestPatternHelper.h"
-
-#include <complex>
-#include <cstdint>
-
-#include "log/log.h"
-#include "utils/Errors.h"
-
-namespace android {
-namespace companion {
-namespace virtualcamera {
-
-namespace {
-
-uint8_t julia(const std::complex<float> n, const std::complex<float> c) {
-  std::complex<float> z = n;
-  for (int i = 0; i < 64; i++) {
-    z = z * z + c;
-    if (std::abs(z) > 2.0) return i * 4;
-  }
-  return 0xff;
-}
-
-uint8_t pixelToFractal(const int x, const int y, const std::complex<float> c) {
-  std::complex<float> n(float(x) / 640.0f - 0.5, float(y) / 480.0f - 0.5);
-  return julia(n * 5.f, c);
-}
-
-void renderTestPatternYcbCr420(uint8_t* data_ptr, const int width,
-                               const int height, const int frameNumber) {
-  float time = float(frameNumber) / 120.0f;
-  const std::complex<float> c(std::sin(time), std::cos(time));
-
-  uint8_t* y_data = data_ptr;
-  uint8_t* uv_data = static_cast<uint8_t*>(y_data + width * height);
-
-  for (int i = 0; i < width; ++i) {
-    for (int j = 0; j < height; ++j) {
-      y_data[j * width + i] = pixelToFractal(i, j, c * 0.78f);
-      if ((i & 1) && (j & 1)) {
-        uv_data[((j / 2) * (width / 2) + i / 2) * 2] =
-            static_cast<uint8_t>((float(i) / float(width)) * 255.f);
-        uv_data[((j / 2) * (width / 2) + i / 2) * 2 + 1] =
-            static_cast<uint8_t>((float(j) / float(height)) * 255.f);
-      }
-    }
-  }
-}
-
-}  // namespace
-
-// This is just to see some meaningfull image in the buffer for testing, only
-// works with YcbCr420.
-void renderTestPatternYCbCr420(const std::shared_ptr<AHardwareBuffer> buffer,
-                               const int frameNumber, const int fence) {
-  AHardwareBuffer_Planes planes_info;
-
-  AHardwareBuffer_Desc hwBufferDesc;
-  AHardwareBuffer_describe(buffer.get(), &hwBufferDesc);
-
-  const int width = hwBufferDesc.width;
-  const int height = hwBufferDesc.height;
-
-  int result = AHardwareBuffer_lockPlanes(buffer.get(),
-                                          AHARDWAREBUFFER_USAGE_CPU_READ_RARELY,
-                                          fence, nullptr, &planes_info);
-  if (result != OK) {
-    ALOGE("%s: Failed to lock planes: %d", __func__, result);
-    return;
-  }
-
-  renderTestPatternYcbCr420(
-      reinterpret_cast<uint8_t*>(planes_info.planes[0].data), width, height,
-      frameNumber);
-
-  AHardwareBuffer_unlock(buffer.get(), nullptr);
-}
-
-void renderTestPatternYCbCr420(sp<Surface> surface, int frameNumber) {
-  ANativeWindow_Buffer buffer;
-  surface->lock(&buffer, nullptr);
-
-  ALOGV("buffer: %dx%d stride %d, pixfmt %d", buffer.width, buffer.height,
-        buffer.stride, buffer.format);
-
-  renderTestPatternYcbCr420(reinterpret_cast<uint8_t*>(buffer.bits),
-                            buffer.width, buffer.height, frameNumber);
-
-  surface->unlockAndPost();
-}
-
-}  // namespace virtualcamera
-}  // namespace companion
-}  // namespace android
diff --git a/services/camera/virtualcamera/util/TestPatternHelper.h b/services/camera/virtualcamera/util/TestPatternHelper.h
deleted file mode 100644
index aca1cdd..0000000
--- a/services/camera/virtualcamera/util/TestPatternHelper.h
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
- * Copyright (C) 2023 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_COMPANION_VIRTUALCAMERA_TESTPATTERNHELPER_H
-#define ANDROID_COMPANION_VIRTUALCAMERA_TESTPATTERNHELPER_H
-
-#include <memory>
-
-#include "android/hardware_buffer.h"
-#include "gui/Surface.h"
-
-namespace android {
-namespace companion {
-namespace virtualcamera {
-
-// Helper function filling hardware buffer with test pattern for debugging /
-// testing purposes.
-void renderTestPatternYCbCr420(std::shared_ptr<AHardwareBuffer> buffer,
-                               int frameNumber, int fence = -1);
-
-// Helper function for rendering test pattern into Surface.
-void renderTestPatternYCbCr420(sp<Surface> surface, int frameNumber);
-
-}  // namespace virtualcamera
-}  // namespace companion
-}  // namespace android
-
-#endif  // ANDROID_COMPANION_VIRTUALCAMERA_TESTPATTERNHELPER_H
diff --git a/services/camera/virtualcamera/util/Util.cc b/services/camera/virtualcamera/util/Util.cc
index 2d0545d..4aff60f 100644
--- a/services/camera/virtualcamera/util/Util.cc
+++ b/services/camera/virtualcamera/util/Util.cc
@@ -20,8 +20,14 @@
 
 #include <algorithm>
 #include <array>
+#include <cstdint>
+#include <memory>
 
+#include "EglUtil.h"
+#include "android/hardware_buffer.h"
 #include "jpeglib.h"
+#include "ui/GraphicBuffer.h"
+#include "utils/Errors.h"
 
 namespace android {
 namespace companion {
@@ -30,16 +36,86 @@
 using ::aidl::android::companion::virtualcamera::Format;
 using ::aidl::android::hardware::common::NativeHandle;
 
-// Lower bound for maximal supported texture size is at least 2048x2048
-// but on most platforms will be more.
-// TODO(b/301023410) - Query actual max texture size.
-constexpr int kMaxTextureSize = 2048;
-constexpr int kLibJpegDctSize = DCTSIZE;
 constexpr int kMaxFpsUpperLimit = 60;
 
 constexpr std::array<Format, 2> kSupportedFormats{Format::YUV_420_888,
                                                   Format::RGBA_8888};
 
+YCbCrLockGuard::YCbCrLockGuard(std::shared_ptr<AHardwareBuffer> hwBuffer,
+                               const uint32_t usageFlags)
+    : mHwBuffer(hwBuffer) {
+  GraphicBuffer* gBuffer = GraphicBuffer::fromAHardwareBuffer(mHwBuffer.get());
+  if (gBuffer == nullptr) {
+    ALOGE("%s: Attempting to lock nullptr buffer.", __func__);
+    return;
+  }
+  mLockStatus = gBuffer->lockYCbCr(usageFlags, &mYCbCr);
+  if (mLockStatus != OK) {
+    ALOGE("%s: Failed to lock graphic buffer: %s", __func__,
+          statusToString(mLockStatus).c_str());
+  }
+}
+
+YCbCrLockGuard::~YCbCrLockGuard() {
+  if (getStatus() != OK) {
+    return;
+  }
+
+  GraphicBuffer* gBuffer = GraphicBuffer::fromAHardwareBuffer(mHwBuffer.get());
+  if (gBuffer == nullptr) {
+    return;
+  }
+  status_t status = gBuffer->unlock();
+  if (status != NO_ERROR) {
+    ALOGE("Failed to unlock graphic buffer: %s", statusToString(status).c_str());
+  }
+}
+
+status_t YCbCrLockGuard::getStatus() const {
+  return mLockStatus;
+}
+
+const android_ycbcr& YCbCrLockGuard::operator*() const {
+  LOG_ALWAYS_FATAL_IF(getStatus() != OK,
+                      "Dereferencing unlocked YCbCrLockGuard, status is %s",
+                      statusToString(mLockStatus).c_str());
+  return mYCbCr;
+}
+
+PlanesLockGuard::PlanesLockGuard(std::shared_ptr<AHardwareBuffer> hwBuffer,
+                                 const uint64_t usageFlags, sp<Fence> fence) {
+  if (hwBuffer == nullptr) {
+    ALOGE("%s: Attempting to lock nullptr buffer.", __func__);
+    return;
+  }
+
+  const int32_t rawFence = fence != nullptr ? fence->get() : -1;
+  mLockStatus = static_cast<status_t>(AHardwareBuffer_lockPlanes(
+      hwBuffer.get(), usageFlags, rawFence, nullptr, &mPlanes));
+  if (mLockStatus != OK) {
+    ALOGE("%s: Failed to lock graphic buffer: %s", __func__,
+          statusToString(mLockStatus).c_str());
+  }
+}
+
+PlanesLockGuard::~PlanesLockGuard() {
+  if (getStatus() != OK || mHwBuffer == nullptr) {
+    return;
+  }
+  AHardwareBuffer_unlock(mHwBuffer.get(), /*fence=*/nullptr);
+}
+
+int PlanesLockGuard::getStatus() const {
+  return mLockStatus;
+}
+
+const AHardwareBuffer_Planes& PlanesLockGuard::operator*() const {
+  LOG_ALWAYS_FATAL_IF(getStatus() != OK,
+                      "Dereferencing unlocked PlanesLockGuard, status is %s",
+                      statusToString(mLockStatus).c_str());
+  return mPlanes;
+}
+
 sp<Fence> importFence(const NativeHandle& aidlHandle) {
   if (aidlHandle.fds.size() != 1) {
     return sp<Fence>::make();
@@ -60,15 +136,9 @@
     return false;
   }
 
-  if (width <= 0 || height <= 0 || width > kMaxTextureSize ||
-      height > kMaxTextureSize) {
-    return false;
-  }
-
-  if (width % kLibJpegDctSize != 0 || height % kLibJpegDctSize != 0) {
-    // Input dimension needs to be multiple of libjpeg DCT size.
-    // TODO(b/301023410) This restriction can be removed once we add support for
-    // unaligned jpeg compression.
+  int maxTextureSize = getMaximumTextureSize();
+  if (width <= 0 || height <= 0 || width > maxTextureSize ||
+      height > maxTextureSize) {
     return false;
   }
 
@@ -79,6 +149,10 @@
   return true;
 }
 
+std::ostream& operator<<(std::ostream& os, const Resolution& resolution) {
+  return os << resolution.width << "x" << resolution.height;
+}
+
 }  // namespace virtualcamera
 }  // namespace companion
 }  // namespace android
diff --git a/services/camera/virtualcamera/util/Util.h b/services/camera/virtualcamera/util/Util.h
index e0a31c0..291e105 100644
--- a/services/camera/virtualcamera/util/Util.h
+++ b/services/camera/virtualcamera/util/Util.h
@@ -17,18 +17,84 @@
 #ifndef ANDROID_COMPANION_VIRTUALCAMERA_UTIL_H
 #define ANDROID_COMPANION_VIRTUALCAMERA_UTIL_H
 
+#include <cmath>
 #include <cstdint>
+#include <memory>
+#include <optional>
+#include <string>
 
 #include "aidl/android/companion/virtualcamera/Format.h"
 #include "aidl/android/hardware/camera/common/Status.h"
 #include "aidl/android/hardware/camera/device/StreamBuffer.h"
 #include "android/binder_auto_utils.h"
+#include "android/hardware_buffer.h"
+#include "system/graphics.h"
 #include "ui/Fence.h"
 
 namespace android {
 namespace companion {
 namespace virtualcamera {
 
+// RAII utility class to safely lock AHardwareBuffer and obtain android_ycbcr
+// structure describing YUV plane layout.
+//
+// Access to the buffer is locked immediatelly afer construction.
+class YCbCrLockGuard {
+ public:
+  YCbCrLockGuard(std::shared_ptr<AHardwareBuffer> hwBuffer, uint32_t usageFlags);
+  YCbCrLockGuard(YCbCrLockGuard&& other) = default;
+  ~YCbCrLockGuard();
+
+  // Returns OK if the buffer is successfully locked.
+  status_t getStatus() const;
+
+  // Dereferencing instance of this guard returns android_ycbcr structure
+  // describing the layout.
+  // Caller needs to check whether the buffer was successfully locked
+  // before dereferencing.
+  const android_ycbcr& operator*() const;
+
+  // Disable copy.
+  YCbCrLockGuard(const YCbCrLockGuard&) = delete;
+  YCbCrLockGuard& operator=(const YCbCrLockGuard&) = delete;
+
+ private:
+  std::shared_ptr<AHardwareBuffer> mHwBuffer;
+  android_ycbcr mYCbCr = {};
+  status_t mLockStatus = DEAD_OBJECT;
+};
+
+// RAII utility class to safely lock AHardwareBuffer and obtain
+// AHardwareBuffer_Planes (Suitable for interacting with RGBA / BLOB buffers.
+//
+// Access to the buffer is locked immediatelly afer construction.
+class PlanesLockGuard {
+ public:
+  PlanesLockGuard(std::shared_ptr<AHardwareBuffer> hwBuffer,
+                  uint64_t usageFlags, sp<Fence> fence = nullptr);
+  PlanesLockGuard(PlanesLockGuard&& other) = default;
+  ~PlanesLockGuard();
+
+  // Returns OK if the buffer is successfully locked.
+  status_t getStatus() const;
+
+  // Dereferencing instance of this guard returns AHardwareBuffer_Planes
+  // structure describing the layout.
+  //
+  // Caller needs to check whether the buffer was successfully locked
+  // before dereferencing.
+  const AHardwareBuffer_Planes& operator*() const;
+
+  // Disable copy.
+  PlanesLockGuard(const PlanesLockGuard&) = delete;
+  PlanesLockGuard& operator=(const YCbCrLockGuard&) = delete;
+
+ private:
+  std::shared_ptr<AHardwareBuffer> mHwBuffer;
+  AHardwareBuffer_Planes mPlanes;
+  status_t mLockStatus = DEAD_OBJECT;
+};
+
 // Converts camera AIDL status to ndk::ScopedAStatus
 inline ndk::ScopedAStatus cameraStatus(
     const ::aidl::android::hardware::camera::common::Status status) {
@@ -52,6 +118,65 @@
     int width, int height,
     ::aidl::android::companion::virtualcamera::Format format, int maxFps);
 
+// Representation of resolution / size.
+struct Resolution {
+  Resolution() = default;
+  Resolution(const int w, const int h) : width(w), height(h) {
+  }
+
+  // Order by increasing pixel count, and by width for same pixel count.
+  bool operator<(const Resolution& other) const {
+    const int pixCount = width * height;
+    const int otherPixCount = other.width * other.height;
+    return pixCount == otherPixCount ? width < other.width
+                                     : pixCount < otherPixCount;
+  }
+
+  bool operator<=(const Resolution& other) const {
+    return *this == other || *this < other;
+  }
+
+  bool operator==(const Resolution& other) const {
+    return width == other.width && height == other.height;
+  }
+
+  int width = 0;
+  int height = 0;
+};
+
+struct FpsRange {
+  int32_t minFps;
+  int32_t maxFps;
+
+  bool operator<(const FpsRange& other) const {
+    return maxFps == other.maxFps ? minFps < other.minFps
+                                  : maxFps < other.maxFps;
+  }
+};
+
+struct GpsCoordinates {
+  // Represented by a double[] in metadata with index 0 for
+  // latitude and index 1 for longitude, 2 for altitude.
+  double_t latitude;
+  double_t longitude;
+  double_t altitude;
+  std::optional<int64_t> timestamp;
+  std::string provider;
+};
+
+inline bool isApproximatellySameAspectRatio(const Resolution r1,
+                                            const Resolution r2) {
+  static constexpr float kAspectRatioEpsilon = 0.05;
+  float aspectRatio1 =
+      static_cast<float>(r1.width) / static_cast<float>(r1.height);
+  float aspectRatio2 =
+      static_cast<float>(r2.width) / static_cast<float>(r2.height);
+
+  return std::abs(aspectRatio1 - aspectRatio2) < kAspectRatioEpsilon;
+}
+
+std::ostream& operator<<(std::ostream& os, const Resolution& resolution);
+
 }  // namespace virtualcamera
 }  // namespace companion
 }  // namespace android
diff --git a/services/mediacodec/seccomp_policy/mediaswcodec-arm.policy b/services/mediacodec/seccomp_policy/mediaswcodec-arm.policy
index 41efce0..92f0745 100644
--- a/services/mediacodec/seccomp_policy/mediaswcodec-arm.policy
+++ b/services/mediacodec/seccomp_policy/mediaswcodec-arm.policy
@@ -85,5 +85,6 @@
 getegid32: 1
 getgroups32: 1
 sysinfo: 1
+setsockopt: 1
 
 @include /apex/com.android.media.swcodec/etc/seccomp_policy/code_coverage.arm.policy
diff --git a/services/mediaextractor/fuzzers/MediaExtractorServiceFuzzer.cpp b/services/mediaextractor/fuzzers/MediaExtractorServiceFuzzer.cpp
index d329e54..2b31de7 100644
--- a/services/mediaextractor/fuzzers/MediaExtractorServiceFuzzer.cpp
+++ b/services/mediaextractor/fuzzers/MediaExtractorServiceFuzzer.cpp
@@ -22,6 +22,7 @@
 using ::android::MediaExtractorService;
 
 extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    signal(SIGPIPE, SIG_IGN);
     auto service = sp<MediaExtractorService>::make();
     fuzzService(service, FuzzedDataProvider(data, size));
     return 0;
diff --git a/services/medialog/Android.bp b/services/medialog/Android.bp
index 8088ef0..7a4c3ad 100644
--- a/services/medialog/Android.bp
+++ b/services/medialog/Android.bp
@@ -29,8 +29,10 @@
         "packagemanager_aidl-cpp",
     ],
 
+    export_include_dirs: ["."],
+
     cflags: [
-        "-Werror",
         "-Wall",
+        "-Werror",
     ],
 }
diff --git a/services/medialog/fuzzer/Android.bp b/services/medialog/fuzzer/Android.bp
index c96c37b..6b4ee5f 100644
--- a/services/medialog/fuzzer/Android.bp
+++ b/services/medialog/fuzzer/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_media_framework_audio",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
@@ -30,12 +31,12 @@
         "frameworks/av/services/medialog",
     ],
     cflags: [
-        "-Werror",
         "-Wall",
+        "-Werror",
     ],
     fuzz_config: {
         cc: [
-            "android-media-fuzzing-reports@google.com",
+            "android-audio-fuzzing-reports@google.com",
         ],
         componentid: 155276,
         hotlists: [
diff --git a/services/mediametrics/AudioPowerUsage.cpp b/services/mediametrics/AudioPowerUsage.cpp
index 7dc445b..201d740 100644
--- a/services/mediametrics/AudioPowerUsage.cpp
+++ b/services/mediametrics/AudioPowerUsage.cpp
@@ -93,18 +93,27 @@
 /* static */
 bool AudioPowerUsage::deviceFromString(const std::string& device_string, int32_t& device) {
     static std::map<std::string, int32_t> deviceTable = {
-        { "AUDIO_DEVICE_OUT_EARPIECE",             OUTPUT_EARPIECE },
-        { "AUDIO_DEVICE_OUT_SPEAKER_SAFE",         OUTPUT_SPEAKER_SAFE },
-        { "AUDIO_DEVICE_OUT_SPEAKER",              OUTPUT_SPEAKER },
-        { "AUDIO_DEVICE_OUT_WIRED_HEADSET",        OUTPUT_WIRED_HEADSET },
-        { "AUDIO_DEVICE_OUT_WIRED_HEADPHONE",      OUTPUT_WIRED_HEADSET },
-        { "AUDIO_DEVICE_OUT_BLUETOOTH_SCO",        OUTPUT_BLUETOOTH_SCO },
-        { "AUDIO_DEVICE_OUT_BLUETOOTH_A2DP",       OUTPUT_BLUETOOTH_A2DP },
-        { "AUDIO_DEVICE_OUT_USB_HEADSET",          OUTPUT_USB_HEADSET },
-        { "AUDIO_DEVICE_OUT_BLUETOOTH_SCO_HEADSET", OUTPUT_BLUETOOTH_SCO },
+        { "AUDIO_DEVICE_OUT_EARPIECE",                  OUTPUT_EARPIECE },
+        { "AUDIO_DEVICE_OUT_SPEAKER_SAFE",              OUTPUT_SPEAKER_SAFE },
+        { "AUDIO_DEVICE_OUT_SPEAKER",                   OUTPUT_SPEAKER },
+        { "AUDIO_DEVICE_OUT_WIRED_HEADSET",             OUTPUT_WIRED_HEADSET },
+        { "AUDIO_DEVICE_OUT_WIRED_HEADPHONE",           OUTPUT_WIRED_HEADSET },
+        { "AUDIO_DEVICE_OUT_BLUETOOTH_SCO",             OUTPUT_BLUETOOTH_SCO },
+        { "AUDIO_DEVICE_OUT_BLUETOOTH_SCO_HEADSET",     OUTPUT_BLUETOOTH_SCO },
+        { "AUDIO_DEVICE_OUT_BLUETOOTH_A2DP",            OUTPUT_BLUETOOTH_A2DP },
+        { "AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES", OUTPUT_BLUETOOTH_A2DP },
+        { "AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER",    OUTPUT_BLUETOOTH_A2DP },
+        { "AUDIO_DEVICE_OUT_BLE_HEADSET",               OUTPUT_BLUETOOTH_BLE },
+        { "AUDIO_DEVICE_OUT_BLE_SPEAKER",               OUTPUT_BLUETOOTH_BLE },
+        { "AUDIO_DEVICE_OUT_BLE_BROADCAST",             OUTPUT_BLUETOOTH_BLE },
+        { "AUDIO_DEVICE_OUT_USB_HEADSET",               OUTPUT_USB_HEADSET },
+        { "AUDIO_DEVICE_OUT_DGTL_DOCK_HEADSET",         OUTPUT_DOCK },
+        { "AUDIO_DEVICE_OUT_HDMI",                      OUTPUT_HDMI },
 
         { "AUDIO_DEVICE_IN_BUILTIN_MIC",           INPUT_BUILTIN_MIC },
         { "AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET", INPUT_BLUETOOTH_SCO },
+        { "AUDIO_DEVICE_IN_BLUETOOTH_BLE",         INPUT_BLUETOOTH_BLE },
+        { "AUDIO_DEVICE_IN_BLE_HEADSET",           INPUT_BLUETOOTH_BLE },
         { "AUDIO_DEVICE_IN_WIRED_HEADSET",         INPUT_WIRED_HEADSET_MIC },
         { "AUDIO_DEVICE_IN_USB_DEVICE",            INPUT_USB_HEADSET_MIC },
         { "AUDIO_DEVICE_IN_BACK_MIC",              INPUT_BUILTIN_BACK_MIC },
diff --git a/services/mediametrics/fuzzer/mediametrics_service_fuzzer.cpp b/services/mediametrics/fuzzer/mediametrics_service_fuzzer.cpp
index 433332c..c7b4297 100644
--- a/services/mediametrics/fuzzer/mediametrics_service_fuzzer.cpp
+++ b/services/mediametrics/fuzzer/mediametrics_service_fuzzer.cpp
@@ -17,6 +17,7 @@
  *****************************************************************************
  * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
  */
+#include <binder/IPCThreadState.h>
 #include <fuzzer/FuzzedDataProvider.h>
 #include <media/MediaMetricsItem.h>
 #include <mediametricsservice/AudioTypes.h>
@@ -26,208 +27,158 @@
 #include <string.h>
 #include <utils/Log.h>
 #include <algorithm>
+#include <set>
 
 using namespace android;
+static constexpr size_t STATSD_LOG_LINES_MAX = 48;
+static unsigned long long kPackedCallingUid = (unsigned long long)AID_SYSTEM << 32;
+constexpr int8_t kMaxBytes = 100;
+constexpr int8_t kMinBytes = 0;
+constexpr size_t kMaxItemLength = 16;
 
 // low water mark
 constexpr size_t kLogItemsLowWater = 1;
 // high water mark
 constexpr size_t kLogItemsHighWater = 2;
 
+/*
+ * Concatenating strings to generate keys in such a way that the
+ * lambda function inside AudioAnalytics() added in the 'mAction' object is covered
+ */
+
+std::string keyMediaValues[] = {
+        "metrics.manager",
+        "mediadrm",
+        "audio.device.a2dp",
+        AMEDIAMETRICS_KEY_AUDIO_MIDI,
+        AMEDIAMETRICS_KEY_PREFIX_AUDIO_SPATIALIZER "*",
+        AMEDIAMETRICS_KEY_PREFIX_AUDIO_THREAD "*",
+        AMEDIAMETRICS_KEY_AUDIO_FLINGER,
+        AMEDIAMETRICS_KEY_AUDIO_POLICY,
+        AMEDIAMETRICS_KEY_PREFIX_AUDIO_TRACK "*",
+        AMEDIAMETRICS_KEY_PREFIX_AUDIO_RECORD "*",
+        AMEDIAMETRICS_KEY_PREFIX_AUDIO_STREAM "*",
+        AMEDIAMETRICS_KEY_PREFIX_AUDIO_DEVICE
+        "postBluetoothA2dpDeviceConnectionStateSuppressNoisyIntent",
+};
+
+std::string keyMediaAction[] = {
+        "createAudioPatch",
+        "connected",
+        AMEDIAMETRICS_PROP_EVENT_VALUE_CREATE,
+        AMEDIAMETRICS_PROP_EVENT_VALUE_TIMEOUT,
+        AMEDIAMETRICS_PROP_EVENT_VALUE_CTOR,
+        AMEDIAMETRICS_PROP_EVENT_VALUE_ENDAAUDIOSTREAM,
+        AMEDIAMETRICS_PROP_EVENT_VALUE_DEVICECLOSED,
+        AMEDIAMETRICS_PROP_EVENT_VALUE_SETVOICEVOLUME,
+        AMEDIAMETRICS_PROP_EVENT_VALUE_SETMODE,
+        AMEDIAMETRICS_PROP_EVENT_VALUE_ENDAUDIOINTERVALGROUP,
+};
+
 class MediaMetricsServiceFuzzer {
-   public:
-    void invokeStartsWith(const uint8_t *data, size_t size);
-    void invokeInstantiate(const uint8_t *data, size_t size);
-    void invokePackageInstallerCheck(const uint8_t *data, size_t size);
-    void invokeItemManipulation(const uint8_t *data, size_t size);
-    void invokeItemExpansion(const uint8_t *data, size_t size);
-    void invokeTimeMachineStorage(const uint8_t *data, size_t size);
-    void invokeTransactionLog(const uint8_t *data, size_t size);
-    void invokeAnalyticsAction(const uint8_t *data, size_t size);
-    void invokeAudioAnalytics(const uint8_t *data, size_t size);
-    void invokeTimedAction(const uint8_t *data, size_t size);
-    void process(const uint8_t *data, size_t size);
+  public:
+    MediaMetricsServiceFuzzer(const uint8_t* data, size_t size) : mFdp(data, size){};
+    void process();
+    void invokeStartsWith();
+    void invokeInstantiate();
+    void invokePackageInstallerCheck();
+    void invokeTimeMachineStorage();
+    void invokeTransactionLog();
+    void invokeAnalyticsAction();
+    void invokeAudioAnalytics();
+    void invokeTimedAction();
+    void setKeyValues(std::shared_ptr<mediametrics::Item>& item, std::string keyValue);
+    std::shared_ptr<mediametrics::Item> CreateItem();
+    sp<MediaMetricsService> mMediaMetricsService;
+    FuzzedDataProvider mFdp;
     std::atomic_int mValue = 0;
 };
 
-void MediaMetricsServiceFuzzer::invokeStartsWith(const uint8_t *data, size_t size) {
-    FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
-    while (fdp.remaining_bytes()) {
-        android::mediametrics::startsWith(fdp.ConsumeRandomLengthString(),
-                                          fdp.ConsumeRandomLengthString());
-    }
+void MediaMetricsServiceFuzzer::setKeyValues(std::shared_ptr<mediametrics::Item>& item,
+                                             std::string keyValue) {
+    auto invokeActionAPIs = mFdp.PickValueInArray<const std::function<void()>>({
+            [&]() { item->setInt32(keyValue.c_str(), mFdp.ConsumeIntegral<int32_t>()); },
+            [&]() { item->addInt32(keyValue.c_str(), mFdp.ConsumeIntegral<int32_t>()); },
+            [&]() { item->setInt64(keyValue.c_str(), mFdp.ConsumeIntegral<int64_t>()); },
+            [&]() { item->addInt64(keyValue.c_str(), mFdp.ConsumeIntegral<int64_t>()); },
+            [&]() { item->setDouble(keyValue.c_str(), mFdp.ConsumeFloatingPoint<double>()); },
+            [&]() { item->addDouble(keyValue.c_str(), mFdp.ConsumeFloatingPoint<double>()); },
+            [&]() { item->setTimestamp(mFdp.ConsumeIntegral<int64_t>()); },
+            [&]() {
+                std::string value = mFdp.ConsumeBool()
+                                            ? mFdp.ConsumeRandomLengthString(kMaxBytes)
+                                            : mFdp.PickValueInArray<std::string>(keyMediaAction);
+                item->setCString(keyValue.c_str(), value.c_str());
+            },
+            [&]() {
+                item->setRate(keyValue.c_str(), mFdp.ConsumeIntegral<int64_t>(),
+                              mFdp.ConsumeIntegral<int64_t>());
+            },
+            [&]() {
+                mediametrics::LogItem<1> itemTemp(mFdp.ConsumeRandomLengthString(kMaxBytes));
+                itemTemp.setPid(mFdp.ConsumeIntegral<int16_t>())
+                        .setUid(mFdp.ConsumeIntegral<int16_t>());
+
+                int32_t i = mFdp.ConsumeIntegral<int32_t>();
+                itemTemp.set(std::to_string(i).c_str(), (int32_t)i);
+                itemTemp.updateHeader();
+                (void)item->readFromByteString(itemTemp.getBuffer(), itemTemp.getLength());
+            },
+
+    });
+    invokeActionAPIs();
 }
 
-void MediaMetricsServiceFuzzer::invokeInstantiate(const uint8_t *data, size_t size) {
-    FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
-    sp mediaMetricsService = new MediaMetricsService();
-
-    while (fdp.remaining_bytes()) {
-        std::unique_ptr<mediametrics::Item> random_key(
-            mediametrics::Item::create(fdp.ConsumeRandomLengthString()));
-        mediaMetricsService->submit(random_key.get());
-        random_key->setInt32(fdp.ConsumeRandomLengthString().c_str(),
-                             fdp.ConsumeIntegral<int32_t>());
-        mediaMetricsService->submit(random_key.get());
-
-        std::unique_ptr<mediametrics::Item> audiotrack_key(
-            mediametrics::Item::create("audiotrack"));
-        mediaMetricsService->submit(audiotrack_key.get());
-        audiotrack_key->addInt32(fdp.ConsumeRandomLengthString().c_str(),
-                                 fdp.ConsumeIntegral<int32_t>());
-        mediaMetricsService->submit(audiotrack_key.get());
+std::shared_ptr<mediametrics::Item> MediaMetricsServiceFuzzer::CreateItem() {
+    std::string key;
+    if (mFdp.ConsumeBool()) {
+        key = mFdp.ConsumeRandomLengthString(kMaxItemLength);
+    } else {
+        key = mFdp.PickValueInArray<std::string>(keyMediaValues);
     }
-}
 
-void MediaMetricsServiceFuzzer::invokePackageInstallerCheck(const uint8_t *data, size_t size) {
-    FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
-    while (fdp.remaining_bytes()) {
-        MediaMetricsService::useUidForPackage(fdp.ConsumeRandomLengthString().c_str(),
-                                              fdp.ConsumeRandomLengthString().c_str());
-    }
-}
-
-void MediaMetricsServiceFuzzer::invokeItemManipulation(const uint8_t *data, size_t size) {
-    FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
-
-    mediametrics::Item item(fdp.ConsumeRandomLengthString().c_str());
-    while (fdp.remaining_bytes()) {
-        const uint8_t action = fdp.ConsumeIntegralInRange<uint8_t>(0, 16);
-        const std::string key = fdp.ConsumeRandomLengthString();
-        if (fdp.remaining_bytes() < 1 || key.length() < 1) {
-            break;
+    std::shared_ptr<mediametrics::Item> item = std::make_shared<mediametrics::Item>(key.c_str());
+    size_t numKeys = mFdp.ConsumeIntegralInRange<size_t>(kMinBytes, kMaxBytes);
+    std::set<std::string> keySet;
+    for (size_t i = 0; i < numKeys; ++i) {
+        std::string keyValue;
+        if (mFdp.ConsumeBool()) {
+            keyValue = mFdp.ConsumeRandomLengthString(kMaxBytes);
+        } else {
+            keyValue = mFdp.PickValueInArray<std::string>(
+                    {AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_STATE, "logSessionIkeyd"});
         }
-        switch (action) {
-            case 0: {
-                item.setInt32(key.c_str(), fdp.ConsumeIntegral<int32_t>());
-                break;
-            }
-            case 1: {
-                item.addInt32(key.c_str(), fdp.ConsumeIntegral<int32_t>());
-                break;
-            }
-            case 2: {
-                int32_t i32 = 0;
-                item.getInt32(key.c_str(), &i32);
-                break;
-            }
-            case 3: {
-                item.setInt64(key.c_str(), fdp.ConsumeIntegral<int64_t>());
-                break;
-            }
-            case 4: {
-                item.addInt64(key.c_str(), fdp.ConsumeIntegral<int64_t>());
-                break;
-            }
-            case 5: {
-                int64_t i64 = 0;
-                item.getInt64(key.c_str(), &i64);
-                break;
-            }
-            case 6: {
-                item.setDouble(key.c_str(), fdp.ConsumeFloatingPoint<double>());
-                break;
-            }
-            case 7: {
-                item.addDouble(key.c_str(), fdp.ConsumeFloatingPoint<double>());
-                break;
-            }
-            case 8: {
-                double d = 0;
-                item.getDouble(key.c_str(), &d);
-                break;
-            }
-            case 9: {
-                item.setCString(key.c_str(), fdp.ConsumeRandomLengthString().c_str());
-                break;
-            }
-            case 10: {
-                char *s = nullptr;
-                item.getCString(key.c_str(), &s);
-                if (s) free(s);
-                break;
-            }
-            case 11: {
-                std::string s;
-                item.getString(key.c_str(), &s);
-                break;
-            }
-            case 12: {
-                item.setRate(key.c_str(), fdp.ConsumeIntegral<int64_t>(),
-                             fdp.ConsumeIntegral<int64_t>());
-                break;
-            }
-            case 13: {
-                int64_t b = 0, h = 0;
-                double d = 0;
-                item.getRate(key.c_str(), &b, &h, &d);
-                break;
-            }
-            case 14: {
-                (void)item.filter(key.c_str());
-                break;
-            }
-            case 15: {
-                const char *arr[1] = {""};
-                arr[0] = const_cast<char *>(key.c_str());
-                (void)item.filterNot(1, arr);
-                break;
-            }
-            case 16: {
-                (void)item.toString().c_str();
-                break;
-            }
+        if (keySet.find(keyValue) == keySet.end()) {
+            setKeyValues(item, keyValue);
+            keySet.insert(keyValue);
         }
     }
-
-    Parcel p;
-    mediametrics::Item item2;
-
-    (void)item.writeToParcel(&p);
-    p.setDataPosition(0);  // rewind for reading
-    (void)item2.readFromParcel(p);
-
-    char *byteData = nullptr;
-    size_t length = 0;
-    (void)item.writeToByteString(&byteData, &length);
-    (void)item2.readFromByteString(byteData, length);
-    if (byteData) {
-        free(byteData);
-    }
-
-    sp mediaMetricsService = new MediaMetricsService();
-    mediaMetricsService->submit(&item2);
+    return item;
 }
 
-void MediaMetricsServiceFuzzer::invokeItemExpansion(const uint8_t *data, size_t size) {
-    FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
-
-    mediametrics::LogItem<1> item("FuzzItem");
-    item.setPid(fdp.ConsumeIntegral<int16_t>()).setUid(fdp.ConsumeIntegral<int16_t>());
-
-    while (fdp.remaining_bytes()) {
-        int32_t i = fdp.ConsumeIntegral<int32_t>();
-        item.set(std::to_string(i).c_str(), (int32_t)i);
-    }
-    item.updateHeader();
-
-    mediametrics::Item item2;
-    (void)item2.readFromByteString(item.getBuffer(), item.getLength());
-
-    sp mediaMetricsService = new MediaMetricsService();
-    mediaMetricsService->submit(&item2);
+void MediaMetricsServiceFuzzer::invokeStartsWith() {
+    android::mediametrics::startsWith(mFdp.ConsumeRandomLengthString(kMaxBytes),
+                                      mFdp.ConsumeRandomLengthString(kMaxBytes));
 }
 
-void MediaMetricsServiceFuzzer::invokeTimeMachineStorage(const uint8_t *data, size_t size) {
-    FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
+void MediaMetricsServiceFuzzer::invokeInstantiate() {
+    auto item = CreateItem();
+    mMediaMetricsService->submit(item.get());
+}
 
-    auto item = std::make_shared<mediametrics::Item>("FuzzKey");
-    int32_t i32 = fdp.ConsumeIntegral<int32_t>();
-    int64_t i64 = fdp.ConsumeIntegral<int64_t>();
-    double d = fdp.ConsumeFloatingPoint<double>();
-    std::string str = fdp.ConsumeRandomLengthString();
-    std::pair<int64_t, int64_t> pair(fdp.ConsumeIntegral<int64_t>(),
-                                     fdp.ConsumeIntegral<int64_t>());
+void MediaMetricsServiceFuzzer::invokePackageInstallerCheck() {
+    MediaMetricsService::useUidForPackage(mFdp.ConsumeRandomLengthString(kMaxBytes).c_str(),
+                                          mFdp.ConsumeRandomLengthString(kMaxBytes).c_str());
+}
+
+void MediaMetricsServiceFuzzer::invokeTimeMachineStorage() {
+    auto item = CreateItem();
+    int32_t i32 = mFdp.ConsumeIntegral<int32_t>();
+    int64_t i64 = mFdp.ConsumeIntegral<int64_t>();
+    double d = mFdp.ConsumeFloatingPoint<double>();
+    std::string str = mFdp.ConsumeRandomLengthString(kMaxBytes);
+    std::pair<int64_t, int64_t> pair(mFdp.ConsumeIntegral<int64_t>(),
+                                     mFdp.ConsumeIntegral<int64_t>());
     (*item).set("i32", i32).set("i64", i64).set("double", d).set("string", str).set("rate", pair);
 
     android::mediametrics::TimeMachine timeMachine;
@@ -251,123 +202,89 @@
     timeMachine.get("Key.string", &str, -1);
 }
 
-void MediaMetricsServiceFuzzer::invokeTransactionLog(const uint8_t *data, size_t size) {
-    FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
-
-    auto item = std::make_shared<mediametrics::Item>("Key1");
-    (*item)
-        .set("one", fdp.ConsumeIntegral<int32_t>())
-        .set("two", fdp.ConsumeIntegral<int32_t>())
-        .setTimestamp(fdp.ConsumeIntegral<int32_t>());
+void MediaMetricsServiceFuzzer::invokeTransactionLog() {
+    auto item = CreateItem();
 
     android::mediametrics::TransactionLog transactionLog(
         kLogItemsLowWater, kLogItemsHighWater);  // keep at most 2 items
     transactionLog.size();
 
     transactionLog.put(item);
-    transactionLog.size();
-
-    auto item2 = std::make_shared<mediametrics::Item>("Key2");
-    (*item2)
-        .set("three", fdp.ConsumeIntegral<int32_t>())
-        .set("[Key1]three", fdp.ConsumeIntegral<int32_t>())
-        .setTimestamp(fdp.ConsumeIntegral<int32_t>());
-
-    transactionLog.put(item2);
-    transactionLog.size();
-
-    auto item3 = std::make_shared<mediametrics::Item>("Key3");
-    (*item3)
-        .set("six", fdp.ConsumeIntegral<int32_t>())
-        .set("[Key1]four", fdp.ConsumeIntegral<int32_t>())  // affects Key1
-        .set("[Key1]five", fdp.ConsumeIntegral<int32_t>())  // affects key1
-        .setTimestamp(fdp.ConsumeIntegral<int32_t>());
-
-    transactionLog.put(item3);
-    transactionLog.size();
 }
 
-void MediaMetricsServiceFuzzer::invokeAnalyticsAction(const uint8_t *data, size_t size) {
-    FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
-
+void MediaMetricsServiceFuzzer::invokeAnalyticsAction() {
     mediametrics::AnalyticsActions analyticsActions;
     bool action = false;
 
-    while (fdp.remaining_bytes()) {
-        analyticsActions.addAction(
-            (fdp.ConsumeRandomLengthString() + std::string(".event")).c_str(),
-            fdp.ConsumeRandomLengthString(),
+    analyticsActions.addAction(
+            (mFdp.ConsumeRandomLengthString(kMaxBytes) + std::string(".event")).c_str(),
+            mFdp.ConsumeRandomLengthString(kMaxBytes),
             std::make_shared<mediametrics::AnalyticsActions::Function>(
-                [&](const std::shared_ptr<const android::mediametrics::Item> &) {
-                    action = true;
-                }));
-    }
+                    [&](const std::shared_ptr<const android::mediametrics::Item>&) {
+                        action = true;
+                    }));
 
-    FuzzedDataProvider fdp2 = FuzzedDataProvider(data, size);
+    // make a test item
+    auto item = CreateItem();
+    (*item).set("event", mFdp.ConsumeRandomLengthString(kMaxBytes).c_str());
 
-    while (fdp2.remaining_bytes()) {
-        // make a test item
-        auto item = std::make_shared<mediametrics::Item>(fdp2.ConsumeRandomLengthString().c_str());
-        (*item).set("event", fdp2.ConsumeRandomLengthString().c_str());
-
-        // get the actions and execute them
-        auto actions = analyticsActions.getActionsForItem(item);
-        for (const auto &action : actions) {
-            action->operator()(item);
+    // get the actions and execute them
+    auto actions = analyticsActions.getActionsForItem(item);
+    for (const auto& action : actions) {
+        action->operator()(item);
         }
-    }
 }
 
-void MediaMetricsServiceFuzzer::invokeAudioAnalytics(const uint8_t *data, size_t size) {
-    FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
+void MediaMetricsServiceFuzzer::invokeAudioAnalytics() {
+    int32_t maxLogLine = mFdp.ConsumeIntegralInRange<int32_t>(0, STATSD_LOG_LINES_MAX);
     std::shared_ptr<android::mediametrics::StatsdLog> statsdLog =
-            std::make_shared<android::mediametrics::StatsdLog>(10);
+            std::make_shared<android::mediametrics::StatsdLog>(maxLogLine);
     android::mediametrics::AudioAnalytics audioAnalytics{statsdLog};
 
-    while (fdp.remaining_bytes()) {
-        auto item = std::make_shared<mediametrics::Item>(fdp.ConsumeRandomLengthString().c_str());
-        int32_t transactionUid = fdp.ConsumeIntegral<int32_t>();  // arbitrary
-        (*item)
-            .set(fdp.ConsumeRandomLengthString().c_str(), fdp.ConsumeIntegral<int32_t>())
-            .set(fdp.ConsumeRandomLengthString().c_str(), fdp.ConsumeIntegral<int32_t>())
-            .set(AMEDIAMETRICS_PROP_ALLOWUID, transactionUid)
-            .setUid(transactionUid)
-            .setTimestamp(fdp.ConsumeIntegral<int32_t>());
-        audioAnalytics.submit(item, fdp.ConsumeBool());
+    auto item = CreateItem();
+    Parcel parcel;
+    item->writeToParcel(&parcel);
+    parcel.setDataPosition(0);
+    if (mFdp.ConsumeBool()) {
+        item->readFromParcel(parcel);
     }
-
-    audioAnalytics.dump(1000);
+    audioAnalytics.submit(item, mFdp.ConsumeBool());
 }
 
-void MediaMetricsServiceFuzzer::invokeTimedAction(const uint8_t *data, size_t size) {
-    FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
+void MediaMetricsServiceFuzzer::invokeTimedAction() {
     android::mediametrics::TimedAction timedAction;
+    timedAction.postIn(std::chrono::seconds(mFdp.ConsumeIntegral<uint32_t>()),
+                       [this] { ++mValue; });
+    timedAction.size();
+}
 
-    while (fdp.remaining_bytes()) {
-        timedAction.postIn(std::chrono::seconds(fdp.ConsumeIntegral<int32_t>()),
-                           [this] { ++mValue; });
-        timedAction.size();
+void MediaMetricsServiceFuzzer::process() {
+    mMediaMetricsService = sp<MediaMetricsService>::make();
+
+    if (mFdp.ConsumeBool()) {
+        IPCThreadState::self()->restoreCallingIdentity(kPackedCallingUid);
+    } else {
+        IPCThreadState::self()->restoreCallingIdentity(mFdp.ConsumeIntegral<size_t>());
+    }
+    while (mFdp.remaining_bytes()) {
+        auto invokeAPIs = mFdp.PickValueInArray<const std::function<void()>>({
+                [&]() { invokeStartsWith(); },
+                [&]() { invokeInstantiate(); },
+                [&]() { invokePackageInstallerCheck(); },
+                [&]() { invokeTimeMachineStorage(); },
+                [&]() { invokeTransactionLog(); },
+                [&]() { invokeAudioAnalytics(); },
+                [&]() { invokeTimedAction(); },
+        });
+        invokeAPIs();
     }
 }
 
-void MediaMetricsServiceFuzzer::process(const uint8_t *data, size_t size) {
-    invokeStartsWith(data, size);
-    invokeInstantiate(data, size);
-    invokePackageInstallerCheck(data, size);
-    invokeItemManipulation(data, size);
-    invokeItemExpansion(data, size);
-    invokeTimeMachineStorage(data, size);
-    invokeTransactionLog(data, size);
-    invokeAnalyticsAction(data, size);
-    invokeAudioAnalytics(data, size);
-    invokeTimedAction(data, size);
-}
-
-extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
     if (size < 1) {
         return 0;
     }
-    MediaMetricsServiceFuzzer mediaMetricsServiceFuzzer;
-    mediaMetricsServiceFuzzer.process(data, size);
+    MediaMetricsServiceFuzzer mediaMetricsServiceFuzzer(data, size);
+    mediaMetricsServiceFuzzer.process();
     return 0;
 }
diff --git a/services/mediametrics/include/mediametricsservice/AudioPowerUsage.h b/services/mediametrics/include/mediametricsservice/AudioPowerUsage.h
index 6e5a5cf..cf09113 100644
--- a/services/mediametrics/include/mediametricsservice/AudioPowerUsage.h
+++ b/services/mediametrics/include/mediametricsservice/AudioPowerUsage.h
@@ -71,6 +71,9 @@
         OUTPUT_BLUETOOTH_SCO    = 0x10,
         OUTPUT_BLUETOOTH_A2DP   = 0x20,
         OUTPUT_SPEAKER_SAFE     = 0x40,
+        OUTPUT_BLUETOOTH_BLE    = 0x80,
+        OUTPUT_DOCK             = 0x100,
+        OUTPUT_HDMI             = 0x200,
 
         INPUT_DEVICE_BIT        = 0x40000000,
         INPUT_BUILTIN_MIC       = INPUT_DEVICE_BIT | 0x1, // non-negative positive int32.
@@ -78,6 +81,7 @@
         INPUT_WIRED_HEADSET_MIC = INPUT_DEVICE_BIT | 0x4,
         INPUT_USB_HEADSET_MIC   = INPUT_DEVICE_BIT | 0x8,
         INPUT_BLUETOOTH_SCO     = INPUT_DEVICE_BIT | 0x10,
+        INPUT_BLUETOOTH_BLE     = INPUT_DEVICE_BIT | 0x20,
     };
 
     static bool typeFromString(const std::string& type_string, int32_t& type);
diff --git a/services/mediametrics/include/mediametricsservice/AudioTypes.h b/services/mediametrics/include/mediametricsservice/AudioTypes.h
index b5fe28b..59654bf 100644
--- a/services/mediametrics/include/mediametricsservice/AudioTypes.h
+++ b/services/mediametrics/include/mediametricsservice/AudioTypes.h
@@ -18,6 +18,7 @@
 
 #include <string>
 #include <unordered_map>
+#include <vector>
 
 namespace android::mediametrics::types {
 
diff --git a/services/mediametrics/include/mediametricsservice/TimedAction.h b/services/mediametrics/include/mediametricsservice/TimedAction.h
index 8b53ded..8901ced 100644
--- a/services/mediametrics/include/mediametricsservice/TimedAction.h
+++ b/services/mediametrics/include/mediametricsservice/TimedAction.h
@@ -81,9 +81,8 @@
     void threadLoop() NO_THREAD_SAFETY_ANALYSIS { // thread safety doesn't cover unique_lock
         std::unique_lock l(mLock);
         while (!mQuit) {
-            auto sleepUntilTime = std::chrono::time_point<TimerClock>::max();
             if (!mMap.empty()) {
-                sleepUntilTime = mMap.begin()->first;
+                auto sleepUntilTime = mMap.begin()->first;
                 const auto now = TimerClock::now();
                 if (sleepUntilTime <= now) {
                     auto node = mMap.extract(mMap.begin()); // removes from mMap.
@@ -96,8 +95,17 @@
                 // of REALTIME specification, use kWakeupInterval to ensure minimum
                 // granularity if suspended.
                 sleepUntilTime = std::min(sleepUntilTime, now + kWakeupInterval);
+                mCondition.wait_until(l, sleepUntilTime);
+            } else {
+                // As TimerClock is system_clock (which is not monotonic), libcxx's
+                // implementation of condition_variable::wait_until(l, std::chrono::time_point)
+                // recalculates the 'until' time into the wait duration and then goes back to the
+                // absolute timestamp when calling pthread_cond_timedwait(); this back-and-forth
+                // calculation sometimes loses the 'max' value because enough time passes in
+                // between, and instead passes incorrect timestamp into the syscall, causing a
+                // crash. Mitigating it by explicitly calling the non-timed wait here.
+                mCondition.wait(l);
             }
-            mCondition.wait_until(l, sleepUntilTime);
         }
     }
 
diff --git a/services/mediaresourcemanager/DefaultResourceModel.cpp b/services/mediaresourcemanager/DefaultResourceModel.cpp
index 7bad715..990df82 100644
--- a/services/mediaresourcemanager/DefaultResourceModel.cpp
+++ b/services/mediaresourcemanager/DefaultResourceModel.cpp
@@ -44,7 +44,9 @@
     clients.clear();
     MediaResourceParcel mediaResource{.type = reclimRequestInfo.mResources[0].type,
                                       .subType = reclimRequestInfo.mResources[0].subType};
-    ResourceRequestInfo resourceRequestInfo{reclimRequestInfo.mCallingPid, &mediaResource};
+    ResourceRequestInfo resourceRequestInfo{reclimRequestInfo.mCallingPid,
+                                            reclimRequestInfo.mClientId,
+                                            &mediaResource};
 
     // Resolve the secure-unsecure codec conflicts if there is any.
     switch (reclimRequestInfo.mResources[0].type) {
@@ -116,7 +118,9 @@
         const ReclaimRequestInfo& reclimRequestInfo,
         std::vector<ClientInfo>& clients) {
     MediaResourceParcel mediaResource;
-    ResourceRequestInfo resourceRequestInfo{reclimRequestInfo.mCallingPid, &mediaResource};
+    ResourceRequestInfo resourceRequestInfo{reclimRequestInfo.mCallingPid,
+                                            reclimRequestInfo.mClientId,
+                                            &mediaResource};
 
     // 1. Look to find the client(s) with the other resources, for the given
     // primary type.
diff --git a/services/mediaresourcemanager/ResourceManagerMetrics.cpp b/services/mediaresourcemanager/ResourceManagerMetrics.cpp
index af85772..8b3711c 100644
--- a/services/mediaresourcemanager/ResourceManagerMetrics.cpp
+++ b/services/mediaresourcemanager/ResourceManagerMetrics.cpp
@@ -43,6 +43,28 @@
     MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED__RECLAIM_STATUS__RECLAIM_FAILED_NO_CLIENTS;
 using stats::media_metrics::\
     MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED__RECLAIM_STATUS__RECLAIM_FAILED_RECLAIM_RESOURCES;
+using stats::media_metrics::MEDIA_CODEC_STARTED__CODEC_TYPE__CODEC_TYPE_UNSPECIFIED;
+using stats::media_metrics::MEDIA_CODEC_STARTED__CODEC_TYPE__CODEC_TYPE_AUDIO;
+using stats::media_metrics::MEDIA_CODEC_STARTED__CODEC_TYPE__CODEC_TYPE_VIDEO;
+using stats::media_metrics::MEDIA_CODEC_STARTED__CODEC_TYPE__CODEC_TYPE_IMAGE;
+
+// Map MediaResourceSubType to stats::media_metrics::CodecType
+inline int32_t getMetricsCodecType(MediaResourceSubType codecType) {
+    switch (codecType) {
+        case MediaResourceSubType::kHwAudioCodec:
+        case MediaResourceSubType::kSwAudioCodec:
+            return MEDIA_CODEC_STARTED__CODEC_TYPE__CODEC_TYPE_AUDIO;
+        case MediaResourceSubType::kHwVideoCodec:
+        case MediaResourceSubType::kSwVideoCodec:
+            return MEDIA_CODEC_STARTED__CODEC_TYPE__CODEC_TYPE_VIDEO;
+        case MediaResourceSubType::kHwImageCodec:
+        case MediaResourceSubType::kSwImageCodec:
+            return MEDIA_CODEC_STARTED__CODEC_TYPE__CODEC_TYPE_IMAGE;
+        case MediaResourceSubType::kUnspecifiedSubType:
+            return MEDIA_CODEC_STARTED__CODEC_TYPE__CODEC_TYPE_UNSPECIFIED;
+    }
+    return MEDIA_CODEC_STARTED__CODEC_TYPE__CODEC_TYPE_UNSPECIFIED;
+}
 
 inline const char* getCodecType(MediaResourceSubType codecType) {
     switch (codecType) {
@@ -87,23 +109,17 @@
     return CodecBucketUnspecified;
 }
 
-static bool getLogMessage(int hwCount, int swCount, std::stringstream& logMsg) {
-    bool update = false;
-    logMsg.clear();
+static std::string getLogMessage(const std::string& firstKey, const long& firstValue,
+                                 const std::string& secondKey, const long& secondValue) {
 
-    if (hwCount > 0) {
-        logMsg << " HW: " << hwCount;
-        update = true;
+    std::stringstream logMsg;
+    if (firstValue > 0) {
+        logMsg << firstKey << firstValue;
     }
-    if (swCount > 0) {
-        logMsg << " SW: " << swCount;
-        update = true;
+    if (secondValue > 0) {
+        logMsg << secondKey << secondValue;
     }
-
-    if (update) {
-        logMsg << " ] ";
-    }
-    return update;
+    return logMsg.str();
 }
 
 ResourceManagerMetrics::ResourceManagerMetrics(const sp<ProcessInfoInterface>& processInfo) {
@@ -229,7 +245,7 @@
          clientConfig.clientInfo.uid,
          clientConfig.id,
          clientConfig.clientInfo.name.c_str(),
-         static_cast<int32_t>(clientConfig.codecType),
+         getMetricsCodecType(clientConfig.codecType),
          clientConfig.isEncoder,
          isHardwareCodec(clientConfig.codecType),
          clientConfig.width, clientConfig.height,
@@ -311,7 +327,7 @@
          clientConfig.clientInfo.uid,
          clientConfig.id,
          clientConfig.clientInfo.name.c_str(),
-         static_cast<int32_t>(clientConfig.codecType),
+         getMetricsCodecType(clientConfig.codecType),
          clientConfig.isEncoder,
          isHardwareCodec(clientConfig.codecType),
          clientConfig.width, clientConfig.height,
@@ -342,6 +358,15 @@
     std::scoped_lock lock(mLock);
     // post MediaCodecConcurrentUsageReported for this terminated pid.
     pushConcurrentUsageReport(pid, uid);
+    // Remove all the metrics associated with this process.
+    std::map<int32_t, ConcurrentCodecs>::iterator it1 = mProcessConcurrentCodecsMap.find(pid);
+    if (it1 != mProcessConcurrentCodecsMap.end()) {
+        mProcessConcurrentCodecsMap.erase(it1);
+    }
+    std::map<int32_t, PixelCount>::iterator it2 = mProcessPixelsMap.find(pid);
+    if (it2 != mProcessPixelsMap.end()) {
+        mProcessPixelsMap.erase(it2);
+    }
 }
 
 void ResourceManagerMetrics::pushConcurrentUsageReport(int32_t pid, uid_t uid) {
@@ -378,24 +403,30 @@
 
     std::stringstream peakCodecLog;
     peakCodecLog << "Peak { ";
-    std::stringstream logMsg;
-    if (getLogMessage(peakHwAudioEncoderCount, peakSwAudioEncoderCount, logMsg)) {
-        peakCodecLog << "AudioEnc[" << logMsg.str();
+    std::string logMsg;
+    logMsg = getLogMessage(" HW: ", peakHwAudioEncoderCount, " SW: ", peakSwAudioEncoderCount);
+    if (!logMsg.empty()) {
+        peakCodecLog << "AudioEnc[ " << logMsg << " ] ";
     }
-    if (getLogMessage(peakHwAudioDecoderCount, peakSwAudioDecoderCount, logMsg)) {
-        peakCodecLog << "AudioDec[" << logMsg.str();
+    logMsg = getLogMessage(" HW: ", peakHwAudioDecoderCount, " SW: ", peakSwAudioDecoderCount);
+    if (!logMsg.empty()) {
+        peakCodecLog << "AudioDec[" << logMsg << " ] ";
     }
-    if (getLogMessage(peakHwVideoEncoderCount, peakSwVideoEncoderCount, logMsg)) {
-        peakCodecLog << "VideoEnc[" << logMsg.str();
+    logMsg = getLogMessage(" HW: ", peakHwVideoEncoderCount, " SW: ", peakSwVideoEncoderCount);
+    if (!logMsg.empty()) {
+        peakCodecLog << "VideoEnc[" << logMsg << " ] ";
     }
-    if (getLogMessage(peakHwVideoDecoderCount, peakSwVideoDecoderCount, logMsg)) {
-        peakCodecLog << "VideoDec[" << logMsg.str();
+    logMsg = getLogMessage(" HW: ", peakHwVideoDecoderCount, " SW: ", peakSwVideoDecoderCount);
+    if (!logMsg.empty()) {
+        peakCodecLog << "VideoDec[" << logMsg << " ] ";
     }
-    if (getLogMessage(peakHwImageEncoderCount, peakSwImageEncoderCount, logMsg)) {
-        peakCodecLog << "ImageEnc[" << logMsg.str();
+    logMsg = getLogMessage(" HW: ", peakHwImageEncoderCount, " SW: ", peakSwImageEncoderCount);
+    if (!logMsg.empty()) {
+        peakCodecLog << "ImageEnc[" << logMsg << " ] ";
     }
-    if (getLogMessage(peakHwImageDecoderCount, peakSwImageDecoderCount, logMsg)) {
-        peakCodecLog << "ImageDec[" << logMsg.str();
+    logMsg = getLogMessage(" HW: ", peakHwImageDecoderCount, " SW: ", peakSwImageDecoderCount);
+    if (!logMsg.empty()) {
+        peakCodecLog << "ImageDec[" << logMsg << " ] ";
     }
     peakCodecLog << "}";
 
@@ -425,6 +456,42 @@
 #endif
 }
 
+inline void pushReclaimStats(int32_t callingPid,
+                             int32_t requesterUid,
+                             int requesterPriority,
+                             const std::string& clientName,
+                             int32_t noOfConcurrentCodecs,
+                             int32_t reclaimStatus,
+                             int32_t noOfCodecsReclaimed = 0,
+                             int32_t targetIndex = -1,
+                             int32_t targetClientPid = -1,
+                             int32_t targetClientUid = -1,
+                             int32_t targetPriority = -1) {
+    // Post the pushed atom
+    int result = stats_write(
+        MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED,
+        requesterUid,
+        requesterPriority,
+        clientName.c_str(),
+        noOfConcurrentCodecs,
+        reclaimStatus,
+        noOfCodecsReclaimed,
+        targetIndex,
+        targetClientUid,
+        targetPriority);
+    ALOGI("%s: Pushed MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED atom: "
+          "Requester[pid(%d): uid(%d): priority(%d)] "
+          "Codec: [%s] "
+          "No of concurrent codecs: %d "
+          "Reclaim Status: %d "
+          "No of codecs reclaimed: %d "
+          "Target[%d][pid(%d): uid(%d): priority(%d)] result: %d",
+          __func__, callingPid, requesterUid, requesterPriority,
+              clientName.c_str(), noOfConcurrentCodecs,
+          reclaimStatus, noOfCodecsReclaimed,
+          targetIndex, targetClientPid, targetClientUid, targetPriority, result);
+}
+
 void ResourceManagerMetrics::pushReclaimAtom(const ClientInfoParcel& clientInfo,
                                              const std::vector<int>& priorities,
                                              const std::vector<ClientInfo>& targetClients,
@@ -463,33 +530,34 @@
             MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED__RECLAIM_STATUS__RECLAIM_FAILED_RECLAIM_RESOURCES;
       }
     }
+
+    if (targetClients.empty()) {
+        // Push the reclaim atom to stats.
+        pushReclaimStats(callingPid,
+                         requesterUid,
+                         requesterPriority,
+                         clientName,
+                         noOfConcurrentCodecs,
+                         reclaimStatus);
+        return;
+    }
+
     int32_t noOfCodecsReclaimed = targetClients.size();
     int32_t targetIndex = 1;
     for (const ClientInfo& targetClient : targetClients) {
         int targetPriority = priorities[targetIndex];
-        // Post the pushed atom
-        int result = stats_write(
-            MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED,
-            requesterUid,
-            requesterPriority,
-            clientName.c_str(),
-            noOfConcurrentCodecs,
-            reclaimStatus,
-            noOfCodecsReclaimed,
-            targetIndex,
-            targetClient.mUid,
-            targetPriority);
-        ALOGI("%s: Pushed MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED atom: "
-              "Requester[pid(%d): uid(%d): priority(%d)] "
-              "Codec: [%s] "
-              "No of concurrent codecs: %d "
-              "Reclaim Status: %d "
-              "No of codecs reclaimed: %d "
-              "Target[%d][pid(%d): uid(%d): priority(%d)] result: %d",
-              __func__, callingPid, requesterUid, requesterPriority,
-              clientName.c_str(), noOfConcurrentCodecs,
-              reclaimStatus, noOfCodecsReclaimed,
-              targetIndex, targetClient.mPid, targetClient.mUid, targetPriority, result);
+        // Push the reclaim atom to stats.
+        pushReclaimStats(callingPid,
+                         requesterUid,
+                         requesterPriority,
+                         clientName,
+                         noOfConcurrentCodecs,
+                         reclaimStatus,
+                         noOfCodecsReclaimed,
+                         targetIndex,
+                         targetClient.mPid,
+                         targetClient.mUid,
+                         targetPriority);
         targetIndex++;
     }
 }
@@ -646,4 +714,114 @@
     return 0;
 }
 
+static std::string getConcurrentInstanceCount(const std::map<std::string, int>& resourceMap) {
+    if (resourceMap.empty()) {
+        return "";
+    }
+    std::stringstream concurrentInstanceInfo;
+    for (const auto& [name, count] : resourceMap) {
+        if (count > 0) {
+            concurrentInstanceInfo << "      Name: " << name << " Instances: " << count << "\n";
+        }
+    }
+
+    std::string info = concurrentInstanceInfo.str();
+    if (info.empty()) {
+        return "";
+    }
+    return "    Current Concurrent Codec Instances:\n" + info;
+}
+
+static std::string getAppsPixelCount(const std::map<int32_t, PixelCount>& pixelMap) {
+    if (pixelMap.empty()) {
+        return "";
+    }
+    std::stringstream pixelInfo;
+    for (const auto& [pid, pixelCount] : pixelMap) {
+        std::string logMsg = getLogMessage(" Current Pixels: ", pixelCount.mCurrent,
+                                           " Peak Pixels: ", pixelCount.mPeak);
+        if (!logMsg.empty()) {
+            pixelInfo  << "      PID[" << pid << "]: {" << logMsg << " }\n";
+        }
+    }
+
+    return "    Applications Pixel Usage:\n" + pixelInfo.str();
+}
+
+static std::string getCodecUsageMetrics(const ConcurrentCodecsMap& codecsMap) {
+    int peakHwAudioEncoderCount = codecsMap[HwAudioEncoder];
+    int peakHwAudioDecoderCount = codecsMap[HwAudioDecoder];
+    int peakHwVideoEncoderCount = codecsMap[HwVideoEncoder];
+    int peakHwVideoDecoderCount = codecsMap[HwVideoDecoder];
+    int peakHwImageEncoderCount = codecsMap[HwImageEncoder];
+    int peakHwImageDecoderCount = codecsMap[HwImageDecoder];
+    int peakSwAudioEncoderCount = codecsMap[SwAudioEncoder];
+    int peakSwAudioDecoderCount = codecsMap[SwAudioDecoder];
+    int peakSwVideoEncoderCount = codecsMap[SwVideoEncoder];
+    int peakSwVideoDecoderCount = codecsMap[SwVideoDecoder];
+    int peakSwImageEncoderCount = codecsMap[SwImageEncoder];
+    int peakSwImageDecoderCount = codecsMap[SwImageDecoder];
+    std::stringstream usageMetrics;
+    std::string logMsg;
+    logMsg = getLogMessage(" HW: ", peakHwAudioEncoderCount, " SW: ", peakSwAudioEncoderCount);
+    if (!logMsg.empty()) {
+        usageMetrics << "AudioEnc[" << logMsg << " ] ";
+    }
+    logMsg = getLogMessage(" HW: ", peakHwAudioDecoderCount, " SW: ", peakSwAudioDecoderCount);
+    if (!logMsg.empty()) {
+        usageMetrics << "AudioDec[" << logMsg << " ] ";
+    }
+    logMsg = getLogMessage(" HW: ", peakHwVideoEncoderCount, " SW: ", peakSwVideoEncoderCount);
+    if (!logMsg.empty()) {
+        usageMetrics << "VideoEnc[" << logMsg << " ] ";
+    }
+    logMsg = getLogMessage(" HW: ", peakHwVideoDecoderCount, " SW: ", peakSwVideoDecoderCount);
+    if (!logMsg.empty()) {
+        usageMetrics << "VideoDec[" << logMsg << " ] ";
+    }
+    logMsg = getLogMessage(" HW: ", peakHwImageEncoderCount, " SW: ", peakSwImageEncoderCount);
+    if (!logMsg.empty()) {
+        usageMetrics << "ImageEnc[" << logMsg << " ] ";
+    }
+    logMsg = getLogMessage(" HW: ", peakHwImageDecoderCount, " SW: ", peakSwImageDecoderCount);
+    if (!logMsg.empty()) {
+        usageMetrics << "ImageDec[" << logMsg << " ] ";
+    }
+
+    return usageMetrics.str();
+}
+
+static std::string getAppsCodecUsageMetrics(
+        const std::map<int32_t, ConcurrentCodecs>& processCodecsMap) {
+    if (processCodecsMap.empty()) {
+        return "";
+    }
+    std::stringstream codecUsage;
+    std::string info;
+    for (const auto& [pid, codecMap] : processCodecsMap) {
+        codecUsage << "      PID[" << pid << "]: ";
+        info = getCodecUsageMetrics(codecMap.mCurrent);
+        if (!info.empty()) {
+            codecUsage << "Current Codec Usage: { " << info << "} ";
+        }
+        info = getCodecUsageMetrics(codecMap.mPeak);
+        if (!info.empty()) {
+            codecUsage << "Peak Codec Usage: { " << info << "}";
+        }
+        codecUsage << "\n";
+    }
+
+    return "    Applications Codec Usage:\n" + codecUsage.str();
+}
+
+
+std::string ResourceManagerMetrics::dump() const {
+    std::string metricsLog("  Metrics logs:\n");
+    metricsLog += getConcurrentInstanceCount(mConcurrentResourceCountMap);
+    metricsLog += getAppsPixelCount(mProcessPixelsMap);
+    metricsLog += getAppsCodecUsageMetrics(mProcessConcurrentCodecsMap);
+
+    return metricsLog;
+}
+
 } // namespace android
diff --git a/services/mediaresourcemanager/ResourceManagerMetrics.h b/services/mediaresourcemanager/ResourceManagerMetrics.h
index a9bc34b..9904f7d 100644
--- a/services/mediaresourcemanager/ResourceManagerMetrics.h
+++ b/services/mediaresourcemanager/ResourceManagerMetrics.h
@@ -96,7 +96,32 @@
 };
 
 //
-// ResourceManagerMetrics class that maintaines concurrent codec count based:
+//  Resource Manager Metrics is designed to answer some of the questions like:
+//    - What apps are causing reclaim and what apps are targeted (reclaimed from) in the process?
+//    - which apps use the most codecs and the most codec memory?
+//    - What is the % of total successful reclaims?
+//
+//  Though, it's not in the context of this class, metrics should also answer:
+//    - what % of codec errors are due to codec being reclaimed?
+//    - What % of successful codec creation(start) requires codec reclaims?
+//    - How often codec start fails even after successful reclaim?
+//
+//  The metrics are collected to analyze and understand the codec resource usage
+//  and use that information to help with:
+//    - minimize the no of reclaims
+//    - reduce the codec start delays by minimizing no of times we try to reclaim
+//    - minimize the reclaim errors in codec records
+//
+//  Success metrics for Resource Manager Service could be defined as:
+//   - increase in sucecssful codec creation for the foreground apps
+//   - reduce the number of reclaims for codecs
+//   - reduce the time to create codec
+//
+//  We would like to use this data to come up with a better resource management that would:
+//   - increase the successful codec creation (for all kind of apps)
+//   - decrease the codec errors due to resources
+//
+// This class that maintains concurrent codec counts based on:
 //
 //  1. # of concurrent active codecs (initialized, but aren't released yet) of given
 //     implementation (by codec name) across the system.
@@ -111,7 +136,7 @@
 //  This should help with understanding the (video) memory usage per
 //  application.
 //
-//
+
 class ResourceManagerMetrics {
 public:
     ResourceManagerMetrics(const sp<ProcessInfoInterface>& processInfo);
@@ -146,6 +171,9 @@
     // Get the current concurrent pixel count (associated with the video codecs) for the process.
     long getCurrentConcurrentPixelCount(int pid) const;
 
+    // retrieves metrics log.
+    std::string dump() const;
+
 private:
     ResourceManagerMetrics(const ResourceManagerMetrics&) = delete;
     ResourceManagerMetrics(ResourceManagerMetrics&&) = delete;
@@ -179,9 +207,9 @@
     // Map of resources (name) and number of concurrent instances
     std::map<std::string, int> mConcurrentResourceCountMap;
 
-    // Map of concurrent codes by CodecBucket across the system.
+    // Map of concurrent codecs by CodecBucket across the system.
     ConcurrentCodecsMap mConcurrentCodecsMap;
-    // Map of concurrent and peak codes by CodecBucket for each process/application.
+    // Map of concurrent and peak codecs by CodecBucket for each process/application.
     std::map<int32_t, ConcurrentCodecs> mProcessConcurrentCodecsMap;
 
     // Uid Observer to monitor the application termination.
diff --git a/services/mediaresourcemanager/ResourceManagerService.cpp b/services/mediaresourcemanager/ResourceManagerService.cpp
index 3a02443..9c2fb7c 100644
--- a/services/mediaresourcemanager/ResourceManagerService.cpp
+++ b/services/mediaresourcemanager/ResourceManagerService.cpp
@@ -108,10 +108,17 @@
         serviceLog = mServiceLog->toString("    " /* linePrefix */);
     }
 
-    // Get all the resource (and overload pid) logs
+    // Get all the resource (and overload pid) log.
     std::string resourceLog;
     getResourceDump(resourceLog);
 
+    // Get all the metrics log.
+    std::string metricsLog;
+    {
+        std::scoped_lock lock{mLock};
+        metricsLog = mResourceManagerMetrics->dump();
+    }
+
     const size_t SIZE = 256;
     char buffer[SIZE];
     snprintf(buffer, SIZE, "ResourceManagerService: %p\n", this);
@@ -123,11 +130,16 @@
             supportsSecureWithNonSecureCodec);
     result.append(buffer);
 
+    // Add resource log.
     result.append(resourceLog.c_str());
 
+    // Add service log.
     result.append("  Events logs (most recent at top):\n");
     result.append(serviceLog);
 
+    // Add metrics log.
+    result.append(metricsLog.c_str());
+
     write(fd, result.c_str(), result.size());
     return OK;
 }
@@ -474,6 +486,7 @@
         const std::vector<MediaResourceParcel>& resources,
         std::vector<ClientInfo>& targetClients) {
     int32_t callingPid = clientInfo.pid;
+    int64_t clientId = clientInfo.id;
     std::scoped_lock lock{mLock};
     if (!mProcessInfo->isPidTrusted(callingPid)) {
         pid_t actualCallingPid = IPCThreadState::self()->getCallingPid();
@@ -508,7 +521,7 @@
     if (secureCodec != NULL) {
         MediaResourceParcel mediaResource{.type = MediaResource::Type::kSecureCodec,
                                           .subType = secureCodec->subType};
-        ResourceRequestInfo resourceRequestInfo{callingPid, &mediaResource};
+        ResourceRequestInfo resourceRequestInfo{callingPid, clientId, &mediaResource};
         if (!mSupportsMultipleSecureCodecs) {
             if (!getAllClients_l(resourceRequestInfo, targetClients)) {
                 return false;
@@ -525,7 +538,7 @@
         if (!mSupportsSecureWithNonSecureCodec) {
             MediaResourceParcel mediaResource{.type = MediaResource::Type::kSecureCodec,
                                               .subType = nonSecureCodec->subType};
-            ResourceRequestInfo resourceRequestInfo{callingPid, &mediaResource};
+            ResourceRequestInfo resourceRequestInfo{callingPid, clientId, &mediaResource};
             if (!getAllClients_l(resourceRequestInfo, targetClients)) {
                 return false;
             }
@@ -533,7 +546,7 @@
     }
 
     if (drmSession != NULL) {
-        ResourceRequestInfo resourceRequestInfo{callingPid, drmSession};
+        ResourceRequestInfo resourceRequestInfo{callingPid, clientId, drmSession};
         getClientForResource_l(resourceRequestInfo, targetClients);
         if (targetClients.size() == 0) {
             return false;
@@ -542,18 +555,18 @@
 
     if (targetClients.size() == 0 && graphicMemory != nullptr) {
         // if no secure/non-secure codec conflict, run second pass to handle other resources.
-        ResourceRequestInfo resourceRequestInfo{callingPid, graphicMemory};
+        ResourceRequestInfo resourceRequestInfo{callingPid, clientId, graphicMemory};
         getClientForResource_l(resourceRequestInfo, targetClients);
     }
 
     if (targetClients.size() == 0) {
         // if we are here, run the third pass to free one codec with the same type.
         if (secureCodec != nullptr) {
-            ResourceRequestInfo resourceRequestInfo{callingPid, secureCodec};
+            ResourceRequestInfo resourceRequestInfo{callingPid, clientId, secureCodec};
             getClientForResource_l(resourceRequestInfo, targetClients);
         }
         if (nonSecureCodec != nullptr) {
-            ResourceRequestInfo resourceRequestInfo{callingPid, nonSecureCodec};
+            ResourceRequestInfo resourceRequestInfo{callingPid, clientId, nonSecureCodec};
             getClientForResource_l(resourceRequestInfo, targetClients);
         }
     }
@@ -562,12 +575,12 @@
         // if we are here, run the fourth pass to free one codec with the different type.
         if (secureCodec != nullptr) {
             MediaResource temp(MediaResource::Type::kNonSecureCodec, secureCodec->subType, 1);
-            ResourceRequestInfo resourceRequestInfo{callingPid, &temp};
+            ResourceRequestInfo resourceRequestInfo{callingPid, clientId, &temp};
             getClientForResource_l(resourceRequestInfo, targetClients);
         }
         if (nonSecureCodec != nullptr) {
             MediaResource temp(MediaResource::Type::kSecureCodec, nonSecureCodec->subType, 1);
-            ResourceRequestInfo resourceRequestInfo{callingPid, &temp};
+            ResourceRequestInfo resourceRequestInfo{callingPid, clientId, &temp};
             getClientForResource_l(resourceRequestInfo, targetClients);
         }
     }
@@ -585,18 +598,21 @@
 
     // Check if there are any resources to be reclaimed before processing.
     if (resources.empty()) {
+        // Invalid reclaim request. So no need to log.
         return Status::ok();
     }
 
     std::vector<ClientInfo> targetClients;
-    if (!getTargetClients(clientInfo, resources, targetClients)) {
-        // Nothing to reclaim from.
+    if (getTargetClients(clientInfo, resources, targetClients)) {
+        // Reclaim all the target clients.
+        *_aidl_return = reclaimUnconditionallyFrom(targetClients);
+    } else {
+        // No clients to reclaim from.
         ALOGI("%s: There aren't any clients to reclaim from", __func__);
-        return Status::ok();
+        // We need to log this failed reclaim as "no clients to reclaim from".
+        targetClients.clear();
     }
 
-    *_aidl_return = reclaimUnconditionallyFrom(targetClients);
-
     // Log Reclaim Pushed Atom to statsd
     pushReclaimAtom(clientInfo, targetClients, *_aidl_return);
 
@@ -914,6 +930,11 @@
 
     for (auto& [pid, infos] : mMap) {
         for (const auto& [id, info] : infos) {
+            if (pid == resourceRequestInfo.mCallingPid && id == resourceRequestInfo.mClientId) {
+                ALOGI("%s: Skip the client[%jd] for which the resource request is made",
+                      __func__, id);
+                continue;
+            }
             if (hasResourceType(type, subType, info.resources)) {
                 if (!isCallingPriorityHigher_l(resourceRequestInfo.mCallingPid, pid)) {
                     // some higher/equal priority process owns the resource,
diff --git a/services/mediaresourcemanager/ResourceManagerServiceNew.cpp b/services/mediaresourcemanager/ResourceManagerServiceNew.cpp
index af093ca..0a0a8f4 100644
--- a/services/mediaresourcemanager/ResourceManagerServiceNew.cpp
+++ b/services/mediaresourcemanager/ResourceManagerServiceNew.cpp
@@ -248,7 +248,7 @@
     // Use the Resource Model to get a list of all the clients that hold the
     // needed/requested resources.
     uint32_t callingImportance = std::max(0, clientInfo.importance);
-    ReclaimRequestInfo reclaimRequestInfo{callingPid, callingImportance, resources};
+    ReclaimRequestInfo reclaimRequestInfo{callingPid, clientInfo.id, callingImportance, resources};
     std::vector<ClientInfo> clients;
     if (!mDefaultResourceModel->getAllClients(reclaimRequestInfo, clients)) {
         if (clients.empty()) {
@@ -300,7 +300,10 @@
 
     // Use the DefaultResourceModel to get all the clients with the resources requested.
     std::vector<MediaResourceParcel> resources{*resourceRequestInfo.mResource};
-    ReclaimRequestInfo reclaimRequestInfo{resourceRequestInfo.mCallingPid, 0, resources};
+    ReclaimRequestInfo reclaimRequestInfo{resourceRequestInfo.mCallingPid,
+                                          resourceRequestInfo.mClientId,
+                                          0, // default importance
+                                          resources};
     std::vector<ClientInfo> clients;
     mDefaultResourceModel->getAllClients(reclaimRequestInfo, clients);
 
diff --git a/services/mediaresourcemanager/ResourceManagerServiceUtils.cpp b/services/mediaresourcemanager/ResourceManagerServiceUtils.cpp
index 679ab13..49f68e9 100644
--- a/services/mediaresourcemanager/ResourceManagerServiceUtils.cpp
+++ b/services/mediaresourcemanager/ResourceManagerServiceUtils.cpp
@@ -94,7 +94,7 @@
         str.append("\n");
     }
 
-    return std::move(str);
+    return str;
 }
 
 bool ResourceList::operator==(const ResourceList& rhs) const {
diff --git a/services/mediaresourcemanager/ResourceManagerServiceUtils.h b/services/mediaresourcemanager/ResourceManagerServiceUtils.h
index 32cb219..e8f1515 100644
--- a/services/mediaresourcemanager/ResourceManagerServiceUtils.h
+++ b/services/mediaresourcemanager/ResourceManagerServiceUtils.h
@@ -166,11 +166,13 @@
 /*
  * Resource Reclaim request info that encapsulates
  *  - the calling/requesting process pid.
+ *  - id of the client that made reclaim request.
  *  - the calling/requesting client's importance.
  *  - the list of resources requesting (to be reclaimed from others)
  */
 struct ReclaimRequestInfo {
     int mCallingPid = -1;
+    int64_t mClientId = 0;
     uint32_t mCallingClientImportance = 0;
     const std::vector<::aidl::android::media::MediaResourceParcel>& mResources;
 };
@@ -178,11 +180,14 @@
 /*
  * Resource request info that encapsulates
  *  - the calling/requesting process pid.
+ *  - the calling/requesting client's id.
  *  - the resource requesting (to be reclaimed from others)
  */
 struct ResourceRequestInfo {
     // pid of the calling/requesting process.
     int mCallingPid = -1;
+    // id of the calling/requesting client.
+    int64_t mClientId = 0;
     // resources requested.
     const ::aidl::android::media::MediaResourceParcel* mResource;
 };
diff --git a/services/mediaresourcemanager/ResourceTracker.cpp b/services/mediaresourcemanager/ResourceTracker.cpp
index 22381c3..3ee20cd 100644
--- a/services/mediaresourcemanager/ResourceTracker.cpp
+++ b/services/mediaresourcemanager/ResourceTracker.cpp
@@ -715,6 +715,11 @@
     MediaResource::SubType subType = resourceRequestInfo.mResource->subType;
     for (auto& [pid, /* ResourceInfos */ infos] : mMap) {
         for (const auto& [id, /* ResourceInfo */ info] : infos) {
+            if (pid == resourceRequestInfo.mCallingPid && id == resourceRequestInfo.mClientId) {
+                ALOGI("%s: Skip the client[%jd] for which the resource request is made",
+                      __func__, id);
+                continue;
+            }
             if (hasResourceType(type, subType, info.resources)) {
                 if (!isCallingPriorityHigher(resourceRequestInfo.mCallingPid, pid)) {
                     // some higher/equal priority process owns the resource,
diff --git a/services/mediaresourcemanager/fuzzer/Android.bp b/services/mediaresourcemanager/fuzzer/Android.bp
index 5bac062..3f04f69 100644
--- a/services/mediaresourcemanager/fuzzer/Android.bp
+++ b/services/mediaresourcemanager/fuzzer/Android.bp
@@ -47,7 +47,7 @@
     ],
     fuzz_config: {
         cc: [
-            "android-media-fuzzing-reports@google.com",
+            "girishshetty@google.com",
         ],
         componentid: 155276,
         hotlists: [
diff --git a/services/mediaresourcemanager/test/Android.bp b/services/mediaresourcemanager/test/Android.bp
index 6a64823..ac41959 100644
--- a/services/mediaresourcemanager/test/Android.bp
+++ b/services/mediaresourcemanager/test/Android.bp
@@ -29,6 +29,9 @@
         "libactivitymanager_aidl",
         "server_configurable_flags",
     ],
+    defaults: [
+        "aconfig_lib_cc_shared_link.defaults",
+    ],
     include_dirs: [
         "frameworks/av/include",
         "frameworks/av/services/mediaresourcemanager",
@@ -81,6 +84,9 @@
         "libactivitymanager_aidl",
         "server_configurable_flags",
     ],
+    defaults: [
+        "aconfig_lib_cc_shared_link.defaults",
+    ],
     include_dirs: [
         "frameworks/av/include",
         "frameworks/av/services/mediaresourcemanager",
diff --git a/services/mediaresourcemanager/test/ResourceManagerServiceTestUtils.h b/services/mediaresourcemanager/test/ResourceManagerServiceTestUtils.h
index 7e8a4a0..2c8659d 100644
--- a/services/mediaresourcemanager/test/ResourceManagerServiceTestUtils.h
+++ b/services/mediaresourcemanager/test/ResourceManagerServiceTestUtils.h
@@ -165,6 +165,7 @@
     DISALLOW_EVIL_CONSTRUCTORS(TestClient);
 };
 
+// [pid, uid] used by the test.
 static const int kTestPid1 = 30;
 static const int kTestUid1 = 1010;
 
@@ -175,6 +176,12 @@
 static const int kMidPriorityPid = 25;
 static const int kHighPriorityPid = 10;
 
+// Client Ids used by the test.
+static const int kLowPriorityClientId = 1111;
+static const int kMidPriorityClientId = 2222;
+static const int kHighPriorityClientId = 3333;
+
+// Client importance used by the test.
 static const int32_t kHighestCodecImportance = 0;
 static const int32_t kLowestCodecImportance = 100;
 static const int32_t kMidCodecImportance = 50;
diff --git a/services/mediaresourcemanager/test/ResourceManagerService_test.cpp b/services/mediaresourcemanager/test/ResourceManagerService_test.cpp
index b3a0932..027987e 100644
--- a/services/mediaresourcemanager/test/ResourceManagerService_test.cpp
+++ b/services/mediaresourcemanager/test/ResourceManagerService_test.cpp
@@ -337,7 +337,7 @@
             // priority too low to reclaim resource
             ClientInfoParcel clientInfo{.pid = static_cast<int32_t>(kLowPriorityPid),
                                         .uid = static_cast<int32_t>(kTestUid1),
-                                        .id = 0,
+                                        .id = kLowPriorityClientId,
                                         .name = "none"};
             CHECK_STATUS_FALSE(mService->reclaimResource(clientInfo, resources, &result));
 
@@ -475,9 +475,9 @@
         MediaResource resource(MediaResource::Type::kSecureCodec,
                                MediaResource::SubType::kUnspecifiedSubType,
                                1);
-        ResourceRequestInfo requestInfoHigh { kHighPriorityPid, &resource};
-        ResourceRequestInfo requestInfoMid { kMidPriorityPid, &resource};
-        ResourceRequestInfo requestInfoLow { kLowPriorityPid, &resource};
+        ResourceRequestInfo requestInfoHigh { kHighPriorityPid, kHighPriorityClientId, &resource};
+        ResourceRequestInfo requestInfoMid { kMidPriorityPid, kMidPriorityClientId, &resource};
+        ResourceRequestInfo requestInfoLow { kLowPriorityPid, kLowPriorityClientId, &resource};
 
         EXPECT_FALSE(mService->getAllClients_l(requestInfoLow, targetClients));
         // some higher priority process (e.g. kTestPid2) owns the resource, so getAllClients_l
@@ -491,6 +491,81 @@
         EXPECT_EQ(getId(mTestClient1), targetClients[1].mClientId);
     }
 
+    // test set up
+    // ---------------------------------------------------------------------------
+    //   pid/priority          client/clientId       type               number
+    // ---------------------------------------------------------------------------
+    //   kTestPid1(30)         mTestClient1          secure codec       1
+    //                                               graphic memory     200
+    //                                               graphic memory     200
+    // ---------------------------------------------------------------------------
+    //   kTestPid2(20)         mTestClient2          non-secure codec   1
+    //                                               graphic memory     300
+    //                         ---------------------------------------------------
+    //                         mTestClient3          secure codec       1
+    //                                               graphic memory     100
+    // ---------------------------------------------------------------------------
+    //   kHighPriorityPid(10)  kHighPriorityClient   secure codec       1
+    // ---------------------------------------------------------------------------
+    // The kHighPriorityClient tries to reclaim request (after adding self)
+    // This should pass (and shouldn't be considered as a new client trying to
+    // reclaim from an existing client from same/higher priority process).
+    void testSelfReclaimResourceSecure() {
+        std::vector<MediaResourceParcel> resources;
+        resources.push_back(MediaResource(MediaResource::Type::kSecureCodec, 1));
+        resources.push_back(MediaResource(MediaResource::Type::kGraphicMemory, 150));
+
+        ClientInfoParcel lowPriorityClient{.pid = static_cast<int32_t>(kLowPriorityPid),
+                                          .uid = static_cast<int32_t>(kTestUid2),
+                                           .id = kLowPriorityClientId,
+                                           .name = "none"};
+        ClientInfoParcel midPriorityClient{.pid = static_cast<int32_t>(kMidPriorityPid),
+                                           .uid = static_cast<int32_t>(kTestUid2),
+                                           .id = kMidPriorityClientId,
+                                           .name = "none"};
+        // HighPriority process with client id kHighPriorityClientId.
+        ClientInfoParcel highPriorityClient1{.pid = static_cast<int32_t>(kHighPriorityPid),
+                                             .uid = static_cast<int32_t>(kTestUid2),
+                                             .id = kHighPriorityClientId,
+                                             .name = "none"};
+        // HighPriority process with client id 0xABCD.
+        ClientInfoParcel highPriorityClient2{.pid = static_cast<int32_t>(kHighPriorityPid),
+                                             .uid = static_cast<int32_t>(kTestUid2),
+                                             .id = 0xABCD,
+                                             .name = "none"};
+
+        addResource();
+
+        // Add a secure codec resource for the highPriorityClient1.
+        std::shared_ptr<IResourceManagerClient> testClient4 =
+            createTestClient(kHighPriorityPid, kTestUid2);
+        std::vector<MediaResourceParcel> resources1;
+        resources1.push_back(MediaResource(MediaResource::Type::kSecureCodec, 1));
+        mService->addResource(highPriorityClient1, testClient4, resources1);
+
+        // secure codecs can't coexist and secure codec can't coexist with non-secure codec.
+        updateConfig(false, false);
+
+        // priority too low
+        CHECK_STATUS_FALSE(mService->reclaimResource(lowPriorityClient, resources, &result));
+        CHECK_STATUS_FALSE(mService->reclaimResource(midPriorityClient, resources, &result));
+
+        // highPriorityClient2 tries to reclaim SecureCodec with Graphic memory.
+        // This should fail as this process already has an instance of secure
+        // codec through testClient4.
+        CHECK_STATUS_FALSE(mService->reclaimResource(highPriorityClient2, resources, &result));
+
+        // highPriorityClient1 tries to reclaim SecureCodec with Graphic memory.
+        // This should reclaim all secure and non-secure codecs.
+        CHECK_STATUS_TRUE(mService->reclaimResource(highPriorityClient1, resources, &result));
+        EXPECT_TRUE(toTestClient(mTestClient1)->checkIfReclaimedAndReset());
+        EXPECT_TRUE(toTestClient(mTestClient2)->checkIfReclaimedAndReset());
+        EXPECT_TRUE(toTestClient(mTestClient3)->checkIfReclaimedAndReset());
+
+        // Make sure there is nothing left.
+        CHECK_STATUS_FALSE(mService->reclaimResource(highPriorityClient1, resources, &result));
+    }
+
     void testReclaimResourceSecure() {
         std::vector<MediaResourceParcel> resources;
         resources.push_back(MediaResource(MediaResource::Type::kSecureCodec, 1));
@@ -498,15 +573,15 @@
 
         ClientInfoParcel lowPriorityClient{.pid = static_cast<int32_t>(kLowPriorityPid),
                                           .uid = static_cast<int32_t>(kTestUid2),
-                                           .id = 0,
+                                           .id = kLowPriorityClientId,
                                            .name = "none"};
         ClientInfoParcel midPriorityClient{.pid = static_cast<int32_t>(kMidPriorityPid),
                                            .uid = static_cast<int32_t>(kTestUid2),
-                                           .id = 0,
+                                           .id = kMidPriorityClientId,
                                            .name = "none"};
         ClientInfoParcel highPriorityClient{.pid = static_cast<int32_t>(kHighPriorityPid),
                                             .uid = static_cast<int32_t>(kTestUid2),
-                                            .id = 0,
+                                            .id = kHighPriorityClientId,
                                             .name = "none"};
 
         // ### secure codec can't coexist and secure codec can coexist with non-secure codec ###
@@ -553,7 +628,6 @@
             CHECK_STATUS_FALSE(mService->reclaimResource(highPriorityClient, resources, &result));
         }
 
-
         // ### secure codecs can coexist but secure codec can't coexist with non-secure codec ###
         {
             addResource();
@@ -650,15 +724,15 @@
 
         ClientInfoParcel lowPriorityClient{.pid = static_cast<int32_t>(kLowPriorityPid),
                                           .uid = static_cast<int32_t>(kTestUid2),
-                                           .id = 0,
+                                           .id = kLowPriorityClientId,
                                            .name = "none"};
         ClientInfoParcel midPriorityClient{.pid = static_cast<int32_t>(kMidPriorityPid),
                                            .uid = static_cast<int32_t>(kTestUid2),
-                                           .id = 0,
+                                           .id = kMidPriorityClientId,
                                            .name = "none"};
         ClientInfoParcel highPriorityClient{.pid = static_cast<int32_t>(kHighPriorityPid),
                                             .uid = static_cast<int32_t>(kTestUid2),
-                                            .id = 0,
+                                            .id = kHighPriorityClientId,
                                             .name = "none"};
 
         // ### secure codec can't coexist with non-secure codec ###
@@ -751,8 +825,8 @@
         MediaResource resource(MediaResource::Type::kGraphicMemory,
                                MediaResource::SubType::kUnspecifiedSubType,
                                1);
-        ResourceRequestInfo requestInfoHigh { kHighPriorityPid, &resource};
-        ResourceRequestInfo requestInfoLow { kLowPriorityPid, &resource};
+        ResourceRequestInfo requestInfoHigh { kHighPriorityPid, kHighPriorityClientId, &resource};
+        ResourceRequestInfo requestInfoLow { kLowPriorityPid, kLowPriorityClientId, &resource};
         EXPECT_FALSE(mService->getLowestPriorityBiggestClient_l(requestInfoHigh, clientInfo));
 
         addResource();
@@ -910,7 +984,7 @@
         reclaimResources.push_back(createNonSecureVideoCodecResource());
         ClientInfoParcel highPriorityClient{.pid = static_cast<int32_t>(kHighPriorityPid),
                                             .uid = static_cast<int32_t>(kTestUid2),
-                                            .id = 0,
+                                            .id = kHighPriorityClientId,
                                             .name = "none"};
         CHECK_STATUS_FALSE(mService->reclaimResource(highPriorityClient, reclaimResources, &result));
 
@@ -951,7 +1025,7 @@
         reclaimResources.push_back(createNonSecureAudioCodecResource());
         ClientInfoParcel highPriorityClient{.pid = static_cast<int32_t>(kHighPriorityPid),
                                             .uid = static_cast<int32_t>(kTestUid2),
-                                            .id = 0,
+                                            .id = kHighPriorityClientId,
                                             .name = "none"};
         CHECK_STATUS_FALSE(mService->reclaimResource(highPriorityClient, reclaimResources, &result));
 
@@ -992,7 +1066,7 @@
         reclaimResources.push_back(createNonSecureImageCodecResource());
         ClientInfoParcel highPriorityClient{.pid = static_cast<int32_t>(kHighPriorityPid),
                                             .uid = static_cast<int32_t>(kTestUid2),
-                                            .id = 0,
+                                            .id = kHighPriorityClientId,
                                             .name = "none"};
         CHECK_STATUS_FALSE(mService->reclaimResource(highPriorityClient, reclaimResources, &result));
 
@@ -1034,7 +1108,7 @@
         reclaimResources.push_back(createGraphicMemoryResource(100));
         ClientInfoParcel highPriorityClient{.pid = static_cast<int32_t>(kHighPriorityPid),
                                             .uid = static_cast<int32_t>(kTestUid2),
-                                            .id = 0,
+                                            .id = kHighPriorityClientId,
                                             .name = "none"};
         CHECK_STATUS_TRUE(mService->reclaimResource(highPriorityClient, reclaimResources, &result));
 
@@ -1786,6 +1860,10 @@
     testRemoveClient();
 }
 
+TEST_F(ResourceManagerServiceTest, selfReclaimResource) {
+    testSelfReclaimResourceSecure();
+}
+
 TEST_F(ResourceManagerServiceTest, reclaimResource) {
     testReclaimResourceSecure();
     testReclaimResourceNonSecure();
@@ -1873,6 +1951,10 @@
     testRemoveClient();
 }
 
+TEST_F(ResourceManagerServiceNewTest, selfReclaimResource) {
+    testSelfReclaimResourceSecure();
+}
+
 TEST_F(ResourceManagerServiceNewTest, reclaimResource) {
     testReclaimResourceSecure();
     testReclaimResourceNonSecure();
diff --git a/services/oboeservice/AAudioServiceEndpointMMAP.cpp b/services/oboeservice/AAudioServiceEndpointMMAP.cpp
index 6b48075..d663f37 100644
--- a/services/oboeservice/AAudioServiceEndpointMMAP.cpp
+++ b/services/oboeservice/AAudioServiceEndpointMMAP.cpp
@@ -37,6 +37,8 @@
 #include "AAudioServiceEndpointPlay.h"
 #include "AAudioServiceEndpointMMAP.h"
 
+#include <com_android_media_aaudio.h>
+
 #define AAUDIO_BUFFER_CAPACITY_MIN    (4 * 512)
 #define AAUDIO_SAMPLE_RATE_DEFAULT    48000
 
@@ -148,9 +150,15 @@
 
         // Try other formats if the config from APM is the same as our current config.
         // Some HALs may report its format support incorrectly.
-        if ((previousConfig.format == config.format) &&
-                (previousConfig.sample_rate == config.sample_rate)) {
-            config.format = getNextFormatToTry(config.format);
+        if (previousConfig.format == config.format) {
+            if (previousConfig.sample_rate == config.sample_rate) {
+                config.format = getNextFormatToTry(config.format);
+            } else if (!com::android::media::aaudio::sample_rate_conversion()) {
+                ALOGI("%s() - AAudio SRC feature not enabled, different rates! %d != %d",
+                      __func__, previousConfig.sample_rate, config.sample_rate);
+                result = AAUDIO_ERROR_INVALID_RATE;
+                break;
+            }
         }
 
         ALOGD("%s() %#x %d failed, perhaps due to format or sample rate. Try again with %#x %d",
@@ -197,6 +205,8 @@
           "sample_rate=%u, channel_mask=%#x, device=%d",
           __func__, config->format, config->sample_rate,
           config->channel_mask, deviceId);
+
+    const std::lock_guard<std::mutex> lock(mMmapStreamLock);
     const status_t status = MmapStreamInterface::openMmapStream(streamDirection,
                                                                 &attributes,
                                                                 config,
@@ -238,7 +248,7 @@
           __func__, config->format, getDeviceId(), getSessionId());
 
     // Create MMAP/NOIRQ buffer.
-    result = createMmapBuffer();
+    result = createMmapBuffer_l();
     if (result != AAUDIO_OK) {
         goto error;
     }
@@ -275,7 +285,7 @@
     return result;
 
 error:
-    close();
+    close_l();
     // restore original requests
     setDeviceId(mRequestedDeviceId);
     setSessionId(requestedSessionId);
@@ -283,13 +293,28 @@
 }
 
 void AAudioServiceEndpointMMAP::close() {
-    if (mMmapStream != nullptr) {
-        // Needs to be explicitly cleared or CTS will fail but it is not clear why.
-        mMmapStream.clear();
+    bool closedIt = false;
+    {
+        const std::lock_guard<std::mutex> lock(mMmapStreamLock);
+        closedIt = close_l();
+    }
+    if (closedIt) {
+        // TODO Why is this needed?
         AudioClock::sleepForNanos(100 * AAUDIO_NANOS_PER_MILLISECOND);
     }
 }
 
+bool AAudioServiceEndpointMMAP::close_l() { // requires mMmapStreamLock
+    bool closedIt = false;
+    if (mMmapStream != nullptr) {
+        // Needs to be explicitly cleared or CTS will fail but it is not clear why.
+        ALOGD("%s() clear mMmapStream", __func__);
+        mMmapStream.clear();
+        closedIt = true;
+    }
+    return closedIt;
+}
+
 aaudio_result_t AAudioServiceEndpointMMAP::startStream(sp<AAudioServiceStreamBase> stream,
                                                    audio_port_handle_t *clientHandle __unused) {
     // Start the client on behalf of the AAudio service.
@@ -310,7 +335,7 @@
 }
 
 aaudio_result_t AAudioServiceEndpointMMAP::stopStream(sp<AAudioServiceStreamBase> /*stream*/,
-                                                      audio_port_handle_t /*clientHandle*/) {
+                                                      audio_port_handle_t clientHandle) {
     mFramesTransferred.reset32();
 
     // Round 64-bit counter up to a multiple of the buffer capacity.
@@ -320,36 +345,68 @@
     mFramesTransferred.roundUp64(getBufferCapacity());
 
     // Use the port handle that was provided by openMmapStream().
-    ALOGV("%s() mPortHandle = %d", __func__, mPortHandle);
-    return stopClient(mPortHandle);
+    aaudio_result_t result = stopClient(mPortHandle);
+    ALOGD("%s(%d): called stopClient(%d=mPortHandle), returning %d", __func__,
+          (int)clientHandle, mPortHandle, result);
+    return result;
 }
 
 aaudio_result_t AAudioServiceEndpointMMAP::startClient(const android::AudioClient& client,
                                                        const audio_attributes_t *attr,
-                                                       audio_port_handle_t *clientHandle) {
-    return mMmapStream == nullptr
-            ? AAUDIO_ERROR_NULL
-            : AAudioConvert_androidToAAudioResult(mMmapStream->start(client, attr, clientHandle));
+                                                       audio_port_handle_t *portHandlePtr) {
+    const std::lock_guard<std::mutex> lock(mMmapStreamLock);
+    if (mMmapStream == nullptr) {
+        ALOGW("%s(): called after mMmapStream set to NULL", __func__);
+        return AAUDIO_ERROR_NULL;
+    } else if (!isConnected()) {
+        ALOGD("%s(): MMAP stream was disconnected", __func__);
+        return AAUDIO_ERROR_DISCONNECTED;
+    } else {
+        aaudio_result_t result = AAudioConvert_androidToAAudioResult(
+                mMmapStream->start(client, attr, portHandlePtr));
+        if (!isConnected() && (portHandlePtr != nullptr)) {
+            ALOGD("%s(): MMAP stream DISCONNECTED after starting port %d, will stop it",
+                  __func__, *portHandlePtr);
+            mMmapStream->stop(*portHandlePtr);
+            *portHandlePtr = AUDIO_PORT_HANDLE_NONE;
+            result = AAUDIO_ERROR_DISCONNECTED;
+        }
+        ALOGD("%s(): returning port %d, result %d", __func__,
+              (portHandlePtr == nullptr) ? -1 : *portHandlePtr, result);
+        return result;
+    }
 }
 
-aaudio_result_t AAudioServiceEndpointMMAP::stopClient(audio_port_handle_t clientHandle) {
-    return mMmapStream == nullptr
-            ? AAUDIO_ERROR_NULL
-            : AAudioConvert_androidToAAudioResult(mMmapStream->stop(clientHandle));
+aaudio_result_t AAudioServiceEndpointMMAP::stopClient(audio_port_handle_t portHandle) {
+    const std::lock_guard<std::mutex> lock(mMmapStreamLock);
+    if (mMmapStream == nullptr) {
+        ALOGE("%s(%d): called after mMmapStream set to NULL", __func__, (int)portHandle);
+        return AAUDIO_ERROR_NULL;
+    } else {
+        aaudio_result_t result = AAudioConvert_androidToAAudioResult(
+                mMmapStream->stop(portHandle));
+        ALOGD("%s(%d): returning %d", __func__, (int)portHandle, result);
+        return result;
+    }
 }
 
 aaudio_result_t AAudioServiceEndpointMMAP::standby() {
-    return mMmapStream == nullptr
-            ? AAUDIO_ERROR_NULL
-            : AAudioConvert_androidToAAudioResult(mMmapStream->standby());
+    const std::lock_guard<std::mutex> lock(mMmapStreamLock);
+    if (mMmapStream == nullptr) {
+        ALOGW("%s(): called after mMmapStream set to NULL", __func__);
+        return AAUDIO_ERROR_NULL;
+    } else {
+        return AAudioConvert_androidToAAudioResult(mMmapStream->standby());
+    }
 }
 
 aaudio_result_t AAudioServiceEndpointMMAP::exitStandby(AudioEndpointParcelable* parcelable) {
+    const std::lock_guard<std::mutex> lock(mMmapStreamLock);
     if (mMmapStream == nullptr) {
         return AAUDIO_ERROR_NULL;
     }
     mAudioDataWrapper->reset();
-    const aaudio_result_t result = createMmapBuffer();
+    const aaudio_result_t result = createMmapBuffer_l();
     if (result == AAUDIO_OK) {
         getDownDataDescription(parcelable);
     }
@@ -359,10 +416,12 @@
 // Get free-running DSP or DMA hardware position from the HAL.
 aaudio_result_t AAudioServiceEndpointMMAP::getFreeRunningPosition(int64_t *positionFrames,
                                                                 int64_t *timeNanos) {
-    struct audio_mmap_position position;
+    const std::lock_guard<std::mutex> lock(mMmapStreamLock);
     if (mMmapStream == nullptr) {
+        ALOGW("%s(): called after mMmapStream set to NULL", __func__);
         return AAUDIO_ERROR_NULL;
     }
+    struct audio_mmap_position position;
     const status_t status = mMmapStream->getMmapPosition(&position);
     ALOGV("%s() status= %d, pos = %d, nanos = %lld\n",
           __func__, status, position.position_frames, (long long) position.time_nanoseconds);
@@ -467,9 +526,14 @@
 aaudio_result_t AAudioServiceEndpointMMAP::getExternalPosition(uint64_t *positionFrames,
                                                                int64_t *timeNanos)
 {
+    const std::lock_guard<std::mutex> lock(mMmapStreamLock);
     if (mHalExternalPositionStatus != AAUDIO_OK) {
         return mHalExternalPositionStatus;
     }
+    if (mMmapStream == nullptr) {
+        ALOGW("%s(): called after mMmapStream set to NULL", __func__);
+        return AAUDIO_ERROR_NULL;
+    }
     uint64_t tempPositionFrames;
     int64_t tempTimeNanos;
     const status_t status = mMmapStream->getExternalPosition(&tempPositionFrames, &tempTimeNanos);
@@ -544,13 +608,20 @@
     return mHalExternalPositionStatus;
 }
 
-aaudio_result_t AAudioServiceEndpointMMAP::createMmapBuffer()
+// mMmapStreamLock should be held when calling this function.
+aaudio_result_t AAudioServiceEndpointMMAP::createMmapBuffer_l()
 {
     memset(&mMmapBufferinfo, 0, sizeof(struct audio_mmap_buffer_info));
     int32_t minSizeFrames = getBufferCapacity();
     if (minSizeFrames <= 0) { // zero will get rejected
         minSizeFrames = AAUDIO_BUFFER_CAPACITY_MIN;
     }
+
+    if (mMmapStream == nullptr) {
+        ALOGW("%s(): called after mMmapStream set to NULL", __func__);
+        return AAUDIO_ERROR_NULL;
+    }
+
     const status_t status = mMmapStream->createMmapBuffer(minSizeFrames, &mMmapBufferinfo);
     const bool isBufferShareable = mMmapBufferinfo.flags & AUDIO_MMAP_APPLICATION_SHAREABLE;
     if (status != OK) {
@@ -590,6 +661,7 @@
     // Call to HAL to make sure the transport FD was able to be closed by binder.
     // This is a tricky workaround for a problem in Binder.
     // TODO:[b/192048842] When that problem is fixed we may be able to remove or change this code.
+    ALOGD("%s() - call getMmapPosition() as a hack to clear FD stuck in Binder", __func__);
     struct audio_mmap_position position;
     mMmapStream->getMmapPosition(&position);
 
@@ -605,11 +677,14 @@
 }
 
 void AAudioServiceEndpointMMAP::reportData() {
+    const std::lock_guard<std::mutex> lock(mMmapStreamLock);
+
     if (mMmapStream == nullptr) {
         // This must not happen
         ALOGE("%s() invalid state, mmap stream is not initialized", __func__);
         return;
     }
+
     auto fifo = mAudioDataWrapper->getFifoBuffer();
     if (fifo == nullptr) {
         ALOGE("%s() fifo buffer is not initialized, cannot report data", __func__);
diff --git a/services/oboeservice/AAudioServiceEndpointMMAP.h b/services/oboeservice/AAudioServiceEndpointMMAP.h
index eaa578c..962d390 100644
--- a/services/oboeservice/AAudioServiceEndpointMMAP.h
+++ b/services/oboeservice/AAudioServiceEndpointMMAP.h
@@ -50,7 +50,7 @@
 
     aaudio_result_t open(const aaudio::AAudioStreamRequest &request) override;
 
-    void close() override;
+    void close() override EXCLUDES(mMmapStreamLock);
 
     aaudio_result_t startStream(android::sp<AAudioServiceStreamBase> stream,
                                 audio_port_handle_t *clientHandle) override;
@@ -60,15 +60,19 @@
 
     aaudio_result_t startClient(const android::AudioClient& client,
                                 const audio_attributes_t *attr,
-                                audio_port_handle_t *clientHandle)  override;
+                                audio_port_handle_t *clientHandle)  override
+                                EXCLUDES(mMmapStreamLock);
 
-    aaudio_result_t stopClient(audio_port_handle_t clientHandle)  override;
+    aaudio_result_t stopClient(audio_port_handle_t clientHandle)  override
+            EXCLUDES(mMmapStreamLock);
 
-    aaudio_result_t standby() override;
+    aaudio_result_t standby() override EXCLUDES(mMmapStreamLock);
 
-    aaudio_result_t exitStandby(AudioEndpointParcelable* parcelable) override;
+    aaudio_result_t exitStandby(AudioEndpointParcelable* parcelable) override
+            EXCLUDES(mMmapStreamLock);
 
-    aaudio_result_t getFreeRunningPosition(int64_t *positionFrames, int64_t *timeNanos) override;
+    aaudio_result_t getFreeRunningPosition(int64_t *positionFrames, int64_t *timeNanos) override
+             EXCLUDES(mMmapStreamLock);
 
     aaudio_result_t getTimestamp(int64_t *positionFrames, int64_t *timeNanos) override;
 
@@ -88,22 +92,31 @@
         return mHardwareTimeOffsetNanos;
     }
 
-    aaudio_result_t getExternalPosition(uint64_t *positionFrames, int64_t *timeNanos);
+    aaudio_result_t getExternalPosition(uint64_t *positionFrames, int64_t *timeNanos)
+            EXCLUDES(mMmapStreamLock);
 
-    int64_t nextDataReportTime();
+    int64_t nextDataReportTime() EXCLUDES(mMmapStreamLock);
 
-    void reportData();
+    void reportData() EXCLUDES(mMmapStreamLock);
 
 private:
 
-    aaudio_result_t openWithConfig(audio_config_base_t* config);
+    /**
+     *
+     * @return true if mMapStream was cleared
+     */
+    bool close_l() REQUIRES(mMmapStreamLock);
 
-    aaudio_result_t createMmapBuffer();
+    aaudio_result_t openWithConfig(audio_config_base_t* config) EXCLUDES(mMmapStreamLock);
+
+    aaudio_result_t createMmapBuffer_l() REQUIRES(mMmapStreamLock);
 
     MonotonicCounter                          mFramesTransferred;
 
     // Interface to the AudioFlinger MMAP support.
-    android::sp<android::MmapStreamInterface> mMmapStream;
+    mutable std::mutex                        mMmapStreamLock;
+    android::sp<android::MmapStreamInterface> mMmapStream GUARDED_BY(mMmapStreamLock);
+
     struct audio_mmap_buffer_info             mMmapBufferinfo;
 
     // There is only one port associated with an MMAP endpoint.
diff --git a/services/oboeservice/AAudioServiceEndpointPlay.cpp b/services/oboeservice/AAudioServiceEndpointPlay.cpp
index 637405d..5d6e2ae 100644
--- a/services/oboeservice/AAudioServiceEndpointPlay.cpp
+++ b/services/oboeservice/AAudioServiceEndpointPlay.cpp
@@ -88,7 +88,8 @@
                 }
 
                 aaudio_stream_state_t state = clientStream->getState();
-                if (state == AAUDIO_STREAM_STATE_STOPPING) {
+                if (state == AAUDIO_STREAM_STATE_STOPPING ||
+                    state == AAUDIO_STREAM_STATE_PAUSING) {
                     allowUnderflow = false; // just read what is already in the FIFO
                 } else if (state != AAUDIO_STREAM_STATE_STARTED) {
                     continue; // this stream is not running so skip it.
diff --git a/services/oboeservice/AAudioServiceStreamBase.cpp b/services/oboeservice/AAudioServiceStreamBase.cpp
index 5fb152e..78cf706 100644
--- a/services/oboeservice/AAudioServiceStreamBase.cpp
+++ b/services/oboeservice/AAudioServiceStreamBase.cpp
@@ -22,6 +22,7 @@
 #include <iostream>
 #include <mutex>
 
+#include <com_android_media_aaudio.h>
 #include <media/MediaMetricsItem.h>
 #include <media/TypeConverter.h>
 #include <mediautils/SchedulingPolicyService.h>
@@ -75,11 +76,7 @@
                         this, getState());
 
     // Stop the command thread before destroying.
-    if (mThreadEnabled) {
-        mThreadEnabled = false;
-        mCommandQueue.stopWaiting();
-        mCommandThread.stop();
-    }
+    stopCommandThread();
 }
 
 std::string AAudioServiceStreamBase::dumpHeader() {
@@ -194,26 +191,27 @@
 
 error:
     closeAndClear();
-    mThreadEnabled = false;
-    mCommandQueue.stopWaiting();
-    mCommandThread.stop();
+    stopCommandThread();
     return result;
 }
 
 aaudio_result_t AAudioServiceStreamBase::close() {
     aaudio_result_t result = sendCommand(CLOSE, nullptr, true /*waitForReply*/, TIMEOUT_NANOS);
+    if (result == AAUDIO_ERROR_ALREADY_CLOSED) {
+        // AAUDIO_ERROR_ALREADY_CLOSED is not a really error but just indicate the stream has
+        // already been closed. In that case, there is no need to close the stream once more.
+        ALOGD("The stream(%d) is already closed", mHandle);
+        return AAUDIO_OK;
+    }
 
-    // Stop the command thread as the stream is closed.
-    mThreadEnabled = false;
-    mCommandQueue.stopWaiting();
-    mCommandThread.stop();
+    stopCommandThread();
 
     return result;
 }
 
 aaudio_result_t AAudioServiceStreamBase::close_l() {
     if (getState() == AAUDIO_STREAM_STATE_CLOSED) {
-        return AAUDIO_OK;
+        return AAUDIO_ERROR_ALREADY_CLOSED;
     }
 
     // This will stop the stream, just in case it was not already stopped.
@@ -222,13 +220,17 @@
     return closeAndClear();
 }
 
-aaudio_result_t AAudioServiceStreamBase::startDevice() {
+aaudio_result_t AAudioServiceStreamBase::startDevice_l() {
     mClientHandle = AUDIO_PORT_HANDLE_NONE;
     sp<AAudioServiceEndpoint> endpoint = mServiceEndpointWeak.promote();
     if (endpoint == nullptr) {
         ALOGE("%s() has no endpoint", __func__);
         return AAUDIO_ERROR_INVALID_STATE;
     }
+    if (!endpoint->isConnected()) {
+        ALOGE("%s() endpoint was already disconnected", __func__);
+        return AAUDIO_ERROR_DISCONNECTED;
+    }
     return endpoint->startStream(this, &mClientHandle);
 }
 
@@ -277,7 +279,7 @@
     mAtomicStreamTimestamp.clear();
 
     mClientHandle = AUDIO_PORT_HANDLE_NONE;
-    result = startDevice();
+    result = startDevice_l();
     if (result != AAUDIO_OK) goto error;
 
     // This should happen at the end of the start.
@@ -310,6 +312,8 @@
             .set(AMEDIAMETRICS_PROP_STATUS, (int32_t)result)
             .record(); });
 
+    setState(AAUDIO_STREAM_STATE_PAUSING);
+
     sp<AAudioServiceEndpoint> endpoint = mServiceEndpointWeak.promote();
     if (endpoint == nullptr) {
         ALOGE("%s() has no endpoint", __func__);
@@ -334,6 +338,7 @@
 aaudio_result_t AAudioServiceStreamBase::stop_l() {
     aaudio_result_t result = AAUDIO_OK;
     if (!isRunning()) {
+        ALOGW("%s() stream not running, returning early", __func__);
         return result;
     }
     const int64_t beginNs = AudioClock::getNanoseconds();
@@ -398,6 +403,7 @@
 }
 
 // implement Runnable, periodically send timestamps to client and process commands from queue.
+// Enter standby mode if idle for a while.
 __attribute__((no_sanitize("integer")))
 void AAudioServiceStreamBase::run() {
     ALOGD("%s() %s entering >>>>>>>>>>>>>> COMMANDS", __func__, getTypeText());
@@ -406,6 +412,7 @@
     TimestampScheduler timestampScheduler;
     int64_t nextTimestampReportTime;
     int64_t nextDataReportTime;
+    // When to try to enter standby.
     int64_t standbyTime = AudioClock::getNanoseconds() + IDLE_TIMEOUT_NANOS;
     // Balance the incStrong from when the thread was launched.
     holdStream->decStrong(nullptr);
@@ -417,28 +424,26 @@
     int32_t loopCount = 0;
     while (mThreadEnabled.load()) {
         loopCount++;
-        int64_t timeoutNanos = -1;
-        if (isDisconnected_l()) {
-            if (!isStandby_l()) {
-                // If the stream is disconnected but not in standby mode, wait until standby time.
+        int64_t timeoutNanos = -1; // wait forever
+        if (isDisconnected_l() || isIdle_l()) {
+            if (isStandbyImplemented() && !isStandby_l()) {
+                // If not in standby mode, wait until standby time.
                 timeoutNanos = standbyTime - AudioClock::getNanoseconds();
                 timeoutNanos = std::max<int64_t>(0, timeoutNanos);
-            } // else {
-                // If the stream is disconnected and in standby mode, keep `timeoutNanos` as
-                // -1 to wait forever until next command as the stream can only be closed.
-            // }
-        } else if (isRunning() || (isIdle_l() && !isStandby_l())) {
-            timeoutNanos = (isRunning() ? std::min(nextTimestampReportTime, nextDataReportTime)
-                                        : standbyTime) - AudioClock::getNanoseconds();
+            }
+            // Otherwise, keep `timeoutNanos` as -1 to wait forever until next command.
+        } else if (isRunning()) {
+            timeoutNanos = std::min(nextTimestampReportTime, nextDataReportTime)
+                    - AudioClock::getNanoseconds();
             timeoutNanos = std::max<int64_t>(0, timeoutNanos);
         }
-
         auto command = mCommandQueue.waitForCommand(timeoutNanos);
         if (!mThreadEnabled) {
             // Break the loop if the thread is disabled.
             break;
         }
 
+        // Is it time to send timestamps?
         if (isRunning() && !isDisconnected_l()) {
             auto currentTimestamp = AudioClock::getNanoseconds();
             if (currentTimestamp >= nextDataReportTime) {
@@ -454,19 +459,24 @@
                 nextTimestampReportTime = timestampScheduler.nextAbsoluteTime();
             }
         }
-        if ((isIdle_l() || isDisconnected_l()) && AudioClock::getNanoseconds() >= standbyTime) {
+
+        // Is it time to enter standby?
+        if ((isIdle_l() || isDisconnected_l())
+                && isStandbyImplemented()
+                && !isStandby_l()
+                && (AudioClock::getNanoseconds() >= standbyTime)) {
+            ALOGD("%s() call standby_l(), %d loops", __func__, loopCount);
             aaudio_result_t result = standby_l();
             if (result != AAUDIO_OK) {
-                // If standby failed because of the function is not implemented, there is no
-                // need to retry. Otherwise, retry standby later.
-                ALOGW("Failed to enter standby, error=%d", result);
-                standbyTime = result == AAUDIO_ERROR_UNIMPLEMENTED
-                        ? std::numeric_limits<int64_t>::max()
-                        : AudioClock::getNanoseconds() + IDLE_TIMEOUT_NANOS;
+                ALOGW("Failed to enter standby, error = %d", result);
+                // Try again later.
+                standbyTime = AudioClock::getNanoseconds() + IDLE_TIMEOUT_NANOS;
             }
         }
 
         if (command != nullptr) {
+            ALOGD("%s() got COMMAND opcode %d after %d loops",
+                    __func__, command->operationCode, loopCount);
             std::scoped_lock<std::mutex> _commandLock(command->lock);
             switch (command->operationCode) {
                 case START:
@@ -521,6 +531,18 @@
                                                        : exitStandby_l(param->mParcelable);
                     standbyTime = AudioClock::getNanoseconds() + IDLE_TIMEOUT_NANOS;
                 } break;
+                case START_CLIENT: {
+                    auto param = (StartClientParam *) command->parameter.get();
+                    command->result = param == nullptr ? AAUDIO_ERROR_ILLEGAL_ARGUMENT
+                                                       : startClient_l(param->mClient,
+                                                                       param->mAttr,
+                                                                       param->mClientHandle);
+                } break;
+                case STOP_CLIENT: {
+                    auto param = (StopClientParam *) command->parameter.get();
+                    command->result = param == nullptr ? AAUDIO_ERROR_ILLEGAL_ARGUMENT
+                                                       : stopClient_l(param->mClientHandle);
+                } break;
                 default:
                     ALOGE("Invalid command op code: %d", command->operationCode);
                     break;
@@ -643,7 +665,7 @@
     int32_t count = mUpMessageQueue->getFifoBuffer()->write(command, 1);
     if (count != 1) {
         ALOGW("%s(): Queue full. Did client stop? Suspending stream. what = %u, %s",
-              __func__, command->what, getTypeText());
+              __func__, static_cast<unsigned>(command->what), getTypeText());
         setSuspended(true);
         return AAUDIO_ERROR_WOULD_BLOCK;
     } else {
@@ -733,6 +755,26 @@
     return mCommandQueue.sendCommand(command);
 }
 
+aaudio_result_t AAudioServiceStreamBase::sendStartClientCommand(const android::AudioClient &client,
+                                                                const audio_attributes_t *attr,
+                                                                audio_port_handle_t *clientHandle) {
+    auto command = std::make_shared<AAudioCommand>(
+            START_CLIENT,
+            std::make_shared<StartClientParam>(client, attr, clientHandle),
+            true /*waitForReply*/,
+            TIMEOUT_NANOS);
+    return mCommandQueue.sendCommand(command);
+}
+
+aaudio_result_t AAudioServiceStreamBase::sendStopClientCommand(audio_port_handle_t clientHandle) {
+    auto command = std::make_shared<AAudioCommand>(
+            STOP_CLIENT,
+            std::make_shared<StopClientParam>(clientHandle),
+            true /*waitForReply*/,
+            TIMEOUT_NANOS);
+    return mCommandQueue.sendCommand(command);
+}
+
 void AAudioServiceStreamBase::onVolumeChanged(float volume) {
     sendServiceEvent(AAUDIO_SERVICE_EVENT_VOLUME, volume);
 }
@@ -766,3 +808,11 @@
         .record();
     return result;
 }
+
+void AAudioServiceStreamBase::stopCommandThread() {
+    bool threadEnabled = true;
+    if (mThreadEnabled.compare_exchange_strong(threadEnabled, false)) {
+        mCommandQueue.stopWaiting();
+        mCommandThread.stop();
+    }
+}
diff --git a/services/oboeservice/AAudioServiceStreamBase.h b/services/oboeservice/AAudioServiceStreamBase.h
index d5061b3..20737bc 100644
--- a/services/oboeservice/AAudioServiceStreamBase.h
+++ b/services/oboeservice/AAudioServiceStreamBase.h
@@ -279,7 +279,7 @@
      * Device specific startup.
      * @return AAUDIO_OK or negative error.
      */
-    virtual aaudio_result_t startDevice();
+    virtual aaudio_result_t startDevice_l() REQUIRES(mLock);
 
     aaudio_result_t writeUpMessageQueue(AAudioServiceMessage *command)
             EXCLUDES(mUpMessageQueueLock);
@@ -288,6 +288,12 @@
 
     aaudio_result_t sendXRunCount(int32_t xRunCount);
 
+    aaudio_result_t sendStartClientCommand(const android::AudioClient& client,
+                                           const audio_attributes_t *attr,
+                                           audio_port_handle_t *clientHandle) EXCLUDES(mLock);
+
+    aaudio_result_t sendStopClientCommand(audio_port_handle_t clientHandle) EXCLUDES(mLock);
+
     /**
      * @param positionFrames
      * @param timeNanos
@@ -310,9 +316,14 @@
         mDisconnected = flag;
     }
 
+    // If you implemented this method then please also override isStandbyImplemented().
     virtual aaudio_result_t standby_l() REQUIRES(mLock) {
         return AAUDIO_ERROR_UNIMPLEMENTED;
     }
+    virtual bool isStandbyImplemented() {
+        return false;
+    }
+
     class ExitStandbyParam : public AAudioCommandParam {
     public:
         explicit ExitStandbyParam(AudioEndpointParcelable* parcelable)
@@ -342,6 +353,40 @@
     }
     virtual void reportData_l() REQUIRES(mLock) { return; }
 
+    class StartClientParam : public AAudioCommandParam {
+    public:
+        StartClientParam(const android::AudioClient& client, const audio_attributes_t* attr,
+                         audio_port_handle_t* clientHandle)
+                : AAudioCommandParam(), mClient(client), mAttr(attr), mClientHandle(clientHandle) {
+        }
+        ~StartClientParam() override = default;
+
+        android::AudioClient mClient;
+        const audio_attributes_t* mAttr;
+        audio_port_handle_t* mClientHandle;
+    };
+    virtual aaudio_result_t startClient_l(
+            const android::AudioClient& client,
+            const audio_attributes_t *attr __unused,
+            audio_port_handle_t *clientHandle __unused) REQUIRES(mLock) {
+        ALOGD("AAudioServiceStreamBase::startClient_l(%p, ...) AAUDIO_ERROR_UNAVAILABLE", &client);
+        return AAUDIO_ERROR_UNAVAILABLE;
+    }
+
+    class StopClientParam : public AAudioCommandParam {
+    public:
+        explicit StopClientParam(audio_port_handle_t clientHandle)
+                : AAudioCommandParam(), mClientHandle(clientHandle) {
+        }
+        ~StopClientParam() override = default;
+
+        audio_port_handle_t mClientHandle;
+    };
+    virtual aaudio_result_t stopClient_l(audio_port_handle_t clientHandle) REQUIRES(mLock) {
+        ALOGD("AAudioServiceStreamBase::stopClient(%d) AAUDIO_ERROR_UNAVAILABLE", clientHandle);
+        return AAUDIO_ERROR_UNAVAILABLE;
+    }
+
     pid_t                   mRegisteredClientThread = ILLEGAL_THREAD_ID;
 
     std::mutex              mUpMessageQueueLock;
@@ -358,9 +403,11 @@
         UNREGISTER_AUDIO_THREAD,
         GET_DESCRIPTION,
         EXIT_STANDBY,
+        START_CLIENT,
+        STOP_CLIENT,
     };
     AAudioThread            mCommandThread;
-    std::atomic<bool>       mThreadEnabled{false};
+    std::atomic_bool        mThreadEnabled{false};
     AAudioCommandQueue      mCommandQueue;
 
     int32_t                 mFramesPerBurst = 0;
@@ -400,6 +447,8 @@
                                 bool waitForReply = false,
                                 int64_t timeoutNanos = 0);
 
+    void stopCommandThread();
+
     aaudio_result_t closeAndClear();
 
     /**
diff --git a/services/oboeservice/AAudioServiceStreamMMAP.cpp b/services/oboeservice/AAudioServiceStreamMMAP.cpp
index 89f6e33..5203e50 100644
--- a/services/oboeservice/AAudioServiceStreamMMAP.cpp
+++ b/services/oboeservice/AAudioServiceStreamMMAP.cpp
@@ -24,6 +24,7 @@
 #include <iostream>
 #include <stdint.h>
 
+#include <com_android_media_aaudio.h>
 #include <utils/String16.h>
 #include <media/nbaio/AudioStreamOutSink.h>
 #include <media/MmapStreamInterface.h>
@@ -83,11 +84,12 @@
 }
 
 // Start the flow of data.
-aaudio_result_t AAudioServiceStreamMMAP::startDevice() {
-    aaudio_result_t result = AAudioServiceStreamBase::startDevice();
+aaudio_result_t AAudioServiceStreamMMAP::startDevice_l() {
+    aaudio_result_t result = AAudioServiceStreamBase::startDevice_l();
     if (!mInService && result == AAUDIO_OK) {
         // Note that this can sometimes take 200 to 300 msec for a cold start!
-        result = startClient(mMmapClient, nullptr /*const audio_attributes_t* */, &mClientHandle);
+        result = startClient_l(
+                mMmapClient, nullptr /*const audio_attributes_t* */, &mClientHandle);
     }
     return result;
 }
@@ -100,7 +102,7 @@
     aaudio_result_t result = AAudioServiceStreamBase::pause_l();
     // TODO put before base::pause()?
     if (!mInService) {
-        (void) stopClient(mClientHandle);
+        (void) stopClient_l(mClientHandle);
     }
     return result;
 }
@@ -112,7 +114,7 @@
     aaudio_result_t result = AAudioServiceStreamBase::stop_l();
     // TODO put before base::stop()?
     if (!mInService) {
-        (void) stopClient(mClientHandle);
+        (void) stopClient_l(mClientHandle);
     }
     return result;
 }
@@ -148,7 +150,40 @@
 
 aaudio_result_t AAudioServiceStreamMMAP::startClient(const android::AudioClient& client,
                                                      const audio_attributes_t *attr,
-                                                     audio_port_handle_t *clientHandle) {
+                                                     audio_port_handle_t *portHandlePtr) {
+    if (com::android::media::aaudio::start_stop_client_from_command_thread()) {
+        return sendStartClientCommand(client, attr, portHandlePtr);
+    } else {
+        sp<AAudioServiceEndpoint> endpoint = mServiceEndpointWeak.promote();
+        if (endpoint == nullptr) {
+            ALOGE("%s() has no endpoint", __func__);
+            return AAUDIO_ERROR_INVALID_STATE;
+        }
+        // Start the client on behalf of the application. Generate a new porthandle.
+        aaudio_result_t result = endpoint->startClient(client, attr, portHandlePtr);
+        ALOGD("%s() flag off, got port %d", __func__,
+              ((portHandlePtr == nullptr) ? -1 : *portHandlePtr));
+        return result;
+    }
+}
+
+aaudio_result_t AAudioServiceStreamMMAP::stopClient(audio_port_handle_t clientHandle) {
+    if (com::android::media::aaudio::start_stop_client_from_command_thread()) {
+        return sendStopClientCommand(clientHandle);
+    } else {
+        sp<AAudioServiceEndpoint> endpoint = mServiceEndpointWeak.promote();
+        if (endpoint == nullptr) {
+            ALOGE("%s() has no endpoint", __func__);
+            return AAUDIO_ERROR_INVALID_STATE;
+        }
+        aaudio_result_t result = endpoint->stopClient(clientHandle);
+        return result;
+    }
+}
+
+aaudio_result_t AAudioServiceStreamMMAP::startClient_l(const android::AudioClient& client,
+                                                       const audio_attributes_t *attr,
+                                                       audio_port_handle_t *clientHandle) {
     sp<AAudioServiceEndpoint> endpoint = mServiceEndpointWeak.promote();
     if (endpoint == nullptr) {
         ALOGE("%s() has no endpoint", __func__);
@@ -159,7 +194,7 @@
     return result;
 }
 
-aaudio_result_t AAudioServiceStreamMMAP::stopClient(audio_port_handle_t clientHandle) {
+aaudio_result_t AAudioServiceStreamMMAP::stopClient_l(audio_port_handle_t clientHandle) {
     sp<AAudioServiceEndpoint> endpoint = mServiceEndpointWeak.promote();
     if (endpoint == nullptr) {
         ALOGE("%s() has no endpoint", __func__);
diff --git a/services/oboeservice/AAudioServiceStreamMMAP.h b/services/oboeservice/AAudioServiceStreamMMAP.h
index 42032d7..f20ea10 100644
--- a/services/oboeservice/AAudioServiceStreamMMAP.h
+++ b/services/oboeservice/AAudioServiceStreamMMAP.h
@@ -73,6 +73,9 @@
     aaudio_result_t stop_l() REQUIRES(mLock) override;
 
     aaudio_result_t standby_l() REQUIRES(mLock) override;
+    bool isStandbyImplemented() override {
+        return true;
+    }
 
     aaudio_result_t exitStandby_l(AudioEndpointParcelable* parcelable) REQUIRES(mLock) override;
 
@@ -93,7 +96,13 @@
      * Device specific startup.
      * @return AAUDIO_OK or negative error.
      */
-    aaudio_result_t startDevice() override;
+    aaudio_result_t startDevice_l() REQUIRES(mLock) override;
+
+    aaudio_result_t startClient_l(const android::AudioClient& client,
+                                  const audio_attributes_t *attr,
+                                  audio_port_handle_t *clientHandle) REQUIRES(mLock) override;
+
+    aaudio_result_t stopClient_l(audio_port_handle_t clientHandle) REQUIRES(mLock) override;
 
 private:
 
diff --git a/services/oboeservice/Android.bp b/services/oboeservice/Android.bp
index 9fe06b7..67b319f 100644
--- a/services/oboeservice/Android.bp
+++ b/services/oboeservice/Android.bp
@@ -69,10 +69,10 @@
     "-android-cloexec-dup", // found in AAudioServiceEndpointMMAP.cpp
     "-bugprone-narrowing-conversions", // found in several interface from size_t to int32_t
 
-    "-google-readability-casting", // C++ casts not always necessary and may be verbose
-    "-google-readability-todo", // do not require TODO(info)
     "-google-build-using-namespace", // Reenable and fix later.
     "-google-global-names-in-headers", // found in several files
+    "-google-readability-casting", // C++ casts not always necessary and may be verbose
+    "-google-readability-todo", // do not require TODO(info)
 
     "-misc-non-private-member-variables-in-classes", // found in aidl generated files
 
@@ -82,26 +82,28 @@
     name: "libaaudioservice_dependencies",
 
     shared_libs: [
+        "aaudio-aidl-cpp",
+        "com.android.media.aaudio-aconfig-cc",
+        "com.android.media.aaudio-aconfig-cc",
+        "framework-permission-aidl-cpp",
         "libaaudio_internal",
         "libaudioclient",
+        "libaudioclient_aidl_conversion",
         "libaudioutils",
-        "libmedia_helper",
-        "libmediametrics",
-        "libmediautils",
         "libbase",
         "libbinder",
         "libcutils",
         "liblog",
+        "libmedia_helper",
+        "libmediametrics",
+        "libmediautils",
         "libutils",
-        "aaudio-aidl-cpp",
-        "framework-permission-aidl-cpp",
-        "libaudioclient_aidl_conversion",
         "packagemanager_aidl-cpp",
     ],
 
     static_libs: [
         "libaudioflinger",
-    ]
+    ],
 }
 
 cc_library_static {
@@ -109,8 +111,8 @@
     name: "libaaudioservice",
 
     defaults: [
-        "libaaudioservice_dependencies",
         "latest_android_media_audio_common_types_cpp_shared",
+        "libaaudioservice_dependencies",
     ],
 
     srcs: [
@@ -136,15 +138,15 @@
     ],
 
     cflags: [
-        "-Wthread-safety",
-        "-Wno-unused-parameter",
         "-Wall",
         "-Werror",
+        "-Wno-unused-parameter",
+        "-Wthread-safety",
     ],
 
     export_shared_lib_headers: [
-        "libaaudio_internal",
         "framework-permission-aidl-cpp",
+        "libaaudio_internal",
     ],
 
     header_libs: [
@@ -152,14 +154,16 @@
     ],
 
     include_dirs: [
-        "frameworks/av/media/libnbaio/include_mono",
         "frameworks/av/media/libnbaio/include",
+        "frameworks/av/media/libnbaio/include_mono",
     ],
 
+    export_include_dirs: ["."],
+
     tidy: true,
     tidy_checks: tidy_errors,
     tidy_checks_as_errors: tidy_errors,
     tidy_flags: [
         "-format-style=file",
-    ]
+    ],
 }
diff --git a/services/oboeservice/fuzzer/Android.bp b/services/oboeservice/fuzzer/Android.bp
index 0230935..97825b3 100644
--- a/services/oboeservice/fuzzer/Android.bp
+++ b/services/oboeservice/fuzzer/Android.bp
@@ -19,6 +19,7 @@
  */
 
 package {
+    default_team: "trendy_team_media_framework_audio",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
@@ -36,21 +37,22 @@
         "oboeservice_fuzzer.cpp",
     ],
     shared_libs: [
+        "aaudio-aidl-cpp",
+        "com.android.media.aaudio-aconfig-cc",
+        "framework-permission-aidl-cpp",
         "libaaudio_internal",
         "libaudioclient",
+        "libaudioclient_aidl_conversion",
         "libaudioflinger",
         "libaudioutils",
-        "libmedia_helper",
-        "libmediametrics",
-        "libmediautils",
         "libbase",
         "libbinder",
         "libcutils",
         "liblog",
+        "libmedia_helper",
+        "libmediametrics",
+        "libmediautils",
         "libutils",
-        "aaudio-aidl-cpp",
-        "framework-permission-aidl-cpp",
-        "libaudioclient_aidl_conversion",
     ],
     static_libs: [
         "libaaudioservice",
diff --git a/services/tuner/TunerDemux.cpp b/services/tuner/TunerDemux.cpp
index 92fa970..a80a88e 100644
--- a/services/tuner/TunerDemux.cpp
+++ b/services/tuner/TunerDemux.cpp
@@ -50,7 +50,9 @@
 }
 
 TunerDemux::~TunerDemux() {
-    close();
+    if (!isClosed) {
+        close();
+    }
     mDemux = nullptr;
     mTunerService = nullptr;
 }
@@ -125,6 +127,7 @@
 }
 
 ::ndk::ScopedAStatus TunerDemux::close() {
+    isClosed = true;
     return mDemux->close();
 }
 
diff --git a/services/tuner/TunerDemux.h b/services/tuner/TunerDemux.h
index 0c71987..17dd7e0 100644
--- a/services/tuner/TunerDemux.h
+++ b/services/tuner/TunerDemux.h
@@ -64,6 +64,7 @@
     shared_ptr<IDemux> mDemux;
     int mDemuxId;
     shared_ptr<TunerService> mTunerService;
+    bool isClosed = false;
 };
 
 }  // namespace tuner
diff --git a/services/tuner/TunerDescrambler.cpp b/services/tuner/TunerDescrambler.cpp
index ffe0be9..c1214bd 100644
--- a/services/tuner/TunerDescrambler.cpp
+++ b/services/tuner/TunerDescrambler.cpp
@@ -41,7 +41,9 @@
 }
 
 TunerDescrambler::~TunerDescrambler() {
-    close();
+    if (!isClosed) {
+        close();
+    }
     mDescrambler = nullptr;
 }
 
@@ -75,6 +77,7 @@
 }
 
 ::ndk::ScopedAStatus TunerDescrambler::close() {
+    isClosed = true;
     return mDescrambler->close();
 }
 
diff --git a/services/tuner/TunerDescrambler.h b/services/tuner/TunerDescrambler.h
index b1d5fb9..434fc5d 100644
--- a/services/tuner/TunerDescrambler.h
+++ b/services/tuner/TunerDescrambler.h
@@ -48,6 +48,7 @@
 
 private:
     shared_ptr<IDescrambler> mDescrambler;
+    bool isClosed = false;
 };
 
 }  // namespace tuner
diff --git a/services/tuner/TunerDvr.cpp b/services/tuner/TunerDvr.cpp
index fcee966..0e1b0fa 100644
--- a/services/tuner/TunerDvr.cpp
+++ b/services/tuner/TunerDvr.cpp
@@ -37,7 +37,9 @@
 }
 
 TunerDvr::~TunerDvr() {
-    close();
+    if (!isClosed) {
+        close();
+    }
     mDvr = nullptr;
 }
 
@@ -92,6 +94,7 @@
 }
 
 ::ndk::ScopedAStatus TunerDvr::close() {
+    isClosed = true;
     return mDvr->close();
 }
 
diff --git a/services/tuner/TunerDvr.h b/services/tuner/TunerDvr.h
index 2330e7b..1fb7a5c 100644
--- a/services/tuner/TunerDvr.h
+++ b/services/tuner/TunerDvr.h
@@ -77,6 +77,7 @@
 private:
     shared_ptr<IDvr> mDvr;
     DvrType mType;
+    bool isClosed = false;
 };
 
 }  // namespace tuner
diff --git a/services/tuner/TunerFilter.cpp b/services/tuner/TunerFilter.cpp
index 478e7ea..84a2b4e 100644
--- a/services/tuner/TunerFilter.cpp
+++ b/services/tuner/TunerFilter.cpp
@@ -47,7 +47,9 @@
         mTunerService(tuner) {}
 
 TunerFilter::~TunerFilter() {
-    close();
+    if (!isClosed) {
+        close();
+    }
     freeSharedFilterToken("");
     {
         Mutex::Autolock _l(mLock);
@@ -266,6 +268,7 @@
     mStarted = false;
     mShared = false;
     mClientPid = -1;
+    isClosed = true;
 
     return res;
 }
diff --git a/services/tuner/TunerFilter.h b/services/tuner/TunerFilter.h
index f6178c4..06735aa 100644
--- a/services/tuner/TunerFilter.h
+++ b/services/tuner/TunerFilter.h
@@ -116,6 +116,7 @@
     shared_ptr<FilterCallback> mFilterCallback;
     Mutex mLock;
     shared_ptr<TunerService> mTunerService;
+    bool isClosed = false;
 };
 
 }  // namespace tuner
diff --git a/services/tuner/TunerFrontend.cpp b/services/tuner/TunerFrontend.cpp
index 1e93d95..081596a 100644
--- a/services/tuner/TunerFrontend.cpp
+++ b/services/tuner/TunerFrontend.cpp
@@ -37,7 +37,9 @@
 }
 
 TunerFrontend::~TunerFrontend() {
-    close();
+    if (!isClosed) {
+        close();
+    }
     mFrontend = nullptr;
     mId = -1;
 }
@@ -89,6 +91,7 @@
 }
 
 ::ndk::ScopedAStatus TunerFrontend::close() {
+    isClosed = true;
     return mFrontend->close();
 }
 
diff --git a/services/tuner/TunerFrontend.h b/services/tuner/TunerFrontend.h
index da471fb..9612124 100644
--- a/services/tuner/TunerFrontend.h
+++ b/services/tuner/TunerFrontend.h
@@ -83,6 +83,7 @@
 private:
     int mId;
     shared_ptr<IFrontend> mFrontend;
+    bool isClosed = false;
 };
 
 }  // namespace tuner
diff --git a/services/tuner/TunerLnb.cpp b/services/tuner/TunerLnb.cpp
index 2fb6135..d27a978 100644
--- a/services/tuner/TunerLnb.cpp
+++ b/services/tuner/TunerLnb.cpp
@@ -36,7 +36,9 @@
 }
 
 TunerLnb::~TunerLnb() {
-    close();
+    if (!isClosed) {
+        close();
+    }
     mLnb = nullptr;
     mId = -1;
 }
@@ -70,6 +72,7 @@
 }
 
 ::ndk::ScopedAStatus TunerLnb::close() {
+    isClosed = true;
     return mLnb->close();
 }
 
diff --git a/services/tuner/TunerLnb.h b/services/tuner/TunerLnb.h
index 72988a6..b0222d7 100644
--- a/services/tuner/TunerLnb.h
+++ b/services/tuner/TunerLnb.h
@@ -66,6 +66,7 @@
 private:
     int mId;
     shared_ptr<ILnb> mLnb;
+    bool isClosed = false;
 };
 
 }  // namespace tuner
diff --git a/services/tuner/TunerTimeFilter.cpp b/services/tuner/TunerTimeFilter.cpp
index 385a063..7a4e200 100644
--- a/services/tuner/TunerTimeFilter.cpp
+++ b/services/tuner/TunerTimeFilter.cpp
@@ -35,7 +35,9 @@
 }
 
 TunerTimeFilter::~TunerTimeFilter() {
-    close();
+    if (!isClosed) {
+        close();
+    }
     mTimeFilter = nullptr;
 }
 
@@ -64,6 +66,7 @@
 }
 
 ::ndk::ScopedAStatus TunerTimeFilter::close() {
+    isClosed = true;
     return mTimeFilter->close();
 }
 
diff --git a/services/tuner/TunerTimeFilter.h b/services/tuner/TunerTimeFilter.h
index 31a47cd..7e40ebe 100644
--- a/services/tuner/TunerTimeFilter.h
+++ b/services/tuner/TunerTimeFilter.h
@@ -45,6 +45,7 @@
 
 private:
     shared_ptr<ITimeFilter> mTimeFilter;
+    bool isClosed = false;
 };
 
 }  // namespace tuner