Merge "Updated color_conversion_fuzzer fuzz_config in Android.bp file" into main
diff --git a/Android.bp b/Android.bp
index 302e250..7a2bb9b 100644
--- a/Android.bp
+++ b/Android.bp
@@ -52,8 +52,8 @@
         "aidl/android/media/VolumeShaperOperationFlag.aidl",
         "aidl/android/media/VolumeShaperState.aidl",
     ],
-    imports: [
-        "android.media.audio.common.types-V2",
+    defaults: [
+        "latest_android_media_audio_common_types_import_interface",
     ],
     backend: {
         cpp: {
@@ -113,8 +113,8 @@
     srcs: [
         "aidl/android/media/audio/IHalAdapterVendorExtension.aidl",
     ],
-    imports: [
-        "android.hardware.audio.core-V1",
+    defaults: [
+        "latest_android_hardware_audio_core_import_interface",
     ],
     backend: {
         // The C++ backend is disabled transitively due to use of FMQ by the audio core HAL.
diff --git a/camera/Android.bp b/camera/Android.bp
index b3f70f4..a3fd7f9 100644
--- a/camera/Android.bp
+++ b/camera/Android.bp
@@ -144,6 +144,7 @@
     srcs: [
         "aidl/android/hardware/CameraExtensionSessionStats.aidl",
         "aidl/android/hardware/ICameraService.aidl",
+        "aidl/android/hardware/CameraIdRemapping.aidl",
         "aidl/android/hardware/ICameraServiceListener.aidl",
         "aidl/android/hardware/ICameraServiceProxy.aidl",
         "aidl/android/hardware/camera2/ICameraDeviceCallbacks.aidl",
diff --git a/camera/Android.mk b/camera/Android.mk
deleted file mode 100644
index d9068c0..0000000
--- a/camera/Android.mk
+++ /dev/null
@@ -1,15 +0,0 @@
-# Copyright 2010 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-include $(call all-subdir-makefiles)
diff --git a/camera/aidl/android/hardware/CameraIdRemapping.aidl b/camera/aidl/android/hardware/CameraIdRemapping.aidl
new file mode 100644
index 0000000..453f696
--- /dev/null
+++ b/camera/aidl/android/hardware/CameraIdRemapping.aidl
@@ -0,0 +1,50 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware;
+
+/**
+ * Specifies a remapping of Camera Ids.
+ *
+ * Example: For a given package, a remapping of camera id0 to id1 specifies
+ * that any operation to perform on id0 should instead be performed on id1.
+ *
+ * @hide
+ */
+parcelable CameraIdRemapping {
+    /**
+     * Specifies remapping of Camera Ids per package.
+     */
+    parcelable PackageIdRemapping {
+        /** Package Name (e.g. com.android.xyz). */
+        @utf8InCpp String packageName;
+        /**
+         * Ordered list of Camera Ids to replace. Only Camera Ids present in this list will be
+         * affected.
+         */
+        @utf8InCpp List<String> cameraIdsToReplace;
+        /**
+         *  Ordered list of updated Camera Ids, where updatedCameraIds[i] corresponds to
+         *  the updated camera id for cameraIdsToReplace[i].
+         */
+        @utf8InCpp List<String> updatedCameraIds;
+    }
+
+    /**
+     * List of Camera Id remappings to perform.
+     */
+    List<PackageIdRemapping> packageIdRemappings;
+}
diff --git a/camera/aidl/android/hardware/ICameraService.aidl b/camera/aidl/android/hardware/ICameraService.aidl
index ed37b2d..409a930 100644
--- a/camera/aidl/android/hardware/ICameraService.aidl
+++ b/camera/aidl/android/hardware/ICameraService.aidl
@@ -29,6 +29,7 @@
 import android.hardware.camera2.impl.CameraMetadataNative;
 import android.hardware.ICameraServiceListener;
 import android.hardware.CameraInfo;
+import android.hardware.CameraIdRemapping;
 import android.hardware.CameraStatus;
 import android.hardware.CameraExtensionSessionStats;
 
@@ -131,6 +132,22 @@
             int targetSdkVersion);
 
     /**
+     * Remap Camera Ids in the CameraService.
+     *
+     * Once this is in effect, all binder calls in the ICameraService that
+     * use logicalCameraId should consult remapping state to arrive at the
+     * correct cameraId to perform the operation on.
+     *
+     * Note: Before the new cameraIdRemapping state is applied, the previous
+     * state is cleared.
+     *
+     * @param cameraIdRemapping the camera ids to remap. Sending an unpopulated
+     *        cameraIdRemapping object will result in clearing of any previous
+     *        cameraIdRemapping state in the camera service.
+     */
+    void remapCameraIds(in CameraIdRemapping cameraIdRemapping);
+
+    /**
      * Remove listener for changes to camera device and flashlight state.
      */
     void removeListener(ICameraServiceListener listener);
diff --git a/camera/include/camera/camera2/SessionConfiguration.h b/camera/include/camera/camera2/SessionConfiguration.h
index 29913f6..73fafb4 100644
--- a/camera/include/camera/camera2/SessionConfiguration.h
+++ b/camera/include/camera/camera2/SessionConfiguration.h
@@ -17,6 +17,8 @@
 #ifndef ANDROID_HARDWARE_CAMERA2_SESSIONCONFIGURATION_H
 #define ANDROID_HARDWARE_CAMERA2_SESSIONCONFIGURATION_H
 
+#include "OutputConfiguration.h"
+
 #include <binder/Parcelable.h>
 
 namespace android {
@@ -25,8 +27,6 @@
 namespace camera2 {
 namespace params {
 
-class OutputConfiguration;
-
 class SessionConfiguration : public android::Parcelable {
 public:
 
diff --git a/camera/ndk/Android.bp b/camera/ndk/Android.bp
index 24a11e3..d4dd546 100644
--- a/camera/ndk/Android.bp
+++ b/camera/ndk/Android.bp
@@ -111,6 +111,7 @@
 
 cc_library_shared {
     name: "libcamera2ndk_vendor",
+    cpp_std: "gnu++17",
     vendor: true,
     srcs: [
         "ndk_vendor/impl/ACameraDevice.cpp",
@@ -177,6 +178,7 @@
     shared_libs: [
         "libcamera2ndk_vendor",
         "libcamera_metadata",
+        "libhidlbase",
         "libmediandk",
         "libnativewindow",
         "libutils",
@@ -186,6 +188,7 @@
     ],
     static_libs: [
         "android.hardware.camera.common@1.0-helper",
+        "android.hidl.token@1.0",
     ],
     cflags: [
         "-D__ANDROID_VNDK__",
diff --git a/camera/ndk/impl/ACameraMetadata.cpp b/camera/ndk/impl/ACameraMetadata.cpp
index 365ac5c..61c7551 100644
--- a/camera/ndk/impl/ACameraMetadata.cpp
+++ b/camera/ndk/impl/ACameraMetadata.cpp
@@ -400,7 +400,6 @@
 
     camera_metadata_ro_entry rawEntry = static_cast<const CameraMetadata*>(mData.get())->find(tag);
     if (rawEntry.count == 0) {
-        ALOGE("%s: cannot find metadata tag %d", __FUNCTION__, tag);
         return ACAMERA_ERROR_METADATA_NOT_FOUND;
     }
     entry->tag = tag;
diff --git a/camera/ndk/include/camera/NdkCameraMetadataTags.h b/camera/ndk/include/camera/NdkCameraMetadataTags.h
index bd679e5..fe0ef67 100644
--- a/camera/ndk/include/camera/NdkCameraMetadataTags.h
+++ b/camera/ndk/include/camera/NdkCameraMetadataTags.h
@@ -584,6 +584,19 @@
      * <p>Only constrains auto-exposure (AE) algorithm, not
      * manual control of ACAMERA_SENSOR_EXPOSURE_TIME and
      * ACAMERA_SENSOR_FRAME_DURATION.</p>
+     * <p>Note that the actual achievable max framerate also depends on the minimum frame
+     * duration of the output streams. The max frame rate will be
+     * <code>min(aeTargetFpsRange.maxFps, 1 / max(individual stream min durations)</code>. For example,
+     * if the application sets this key to <code>{60, 60}</code>, but the maximum minFrameDuration among
+     * all configured streams is 33ms, the maximum framerate won't be 60fps, but will be
+     * 30fps.</p>
+     * <p>To start a CaptureSession with a target FPS range different from the
+     * capture request template's default value, the application
+     * is strongly recommended to call
+     * {@link ACameraDevice_createCaptureSessionWithSessionParameters }
+     * with the target fps range before creating the capture session. The aeTargetFpsRange is
+     * typically a session parameter. Specifying it at session creation time helps avoid
+     * session reconfiguration delays in cases like 60fps or high speed recording.</p>
      *
      * @see ACAMERA_SENSOR_EXPOSURE_TIME
      * @see ACAMERA_SENSOR_FRAME_DURATION
@@ -1128,6 +1141,12 @@
      * ACAMERA_CONTROL_VIDEO_STABILIZATION_MODE field will return
      * OFF if the recording output is not stabilized, or if there are no output
      * Surface types that can be stabilized.</p>
+     * <p>The application is strongly recommended to call
+     * {@link ACameraDevice_createCaptureSessionWithSessionParameters }
+     * with the desired video stabilization mode before creating the capture session.
+     * Video stabilization mode is a session parameter on many devices. Specifying
+     * it at session creation time helps avoid reconfiguration delay caused by difference
+     * between the default value and the first CaptureRequest.</p>
      * <p>If a camera device supports both this mode and OIS
      * (ACAMERA_LENS_OPTICAL_STABILIZATION_MODE), turning both modes on may
      * produce undesirable interaction, so it is recommended not to enable
@@ -5384,7 +5403,7 @@
      * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_DEFAULT">CameraMetadata#SENSOR_PIXEL_MODE_DEFAULT</a> mode.
      * They can be queried through
      * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics.html#get">CameraCharacteristics#get</a> with
-     * <a href="https://developer.android.com/reference/CameraCharacteristics.html#SCALER_STREAM_CONFIGURATION_MAP_MAXIMUM_RESOLUTION)">CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP_MAXIMUM_RESOLUTION)</a>.
+     * <a href="https://developer.android.com/reference/CameraCharacteristics.html#SCALER_STREAM_CONFIGURATION_MAP_MAXIMUM_RESOLUTION">CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP_MAXIMUM_RESOLUTION</a>.
      * Unless reported by both
      * <a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html">StreamConfigurationMap</a>s, the outputs from
      * <code>android.scaler.streamConfigurationMapMaximumResolution</code> and
@@ -5399,13 +5418,12 @@
      * <ul>
      * <li>
      * <p>The mandatory stream combinations listed in
-     *   <a href="https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics/mandatoryMaximumResolutionStreamCombinations.html">mandatoryMaximumResolutionStreamCombinations</a>
-     *   would not apply.</p>
+     *   android.scaler.mandatoryMaximumResolutionStreamCombinations  would not apply.</p>
      * </li>
      * <li>
      * <p>The bayer pattern of {@code RAW} streams when
      *   <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>
-     *   is selected will be the one listed in <a href="https://developer.android.com/reference/android/sensor/info/binningFactor.html">binningFactor</a>.</p>
+     *   is selected will be the one listed in ACAMERA_SENSOR_INFO_BINNING_FACTOR.</p>
      * </li>
      * <li>
      * <p>The following keys will always be present:</p>
@@ -5419,6 +5437,7 @@
      * </ul>
      *
      * @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION
+     * @see ACAMERA_SENSOR_INFO_BINNING_FACTOR
      * @see ACAMERA_SENSOR_INFO_PIXEL_ARRAY_SIZE_MAXIMUM_RESOLUTION
      * @see ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION
      */
diff --git a/camera/ndk/ndk_vendor/tests/AImageReaderVendorTest.cpp b/camera/ndk/ndk_vendor/tests/AImageReaderVendorTest.cpp
index 7f6ea9d..74c6cad 100644
--- a/camera/ndk/ndk_vendor/tests/AImageReaderVendorTest.cpp
+++ b/camera/ndk/ndk_vendor/tests/AImageReaderVendorTest.cpp
@@ -31,10 +31,13 @@
 #include <stdio.h>
 
 #include <android/log.h>
+#include <android/hidl/manager/1.2/IServiceManager.h>
+#include <android/hidl/token/1.0/ITokenManager.h>
 #include <camera/NdkCameraError.h>
 #include <camera/NdkCameraManager.h>
 #include <camera/NdkCameraDevice.h>
 #include <camera/NdkCameraCaptureSession.h>
+#include <hidl/ServiceManagement.h>
 #include <media/NdkImage.h>
 #include <media/NdkImageReader.h>
 #include <cutils/native_handle.h>
@@ -50,6 +53,8 @@
 static constexpr int kTestImageFormat = AIMAGE_FORMAT_YUV_420_888;
 
 using android::hardware::camera::common::V1_0::helper::VendorTagDescriptorCache;
+using android::hidl::manager::V1_0::IServiceManager;
+using android::hidl::token::V1_0::ITokenManager;
 using ConfiguredWindows = std::set<const native_handle_t *>;
 
 class CameraHelper {
@@ -981,11 +986,19 @@
 
 
 TEST_F(AImageReaderVendorTest, CreateWindowNativeHandle) {
+    auto transport = android::hardware::defaultServiceManager()->getTransport(ITokenManager::descriptor, "default");
+    if (transport.isOk() && transport == IServiceManager::Transport::EMPTY) {
+        GTEST_SKIP() << "This device no longer supports AImageReader_getWindowNativeHandle";
+    }
     testBasicTakePictures(/*prepareSurfaces*/ false);
     testBasicTakePictures(/*prepareSurfaces*/ true);
 }
 
 TEST_F(AImageReaderVendorTest, LogicalCameraPhysicalStream) {
+    auto transport = android::hardware::defaultServiceManager()->getTransport(ITokenManager::descriptor, "default");
+    if (transport.isOk() && transport == IServiceManager::Transport::EMPTY) {
+        GTEST_SKIP() << "This device no longer supports AImageReader_getWindowNativeHandle";
+    }
     for (auto & v2 : {true, false}) {
         testLogicalCameraPhysicalStream(false/*usePhysicalSettings*/, v2);
         testLogicalCameraPhysicalStream(true/*usePhysicalSettings*/, v2);
diff --git a/camera/tests/Android.bp b/camera/tests/Android.bp
new file mode 100644
index 0000000..65b8b41
--- /dev/null
+++ b/camera/tests/Android.bp
@@ -0,0 +1,52 @@
+// Copyright 2013 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package {
+    // See: http://go/android-license-faq
+    default_applicable_licenses: [
+        "frameworks_av_camera_license",
+    ],
+}
+
+cc_test {
+    name: "camera_client_test",
+    srcs: [
+        "VendorTagDescriptorTests.cpp",
+        "CameraBinderTests.cpp",
+        "CameraZSLTests.cpp",
+        "CameraCharacteristicsPermission.cpp",
+    ],
+    shared_libs: [
+        "liblog",
+        "libutils",
+        "libcutils",
+        "libcamera_metadata",
+        "libcamera_client",
+        "libgui",
+        "libsync",
+        "libui",
+        "libdl",
+        "libbinder",
+    ],
+    include_dirs: [
+        "system/media/private/camera/include",
+        "system/media/camera/tests",
+        "frameworks/av/services/camera/libcameraservice",
+    ],
+    cflags: [
+        "-Wall",
+        "-Wextra",
+        "-Werror",
+    ],
+}
diff --git a/camera/tests/Android.mk b/camera/tests/Android.mk
deleted file mode 100644
index 7f8078e..0000000
--- a/camera/tests/Android.mk
+++ /dev/null
@@ -1,50 +0,0 @@
-# Copyright 2013 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-LOCAL_PATH:= $(call my-dir)
-include $(CLEAR_VARS)
-LOCAL_ADDITIONAL_DEPENDENCIES := $(LOCAL_PATH)/Android.mk
-
-LOCAL_SRC_FILES:= \
-	VendorTagDescriptorTests.cpp \
-	CameraBinderTests.cpp \
-	CameraZSLTests.cpp \
-	CameraCharacteristicsPermission.cpp
-
-LOCAL_SHARED_LIBRARIES := \
-	liblog \
-	libutils \
-	libcutils \
-	libcamera_metadata \
-	libcamera_client \
-	libgui \
-	libsync \
-	libui \
-	libdl \
-	libbinder
-
-LOCAL_C_INCLUDES += \
-	system/media/private/camera/include \
-	system/media/camera/tests \
-	frameworks/av/services/camera/libcameraservice \
-
-LOCAL_CFLAGS += -Wall -Wextra -Werror
-
-LOCAL_MODULE:= camera_client_test
-LOCAL_LICENSE_KINDS:= SPDX-license-identifier-Apache-2.0
-LOCAL_LICENSE_CONDITIONS:= notice
-LOCAL_NOTICE_FILE:= $(LOCAL_PATH)/../NOTICE
-LOCAL_MODULE_TAGS := tests
-
-include $(BUILD_NATIVE_TEST)
diff --git a/drm/libmediadrm/fuzzer/Android.bp b/drm/libmediadrm/fuzzer/Android.bp
index deda9ef..adc33d5 100644
--- a/drm/libmediadrm/fuzzer/Android.bp
+++ b/drm/libmediadrm/fuzzer/Android.bp
@@ -65,8 +65,16 @@
     ],
     fuzz_config: {
         cc: [
-            "android-media-fuzzing-reports@google.com",
+            "android-drm-team@google.com",
         ],
-        componentid: 155276,
+        componentid: 49079,
+        hotlists: [
+            "4593311",
+        ],
+        description: "The fuzzer targets the APIs of libmediadrm",
+        vector: "local_no_privileges_required",
+        service_privilege: "privileged",
+        users: "multi_user",
+        fuzzed_code_usage: "shipped",
     },
 }
diff --git a/include/common_time/OWNERS b/include/common_time/OWNERS
deleted file mode 100644
index f9cb567..0000000
--- a/include/common_time/OWNERS
+++ /dev/null
@@ -1 +0,0 @@
-gkasten@google.com
diff --git a/include/private/media/OWNERS b/include/private/media/OWNERS
index 21723ba..10d06de 100644
--- a/include/private/media/OWNERS
+++ b/include/private/media/OWNERS
@@ -1,3 +1,4 @@
+# Bug component: 48436
 elaurent@google.com
-gkasten@google.com
 hunga@google.com
+include platform/frameworks/av:/media/janitors/audio_OWNERS #{LAST_RESORT_SUGGESTION}
diff --git a/media/OWNERS b/media/OWNERS
index 4a25b68..976fb9e 100644
--- a/media/OWNERS
+++ b/media/OWNERS
@@ -10,11 +10,9 @@
 philburk@google.com
 pmclean@google.com
 quxiangfang@google.com
-rago@google.com
 robertshih@google.com
 taklee@google.com
 wonsik@google.com
-ytai@google.com
 
 # go/android-fwk-media-solutions for info on areas of ownership.
 include platform/frameworks/av:/media/janitors/media_solutions_OWNERS
diff --git a/media/aconfig/Android.bp b/media/aconfig/Android.bp
index 4b489e2..96bf4f5 100644
--- a/media/aconfig/Android.bp
+++ b/media/aconfig/Android.bp
@@ -12,5 +12,11 @@
 
 cc_aconfig_library {
     name: "aconfig_mediacodec_flags_c_lib",
+    min_sdk_version: "30",
+    vendor_available: true,
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.media.swcodec",
+    ],
     aconfig_declarations: "aconfig_mediacodec_flags",
 }
diff --git a/media/aconfig/mediacodec_flags.aconfig b/media/aconfig/mediacodec_flags.aconfig
index 90ddf27..c82ad4d 100644
--- a/media/aconfig/mediacodec_flags.aconfig
+++ b/media/aconfig/mediacodec_flags.aconfig
@@ -13,3 +13,10 @@
   description: "Feature flags for media codec importance"
   bug: "297929011"
 }
+
+flag {
+  name: "aidl_hal"
+  namespace: "codec_fwk"
+  description: "Feature flags for enabling AIDL HAL handling"
+  bug: "251850069"
+}
diff --git a/media/audioaidlconversion/AidlConversionCppNdk.cpp b/media/audioaidlconversion/AidlConversionCppNdk.cpp
index 5558259..bcb31f3 100644
--- a/media/audioaidlconversion/AidlConversionCppNdk.cpp
+++ b/media/audioaidlconversion/AidlConversionCppNdk.cpp
@@ -28,7 +28,6 @@
 
 #include "media/AidlConversionCppNdk.h"
 
-#include <media/ShmemCompat.h>
 #include <media/stagefright/foundation/MediaDefs.h>
 
 ////////////////////////////////////////////////////////////////////////////////////////////////////
@@ -562,10 +561,11 @@
                 GET_DEVICE_DESC_CONNECTION(IP_V4));
         append_AudioDeviceDescription(pairs,
                 AUDIO_DEVICE_IN_BUS, AUDIO_DEVICE_OUT_BUS,
-                AudioDeviceType::IN_DEVICE, AudioDeviceType::OUT_DEVICE);
+                AudioDeviceType::IN_BUS, AudioDeviceType::OUT_BUS);
         append_AudioDeviceDescription(pairs,
                 AUDIO_DEVICE_IN_PROXY, AUDIO_DEVICE_OUT_PROXY,
-                AudioDeviceType::IN_AFE_PROXY, AudioDeviceType::OUT_AFE_PROXY);
+                AudioDeviceType::IN_AFE_PROXY, AudioDeviceType::OUT_AFE_PROXY,
+                GET_DEVICE_DESC_CONNECTION(VIRTUAL));
         append_AudioDeviceDescription(pairs,
                 AUDIO_DEVICE_IN_USB_HEADSET, AUDIO_DEVICE_OUT_USB_HEADSET,
                 AudioDeviceType::IN_HEADSET, AudioDeviceType::OUT_HEADSET,
@@ -1079,9 +1079,13 @@
         case Tag::ipv6: {
             const std::vector<int32_t>& ipv6 = aidl.address.get<AudioDeviceAddress::ipv6>();
             if (ipv6.size() != 8) return BAD_VALUE;
+// FIXME: Code warning found by clang-r510928
+#pragma clang diagnostic push
+#pragma clang diagnostic ignored "-Wfortify-source"
             snprintf(addressBuffer, AUDIO_DEVICE_MAX_ADDRESS_LEN,
                     "%04X:%04X:%04X:%04X:%04X:%04X:%04X:%04X",
                     ipv6[0], ipv6[1], ipv6[2], ipv6[3], ipv6[4], ipv6[5], ipv6[6], ipv6[7]);
+#pragma clang diagnostic pop
         } break;
         case Tag::alsa: {
             const std::vector<int32_t>& alsa = aidl.address.get<AudioDeviceAddress::alsa>();
diff --git a/media/audioaidlconversion/Android.bp b/media/audioaidlconversion/Android.bp
index d3a5755..07c59c7 100644
--- a/media/audioaidlconversion/Android.bp
+++ b/media/audioaidlconversion/Android.bp
@@ -56,6 +56,19 @@
 }
 
 cc_defaults {
+    name: "audio_aidl_conversion_common_default_cpp",
+    shared_libs: [
+        "libbinder",
+        "libshmemcompat",
+        "shared-file-region-aidl-cpp",
+        "framework-permission-aidl-cpp",
+    ],
+    export_shared_lib_headers: [
+        "shared-file-region-aidl-cpp",
+    ],
+}
+
+cc_defaults {
     name: "audio_aidl_conversion_common_default",
     export_include_dirs: ["include"],
     host_supported: true,
@@ -67,17 +80,12 @@
     ],
     shared_libs: [
         "libbase",
-        "libbinder",
         "liblog",
-        "libshmemcompat",
         "libstagefright_foundation",
         "libutils",
-        "shared-file-region-aidl-cpp",
-        "framework-permission-aidl-cpp",
     ],
     export_shared_lib_headers: [
         "libbase",
-        "shared-file-region-aidl-cpp",
     ],
     cflags: [
         "-Wall",
@@ -113,6 +121,7 @@
     ],
     defaults: [
         "audio_aidl_conversion_common_default",
+        "audio_aidl_conversion_common_default_cpp",
         "latest_android_media_audio_common_types_cpp_export_shared",
     ],
     min_sdk_version: "29",
@@ -223,6 +232,7 @@
     ],
     defaults: [
         "audio_aidl_conversion_common_default",
+        "audio_aidl_conversion_common_default_cpp",
         "audio_aidl_conversion_common_util_default",
         "latest_android_media_audio_common_types_cpp_shared",
         "latest_android_media_audio_common_types_ndk_shared",
diff --git a/media/audioaidlconversion/include/media/AidlConversionUtil-impl.h b/media/audioaidlconversion/include/media/AidlConversionUtil-impl.h
index 656d76a..7cba011 100644
--- a/media/audioaidlconversion/include/media/AidlConversionUtil-impl.h
+++ b/media/audioaidlconversion/include/media/AidlConversionUtil-impl.h
@@ -25,12 +25,12 @@
 #define AUDIO_AIDL_CONVERSION_AIDL_CONVERSION_UTIL_CPP
 #endif  // BACKEND_NDK_IMPL
 
+#include <functional>
 #include <limits>
 #include <type_traits>
 #include <utility>
 
 #include <android-base/expected.h>
-#include <binder/Status.h>
 
 #if defined(BACKEND_NDK_IMPL)
 #include <android/binder_auto_utils.h>
@@ -40,6 +40,7 @@
 namespace aidl {
 #else
 #include <binder/Enums.h>
+#include <binder/Status.h>
 #endif  // BACKEND_NDK_IMPL
 namespace android {
 
@@ -374,6 +375,30 @@
  * Note: for EX_TRANSACTION_FAILED and EX_SERVICE_SPECIFIC a more detailed error code
  * can be found from transactionError() or serviceSpecificErrorCode().
  */
+#if defined(BACKEND_NDK_IMPL)
+static inline ::android::status_t statusTFromExceptionCode(binder_exception_t exception) {
+    switch (exception) {
+        case EX_NONE:
+            return ::android::OK;
+        case EX_SECURITY:  // Java SecurityException, rethrows locally in Java
+            return ::android::PERMISSION_DENIED;
+        case EX_BAD_PARCELABLE:  // Java BadParcelableException, rethrows in Java
+        case EX_ILLEGAL_ARGUMENT:  // Java IllegalArgumentException, rethrows in Java
+        case EX_NULL_POINTER:  // Java NullPointerException, rethrows in Java
+            return ::android::BAD_VALUE;
+        case EX_ILLEGAL_STATE:  // Java IllegalStateException, rethrows in Java
+        case EX_UNSUPPORTED_OPERATION:  // Java UnsupportedOperationException, rethrows
+            return ::android::INVALID_OPERATION;
+        case EX_PARCELABLE:  // Java bootclass loader (not standard exception), rethrows
+        case EX_NETWORK_MAIN_THREAD:  // Java NetworkOnMainThreadException, rethrows
+        case EX_TRANSACTION_FAILED: // Native - see error code
+        case EX_SERVICE_SPECIFIC:   // Java ServiceSpecificException,
+                                            // rethrows in Java with integer error code
+            return ::android::UNKNOWN_ERROR;
+    }
+    return ::android::UNKNOWN_ERROR;
+}
+#else
 static inline ::android::status_t statusTFromExceptionCode(int32_t exceptionCode) {
     using namespace ::android::binder;
     switch (exceptionCode) {
@@ -398,6 +423,7 @@
     }
     return ::android::UNKNOWN_ERROR;
 }
+#endif  // BACKEND_NDK_IMPL
 
 /**
  * Return the equivalent Android ::android::status_t from a binder status.
@@ -410,6 +436,7 @@
  *
  * return_type method(type0 param0, ...)
  */
+#if !defined(BACKEND_NDK_IMPL)
 static inline ::android::status_t statusTFromBinderStatus(const ::android::binder::Status &status) {
     return status.isOk() ? ::android::OK // check ::android::OK,
         : status.serviceSpecificErrorCode() // service-side error, not standard Java exception
@@ -418,6 +445,7 @@
         ?: statusTFromExceptionCode(status.exceptionCode()); // a service-side error with a
                                                     // standard Java exception (fromExceptionCode)
 }
+#endif
 
 #if defined(BACKEND_NDK_IMPL)
 static inline ::android::status_t statusTFromBinderStatus(const ::ndk::ScopedAStatus &status) {
@@ -443,6 +471,7 @@
  * This is used for methods not returning an explicit status_t,
  * where Java callers expect an exception, not an integer return value.
  */
+#if !defined(BACKEND_NDK_IMPL)
 static inline ::android::binder::Status binderStatusFromStatusT(
         ::android::status_t status, const char *optionalMessage = nullptr) {
     const char * const emptyIfNull = optionalMessage == nullptr ? "" : optionalMessage;
@@ -470,6 +499,7 @@
     // throw a ServiceSpecificException.
     return Status::fromServiceSpecificError(status, emptyIfNull);
 }
+#endif
 
 } // namespace aidl_utils
 
diff --git a/media/audioaidlconversion/tests/audio_aidl_ndk_conversion_tests.cpp b/media/audioaidlconversion/tests/audio_aidl_ndk_conversion_tests.cpp
index 60727b4..f78243e 100644
--- a/media/audioaidlconversion/tests/audio_aidl_ndk_conversion_tests.cpp
+++ b/media/audioaidlconversion/tests/audio_aidl_ndk_conversion_tests.cpp
@@ -13,7 +13,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-
+#define LOG_TAG "AidlConversionNdkTests"
 #include <iostream>
 #include <type_traits>
 
diff --git a/media/audioserver/OWNERS b/media/audioserver/OWNERS
index f9cb567..f02cbc3 100644
--- a/media/audioserver/OWNERS
+++ b/media/audioserver/OWNERS
@@ -1 +1,5 @@
-gkasten@google.com
+# Bug component: 48436
+atneya@google.com
+hunga@google.com
+philburk@google.com
+include platform/frameworks/av:/media/janitors/audio_OWNERS #{LAST_RESORT_SUGGESTION}
diff --git a/media/codec2/TEST_MAPPING b/media/codec2/TEST_MAPPING
index 8a894f3..b911e11 100644
--- a/media/codec2/TEST_MAPPING
+++ b/media/codec2/TEST_MAPPING
@@ -25,5 +25,8 @@
         }
       ]
     }
+  ],
+  "postsubmit": [
+    { "name": "c2aidl_gtracker_test"}
   ]
 }
diff --git a/media/codec2/components/base/Android.bp b/media/codec2/components/base/Android.bp
index 664647a..4b189b4 100644
--- a/media/codec2/components/base/Android.bp
+++ b/media/codec2/components/base/Android.bp
@@ -42,6 +42,10 @@
         "libnativewindow_headers",
     ],
 
+    static_libs: [
+        "libyuv_static", // for conversion routines
+    ],
+
     shared_libs: [
         "libcutils", // for properties
         "liblog", // for ALOG
diff --git a/media/codec2/components/base/SimpleC2Component.cpp b/media/codec2/components/base/SimpleC2Component.cpp
index 55a1164..06a21f6 100644
--- a/media/codec2/components/base/SimpleC2Component.cpp
+++ b/media/codec2/components/base/SimpleC2Component.cpp
@@ -21,8 +21,10 @@
 #include <android/hardware_buffer.h>
 #include <cutils/properties.h>
 #include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/AUtils.h>
 
 #include <inttypes.h>
+#include <libyuv.h>
 
 #include <C2Config.h>
 #include <C2Debug.h>
@@ -32,6 +34,15 @@
 #include <SimpleC2Component.h>
 
 namespace android {
+
+// libyuv version required for I410ToAB30Matrix and I210ToAB30Matrix.
+#if LIBYUV_VERSION >= 1780
+#include <algorithm>
+#define HAVE_LIBYUV_I410_I210_TO_AB30 1
+#else
+#define HAVE_LIBYUV_I410_I210_TO_AB30 0
+#endif
+
 constexpr uint8_t kNeutralUVBitDepth8 = 128;
 constexpr uint16_t kNeutralUVBitDepth10 = 512;
 
@@ -506,6 +517,120 @@
     }
 }
 
+void convertPlanar16ToY410OrRGBA1010102(uint8_t* dst, const uint16_t* srcY, const uint16_t* srcU,
+                                        const uint16_t* srcV, size_t srcYStride, size_t srcUStride,
+                                        size_t srcVStride, size_t dstStride, size_t width,
+                                        size_t height,
+                                        std::shared_ptr<const C2ColorAspectsStruct> aspects,
+                                        CONV_FORMAT_T format) {
+    bool processed = false;
+#if HAVE_LIBYUV_I410_I210_TO_AB30
+    if (format == CONV_FORMAT_I444) {
+        libyuv::I410ToAB30Matrix(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride, dst,
+                                 dstStride, &libyuv::kYuvV2020Constants, width, height);
+        processed = true;
+    } else if (format == CONV_FORMAT_I422) {
+        libyuv::I210ToAB30Matrix(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride, dst,
+                                 dstStride, &libyuv::kYuvV2020Constants, width, height);
+        processed = true;
+    }
+#endif  // HAVE_LIBYUV_I410_I210_TO_AB30
+    if (!processed) {
+        convertYUV420Planar16ToY410OrRGBA1010102(
+                (uint32_t*)dst, srcY, srcU, srcV, srcYStride, srcUStride, srcVStride,
+                dstStride / sizeof(uint32_t), width, height,
+                std::static_pointer_cast<const C2ColorAspectsStruct>(aspects));
+    }
+}
+
+void convertPlanar16ToP010(uint16_t* dstY, uint16_t* dstUV, const uint16_t* srcY,
+                           const uint16_t* srcU, const uint16_t* srcV, size_t srcYStride,
+                           size_t srcUStride, size_t srcVStride, size_t dstYStride,
+                           size_t dstUStride, size_t dstVStride, size_t width, size_t height,
+                           bool isMonochrome, CONV_FORMAT_T format, uint16_t* tmpFrameBuffer,
+                           size_t tmpFrameBufferSize) {
+#if LIBYUV_VERSION >= 1779
+    if ((format == CONV_FORMAT_I444) || (format == CONV_FORMAT_I422)) {
+        // TODO(https://crbug.com/libyuv/952): replace this block with libyuv::I410ToP010
+        // and libyuv::I210ToP010 when they are available. Note it may be safe to alias dstY
+        // in I010ToP010, but the libyuv API doesn't make any guarantees.
+        const size_t tmpSize = dstYStride * height + dstUStride * align(height, 2);
+        CHECK(tmpSize <= tmpFrameBufferSize);
+
+        uint16_t* const tmpY = tmpFrameBuffer;
+        uint16_t* const tmpU = tmpY + dstYStride * height;
+        uint16_t* const tmpV = tmpU + dstUStride * align(height, 2) / 2;
+        if (format == CONV_FORMAT_I444) {
+            libyuv::I410ToI010(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride, tmpY,
+                               dstYStride, tmpU, dstUStride, tmpV, dstUStride, width, height);
+        } else {
+            libyuv::I210ToI010(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride, tmpY,
+                               dstYStride, tmpU, dstUStride, tmpV, dstUStride, width, height);
+        }
+        libyuv::I010ToP010(tmpY, dstYStride, tmpU, dstUStride, tmpV, dstVStride, dstY, dstYStride,
+                           dstUV, dstUStride, width, height);
+    } else {
+        convertYUV420Planar16ToP010(dstY, dstUV, srcY, srcU, srcV, srcYStride, srcUStride,
+                                    srcVStride, dstYStride, dstUStride, width, height,
+                                    isMonochrome);
+    }
+#else   // LIBYUV_VERSION < 1779
+    convertYUV420Planar16ToP010(dstY, dstUV, srcY, srcU, srcV, srcYStride, srcUStride, srcVStride,
+                                dstYStride, dstUStride, width, height, isMonochrome);
+#endif  // LIBYUV_VERSION >= 1779
+}
+
+void convertPlanar16ToYV12(uint8_t* dstY, uint8_t* dstU, uint8_t* dstV, const uint16_t* srcY,
+                           const uint16_t* srcU, const uint16_t* srcV, size_t srcYStride,
+                           size_t srcUStride, size_t srcVStride, size_t dstYStride,
+                           size_t dstUStride, size_t dstVStride, size_t width, size_t height,
+                           bool isMonochrome, CONV_FORMAT_T format, uint16_t* tmpFrameBuffer,
+                           size_t tmpFrameBufferSize) {
+#if LIBYUV_VERSION >= 1779
+    if (format == CONV_FORMAT_I444) {
+        // TODO(https://crbug.com/libyuv/950): replace this block with libyuv::I410ToI420
+        // when it's available.
+        const size_t tmpSize = dstYStride * height + dstUStride * align(height, 2);
+        CHECK(tmpSize <= tmpFrameBufferSize);
+
+        uint16_t* const tmpY = tmpFrameBuffer;
+        uint16_t* const tmpU = tmpY + dstYStride * height;
+        uint16_t* const tmpV = tmpU + dstUStride * align(height, 2) / 2;
+        libyuv::I410ToI010(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride, tmpY, dstYStride,
+                           tmpU, dstUStride, tmpV, dstVStride, width, height);
+        libyuv::I010ToI420(tmpY, dstYStride, tmpU, dstUStride, tmpV, dstUStride, dstY, dstYStride,
+                           dstU, dstUStride, dstV, dstVStride, width, height);
+    } else if (format == CONV_FORMAT_I422) {
+        libyuv::I210ToI420(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride, dstY, dstYStride,
+                           dstU, dstUStride, dstV, dstVStride, width, height);
+    } else {
+        convertYUV420Planar16ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride, srcUStride,
+                                    srcVStride, dstYStride, dstUStride, width, height,
+                                    isMonochrome);
+    }
+#else   // LIBYUV_VERSION < 1779
+    convertYUV420Planar16ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride, srcUStride,
+                                srcVStride, dstYStride, dstUStride, width, height, isMonochrome);
+#endif  // LIBYUV_VERSION >= 1779
+}
+
+void convertPlanar8ToYV12(uint8_t* dstY, uint8_t* dstU, uint8_t* dstV, const uint8_t* srcY,
+                          const uint8_t* srcU, const uint8_t* srcV, size_t srcYStride,
+                          size_t srcUStride, size_t srcVStride, size_t dstYStride,
+                          size_t dstUStride, size_t dstVStride, uint32_t width, uint32_t height,
+                          bool isMonochrome, CONV_FORMAT_T format) {
+    if (format == CONV_FORMAT_I444) {
+        libyuv::I444ToI420(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride, dstY, dstYStride,
+                           dstU, dstUStride, dstV, dstVStride, width, height);
+    } else if (format == CONV_FORMAT_I422) {
+        libyuv::I422ToI420(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride, dstY, dstYStride,
+                           dstU, dstUStride, dstV, dstVStride, width, height);
+    } else {
+        convertYUV420Planar8ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride, srcUStride,
+                                   srcVStride, dstYStride, dstUStride, dstVStride, width, height,
+                                   isMonochrome);
+    }
+}
 std::unique_ptr<C2Work> SimpleC2Component::WorkQueue::pop_front() {
     std::unique_ptr<C2Work> work = std::move(mQueue.front().work);
     mQueue.pop_front();
diff --git a/media/codec2/components/base/include/SimpleC2Component.h b/media/codec2/components/base/include/SimpleC2Component.h
index bc27474..b28c47e 100644
--- a/media/codec2/components/base/include/SimpleC2Component.h
+++ b/media/codec2/components/base/include/SimpleC2Component.h
@@ -31,6 +31,12 @@
 
 namespace android {
 
+typedef enum {
+    CONV_FORMAT_I420,
+    CONV_FORMAT_I422,
+    CONV_FORMAT_I444,
+} CONV_FORMAT_T;
+
 void convertYUV420Planar8ToYV12(uint8_t *dstY, uint8_t *dstU, uint8_t *dstV, const uint8_t *srcY,
                                 const uint8_t *srcU, const uint8_t *srcV, size_t srcYStride,
                                 size_t srcUStride, size_t srcVStride, size_t dstYStride,
@@ -66,6 +72,30 @@
                                         const uint32_t* srcRGBA, size_t srcRGBStride, size_t width,
                                         size_t height, C2Color::matrix_t colorMatrix,
                                         C2Color::range_t colorRange);
+void convertPlanar16ToY410OrRGBA1010102(uint8_t* dst, const uint16_t* srcY, const uint16_t* srcU,
+                                        const uint16_t* srcV, size_t srcYStride, size_t srcUStride,
+                                        size_t srcVStride, size_t dstStride, size_t width,
+                                        size_t height,
+                                        std::shared_ptr<const C2ColorAspectsStruct> aspects,
+                                        CONV_FORMAT_T format);
+
+void convertPlanar16ToP010(uint16_t* dstY, uint16_t* dstUV, const uint16_t* srcY,
+                           const uint16_t* srcU, const uint16_t* srcV, size_t srcYStride,
+                           size_t srcUStride, size_t srcVStride, size_t dstYStride,
+                           size_t dstUStride, size_t dstVStride, size_t width, size_t height,
+                           bool isMonochrome, CONV_FORMAT_T format, uint16_t* tmpFrameBuffer,
+                           size_t tmpFrameBufferSize);
+void convertPlanar16ToYV12(uint8_t* dstY, uint8_t* dstU, uint8_t* dstV, const uint16_t* srcY,
+                           const uint16_t* srcU, const uint16_t* srcV, size_t srcYStride,
+                           size_t srcUStride, size_t srcVStride, size_t dstYStride,
+                           size_t dstUStride, size_t dstVStride, size_t width, size_t height,
+                           bool isMonochrome, CONV_FORMAT_T format, uint16_t* tmpFrameBuffer,
+                           size_t tmpFrameBufferSize);
+void convertPlanar8ToYV12(uint8_t* dstY, uint8_t* dstU, uint8_t* dstV, const uint8_t* srcY,
+                          const uint8_t* srcU, const uint8_t* srcV, size_t srcYStride,
+                          size_t srcUStride, size_t srcVStride, size_t dstYStride,
+                          size_t dstUStride, size_t dstVStride, uint32_t width, uint32_t height,
+                          bool isMonochrome, CONV_FORMAT_T format);
 
 class SimpleC2Component
         : public C2Component, public std::enable_shared_from_this<SimpleC2Component> {
diff --git a/media/codec2/components/dav1d/Android.bp b/media/codec2/components/dav1d/Android.bp
new file mode 100644
index 0000000..d549ccb
--- /dev/null
+++ b/media/codec2/components/dav1d/Android.bp
@@ -0,0 +1,28 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_library {
+    name: "libcodec2_soft_av1dec_dav1d",
+
+    defaults: [
+        "libcodec2_soft-defaults",
+        "libcodec2_soft_sanitize_all-defaults",
+        "libcodec2_soft_sanitize_cfi-defaults",
+    ],
+
+    cflags: [
+        "-DCODECNAME=\"c2.android.av1-dav1d.decoder\"",
+        "-Wno-unused-variable",
+    ],
+
+    srcs: ["C2SoftDav1dDec.cpp", "C2SoftDav1dDump.cpp"],
+    static_libs: [
+        "libdav1d",
+    ],
+}
diff --git a/media/codec2/components/dav1d/C2SoftDav1dDec.cpp b/media/codec2/components/dav1d/C2SoftDav1dDec.cpp
new file mode 100644
index 0000000..76680a3
--- /dev/null
+++ b/media/codec2/components/dav1d/C2SoftDav1dDec.cpp
@@ -0,0 +1,1203 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "C2SoftDav1dDec"
+#include <android-base/properties.h>
+#include <cutils/properties.h>
+#include <thread>
+
+#include <C2Debug.h>
+#include <C2PlatformSupport.h>
+#include <Codec2BufferUtils.h>
+#include <Codec2CommonUtils.h>
+#include <Codec2Mapper.h>
+#include <SimpleC2Interface.h>
+#include <log/log.h>
+#include <media/stagefright/foundation/AUtils.h>
+#include <media/stagefright/foundation/MediaDefs.h>
+#include "C2SoftDav1dDec.h"
+
+namespace android {
+
+// The number of threads used for the dav1d decoder.
+static const int NUM_THREADS_DAV1D_DEFAULT = 0;
+static const char NUM_THREADS_DAV1D_PROPERTY[] = "debug.dav1d.numthreads";
+
+// codecname set and passed in as a compile flag from Android.bp
+constexpr char COMPONENT_NAME[] = CODECNAME;
+
+constexpr size_t kMinInputBufferSize = 2 * 1024 * 1024;
+
+constexpr uint32_t kOutputDelay = 4;
+
+class C2SoftDav1dDec::IntfImpl : public SimpleInterface<void>::BaseParams {
+  public:
+    explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper>& helper)
+        : SimpleInterface<void>::BaseParams(helper, COMPONENT_NAME, C2Component::KIND_DECODER,
+                                            C2Component::DOMAIN_VIDEO, MEDIA_MIMETYPE_VIDEO_AV1) {
+        noPrivateBuffers();
+        noInputReferences();
+        noOutputReferences();
+        noInputLatency();
+        noTimeStretch();
+
+        addParameter(DefineParam(mAttrib, C2_PARAMKEY_COMPONENT_ATTRIBUTES)
+                             .withConstValue(new C2ComponentAttributesSetting(
+                                     C2Component::ATTRIB_IS_TEMPORAL))
+                             .build());
+
+        addParameter(DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
+                             .withDefault(new C2StreamPictureSizeInfo::output(0u, 320, 240))
+                             .withFields({
+                                     C2F(mSize, width).inRange(2, 4096),
+                                     C2F(mSize, height).inRange(2, 4096),
+                             })
+                             .withSetter(SizeSetter)
+                             .build());
+
+        addParameter(DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL)
+                             .withDefault(new C2StreamProfileLevelInfo::input(
+                                     0u, C2Config::PROFILE_AV1_0, C2Config::LEVEL_AV1_2_1))
+                             .withFields({C2F(mProfileLevel, profile)
+                                                  .oneOf({C2Config::PROFILE_AV1_0,
+                                                          C2Config::PROFILE_AV1_1}),
+                                          C2F(mProfileLevel, level)
+                                                  .oneOf({
+                                                          C2Config::LEVEL_AV1_2,
+                                                          C2Config::LEVEL_AV1_2_1,
+                                                          C2Config::LEVEL_AV1_2_2,
+                                                          C2Config::LEVEL_AV1_2_3,
+                                                          C2Config::LEVEL_AV1_3,
+                                                          C2Config::LEVEL_AV1_3_1,
+                                                          C2Config::LEVEL_AV1_3_2,
+                                                          C2Config::LEVEL_AV1_3_3,
+                                                          C2Config::LEVEL_AV1_4,
+                                                          C2Config::LEVEL_AV1_4_1,
+                                                          C2Config::LEVEL_AV1_4_2,
+                                                          C2Config::LEVEL_AV1_4_3,
+                                                          C2Config::LEVEL_AV1_5,
+                                                          C2Config::LEVEL_AV1_5_1,
+                                                          C2Config::LEVEL_AV1_5_2,
+                                                          C2Config::LEVEL_AV1_5_3,
+                                                  })})
+                             .withSetter(ProfileLevelSetter, mSize)
+                             .build());
+
+        mHdr10PlusInfoInput = C2StreamHdr10PlusInfo::input::AllocShared(0);
+        addParameter(DefineParam(mHdr10PlusInfoInput, C2_PARAMKEY_INPUT_HDR10_PLUS_INFO)
+                             .withDefault(mHdr10PlusInfoInput)
+                             .withFields({
+                                     C2F(mHdr10PlusInfoInput, m.value).any(),
+                             })
+                             .withSetter(Hdr10PlusInfoInputSetter)
+                             .build());
+
+        mHdr10PlusInfoOutput = C2StreamHdr10PlusInfo::output::AllocShared(0);
+        addParameter(DefineParam(mHdr10PlusInfoOutput, C2_PARAMKEY_OUTPUT_HDR10_PLUS_INFO)
+                             .withDefault(mHdr10PlusInfoOutput)
+                             .withFields({
+                                     C2F(mHdr10PlusInfoOutput, m.value).any(),
+                             })
+                             .withSetter(Hdr10PlusInfoOutputSetter)
+                             .build());
+
+        // default static info
+        C2HdrStaticMetadataStruct defaultStaticInfo{};
+        helper->addStructDescriptors<C2MasteringDisplayColorVolumeStruct, C2ColorXyStruct>();
+        addParameter(
+                DefineParam(mHdrStaticInfo, C2_PARAMKEY_HDR_STATIC_INFO)
+                        .withDefault(new C2StreamHdrStaticInfo::output(0u, defaultStaticInfo))
+                        .withFields({C2F(mHdrStaticInfo, mastering.red.x).inRange(0, 1),
+                                     C2F(mHdrStaticInfo, mastering.red.y).inRange(0, 1),
+                                     C2F(mHdrStaticInfo, mastering.green.x).inRange(0, 1),
+                                     C2F(mHdrStaticInfo, mastering.green.y).inRange(0, 1),
+                                     C2F(mHdrStaticInfo, mastering.blue.x).inRange(0, 1),
+                                     C2F(mHdrStaticInfo, mastering.blue.y).inRange(0, 1),
+                                     C2F(mHdrStaticInfo, mastering.white.x).inRange(0, 1),
+                                     C2F(mHdrStaticInfo, mastering.white.x).inRange(0, 1),
+                                     C2F(mHdrStaticInfo, mastering.maxLuminance).inRange(0, 65535),
+                                     C2F(mHdrStaticInfo, mastering.minLuminance).inRange(0, 6.5535),
+                                     C2F(mHdrStaticInfo, maxCll).inRange(0, 0XFFFF),
+                                     C2F(mHdrStaticInfo, maxFall).inRange(0, 0XFFFF)})
+                        .withSetter(HdrStaticInfoSetter)
+                        .build());
+
+        addParameter(DefineParam(mMaxSize, C2_PARAMKEY_MAX_PICTURE_SIZE)
+                             .withDefault(new C2StreamMaxPictureSizeTuning::output(0u, 320, 240))
+                             .withFields({
+                                     C2F(mSize, width).inRange(2, 2048, 2),
+                                     C2F(mSize, height).inRange(2, 2048, 2),
+                             })
+                             .withSetter(MaxPictureSizeSetter, mSize)
+                             .build());
+
+        addParameter(
+                DefineParam(mMaxInputSize, C2_PARAMKEY_INPUT_MAX_BUFFER_SIZE)
+                        .withDefault(new C2StreamMaxBufferSizeInfo::input(0u, kMinInputBufferSize))
+                        .withFields({
+                                C2F(mMaxInputSize, value).any(),
+                        })
+                        .calculatedAs(MaxInputSizeSetter, mMaxSize)
+                        .build());
+
+        C2ChromaOffsetStruct locations[1] = {C2ChromaOffsetStruct::ITU_YUV_420_0()};
+        std::shared_ptr<C2StreamColorInfo::output> defaultColorInfo =
+                C2StreamColorInfo::output::AllocShared(1u, 0u, 8u /* bitDepth */, C2Color::YUV_420);
+        memcpy(defaultColorInfo->m.locations, locations, sizeof(locations));
+
+        defaultColorInfo = C2StreamColorInfo::output::AllocShared(
+                {C2ChromaOffsetStruct::ITU_YUV_420_0()}, 0u, 8u /* bitDepth */, C2Color::YUV_420);
+        helper->addStructDescriptors<C2ChromaOffsetStruct>();
+
+        addParameter(DefineParam(mColorInfo, C2_PARAMKEY_CODED_COLOR_INFO)
+                             .withConstValue(defaultColorInfo)
+                             .build());
+
+        addParameter(DefineParam(mDefaultColorAspects, C2_PARAMKEY_DEFAULT_COLOR_ASPECTS)
+                             .withDefault(new C2StreamColorAspectsTuning::output(
+                                     0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED,
+                                     C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
+                             .withFields({C2F(mDefaultColorAspects, range)
+                                                  .inRange(C2Color::RANGE_UNSPECIFIED,
+                                                           C2Color::RANGE_OTHER),
+                                          C2F(mDefaultColorAspects, primaries)
+                                                  .inRange(C2Color::PRIMARIES_UNSPECIFIED,
+                                                           C2Color::PRIMARIES_OTHER),
+                                          C2F(mDefaultColorAspects, transfer)
+                                                  .inRange(C2Color::TRANSFER_UNSPECIFIED,
+                                                           C2Color::TRANSFER_OTHER),
+                                          C2F(mDefaultColorAspects, matrix)
+                                                  .inRange(C2Color::MATRIX_UNSPECIFIED,
+                                                           C2Color::MATRIX_OTHER)})
+                             .withSetter(DefaultColorAspectsSetter)
+                             .build());
+
+        addParameter(DefineParam(mCodedColorAspects, C2_PARAMKEY_VUI_COLOR_ASPECTS)
+                             .withDefault(new C2StreamColorAspectsInfo::input(
+                                     0u, C2Color::RANGE_LIMITED, C2Color::PRIMARIES_UNSPECIFIED,
+                                     C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
+                             .withFields({C2F(mCodedColorAspects, range)
+                                                  .inRange(C2Color::RANGE_UNSPECIFIED,
+                                                           C2Color::RANGE_OTHER),
+                                          C2F(mCodedColorAspects, primaries)
+                                                  .inRange(C2Color::PRIMARIES_UNSPECIFIED,
+                                                           C2Color::PRIMARIES_OTHER),
+                                          C2F(mCodedColorAspects, transfer)
+                                                  .inRange(C2Color::TRANSFER_UNSPECIFIED,
+                                                           C2Color::TRANSFER_OTHER),
+                                          C2F(mCodedColorAspects, matrix)
+                                                  .inRange(C2Color::MATRIX_UNSPECIFIED,
+                                                           C2Color::MATRIX_OTHER)})
+                             .withSetter(CodedColorAspectsSetter)
+                             .build());
+
+        addParameter(
+                DefineParam(mColorAspects, C2_PARAMKEY_COLOR_ASPECTS)
+                        .withDefault(new C2StreamColorAspectsInfo::output(
+                                0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED,
+                                C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
+                        .withFields(
+                                {C2F(mColorAspects, range)
+                                         .inRange(C2Color::RANGE_UNSPECIFIED, C2Color::RANGE_OTHER),
+                                 C2F(mColorAspects, primaries)
+                                         .inRange(C2Color::PRIMARIES_UNSPECIFIED,
+                                                  C2Color::PRIMARIES_OTHER),
+                                 C2F(mColorAspects, transfer)
+                                         .inRange(C2Color::TRANSFER_UNSPECIFIED,
+                                                  C2Color::TRANSFER_OTHER),
+                                 C2F(mColorAspects, matrix)
+                                         .inRange(C2Color::MATRIX_UNSPECIFIED,
+                                                  C2Color::MATRIX_OTHER)})
+                        .withSetter(ColorAspectsSetter, mDefaultColorAspects, mCodedColorAspects)
+                        .build());
+
+        std::vector<uint32_t> pixelFormats = {HAL_PIXEL_FORMAT_YCBCR_420_888};
+        if (isHalPixelFormatSupported((AHardwareBuffer_Format)HAL_PIXEL_FORMAT_YCBCR_P010)) {
+            pixelFormats.push_back(HAL_PIXEL_FORMAT_YCBCR_P010);
+        }
+        // If color format surface isn't added to supported formats, there is no way to know
+        // when the color-format is configured to surface. This is necessary to be able to
+        // choose 10-bit format while decoding 10-bit clips in surface mode.
+        pixelFormats.push_back(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
+
+        // TODO: support more formats?
+        addParameter(DefineParam(mPixelFormat, C2_PARAMKEY_PIXEL_FORMAT)
+                             .withDefault(new C2StreamPixelFormatInfo::output(
+                                     0u, HAL_PIXEL_FORMAT_YCBCR_420_888))
+                             .withFields({C2F(mPixelFormat, value).oneOf(pixelFormats)})
+                             .withSetter((Setter<decltype(*mPixelFormat)>::StrictValueWithNoDeps))
+                             .build());
+
+        addParameter(
+                DefineParam(mActualOutputDelay, C2_PARAMKEY_OUTPUT_DELAY)
+                .withDefault(new C2PortActualDelayTuning::output(kOutputDelay))
+                .withFields({C2F(mActualOutputDelay, value).inRange(0, kOutputDelay)})
+                .withSetter(Setter<decltype(*mActualOutputDelay)>::StrictValueWithNoDeps)
+                .build());
+    }
+
+    static C2R SizeSetter(bool mayBlock, const C2P<C2StreamPictureSizeInfo::output>& oldMe,
+                          C2P<C2StreamPictureSizeInfo::output>& me) {
+        (void)mayBlock;
+        C2R res = C2R::Ok();
+        if (!me.F(me.v.width).supportsAtAll(me.v.width)) {
+            res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.width)));
+            me.set().width = oldMe.v.width;
+        }
+        if (!me.F(me.v.height).supportsAtAll(me.v.height)) {
+            res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.height)));
+            me.set().height = oldMe.v.height;
+        }
+        return res;
+    }
+
+    static C2R MaxPictureSizeSetter(bool mayBlock, C2P<C2StreamMaxPictureSizeTuning::output>& me,
+                                    const C2P<C2StreamPictureSizeInfo::output>& size) {
+        (void)mayBlock;
+        // TODO: get max width/height from the size's field helpers vs.
+        // hardcoding
+        me.set().width = c2_min(c2_max(me.v.width, size.v.width), 4096u);
+        me.set().height = c2_min(c2_max(me.v.height, size.v.height), 4096u);
+        return C2R::Ok();
+    }
+
+    static C2R MaxInputSizeSetter(bool mayBlock, C2P<C2StreamMaxBufferSizeInfo::input>& me,
+                                  const C2P<C2StreamMaxPictureSizeTuning::output>& maxSize) {
+        (void)mayBlock;
+        // assume compression ratio of 2, but enforce a floor
+        me.set().value =
+                c2_max((((maxSize.v.width + 63) / 64) * ((maxSize.v.height + 63) / 64) * 3072),
+                       kMinInputBufferSize);
+        return C2R::Ok();
+    }
+
+    static C2R DefaultColorAspectsSetter(bool mayBlock,
+                                         C2P<C2StreamColorAspectsTuning::output>& me) {
+        (void)mayBlock;
+        if (me.v.range > C2Color::RANGE_OTHER) {
+            me.set().range = C2Color::RANGE_OTHER;
+        }
+        if (me.v.primaries > C2Color::PRIMARIES_OTHER) {
+            me.set().primaries = C2Color::PRIMARIES_OTHER;
+        }
+        if (me.v.transfer > C2Color::TRANSFER_OTHER) {
+            me.set().transfer = C2Color::TRANSFER_OTHER;
+        }
+        if (me.v.matrix > C2Color::MATRIX_OTHER) {
+            me.set().matrix = C2Color::MATRIX_OTHER;
+        }
+        return C2R::Ok();
+    }
+
+    static C2R CodedColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::input>& me) {
+        (void)mayBlock;
+        if (me.v.range > C2Color::RANGE_OTHER) {
+            me.set().range = C2Color::RANGE_OTHER;
+        }
+        if (me.v.primaries > C2Color::PRIMARIES_OTHER) {
+            me.set().primaries = C2Color::PRIMARIES_OTHER;
+        }
+        if (me.v.transfer > C2Color::TRANSFER_OTHER) {
+            me.set().transfer = C2Color::TRANSFER_OTHER;
+        }
+        if (me.v.matrix > C2Color::MATRIX_OTHER) {
+            me.set().matrix = C2Color::MATRIX_OTHER;
+        }
+        return C2R::Ok();
+    }
+
+    static C2R ColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::output>& me,
+                                  const C2P<C2StreamColorAspectsTuning::output>& def,
+                                  const C2P<C2StreamColorAspectsInfo::input>& coded) {
+        (void)mayBlock;
+        // take default values for all unspecified fields, and coded values for specified ones
+        me.set().range = coded.v.range == RANGE_UNSPECIFIED ? def.v.range : coded.v.range;
+        me.set().primaries =
+                coded.v.primaries == PRIMARIES_UNSPECIFIED ? def.v.primaries : coded.v.primaries;
+        me.set().transfer =
+                coded.v.transfer == TRANSFER_UNSPECIFIED ? def.v.transfer : coded.v.transfer;
+        me.set().matrix = coded.v.matrix == MATRIX_UNSPECIFIED ? def.v.matrix : coded.v.matrix;
+        return C2R::Ok();
+    }
+
+    static C2R ProfileLevelSetter(bool mayBlock, C2P<C2StreamProfileLevelInfo::input>& me,
+                                  const C2P<C2StreamPictureSizeInfo::output>& size) {
+        (void)mayBlock;
+        (void)size;
+        (void)me;  // TODO: validate
+        return C2R::Ok();
+    }
+
+    std::shared_ptr<C2StreamColorAspectsTuning::output> getDefaultColorAspects_l() {
+        return mDefaultColorAspects;
+    }
+
+    std::shared_ptr<C2StreamColorAspectsInfo::output> getColorAspects_l() { return mColorAspects; }
+
+    static C2R Hdr10PlusInfoInputSetter(bool mayBlock, C2P<C2StreamHdr10PlusInfo::input>& me) {
+        (void)mayBlock;
+        (void)me;  // TODO: validate
+        return C2R::Ok();
+    }
+
+    static C2R Hdr10PlusInfoOutputSetter(bool mayBlock, C2P<C2StreamHdr10PlusInfo::output>& me) {
+        (void)mayBlock;
+        (void)me;  // TODO: validate
+        return C2R::Ok();
+    }
+
+    // unsafe getters
+    std::shared_ptr<C2StreamPixelFormatInfo::output> getPixelFormat_l() const {
+        return mPixelFormat;
+    }
+
+    static C2R HdrStaticInfoSetter(bool mayBlock, C2P<C2StreamHdrStaticInfo::output>& me) {
+        (void)mayBlock;
+        if (me.v.mastering.red.x > 1) {
+            me.set().mastering.red.x = 1;
+        }
+        if (me.v.mastering.red.y > 1) {
+            me.set().mastering.red.y = 1;
+        }
+        if (me.v.mastering.green.x > 1) {
+            me.set().mastering.green.x = 1;
+        }
+        if (me.v.mastering.green.y > 1) {
+            me.set().mastering.green.y = 1;
+        }
+        if (me.v.mastering.blue.x > 1) {
+            me.set().mastering.blue.x = 1;
+        }
+        if (me.v.mastering.blue.y > 1) {
+            me.set().mastering.blue.y = 1;
+        }
+        if (me.v.mastering.white.x > 1) {
+            me.set().mastering.white.x = 1;
+        }
+        if (me.v.mastering.white.y > 1) {
+            me.set().mastering.white.y = 1;
+        }
+        if (me.v.mastering.maxLuminance > 65535.0) {
+            me.set().mastering.maxLuminance = 65535.0;
+        }
+        if (me.v.mastering.minLuminance > 6.5535) {
+            me.set().mastering.minLuminance = 6.5535;
+        }
+        if (me.v.maxCll > 65535.0) {
+            me.set().maxCll = 65535.0;
+        }
+        if (me.v.maxFall > 65535.0) {
+            me.set().maxFall = 65535.0;
+        }
+        return C2R::Ok();
+    }
+
+  private:
+    std::shared_ptr<C2StreamProfileLevelInfo::input> mProfileLevel;
+    std::shared_ptr<C2StreamPictureSizeInfo::output> mSize;
+    std::shared_ptr<C2StreamMaxPictureSizeTuning::output> mMaxSize;
+    std::shared_ptr<C2StreamMaxBufferSizeInfo::input> mMaxInputSize;
+    std::shared_ptr<C2StreamColorInfo::output> mColorInfo;
+    std::shared_ptr<C2StreamPixelFormatInfo::output> mPixelFormat;
+    std::shared_ptr<C2StreamColorAspectsTuning::output> mDefaultColorAspects;
+    std::shared_ptr<C2StreamColorAspectsInfo::input> mCodedColorAspects;
+    std::shared_ptr<C2StreamColorAspectsInfo::output> mColorAspects;
+    std::shared_ptr<C2StreamHdr10PlusInfo::input> mHdr10PlusInfoInput;
+    std::shared_ptr<C2StreamHdr10PlusInfo::output> mHdr10PlusInfoOutput;
+    std::shared_ptr<C2StreamHdrStaticInfo::output> mHdrStaticInfo;
+};
+
+C2SoftDav1dDec::C2SoftDav1dDec(const char* name, c2_node_id_t id,
+                               const std::shared_ptr<IntfImpl>& intfImpl)
+    : SimpleC2Component(std::make_shared<SimpleInterface<IntfImpl>>(name, id, intfImpl)),
+      mIntf(intfImpl) {
+    mTimeStart = mTimeEnd = systemTime();
+}
+
+C2SoftDav1dDec::~C2SoftDav1dDec() {
+    onRelease();
+}
+
+c2_status_t C2SoftDav1dDec::onInit() {
+    return initDecoder() ? C2_OK : C2_CORRUPTED;
+}
+
+c2_status_t C2SoftDav1dDec::onStop() {
+    // TODO: b/277797541 - investigate if the decoder needs to be flushed.
+    mSignalledError = false;
+    mSignalledOutputEos = false;
+    return C2_OK;
+}
+
+void C2SoftDav1dDec::onReset() {
+    (void)onStop();
+    c2_status_t err = onFlush_sm();
+    if (err != C2_OK) {
+        ALOGW("Failed to flush the av1 decoder. Trying to hard reset.");
+        destroyDecoder();
+        if (!initDecoder()) {
+            ALOGE("Hard reset failed.");
+        }
+    }
+}
+
+void C2SoftDav1dDec::flushDav1d() {
+    if (mDav1dCtx) {
+        Dav1dPicture p;
+
+        int res = 0;
+        while (true) {
+            memset(&p, 0, sizeof(p));
+
+            if ((res = dav1d_get_picture(mDav1dCtx, &p)) < 0) {
+                if (res != DAV1D_ERR(EAGAIN)) {
+                    ALOGE("Error decoding frame: %s\n", strerror(DAV1D_ERR(res)));
+                    break;
+                } else {
+                    res = 0;
+                    break;
+                }
+            } else {
+                dav1d_picture_unref(&p);
+            }
+        }
+
+        dav1d_flush(mDav1dCtx);
+    }
+}
+
+void C2SoftDav1dDec::onRelease() {
+    destroyDecoder();
+}
+
+c2_status_t C2SoftDav1dDec::onFlush_sm() {
+    flushDav1d();
+
+    mSignalledError = false;
+    mSignalledOutputEos = false;
+
+    return C2_OK;
+}
+
+static int GetCPUCoreCount() {
+    int cpuCoreCount = 1;
+#if defined(_SC_NPROCESSORS_ONLN)
+    cpuCoreCount = sysconf(_SC_NPROCESSORS_ONLN);
+#else
+    // _SC_NPROC_ONLN must be defined...
+    cpuCoreCount = sysconf(_SC_NPROC_ONLN);
+#endif
+    CHECK(cpuCoreCount >= 1);
+    ALOGV("Number of CPU cores: %d", cpuCoreCount);
+    return cpuCoreCount;
+}
+
+bool C2SoftDav1dDec::initDecoder() {
+#ifdef FILE_DUMP_ENABLE
+    mC2SoftDav1dDump.initDumping();
+#endif
+    mSignalledError = false;
+    mSignalledOutputEos = false;
+    mHalPixelFormat = HAL_PIXEL_FORMAT_YV12;
+    {
+        IntfImpl::Lock lock = mIntf->lock();
+        mPixelFormatInfo = mIntf->getPixelFormat_l();
+    }
+
+    const char* version = dav1d_version();
+
+    Dav1dSettings lib_settings;
+    dav1d_default_settings(&lib_settings);
+    int cpu_count = GetCPUCoreCount();
+    lib_settings.n_threads = std::max(cpu_count / 2, 1);  // use up to half the cores by default.
+
+    int32_t numThreads =
+            android::base::GetIntProperty(NUM_THREADS_DAV1D_PROPERTY, NUM_THREADS_DAV1D_DEFAULT);
+    if (numThreads > 0) lib_settings.n_threads = numThreads;
+
+    lib_settings.max_frame_delay = kOutputDelay;
+
+    int res = 0;
+    if ((res = dav1d_open(&mDav1dCtx, &lib_settings))) {
+        ALOGE("dav1d_open failed. status: %d.", res);
+        return false;
+    } else {
+        ALOGD("dav1d_open succeeded(n_threads=%d,version=%s).", lib_settings.n_threads, version);
+    }
+
+    return true;
+}
+
+void C2SoftDav1dDec::destroyDecoder() {
+    if (mDav1dCtx) {
+        dav1d_close(&mDav1dCtx);
+        mDav1dCtx = nullptr;
+        mOutputBufferIndex = 0;
+        mInputBufferIndex = 0;
+    }
+#ifdef FILE_DUMP_ENABLE
+    mC2SoftDav1dDump.destroyDumping();
+#endif
+}
+
+void fillEmptyWork(const std::unique_ptr<C2Work>& work) {
+    uint32_t flags = 0;
+    if (work->input.flags & C2FrameData::FLAG_END_OF_STREAM) {
+        flags |= C2FrameData::FLAG_END_OF_STREAM;
+        ALOGV("signalling end_of_stream.");
+    }
+    work->worklets.front()->output.flags = (C2FrameData::flags_t)flags;
+    work->worklets.front()->output.buffers.clear();
+    work->worklets.front()->output.ordinal = work->input.ordinal;
+    work->workletsProcessed = 1u;
+}
+
+void C2SoftDav1dDec::finishWork(uint64_t index, const std::unique_ptr<C2Work>& work,
+                                const std::shared_ptr<C2GraphicBlock>& block,
+                                const Dav1dPicture &img) {
+    std::shared_ptr<C2Buffer> buffer = createGraphicBuffer(block, C2Rect(mWidth, mHeight));
+    {
+        IntfImpl::Lock lock = mIntf->lock();
+        buffer->setInfo(mIntf->getColorAspects_l());
+    }
+
+    auto fillWork = [buffer, index, img, this](const std::unique_ptr<C2Work>& work) {
+        uint32_t flags = 0;
+        if ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) &&
+            (c2_cntr64_t(index) == work->input.ordinal.frameIndex)) {
+            flags |= C2FrameData::FLAG_END_OF_STREAM;
+            ALOGV("signalling end_of_stream.");
+        }
+        getHDRStaticParams(&img, work);
+        getHDR10PlusInfoData(&img, work);
+
+        work->worklets.front()->output.flags = (C2FrameData::flags_t)flags;
+        work->worklets.front()->output.buffers.clear();
+        work->worklets.front()->output.buffers.push_back(buffer);
+        work->worklets.front()->output.ordinal = work->input.ordinal;
+        work->workletsProcessed = 1u;
+    };
+    if (work && c2_cntr64_t(index) == work->input.ordinal.frameIndex) {
+        fillWork(work);
+    } else {
+        finish(index, fillWork);
+    }
+}
+
+void C2SoftDav1dDec::process(const std::unique_ptr<C2Work>& work,
+                             const std::shared_ptr<C2BlockPool>& pool) {
+    work->result = C2_OK;
+    work->workletsProcessed = 0u;
+    work->worklets.front()->output.configUpdate.clear();
+    work->worklets.front()->output.flags = work->input.flags;
+    if (mSignalledError || mSignalledOutputEos) {
+        work->result = C2_BAD_VALUE;
+        return;
+    }
+
+    size_t inOffset = 0u;
+    size_t inSize = 0u;
+    C2ReadView rView = mDummyReadView;
+    if (!work->input.buffers.empty()) {
+        rView = work->input.buffers[0]->data().linearBlocks().front().map().get();
+        inSize = rView.capacity();
+        if (inSize && rView.error()) {
+            ALOGE("read view map failed %d", rView.error());
+            work->result = C2_CORRUPTED;
+            return;
+        }
+    }
+
+    bool codecConfig = ((work->input.flags & C2FrameData::FLAG_CODEC_CONFIG) != 0);
+    bool end_of_stream = ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0);
+
+    if (codecConfig) {
+        fillEmptyWork(work);
+        return;
+    }
+
+    int64_t in_frameIndex = work->input.ordinal.frameIndex.peekll();
+    if (inSize) {
+        mInputBufferIndex = in_frameIndex;
+
+        uint8_t* bitstream = const_cast<uint8_t*>(rView.data() + inOffset);
+
+        mTimeStart = systemTime();
+        nsecs_t delay = mTimeStart - mTimeEnd;
+
+        // Send the bitstream data (inputBuffer) to dav1d.
+        if (mDav1dCtx) {
+            int i_ret = 0;
+
+            Dav1dSequenceHeader seq;
+            int res = dav1d_parse_sequence_header(&seq, bitstream, inSize);
+            if (res == 0) {
+                ALOGV("dav1d found a sequenceHeader (%dx%d) for in_frameIndex=%ld.", seq.max_width,
+                      seq.max_height, (long)in_frameIndex);
+                if (seq.max_width != mWidth || seq.max_height != mHeight) {
+                    drainInternal(DRAIN_COMPONENT_NO_EOS, pool, work);
+                    mWidth = seq.max_width;
+                    mHeight = seq.max_height;
+
+                    C2StreamPictureSizeInfo::output size(0u, mWidth, mHeight);
+                    std::vector<std::unique_ptr<C2SettingResult>> failures;
+                    c2_status_t err = mIntf->config({&size}, C2_MAY_BLOCK, &failures);
+                    if (err == C2_OK) {
+                        work->worklets.front()->output.configUpdate.push_back(C2Param::Copy(size));
+                    } else {
+                        ALOGE("Config update size failed");
+                        mSignalledError = true;
+                        work->result = C2_CORRUPTED;
+                        work->workletsProcessed = 1u;
+                        return;
+                    }
+                }
+            }
+
+            // insert OBU TD if it is not present.
+            // TODO: b/286852962
+            uint8_t obu_type = (bitstream[0] >> 3) & 0xf;
+            Dav1dData data;
+
+            uint8_t* ptr = (obu_type == DAV1D_OBU_TD) ? dav1d_data_create(&data, inSize)
+                                                      : dav1d_data_create(&data, inSize + 2);
+            if (ptr == nullptr) {
+                ALOGE("dav1d_data_create failed!");
+                i_ret = -1;
+
+            } else {
+                data.m.timestamp = in_frameIndex;
+
+                int new_Size;
+                if (obu_type != DAV1D_OBU_TD) {
+                    new_Size = (int)(inSize + 2);
+
+                    // OBU TD
+                    ptr[0] = 0x12;
+                    ptr[1] = 0;
+
+                    memcpy(ptr + 2, bitstream, inSize);
+                } else {
+                    new_Size = (int)(inSize);
+                    // TODO: b/277797541 - investigate how to wrap this pointer in Dav1dData to
+                    // avoid memcopy operations.
+                    memcpy(ptr, bitstream, new_Size);
+                }
+
+                // ALOGV("memcpy(ptr,bitstream,inSize=%ld,new_Size=%d,in_frameIndex=%ld,timestamp=%ld,"
+                //       "ptr[0,1,2,3,4]=%x,%x,%x,%x,%x)",
+                //       inSize, new_Size, frameIndex, data.m.timestamp, ptr[0], ptr[1], ptr[2],
+                //       ptr[3], ptr[4]);
+
+                // Dump the bitstream data (inputBuffer) if dumping is enabled.
+#ifdef FILE_DUMP_ENABLE
+                mC2SoftDav1dDump.dumpInput(ptr, new_Size);
+#endif
+
+                bool b_draining = false;
+                int res;
+
+                do {
+                    res = dav1d_send_data(mDav1dCtx, &data);
+                    if (res < 0 && res != DAV1D_ERR(EAGAIN)) {
+                        ALOGE("Decoder feed error %s!", strerror(DAV1D_ERR(res)));
+                        /* bitstream decoding errors (typically DAV1D_ERR(EINVAL), are assumed
+                         * to be recoverable. Other errors returned from this function are
+                         * either unexpected, or considered critical failures.
+                         */
+                        i_ret = res == DAV1D_ERR(EINVAL) ? 0 : -1;
+                        break;
+                    }
+
+                    outputBuffer(pool, work);
+
+                } while (res == DAV1D_ERR(EAGAIN));
+
+                if (data.sz > 0) {
+                    ALOGE("unexpected data.sz=%zu after dav1d_send_data", data.sz);
+                    dav1d_data_unref(&data);
+                }
+            }
+
+            mTimeEnd = systemTime();
+            nsecs_t decodeTime = mTimeEnd - mTimeStart;
+            // ALOGV("decodeTime=%4" PRId64 " delay=%4" PRId64 "\n", decodeTime, delay);
+
+            if (i_ret != 0) {
+                ALOGE("av1 decoder failed to decode frame. status: %d.", i_ret);
+                work->result = C2_CORRUPTED;
+                work->workletsProcessed = 1u;
+                mSignalledError = true;
+                return;
+            }
+        }
+    }
+
+    if (end_of_stream) {
+        drainInternal(DRAIN_COMPONENT_WITH_EOS, pool, work);
+        mSignalledOutputEos = true;
+    } else if (!inSize) {
+        fillEmptyWork(work);
+    }
+}
+
+void C2SoftDav1dDec::getHDRStaticParams(const Dav1dPicture* picture,
+                                        const std::unique_ptr<C2Work>& work) {
+    C2StreamHdrStaticMetadataInfo::output hdrStaticMetadataInfo{};
+    bool infoPresent = false;
+
+    if (picture != nullptr) {
+        if (picture->mastering_display != nullptr) {
+            hdrStaticMetadataInfo.mastering.red.x =
+                    picture->mastering_display->primaries[0][0] / 65536.0;
+            hdrStaticMetadataInfo.mastering.red.y =
+                    picture->mastering_display->primaries[0][1] / 65536.0;
+
+            hdrStaticMetadataInfo.mastering.green.x =
+                    picture->mastering_display->primaries[1][0] / 65536.0;
+            hdrStaticMetadataInfo.mastering.green.y =
+                    picture->mastering_display->primaries[1][1] / 65536.0;
+
+            hdrStaticMetadataInfo.mastering.blue.x =
+                    picture->mastering_display->primaries[2][0] / 65536.0;
+            hdrStaticMetadataInfo.mastering.blue.y =
+                    picture->mastering_display->primaries[2][1] / 65536.0;
+
+            hdrStaticMetadataInfo.mastering.white.x =
+                    picture->mastering_display->white_point[0] / 65536.0;
+            hdrStaticMetadataInfo.mastering.white.y =
+                    picture->mastering_display->white_point[1] / 65536.0;
+
+            hdrStaticMetadataInfo.mastering.maxLuminance =
+                    picture->mastering_display->max_luminance / 256.0;
+            hdrStaticMetadataInfo.mastering.minLuminance =
+                    picture->mastering_display->min_luminance / 16384.0;
+
+            infoPresent = true;
+        }
+
+        if (picture->content_light != nullptr) {
+            hdrStaticMetadataInfo.maxCll = picture->content_light->max_content_light_level;
+            hdrStaticMetadataInfo.maxFall = picture->content_light->max_frame_average_light_level;
+            infoPresent = true;
+        }
+    }
+
+    // if (infoPresent) {
+    //   ALOGD("received a hdrStaticMetadataInfo (mastering.red=%f,%f mastering.green=%f,%f
+    //   mastering.blue=%f,%f mastering.white=%f,%f mastering.maxLuminance=%f
+    //   mastering.minLuminance=%f maxCll=%f maxFall=%f) at mOutputBufferIndex=%d.",
+    //   hdrStaticMetadataInfo.mastering.red.x,hdrStaticMetadataInfo.mastering.red.y,
+    //   hdrStaticMetadataInfo.mastering.green.x,hdrStaticMetadataInfo.mastering.green.y,
+    //   hdrStaticMetadataInfo.mastering.blue.x,hdrStaticMetadataInfo.mastering.blue.y,
+    //   hdrStaticMetadataInfo.mastering.white.x,hdrStaticMetadataInfo.mastering.white.y,
+    //   hdrStaticMetadataInfo.mastering.maxLuminance,hdrStaticMetadataInfo.mastering.minLuminance,
+    //   hdrStaticMetadataInfo.maxCll,
+    //   hdrStaticMetadataInfo.maxFall,
+    //   mOutputBufferIndex);
+    // }
+
+    // config if static info has changed
+    if (infoPresent && !(hdrStaticMetadataInfo == mHdrStaticMetadataInfo)) {
+        mHdrStaticMetadataInfo = hdrStaticMetadataInfo;
+        work->worklets.front()->output.configUpdate.push_back(
+                C2Param::Copy(mHdrStaticMetadataInfo));
+    }
+}
+
+void C2SoftDav1dDec::getHDR10PlusInfoData(const Dav1dPicture* picture,
+                                          const std::unique_ptr<C2Work>& work) {
+    if (picture != nullptr) {
+        if (picture->itut_t35 != nullptr) {
+            std::vector<uint8_t> payload;
+            size_t payloadSize = picture->itut_t35->payload_size;
+            if (payloadSize > 0) {
+                payload.push_back(picture->itut_t35->country_code);
+                if (picture->itut_t35->country_code == 0xFF) {
+                    payload.push_back(picture->itut_t35->country_code_extension_byte);
+                }
+                payload.insert(payload.end(), picture->itut_t35->payload,
+                               picture->itut_t35->payload + picture->itut_t35->payload_size);
+            }
+
+            std::unique_ptr<C2StreamHdr10PlusInfo::output> hdr10PlusInfo =
+                    C2StreamHdr10PlusInfo::output::AllocUnique(payload.size());
+            if (!hdr10PlusInfo) {
+                ALOGE("Hdr10PlusInfo allocation failed");
+                mSignalledError = true;
+                work->result = C2_NO_MEMORY;
+                return;
+            }
+            memcpy(hdr10PlusInfo->m.value, payload.data(), payload.size());
+
+            // ALOGD("Received a hdr10PlusInfo from picture->itut_t32
+            // (payload_size=%ld,country_code=%d) at mOutputBufferIndex=%d.",
+            // picture->itut_t35->payload_size,
+            // picture->itut_t35->country_code,
+            // mOutputBufferIndex);
+
+            // config if hdr10Plus info has changed
+            if (nullptr == mHdr10PlusInfo || !(*hdr10PlusInfo == *mHdr10PlusInfo)) {
+                mHdr10PlusInfo = std::move(hdr10PlusInfo);
+                work->worklets.front()->output.configUpdate.push_back(std::move(mHdr10PlusInfo));
+            }
+        }
+    }
+}
+
+void C2SoftDav1dDec::getVuiParams(const Dav1dPicture* picture) {
+    VuiColorAspects vuiColorAspects;
+
+    if (picture) {
+        vuiColorAspects.primaries = picture->seq_hdr->pri;
+        vuiColorAspects.transfer = picture->seq_hdr->trc;
+        vuiColorAspects.coeffs = picture->seq_hdr->mtrx;
+        vuiColorAspects.fullRange = picture->seq_hdr->color_range;
+
+        // ALOGD("Received a vuiColorAspects from dav1d
+        //       (primaries = % d, transfer = % d, coeffs = % d, fullRange = % d)
+        //               at mOutputBufferIndex = % d,
+        //       out_frameIndex = % ld.",
+        //                          vuiColorAspects.primaries,
+        //       vuiColorAspects.transfer, vuiColorAspects.coeffs, vuiColorAspects.fullRange,
+        //       mOutputBufferIndex, picture->m.timestamp);
+    }
+
+    // convert vui aspects to C2 values if changed
+    if (!(vuiColorAspects == mBitstreamColorAspects)) {
+        mBitstreamColorAspects = vuiColorAspects;
+        ColorAspects sfAspects;
+        C2StreamColorAspectsInfo::input codedAspects = {0u};
+        ColorUtils::convertIsoColorAspectsToCodecAspects(
+                vuiColorAspects.primaries, vuiColorAspects.transfer, vuiColorAspects.coeffs,
+                vuiColorAspects.fullRange, sfAspects);
+        if (!C2Mapper::map(sfAspects.mPrimaries, &codedAspects.primaries)) {
+            codedAspects.primaries = C2Color::PRIMARIES_UNSPECIFIED;
+        }
+        if (!C2Mapper::map(sfAspects.mRange, &codedAspects.range)) {
+            codedAspects.range = C2Color::RANGE_UNSPECIFIED;
+        }
+        if (!C2Mapper::map(sfAspects.mMatrixCoeffs, &codedAspects.matrix)) {
+            codedAspects.matrix = C2Color::MATRIX_UNSPECIFIED;
+        }
+        if (!C2Mapper::map(sfAspects.mTransfer, &codedAspects.transfer)) {
+            codedAspects.transfer = C2Color::TRANSFER_UNSPECIFIED;
+        }
+        std::vector<std::unique_ptr<C2SettingResult>> failures;
+        mIntf->config({&codedAspects}, C2_MAY_BLOCK, &failures);
+    }
+}
+
+void C2SoftDav1dDec::setError(const std::unique_ptr<C2Work>& work, c2_status_t error) {
+    mSignalledError = true;
+    work->result = error;
+    work->workletsProcessed = 1u;
+}
+
+bool C2SoftDav1dDec::allocTmpFrameBuffer(size_t size) {
+    if (size > mTmpFrameBufferSize) {
+        mTmpFrameBuffer = std::make_unique<uint16_t[]>(size);
+        if (mTmpFrameBuffer == nullptr) {
+            mTmpFrameBufferSize = 0;
+            return false;
+        }
+        mTmpFrameBufferSize = size;
+    }
+    return true;
+}
+
+bool C2SoftDav1dDec::outputBuffer(const std::shared_ptr<C2BlockPool>& pool,
+                                  const std::unique_ptr<C2Work>& work) {
+    if (!(work && pool)) return false;
+    if (mDav1dCtx == nullptr) return false;
+
+    // Get a decoded picture from dav1d if it is enabled.
+    Dav1dPicture img;
+    memset(&img, 0, sizeof(img));
+
+    int res = 0;
+    res = dav1d_get_picture(mDav1dCtx, &img);
+    if (res == DAV1D_ERR(EAGAIN)) {
+        ALOGV("Not enough data to output a picture.");
+        return false;
+    } else if (res != 0) {
+        ALOGE("The AV1 decoder failed to get a picture (res=%s).", strerror(DAV1D_ERR(res)));
+        return false;
+    }
+
+    getVuiParams(&img);
+
+    // out_frameIndex that the decoded picture returns from dav1d.
+    int64_t out_frameIndex = img.m.timestamp;
+
+    const bool isMonochrome = img.p.layout == DAV1D_PIXEL_LAYOUT_I400;
+
+    int bitdepth = img.p.bpc;
+
+    std::shared_ptr<C2GraphicBlock> block;
+    uint32_t format = HAL_PIXEL_FORMAT_YV12;
+    std::shared_ptr<C2StreamColorAspectsInfo::output> codedColorAspects;
+    if (bitdepth == 10 && mPixelFormatInfo->value != HAL_PIXEL_FORMAT_YCBCR_420_888) {
+        IntfImpl::Lock lock = mIntf->lock();
+        codedColorAspects = mIntf->getColorAspects_l();
+        bool allowRGBA1010102 = false;
+        if (codedColorAspects->primaries == C2Color::PRIMARIES_BT2020 &&
+            codedColorAspects->matrix == C2Color::MATRIX_BT2020 &&
+            codedColorAspects->transfer == C2Color::TRANSFER_ST2084) {
+            allowRGBA1010102 = true;
+        }
+        format = getHalPixelFormatForBitDepth10(allowRGBA1010102);
+    }
+
+    if (mHalPixelFormat != format) {
+        C2StreamPixelFormatInfo::output pixelFormat(0u, format);
+        std::vector<std::unique_ptr<C2SettingResult>> failures;
+        c2_status_t err = mIntf->config({&pixelFormat}, C2_MAY_BLOCK, &failures);
+        if (err == C2_OK) {
+            work->worklets.front()->output.configUpdate.push_back(C2Param::Copy(pixelFormat));
+        } else {
+            ALOGE("Config update pixelFormat failed");
+            mSignalledError = true;
+            work->workletsProcessed = 1u;
+            work->result = C2_CORRUPTED;
+            return UNKNOWN_ERROR;
+        }
+        mHalPixelFormat = format;
+    }
+
+    C2MemoryUsage usage = {C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE};
+
+    // We always create a graphic block that is width aligned to 16 and height
+    // aligned to 2. We set the correct "crop" value of the image in the call to
+    // createGraphicBuffer() by setting the correct image dimensions.
+    c2_status_t err =
+            pool->fetchGraphicBlock(align(mWidth, 16), align(mHeight, 2), format, usage, &block);
+
+    if (err != C2_OK) {
+        ALOGE("fetchGraphicBlock for Output failed with status %d", err);
+        work->result = err;
+        return false;
+    }
+
+    C2GraphicView wView = block->map().get();
+
+    if (wView.error()) {
+        ALOGE("graphic view map failed %d", wView.error());
+        work->result = C2_CORRUPTED;
+        return false;
+    }
+
+    // ALOGV("provided (%dx%d) required (%dx%d), out frameindex %d", block->width(),
+    //       block->height(), mWidth, mHeight, (int)out_frameIndex);
+
+    mOutputBufferIndex = out_frameIndex;
+
+    uint8_t* dstY = const_cast<uint8_t*>(wView.data()[C2PlanarLayout::PLANE_Y]);
+    uint8_t* dstU = const_cast<uint8_t*>(wView.data()[C2PlanarLayout::PLANE_U]);
+    uint8_t* dstV = const_cast<uint8_t*>(wView.data()[C2PlanarLayout::PLANE_V]);
+
+    C2PlanarLayout layout = wView.layout();
+    size_t dstYStride = layout.planes[C2PlanarLayout::PLANE_Y].rowInc;
+    size_t dstUStride = layout.planes[C2PlanarLayout::PLANE_U].rowInc;
+    size_t dstVStride = layout.planes[C2PlanarLayout::PLANE_V].rowInc;
+
+    CONV_FORMAT_T convFormat;
+    switch (img.p.layout) {
+        case DAV1D_PIXEL_LAYOUT_I444:
+            convFormat = CONV_FORMAT_I444;
+            break;
+        case DAV1D_PIXEL_LAYOUT_I422:
+            convFormat = CONV_FORMAT_I422;
+            break;
+        default:
+            convFormat = CONV_FORMAT_I420;
+            break;
+    }
+
+    if (bitdepth == 10) {
+        // TODO: b/277797541 - Investigate if we can ask DAV1D to output the required format during
+        // decompression to avoid color conversion.
+        const uint16_t* srcY = (const uint16_t*)img.data[0];
+        const uint16_t* srcU = (const uint16_t*)img.data[1];
+        const uint16_t* srcV = (const uint16_t*)img.data[2];
+        size_t srcYStride = img.stride[0] / 2;
+        size_t srcUStride = img.stride[1] / 2;
+        size_t srcVStride = img.stride[1] / 2;
+
+        if (format == HAL_PIXEL_FORMAT_RGBA_1010102) {
+            if (isMonochrome) {
+                const size_t tmpSize = mWidth;
+                const bool needFill = tmpSize > mTmpFrameBufferSize;
+                if (!allocTmpFrameBuffer(tmpSize)) {
+                    ALOGE("Error allocating temp conversion buffer (%zu bytes)", tmpSize);
+                    setError(work, C2_NO_MEMORY);
+                    return false;
+                }
+                srcU = srcV = mTmpFrameBuffer.get();
+                srcUStride = srcVStride = 0;
+                if (needFill) {
+                    std::fill_n(mTmpFrameBuffer.get(), tmpSize, 512);
+                }
+            }
+            convertPlanar16ToY410OrRGBA1010102(
+                    dstY, srcY, srcU, srcV, srcYStride, srcUStride, srcVStride,
+                    dstYStride, mWidth, mHeight,
+                    std::static_pointer_cast<const C2ColorAspectsStruct>(codedColorAspects),
+                    convFormat);
+        } else if (format == HAL_PIXEL_FORMAT_YCBCR_P010) {
+            dstYStride /= 2;
+            dstUStride /= 2;
+            dstVStride /= 2;
+            size_t tmpSize = 0;
+            if ((img.p.layout == DAV1D_PIXEL_LAYOUT_I444) ||
+                (img.p.layout == DAV1D_PIXEL_LAYOUT_I422)) {
+                tmpSize = dstYStride * mHeight + dstUStride * align(mHeight, 2);
+                if (!allocTmpFrameBuffer(tmpSize)) {
+                    ALOGE("Error allocating temp conversion buffer (%zu bytes)", tmpSize);
+                    setError(work, C2_NO_MEMORY);
+                    return false;
+                }
+            }
+            convertPlanar16ToP010((uint16_t*)dstY, (uint16_t*)dstU, srcY, srcU, srcV, srcYStride,
+                                  srcUStride, srcVStride, dstYStride, dstUStride, dstVStride,
+                                  mWidth, mHeight, isMonochrome, convFormat, mTmpFrameBuffer.get(),
+                                  tmpSize);
+        } else {
+            size_t tmpSize = 0;
+            if (img.p.layout == DAV1D_PIXEL_LAYOUT_I444) {
+                tmpSize = dstYStride * mHeight + dstUStride * align(mHeight, 2);
+                if (!allocTmpFrameBuffer(tmpSize)) {
+                    ALOGE("Error allocating temp conversion buffer (%zu bytes)", tmpSize);
+                    setError(work, C2_NO_MEMORY);
+                    return false;
+                }
+            }
+            convertPlanar16ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride, srcUStride,
+                                  srcVStride, dstYStride, dstUStride, dstVStride, mWidth, mHeight,
+                                  isMonochrome, convFormat, mTmpFrameBuffer.get(), tmpSize);
+        }
+
+        // if(mOutputBufferIndex % 100 == 0)
+        ALOGV("output a 10bit picture %dx%d from dav1d "
+              "(mInputBufferIndex=%d,mOutputBufferIndex=%d,format=%d).",
+              mWidth, mHeight, mInputBufferIndex, mOutputBufferIndex, format);
+
+        // Dump the output buffer if dumping is enabled (debug only).
+#ifdef FILE_DUMP_ENABLE
+        mC2SoftDav1dDump.dumpOutput<uint16_t>(srcY, srcU, srcV, srcYStride, srcUStride, srcVStride,
+                                              mWidth, mHeight);
+#endif
+    } else {
+        const uint8_t* srcY = (const uint8_t*)img.data[0];
+        const uint8_t* srcU = (const uint8_t*)img.data[1];
+        const uint8_t* srcV = (const uint8_t*)img.data[2];
+
+        size_t srcYStride = img.stride[0];
+        size_t srcUStride = img.stride[1];
+        size_t srcVStride = img.stride[1];
+
+        // if(mOutputBufferIndex % 100 == 0)
+        ALOGV("output a 8bit picture %dx%d from dav1d "
+              "(mInputBufferIndex=%d,mOutputBufferIndex=%d,format=%d).",
+              mWidth, mHeight, mInputBufferIndex, mOutputBufferIndex, format);
+
+        // Dump the output buffer is dumping is enabled (debug only)
+#ifdef FILE_DUMP_ENABLE
+        mC2SoftDav1dDump.dumpOutput<uint8_t>(srcY, srcU, srcV, srcYStride, srcUStride, srcVStride,
+                                             mWidth, mHeight);
+#endif
+        convertPlanar8ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride, srcUStride, srcVStride,
+                             dstYStride, dstUStride, dstVStride, mWidth, mHeight, isMonochrome,
+                             convFormat);
+    }
+
+    finishWork(out_frameIndex, work, std::move(block), img);
+    dav1d_picture_unref(&img);
+    block = nullptr;
+    return true;
+}
+
+c2_status_t C2SoftDav1dDec::drainInternal(uint32_t drainMode,
+                                          const std::shared_ptr<C2BlockPool>& pool,
+                                          const std::unique_ptr<C2Work>& work) {
+    if (drainMode == NO_DRAIN) {
+        ALOGW("drain with NO_DRAIN: no-op");
+        return C2_OK;
+    }
+    if (drainMode == DRAIN_CHAIN) {
+        ALOGW("DRAIN_CHAIN not supported");
+        return C2_OMITTED;
+    }
+
+    while (outputBuffer(pool, work)) {
+    }
+
+    if (drainMode == DRAIN_COMPONENT_WITH_EOS && work && work->workletsProcessed == 0u) {
+        fillEmptyWork(work);
+    }
+
+    return C2_OK;
+}
+
+c2_status_t C2SoftDav1dDec::drain(uint32_t drainMode, const std::shared_ptr<C2BlockPool>& pool) {
+    return drainInternal(drainMode, pool, nullptr);
+}
+
+class C2SoftDav1dFactory : public C2ComponentFactory {
+  public:
+    C2SoftDav1dFactory()
+        : mHelper(std::static_pointer_cast<C2ReflectorHelper>(
+                  GetCodec2PlatformComponentStore()->getParamReflector())) {}
+
+    virtual c2_status_t createComponent(c2_node_id_t id,
+                                        std::shared_ptr<C2Component>* const component,
+                                        std::function<void(C2Component*)> deleter) override {
+        *component = std::shared_ptr<C2Component>(
+                new C2SoftDav1dDec(COMPONENT_NAME, id,
+                                   std::make_shared<C2SoftDav1dDec::IntfImpl>(mHelper)),
+                deleter);
+        return C2_OK;
+    }
+
+    virtual c2_status_t createInterface(
+            c2_node_id_t id, std::shared_ptr<C2ComponentInterface>* const interface,
+            std::function<void(C2ComponentInterface*)> deleter) override {
+        *interface = std::shared_ptr<C2ComponentInterface>(
+                new SimpleInterface<C2SoftDav1dDec::IntfImpl>(
+                        COMPONENT_NAME, id, std::make_shared<C2SoftDav1dDec::IntfImpl>(mHelper)),
+                deleter);
+        return C2_OK;
+    }
+
+    virtual ~C2SoftDav1dFactory() override = default;
+
+  private:
+    std::shared_ptr<C2ReflectorHelper> mHelper;
+};
+
+}  // namespace android
+
+__attribute__((cfi_canonical_jump_table)) extern "C" ::C2ComponentFactory* CreateCodec2Factory() {
+    ALOGV("in %s", __func__);
+    return new ::android::C2SoftDav1dFactory();
+}
+
+__attribute__((cfi_canonical_jump_table)) extern "C" void DestroyCodec2Factory(
+        ::C2ComponentFactory* factory) {
+    ALOGV("in %s", __func__);
+    delete factory;
+}
diff --git a/media/codec2/components/dav1d/C2SoftDav1dDec.h b/media/codec2/components/dav1d/C2SoftDav1dDec.h
new file mode 100644
index 0000000..5d2a725
--- /dev/null
+++ b/media/codec2/components/dav1d/C2SoftDav1dDec.h
@@ -0,0 +1,130 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_C2_SOFT_DAV1D_DEC_H_
+#define ANDROID_C2_SOFT_DAV1D_DEC_H_
+
+#include <inttypes.h>
+
+#include <memory>
+
+#include <media/stagefright/foundation/ColorUtils.h>
+
+#include <C2Config.h>
+#include <SimpleC2Component.h>
+
+#include <dav1d/dav1d.h>
+#include <deque>
+#include <C2SoftDav1dDump.h>
+
+//#define FILE_DUMP_ENABLE 1
+
+namespace android {
+
+struct C2SoftDav1dDec : public SimpleC2Component {
+    class IntfImpl;
+
+    C2SoftDav1dDec(const char* name, c2_node_id_t id, const std::shared_ptr<IntfImpl>& intfImpl);
+    ~C2SoftDav1dDec();
+
+    // Begin SimpleC2Component overrides.
+    c2_status_t onInit() override;
+    c2_status_t onStop() override;
+    void onReset() override;
+    void onRelease() override;
+    c2_status_t onFlush_sm() override;
+    void process(const std::unique_ptr<C2Work>& work,
+                 const std::shared_ptr<C2BlockPool>& pool) override;
+    c2_status_t drain(uint32_t drainMode, const std::shared_ptr<C2BlockPool>& pool) override;
+    // End SimpleC2Component overrides.
+
+  private:
+    std::shared_ptr<IntfImpl> mIntf;
+
+    int mInputBufferIndex = 0;
+    int mOutputBufferIndex = 0;
+
+    Dav1dContext* mDav1dCtx = nullptr;
+
+    // configurations used by component in process
+    // (TODO: keep this in intf but make them internal only)
+    std::shared_ptr<C2StreamPixelFormatInfo::output> mPixelFormatInfo;
+
+    uint32_t mHalPixelFormat;
+    uint32_t mWidth;
+    uint32_t mHeight;
+    bool mSignalledOutputEos;
+    bool mSignalledError;
+    // Used during 10-bit I444/I422 to 10-bit P010 & 8-bit I420 conversions.
+    std::unique_ptr<uint16_t[]> mTmpFrameBuffer;
+    size_t mTmpFrameBufferSize = 0;
+
+    C2StreamHdrStaticMetadataInfo::output mHdrStaticMetadataInfo;
+    std::unique_ptr<C2StreamHdr10PlusInfo::output> mHdr10PlusInfo = nullptr;
+
+    // Color aspects. These are ISO values and are meant to detect changes in aspects to avoid
+    // converting them to C2 values for each frame
+    struct VuiColorAspects {
+        uint8_t primaries;
+        uint8_t transfer;
+        uint8_t coeffs;
+        uint8_t fullRange;
+
+        // default color aspects
+        VuiColorAspects()
+            : primaries(C2Color::PRIMARIES_UNSPECIFIED),
+              transfer(C2Color::TRANSFER_UNSPECIFIED),
+              coeffs(C2Color::MATRIX_UNSPECIFIED),
+              fullRange(C2Color::RANGE_UNSPECIFIED) {}
+
+        bool operator==(const VuiColorAspects& o) {
+            return primaries == o.primaries && transfer == o.transfer && coeffs == o.coeffs &&
+                   fullRange == o.fullRange;
+        }
+    } mBitstreamColorAspects;
+
+    nsecs_t mTimeStart = 0;  // Time at the start of decode()
+    nsecs_t mTimeEnd = 0;    // Time at the end of decode()
+
+    bool initDecoder();
+    void getHDRStaticParams(const Dav1dPicture* picture, const std::unique_ptr<C2Work>& work);
+    void getHDR10PlusInfoData(const Dav1dPicture* picture, const std::unique_ptr<C2Work>& work);
+    void getVuiParams(const Dav1dPicture* picture);
+    void destroyDecoder();
+    void finishWork(uint64_t index, const std::unique_ptr<C2Work>& work,
+                    const std::shared_ptr<C2GraphicBlock>& block,
+                    const Dav1dPicture &img);
+    // Sets |work->result| and mSignalledError. Returns false.
+    void setError(const std::unique_ptr<C2Work>& work, c2_status_t error);
+    bool allocTmpFrameBuffer(size_t size);
+    bool outputBuffer(const std::shared_ptr<C2BlockPool>& pool,
+                      const std::unique_ptr<C2Work>& work);
+
+    c2_status_t drainInternal(uint32_t drainMode, const std::shared_ptr<C2BlockPool>& pool,
+                              const std::unique_ptr<C2Work>& work);
+
+    void flushDav1d();
+
+#ifdef FILE_DUMP_ENABLE
+    C2SoftDav1dDump mC2SoftDav1dDump;
+#endif
+
+    C2_DO_NOT_COPY(C2SoftDav1dDec);
+};
+
+}  // namespace android
+
+#endif  // ANDROID_C2_SOFT_DAV1D_DEC_H_
diff --git a/media/codec2/components/dav1d/C2SoftDav1dDump.cpp b/media/codec2/components/dav1d/C2SoftDav1dDump.cpp
new file mode 100644
index 0000000..ec8d6cd
--- /dev/null
+++ b/media/codec2/components/dav1d/C2SoftDav1dDump.cpp
@@ -0,0 +1,191 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+//#define LOG_NDEBUG 0
+#define LOG_TAG "C2SoftDav1dDump"
+#include "C2SoftDav1dDump.h"
+
+namespace android {
+
+// Flag to enable dumping the bitsteram and the decoded pictures to files.
+static const bool ENABLE_DUMPING_FILES_DEFAULT = true;
+static const char ENABLE_DUMPING_FILES_PROPERTY[] = "debug.dav1d.enabledumping";
+
+// The number of frames to dump to a file
+static const int NUM_FRAMES_TO_DUMP_DEFAULT = INT_MAX;
+static const char NUM_FRAMES_TO_DUMP_PROPERTY[] = "debug.dav1d.numframestodump";
+
+// start dumping from this frame
+static const int STARTING_FRAME_TO_DUMP_DEFAULT = 0;
+static const char STARTING_FRAME_TO_DUMP_PROPERTY[] = "debug.dav1d.startingframetodump";
+
+void C2SoftDav1dDump::initDumping() {
+    nsecs_t now = systemTime();
+    snprintf(mInDataFileName, kFileNameLength, "%s_%" PRId64 "d.%s", DUMP_FILE_PATH, now,
+             INPUT_DATA_DUMP_EXT);
+    snprintf(mInSizeFileName, kFileNameLength, "%s_%" PRId64 "d.%s", DUMP_FILE_PATH, now,
+             INPUT_SIZE_DUMP_EXT);
+    snprintf(mDav1dOutYuvFileName, kFileNameLength, "%s_%" PRId64 "dx.%s", DUMP_FILE_PATH, now,
+             OUTPUT_YUV_DUMP_EXT);
+
+    mFramesToDump =
+            android::base::GetIntProperty(NUM_FRAMES_TO_DUMP_PROPERTY, NUM_FRAMES_TO_DUMP_DEFAULT);
+    mFirstFrameToDump = android::base::GetIntProperty(STARTING_FRAME_TO_DUMP_PROPERTY,
+                                                      STARTING_FRAME_TO_DUMP_DEFAULT);
+    bool enableDumping = android::base::GetBoolProperty(ENABLE_DUMPING_FILES_PROPERTY,
+                                                        ENABLE_DUMPING_FILES_DEFAULT);
+    ALOGD("enableDumping = %d, mFramesToDump = %d", enableDumping, mFramesToDump);
+
+    if (enableDumping) {
+        mInDataFile = fopen(mInDataFileName, "wb");
+        if (mInDataFile == nullptr) {
+            ALOGD("Could not open file %s", mInDataFileName);
+        }
+
+        mInSizeFile = fopen(mInSizeFileName, "wb");
+        if (mInSizeFile == nullptr) {
+            ALOGD("Could not open file %s", mInSizeFileName);
+        }
+
+        mDav1dOutYuvFile = fopen(mDav1dOutYuvFileName, "wb");
+        if (mDav1dOutYuvFile == nullptr) {
+            ALOGD("Could not open file %s", mDav1dOutYuvFileName);
+        }
+    }
+}
+
+void C2SoftDav1dDump::destroyDumping() {
+    if (mInDataFile != nullptr) {
+        fclose(mInDataFile);
+        mInDataFile = nullptr;
+    }
+
+    if (mInSizeFile != nullptr) {
+        fclose(mInSizeFile);
+        mInSizeFile = nullptr;
+    }
+
+    if (mDav1dOutYuvFile != nullptr) {
+        fclose(mDav1dOutYuvFile);
+        mDav1dOutYuvFile = nullptr;
+    }
+}
+
+void C2SoftDav1dDump::dumpInput(uint8_t* ptr, int size) {
+    if (mInDataFile) {
+        int ret = fwrite(ptr, 1, size, mInDataFile);
+
+        if (ret != size) {
+            ALOGE("Error in fwrite %s, requested %d, returned %d", mInDataFileName, size, ret);
+        }
+    }
+
+    // Dump the size per inputBuffer if dumping is enabled.
+    if (mInSizeFile) {
+        int ret = fwrite(&size, 1, 4, mInSizeFile);
+
+        if (ret != 4) {
+            ALOGE("Error in fwrite %s, requested %d, returned %d", mInSizeFileName, 4, ret);
+        }
+    }
+}
+
+template <typename T>
+void C2SoftDav1dDump::dumpOutput(const T* srcY, const T* srcU, const T* srcV, size_t srcYStride,
+                                 size_t srcUStride, size_t srcVStride, int width, int height) {
+    mOutputCount++;
+    FILE* fp_out = mDav1dOutYuvFile;
+    int typeSize = sizeof(T);
+    if (fp_out && mOutputCount >= mFirstFrameToDump &&
+        mOutputCount <= (mFirstFrameToDump + mFramesToDump - 1)) {
+        for (int i = 0; i < height; i++) {
+            int ret =
+                    fwrite((uint8_t*)srcY + i * srcYStride * typeSize, 1, width * typeSize, fp_out);
+            if (ret != width * typeSize) {
+                ALOGE("Error in fwrite, requested %d, returned %d", width * typeSize, ret);
+                break;
+            }
+        }
+
+        for (int i = 0; i < height / 2; i++) {
+            int ret = fwrite((uint8_t*)srcU + i * srcUStride * typeSize, 1, width * typeSize / 2,
+                             fp_out);
+            if (ret != width * typeSize / 2) {
+                ALOGE("Error in fwrite, requested %d, returned %d", width * typeSize / 2, ret);
+                break;
+            }
+        }
+
+        for (int i = 0; i < height / 2; i++) {
+            int ret = fwrite((uint8_t*)srcV + i * srcVStride * typeSize, 1, width * typeSize / 2,
+                             fp_out);
+            if (ret != width * typeSize / 2) {
+                ALOGE("Error in fwrite, requested %d, returned %d", width * typeSize / 2, ret);
+                break;
+            }
+        }
+    }
+}
+
+void C2SoftDav1dDump::writeDav1dOutYuvFile(const Dav1dPicture& p) {
+    if (mDav1dOutYuvFile != NULL) {
+        uint8_t* ptr;
+        const int hbd = p.p.bpc > 8;
+
+        ptr = (uint8_t*)p.data[0];
+        for (int y = 0; y < p.p.h; y++) {
+            int iSize = p.p.w << hbd;
+            int ret = fwrite(ptr, 1, iSize, mDav1dOutYuvFile);
+            if (ret != iSize) {
+                ALOGE("Error in fwrite %s, requested %d, returned %d", mDav1dOutYuvFileName, iSize,
+                      ret);
+                break;
+            }
+
+            ptr += p.stride[0];
+        }
+
+        if (p.p.layout != DAV1D_PIXEL_LAYOUT_I400) {
+            // u/v
+            const int ss_ver = p.p.layout == DAV1D_PIXEL_LAYOUT_I420;
+            const int ss_hor = p.p.layout != DAV1D_PIXEL_LAYOUT_I444;
+            const int cw = (p.p.w + ss_hor) >> ss_hor;
+            const int ch = (p.p.h + ss_ver) >> ss_ver;
+            for (int pl = 1; pl <= 2; pl++) {
+                ptr = (uint8_t*)p.data[pl];
+                for (int y = 0; y < ch; y++) {
+                    int iSize = cw << hbd;
+                    int ret = fwrite(ptr, 1, cw << hbd, mDav1dOutYuvFile);
+                    if (ret != iSize) {
+                        ALOGE("Error in fwrite %s, requested %d, returned %d", mDav1dOutYuvFileName,
+                              iSize, ret);
+                        break;
+                    }
+                    ptr += p.stride[1];
+                }
+            }
+        }
+    }
+}
+
+template void C2SoftDav1dDump::dumpOutput<uint8_t>(const uint8_t* srcY, const uint8_t* srcU,
+                                                   const uint8_t* srcV, size_t srcYStride,
+                                                   size_t srcUStride, size_t srcVStride, int width,
+                                                   int height);
+template void C2SoftDav1dDump::dumpOutput<uint16_t>(const uint16_t* srcY, const uint16_t* srcU,
+                                                    const uint16_t* srcV, size_t srcYStride,
+                                                    size_t srcUStride, size_t srcVStride, int width,
+                                                    int height);
+}  // namespace android
\ No newline at end of file
diff --git a/media/codec2/components/dav1d/C2SoftDav1dDump.h b/media/codec2/components/dav1d/C2SoftDav1dDump.h
new file mode 100644
index 0000000..ea7a48a
--- /dev/null
+++ b/media/codec2/components/dav1d/C2SoftDav1dDump.h
@@ -0,0 +1,52 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include <android-base/properties.h>
+#include <Codec2CommonUtils.h>
+#include <Codec2Mapper.h>
+#include <dav1d/dav1d.h>
+
+#define DUMP_FILE_PATH "/data/local/tmp/dump"
+#define INPUT_DATA_DUMP_EXT "av1"
+#define INPUT_SIZE_DUMP_EXT "size"
+#define OUTPUT_YUV_DUMP_EXT "yuv"
+
+namespace android {
+constexpr size_t kFileNameLength = 256;
+
+class C2SoftDav1dDump {
+  public:
+    void initDumping();
+    void destroyDumping();
+    void dumpInput(uint8_t* ptr, int new_size);
+    template <typename T>
+    void dumpOutput(const T* srcY, const T* srcU, const T* srcV, size_t srcYStride,
+                    size_t srcUStride, size_t srcVStride, int width, int height);
+    void writeDav1dOutYuvFile(const Dav1dPicture& p);
+
+  private:
+    int mFramesToDump = 0;
+    int mFirstFrameToDump = 0;
+    int mOutputCount = 0;
+
+    char mInDataFileName[kFileNameLength];
+    char mInSizeFileName[kFileNameLength];
+    char mDav1dOutYuvFileName[kFileNameLength];
+
+    FILE* mInDataFile = nullptr;
+    FILE* mInSizeFile = nullptr;
+    FILE* mDav1dOutYuvFile = nullptr;
+};
+}  // namespace android
diff --git a/media/codec2/components/gav1/C2SoftGav1Dec.cpp b/media/codec2/components/gav1/C2SoftGav1Dec.cpp
index 3e4247b..5141d65 100644
--- a/media/codec2/components/gav1/C2SoftGav1Dec.cpp
+++ b/media/codec2/components/gav1/C2SoftGav1Dec.cpp
@@ -18,6 +18,7 @@
 #define LOG_TAG "C2SoftGav1Dec"
 #include "C2SoftGav1Dec.h"
 
+#include <android-base/properties.h>
 #include <C2Debug.h>
 #include <C2PlatformSupport.h>
 #include <Codec2BufferUtils.h>
@@ -39,6 +40,9 @@
 
 namespace android {
 
+// Property used to control the number of threads used in the gav1 decoder.
+constexpr char kNumThreadsProperty[] = "debug.c2.gav1.numthreads";
+
 // codecname set and passed in as a compile flag from Android.bp
 constexpr char COMPONENT_NAME[] = CODECNAME;
 
@@ -506,6 +510,10 @@
 
   libgav1::DecoderSettings settings = {};
   settings.threads = GetCPUCoreCount();
+  int32_t numThreads = android::base::GetIntProperty(kNumThreadsProperty, 0);
+  if (numThreads > 0 && numThreads < settings.threads) {
+    settings.threads = numThreads;
+  }
 
   ALOGV("Using libgav1 AV1 software decoder.");
   Libgav1StatusCode status = mCodecCtx->Init(&settings);
@@ -752,6 +760,19 @@
     return true;
 }
 
+bool C2SoftGav1Dec::fillMonochromeRow(int value) {
+    const size_t tmpSize = mWidth;
+    const bool needFill = tmpSize > mTmpFrameBufferSize;
+    if (!allocTmpFrameBuffer(tmpSize)) {
+        ALOGE("Error allocating temp conversion buffer (%zu bytes)", tmpSize);
+        return false;
+    }
+    if (needFill) {
+        std::fill_n(mTmpFrameBuffer.get(), tmpSize, value);
+    }
+    return true;
+}
+
 bool C2SoftGav1Dec::outputBuffer(const std::shared_ptr<C2BlockPool> &pool,
                                  const std::unique_ptr<C2Work> &work) {
   if (!(work && pool)) return false;
@@ -773,6 +794,16 @@
     return false;
   }
 
+#if LIBYUV_VERSION < 1871
+  if (buffer->bitdepth > 10) {
+    ALOGE("bitdepth %d is not supported", buffer->bitdepth);
+    mSignalledError = true;
+    work->workletsProcessed = 1u;
+    work->result = C2_CORRUPTED;
+    return false;
+  }
+#endif
+
   const int width = buffer->displayed_width[0];
   const int height = buffer->displayed_height[0];
   if (width != mWidth || height != mHeight) {
@@ -816,7 +847,7 @@
   std::shared_ptr<C2GraphicBlock> block;
   uint32_t format = HAL_PIXEL_FORMAT_YV12;
   std::shared_ptr<C2StreamColorAspectsInfo::output> codedColorAspects;
-  if (buffer->bitdepth == 10 && mPixelFormatInfo->value != HAL_PIXEL_FORMAT_YCBCR_420_888) {
+  if (buffer->bitdepth >= 10 && mPixelFormatInfo->value != HAL_PIXEL_FORMAT_YCBCR_420_888) {
     IntfImpl::Lock lock = mIntf->lock();
     codedColorAspects = mIntf->getColorAspects_l();
     bool allowRGBA1010102 = false;
@@ -828,8 +859,9 @@
     format = getHalPixelFormatForBitDepth10(allowRGBA1010102);
 #if !HAVE_LIBYUV_I410_I210_TO_AB30
     if ((format == HAL_PIXEL_FORMAT_RGBA_1010102) &&
-        (buffer->image_format != libgav1::kImageFormatYuv420)) {
-        ALOGE("Only YUV420 output is supported when targeting RGBA_1010102");
+        (buffer->image_format != libgav1::kImageFormatYuv420) &&
+        (buffer->bitdepth == 10)) {
+        ALOGE("Only YUV420 output is supported for 10-bit when targeting RGBA_1010102");
       mSignalledError = true;
       work->result = C2_OMITTED;
       work->workletsProcessed = 1u;
@@ -837,6 +869,18 @@
     }
 #endif
   }
+  if (buffer->bitdepth == 12 && format == HAL_PIXEL_FORMAT_RGBA_1010102 &&
+      (buffer->image_format == libgav1::kImageFormatYuv422 ||
+       buffer->image_format == libgav1::kImageFormatYuv444)) {
+      // There are no 12-bit color conversion functions from YUV422/YUV444 to
+      // RGBA_1010102. Use 8-bit YV12 in this case.
+      format = HAL_PIXEL_FORMAT_YV12;
+  }
+  if (buffer->bitdepth == 12 && format == HAL_PIXEL_FORMAT_YCBCR_P010) {
+      // There are no 12-bit color conversion functions to P010. Use 8-bit YV12
+      // in this case.
+      format = HAL_PIXEL_FORMAT_YV12;
+  }
 
   if (mHalPixelFormat != format) {
     C2StreamPixelFormatInfo::output pixelFormat(0u, format);
@@ -890,7 +934,41 @@
   size_t dstUStride = layout.planes[C2PlanarLayout::PLANE_U].rowInc;
   size_t dstVStride = layout.planes[C2PlanarLayout::PLANE_V].rowInc;
 
-  if (buffer->bitdepth == 10) {
+  if (buffer->bitdepth == 12) {
+#if LIBYUV_VERSION >= 1871
+      const uint16_t *srcY = (const uint16_t *)buffer->plane[0];
+      const uint16_t *srcU = (const uint16_t *)buffer->plane[1];
+      const uint16_t *srcV = (const uint16_t *)buffer->plane[2];
+      size_t srcYStride = buffer->stride[0] / 2;
+      size_t srcUStride = buffer->stride[1] / 2;
+      size_t srcVStride = buffer->stride[2] / 2;
+      if (isMonochrome) {
+          if (!fillMonochromeRow(2048)) {
+              setError(work, C2_NO_MEMORY);
+              return false;
+          }
+          srcU = srcV = mTmpFrameBuffer.get();
+          srcUStride = srcVStride = 0;
+      }
+      if (format == HAL_PIXEL_FORMAT_RGBA_1010102) {
+          libyuv::I012ToAB30Matrix(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
+                                   dstY, dstYStride, &libyuv::kYuvV2020Constants,
+                                   mWidth, mHeight);
+      } else if (isMonochrome || buffer->image_format == libgav1::kImageFormatYuv420) {
+          libyuv::I012ToI420(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
+                             dstY, dstYStride, dstU, dstUStride, dstV, dstVStride,
+                             mWidth, mHeight);
+      } else if (buffer->image_format == libgav1::kImageFormatYuv444) {
+          libyuv::I412ToI420(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
+                             dstY, dstYStride, dstU, dstUStride, dstV, dstVStride,
+                             mWidth, mHeight);
+      } else {
+          libyuv::I212ToI420(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
+                             dstY, dstYStride, dstU, dstUStride, dstV, dstVStride,
+                             mWidth, mHeight);
+      }
+#endif  // LIBYUV_VERSION >= 1871
+  } else if (buffer->bitdepth == 10) {
     const uint16_t *srcY = (const uint16_t *)buffer->plane[0];
     const uint16_t *srcU = (const uint16_t *)buffer->plane[1];
     const uint16_t *srcV = (const uint16_t *)buffer->plane[2];
@@ -915,18 +993,12 @@
 #endif  // HAVE_LIBYUV_I410_I210_TO_AB30
         if (!processed) {
             if (isMonochrome) {
-                const size_t tmpSize = mWidth;
-                const bool needFill = tmpSize > mTmpFrameBufferSize;
-                if (!allocTmpFrameBuffer(tmpSize)) {
-                    ALOGE("Error allocating temp conversion buffer (%zu bytes)", tmpSize);
+                if (!fillMonochromeRow(512)) {
                     setError(work, C2_NO_MEMORY);
                     return false;
                 }
                 srcU = srcV = mTmpFrameBuffer.get();
                 srcUStride = srcVStride = 0;
-                if (needFill) {
-                    std::fill_n(mTmpFrameBuffer.get(), tmpSize, 512);
-                }
             }
             convertYUV420Planar16ToY410OrRGBA1010102(
                     (uint32_t *)dstY, srcY, srcU, srcV, srcYStride,
diff --git a/media/codec2/components/gav1/C2SoftGav1Dec.h b/media/codec2/components/gav1/C2SoftGav1Dec.h
index c3b27ea..0e09fcc 100644
--- a/media/codec2/components/gav1/C2SoftGav1Dec.h
+++ b/media/codec2/components/gav1/C2SoftGav1Dec.h
@@ -105,6 +105,7 @@
   // Sets |work->result| and mSignalledError. Returns false.
   void setError(const std::unique_ptr<C2Work> &work, c2_status_t error);
   bool allocTmpFrameBuffer(size_t size);
+  bool fillMonochromeRow(int value);
   bool outputBuffer(const std::shared_ptr<C2BlockPool>& pool,
                     const std::unique_ptr<C2Work>& work);
   c2_status_t drainInternal(uint32_t drainMode,
diff --git a/media/codec2/components/raw/C2SoftRawDec.cpp b/media/codec2/components/raw/C2SoftRawDec.cpp
index a03d4e2..ea13071 100644
--- a/media/codec2/components/raw/C2SoftRawDec.cpp
+++ b/media/codec2/components/raw/C2SoftRawDec.cpp
@@ -65,7 +65,7 @@
         addParameter(
                 DefineParam(mChannelCount, C2_PARAMKEY_CHANNEL_COUNT)
                 .withDefault(new C2StreamChannelCountInfo::output(0u, 2))
-                .withFields({C2F(mChannelCount, value).inRange(1, 8)})
+                .withFields({C2F(mChannelCount, value).inRange(1, 12)})
                 .withSetter(Setter<decltype(*mChannelCount)>::StrictValueWithNoDeps)
                 .build());
 
diff --git a/media/codec2/core/include/C2Buffer.h b/media/codec2/core/include/C2Buffer.h
index 7e1df91..a04fc41 100644
--- a/media/codec2/core/include/C2Buffer.h
+++ b/media/codec2/core/include/C2Buffer.h
@@ -1046,13 +1046,9 @@
      *                      (unexpected)
      */
     virtual c2_status_t fetchLinearBlock(
-            uint32_t capacity __unused, C2MemoryUsage usage __unused,
+            uint32_t capacity, C2MemoryUsage usage,
             std::shared_ptr<C2LinearBlock> *block /* nonnull */,
-            C2Fence *fence /* nonnull */) {
-        *block = nullptr;
-        (void) fence;
-        return C2_OMITTED;
-    }
+            C2Fence *fence /* nonnull */);
 
     /**
      * Blocking fetch for 2D graphic block. Obtains a 2D graphic writable block of given |capacity|
@@ -1096,14 +1092,10 @@
      *                      (unexpected)
      */
     virtual c2_status_t fetchGraphicBlock(
-            uint32_t width __unused, uint32_t height __unused, uint32_t format __unused,
-            C2MemoryUsage usage __unused,
+            uint32_t width, uint32_t height, uint32_t format,
+            C2MemoryUsage usage,
             std::shared_ptr<C2GraphicBlock> *block /* nonnull */,
-            C2Fence *fence /* nonnull */) {
-        *block = nullptr;
-        (void) fence;
-        return C2_OMITTED;
-    }
+            C2Fence *fence /* nonnull */);
 protected:
     C2BlockPool() = default;
 };
diff --git a/media/codec2/core/include/C2Config.h b/media/codec2/core/include/C2Config.h
index 9a3399d..785cdf2 100644
--- a/media/codec2/core/include/C2Config.h
+++ b/media/codec2/core/include/C2Config.h
@@ -160,6 +160,10 @@
     kParamIndexSecureMode,
     kParamIndexEncryptedBuffer, // info-buffer, used with SM_READ_PROTECTED_WITH_ENCRYPTED
 
+    /* multiple access unit support */
+    kParamIndexLargeFrame,
+    kParamIndexAccessUnitInfos, // struct
+
     // deprecated
     kParamIndexDelayRequest = kParamIndexDelay | C2Param::CoreIndex::IS_REQUEST_FLAG,
 
@@ -1114,6 +1118,36 @@
 constexpr char C2_PARAMKEY_INPUT_MAX_BUFFER_SIZE[] = "input.buffers.max-size";
 constexpr char C2_PARAMKEY_OUTPUT_MAX_BUFFER_SIZE[] = "output.buffers.max-size";
 
+/**
+ * Large frame struct
+ *
+ * This structure describes the size limits for large frames (frames with multiple
+ * access units.)
+ */
+struct C2LargeFrameStruct {
+    uint32_t maxSize;         ///< maximum size of the buffer in bytes
+    uint32_t thresholdSize;   ///< size threshold for the buffer in bytes. The buffer is considered
+                              ///< full as soon as its size reaches or surpasses this limit.
+    C2LargeFrameStruct()
+        : maxSize(0),
+          thresholdSize(0) {}
+
+    C2LargeFrameStruct(uint32_t maxSize_, uint32_t thresholdSize_)
+        : maxSize(maxSize_), thresholdSize(thresholdSize_) {}
+
+    DEFINE_AND_DESCRIBE_C2STRUCT(LargeFrame)
+    C2FIELD(maxSize, "max-size")
+    C2FIELD(thresholdSize, "threshold-size")
+};
+
+/**
+ * This tuning controls the size limits for large output frames for the component.
+ * The default value for this tuning is platform specific.
+ */
+typedef C2StreamParam<C2Tuning, C2LargeFrameStruct, kParamIndexLargeFrame>
+        C2LargeFrame;
+constexpr char C2_PARAMKEY_OUTPUT_LARGE_FRAME[] = "output.large-frame";
+
 /* ---------------------------------------- misc. state ---------------------------------------- */
 
 /**
@@ -2146,6 +2180,49 @@
         C2StreamAudioFrameSizeInfo;
 constexpr char C2_PARAMKEY_AUDIO_FRAME_SIZE[] = "raw.audio-frame-size";
 
+/**
+ * Information for an access unit in a large frame (containing multiple access units)
+ */
+struct C2AccessUnitInfosStruct {
+
+    inline C2AccessUnitInfosStruct() {
+        memset(this, 0, sizeof(*this));
+    }
+
+    inline C2AccessUnitInfosStruct(
+            uint32_t flags_,
+            uint32_t size_,
+            int64_t timestamp_)
+        : flags(flags_),
+          size(size_),
+          timestamp(timestamp_) { }
+
+    uint32_t flags; ///<flags for the access-unit
+    uint32_t size; ///<size of access-unit
+    int64_t timestamp; ///<timestamp in us for the access-unit
+
+    DEFINE_AND_DESCRIBE_C2STRUCT(AccessUnitInfos)
+    C2FIELD(flags, "flags")
+    C2FIELD(size, "size")
+    C2FIELD(timestamp, "timestamp")
+};
+
+/**
+ * Multiple access unit support (e.g large audio frames)
+ *
+ * If supported by a component, multiple access units may be contained
+ * in a single work item. For now this is only defined for linear buffers.
+ * The metadata indicates the access-unit boundaries in a single buffer.
+ * The boundary of each access-units are marked by its size, immediately
+ * followed by the next access-unit.
+ */
+typedef C2StreamParam<C2Info, C2SimpleArrayStruct<C2AccessUnitInfosStruct>,
+                kParamIndexAccessUnitInfos>
+        C2AccessUnitInfos;
+
+constexpr char C2_PARAMKEY_INPUT_ACCESS_UNIT_INFOS[] = "input.access-unit-infos";
+constexpr char C2_PARAMKEY_OUTPUT_ACCESS_UNIT_INFOS[] = "output.access-unit-infos";
+
 /* --------------------------------------- AAC components --------------------------------------- */
 
 /**
diff --git a/media/codec2/core/include/C2ParamDef.h b/media/codec2/core/include/C2ParamDef.h
index d578820..86dfe65 100644
--- a/media/codec2/core/include/C2ParamDef.h
+++ b/media/codec2/core/include/C2ParamDef.h
@@ -404,6 +404,7 @@
     /// Specialization for an input port parameter.
     struct input : public T, public S,
             public _C2StructCheck<S, ParamIndex, T::PARAM_KIND | T::Index::DIR_INPUT> {
+        using T::operator!=;
         _C2_CORE_INDEX_OVERRIDE(ParamIndex)
         /// Wrapper around base structure's constructor.
         template<typename ...Args>
@@ -416,6 +417,7 @@
     /// Specialization for an output port parameter.
     struct output : public T, public S,
             public _C2StructCheck<S, ParamIndex, T::PARAM_KIND | T::Index::DIR_OUTPUT> {
+        using T::operator!=;
         _C2_CORE_INDEX_OVERRIDE(ParamIndex)
         /// Wrapper around base structure's constructor.
         template<typename ...Args>
@@ -470,6 +472,7 @@
     /// Specialization for an input port parameter.
     struct input : public T,
             public _C2FlexStructCheck<S, ParamIndex, T::PARAM_KIND | T::Index::DIR_INPUT> {
+        using T::operator!=;
     private:
         /// Wrapper around base structure's constructor while also specifying port/direction.
         template<typename ...Args>
@@ -486,6 +489,7 @@
     /// Specialization for an output port parameter.
     struct output : public T,
             public _C2FlexStructCheck<S, ParamIndex, T::PARAM_KIND | T::Index::DIR_OUTPUT> {
+        using T::operator!=;
     private:
         /// Wrapper around base structure's constructor while also specifying port/direction.
         template<typename ...Args>
@@ -549,6 +553,7 @@
     struct input : public T, public S,
             public _C2StructCheck<S, ParamIndex,
                     T::PARAM_KIND | T::Index::IS_STREAM_FLAG | T::Type::DIR_INPUT> {
+        using T::operator!=;
         _C2_CORE_INDEX_OVERRIDE(ParamIndex)
 
         /// Default constructor. Stream-ID is undefined.
@@ -567,6 +572,7 @@
     struct output : public T, public S,
             public _C2StructCheck<S, ParamIndex,
                     T::PARAM_KIND | T::Index::IS_STREAM_FLAG | T::Type::DIR_OUTPUT> {
+        using T::operator!=;
         _C2_CORE_INDEX_OVERRIDE(ParamIndex)
 
         /// Default constructor. Stream-ID is undefined.
@@ -634,6 +640,7 @@
     struct input : public T,
             public _C2FlexStructCheck<S, ParamIndex,
                     T::PARAM_KIND | T::Index::IS_STREAM_FLAG | T::Type::DIR_INPUT> {
+        using T::operator!=;
     private:
         /// Default constructor. Stream-ID is undefined.
         inline input(size_t flexCount) : T(_Type::CalcSize(flexCount), input::PARAM_TYPE) { }
@@ -656,6 +663,7 @@
     struct output : public T,
             public _C2FlexStructCheck<S, ParamIndex,
                     T::PARAM_KIND | T::Index::IS_STREAM_FLAG | T::Type::DIR_OUTPUT> {
+        using T::operator!=;
     private:
         /// Default constructor. Stream-ID is undefined.
         inline output(size_t flexCount) : T(_Type::CalcSize(flexCount), output::PARAM_TYPE) { }
diff --git a/media/codec2/fuzzer/Android.bp b/media/codec2/fuzzer/Android.bp
index dd68e7e..b387b2c 100644
--- a/media/codec2/fuzzer/Android.bp
+++ b/media/codec2/fuzzer/Android.bp
@@ -41,8 +41,17 @@
 
     fuzz_config: {
         cc: [
-            "wonsik@google.com",
+            "android-fwk-video@google.com",
         ],
+        componentid: 1344,
+        hotlists: [
+            "4593311",
+        ],
+        description: "The fuzzer targets the APIs of libcodec2",
+        vector: "remote",
+        service_privilege: "constrained",
+        users: "multi_user",
+        fuzzed_code_usage: "shipped",
     },
 }
 
diff --git a/media/codec2/hal/aidl/Android.bp b/media/codec2/hal/aidl/Android.bp
index a6a6b77..48b6e21 100644
--- a/media/codec2/hal/aidl/Android.bp
+++ b/media/codec2/hal/aidl/Android.bp
@@ -6,7 +6,10 @@
 // use libcodec2-aidl-client-defaults instead
 cc_library {
     name: "libcodec2_aidl_client",
-    min_sdk_version: "31",
+
+    defaults: [
+        "libcodec2_hal_selection",
+    ],
 
     srcs: [
         "BufferTypes.cpp",
@@ -21,7 +24,7 @@
     shared_libs: [
         "android.hardware.common-V2-ndk",
         "android.hardware.media.bufferpool@2.0",
-        "android.hardware.media.bufferpool2-V1-ndk",
+        "android.hardware.media.bufferpool2-V2-ndk",
         "android.hardware.media.c2-V1-ndk",
         "libbinder_ndk",
         "libbase",
@@ -54,12 +57,15 @@
 // use libcodec2-aidl-defaults instead
 cc_library {
     name: "libcodec2_aidl",
-    min_sdk_version: "31",
+    min_sdk_version: "30",
     vendor_available: true,
     apex_available: [
         "//apex_available:platform",
         "com.android.media.swcodec",
-        "test_com.android.media.swcodec",
+    ],
+
+    defaults: [
+        "libcodec2_hal_selection",
     ],
 
     srcs: [
@@ -78,7 +84,7 @@
 
     shared_libs: [
         "android.hardware.common-V2-ndk",
-        "android.hardware.media.bufferpool2-V1-ndk",
+        "android.hardware.media.bufferpool2-V2-ndk",
         "android.hardware.media.c2-V1-ndk",
         "libbase",
         "libbinder_ndk",
@@ -132,7 +138,7 @@
 // public dependency for Codec 2.0 HAL service implementations
 cc_defaults {
     name: "libcodec2-aidl-defaults",
-    min_sdk_version: "31",
+    min_sdk_version: "30",
     defaults: ["libcodec2-impl-defaults"],
 
     shared_libs: [
@@ -145,7 +151,6 @@
 // public dependency for Codec 2.0 HAL client
 cc_defaults {
     name: "libcodec2-aidl-client-defaults",
-    min_sdk_version: "31",
     defaults: ["libcodec2-impl-defaults"],
 
     shared_libs: [
@@ -163,18 +168,15 @@
     defaults: [
         "libcodec2-aidl-defaults",
         "service_fuzzer_defaults",
+        "libcodec2-runtime-libs",
     ],
     shared_libs: [
         "libcodec2_vndk",
-
-        "libcodec2_soft_avcdec",
-        "libcodec2_soft_avcenc",
-        "libcodec2_soft_aacdec",
-        "libcodec2_soft_aacenc",
     ],
     fuzz_config: {
         cc: [
             "wonsik@google.com",
         ],
+        triage_assignee: "waghpawan@google.com",
     },
 }
diff --git a/media/codec2/hal/aidl/BufferTypes.cpp b/media/codec2/hal/aidl/BufferTypes.cpp
index b1af579..bc4948b 100644
--- a/media/codec2/hal/aidl/BufferTypes.cpp
+++ b/media/codec2/hal/aidl/BufferTypes.cpp
@@ -201,7 +201,7 @@
 
 template<>
 void SetHandle(BaseBlock *block, const C2Handle *handle) {
-    block->set<BaseBlock::nativeBlock>(makeToAidl(handle));
+    block->set<BaseBlock::nativeBlock>(dupToAidl(handle));
 }
 
 template<>
diff --git a/media/codec2/hal/aidl/Component.cpp b/media/codec2/hal/aidl/Component.cpp
index 2e0859b..4c2d5d3 100644
--- a/media/codec2/hal/aidl/Component.cpp
+++ b/media/codec2/hal/aidl/Component.cpp
@@ -292,48 +292,51 @@
     static constexpr IComponent::BlockPoolAllocator::Tag ALLOCATOR_ID =
         IComponent::BlockPoolAllocator::allocatorId;
     static constexpr IComponent::BlockPoolAllocator::Tag IGBA =
-        IComponent::BlockPoolAllocator::igba;
+        IComponent::BlockPoolAllocator::allocator;
     c2_status_t status = C2_OK;
+    ::android::C2PlatformAllocatorDesc allocatorParam;
     switch (allocator.getTag()) {
-        case ALLOCATOR_ID:
-#ifdef __ANDROID_APEX__
-            status = ::android::CreateCodec2BlockPool(
-                    static_cast<::android::C2PlatformAllocatorStore::id_t>(
-                            allocator.get<ALLOCATOR_ID>()),
-                    mComponent,
-                    &c2BlockPool);
-#else
-            status = ComponentStore::GetFilterWrapper()->createBlockPool(
-                    static_cast<::android::C2PlatformAllocatorStore::id_t>(
-                            allocator.get<ALLOCATOR_ID>()),
-                    mComponent,
-                    &c2BlockPool);
-#endif
-            if (status != C2_OK) {
-                blockPool = nullptr;
-            }
-            break;
-        case IGBA:
-            // FIXME
-            break;
+        case ALLOCATOR_ID: {
+            allocatorParam.allocatorId =
+                    allocator.get<IComponent::BlockPoolAllocator::allocatorId>();
+        }
+        break;
+        case IGBA: {
+            allocatorParam.allocatorId = ::android::C2PlatformAllocatorStore::IGBA;
+            allocatorParam.igba =
+                    allocator.get<IComponent::BlockPoolAllocator::allocator>().igba;
+            allocatorParam.waitableFd.reset(
+                    allocator.get<IComponent::BlockPoolAllocator::allocator>()
+                    .waitableFd.dup().release());
+        }
+        break;
         default:
-            break;
+            return ScopedAStatus::fromServiceSpecificError(C2_CORRUPTED);
     }
-    if (blockPool) {
+#ifdef __ANDROID_APEX__
+    status = ::android::CreateCodec2BlockPool(
+            allocatorParam,
+            mComponent,
+            &c2BlockPool);
+#else
+    status = ComponentStore::GetFilterWrapper()->createBlockPool(
+            allocatorParam,
+            mComponent,
+            &c2BlockPool);
+#endif
+    if (status != C2_OK) {
+        return ScopedAStatus::fromServiceSpecificError(status);
+    }
+    {
         mBlockPoolsMutex.lock();
         mBlockPools.emplace(c2BlockPool->getLocalId(), c2BlockPool);
         mBlockPoolsMutex.unlock();
-    } else if (status == C2_OK) {
-        status = C2_CORRUPTED;
     }
 
     blockPool->blockPoolId = c2BlockPool ? c2BlockPool->getLocalId() : 0;
     blockPool->configurable = SharedRefBase::make<CachedConfigurable>(
             std::make_unique<BlockPoolIntf>(c2BlockPool));
-    if (status == C2_OK) {
-        return ScopedAStatus::ok();
-    }
-    return ScopedAStatus::fromServiceSpecificError(status);
+    return ScopedAStatus::ok();
 }
 
 ScopedAStatus Component::destroyBlockPool(int64_t blockPoolId) {
@@ -400,6 +403,22 @@
     return ScopedAStatus::fromServiceSpecificError(Status::OMITTED);
 }
 
+ScopedAStatus Component::connectToInputSurface(
+        const std::shared_ptr<IInputSurface>& inputSurface,
+        std::shared_ptr<IInputSurfaceConnection> *connection) {
+    // TODO
+    (void)inputSurface;
+    (void)connection;
+    return ScopedAStatus::fromServiceSpecificError(Status::OMITTED);
+}
+
+ScopedAStatus Component::asInputSink(
+        std::shared_ptr<IInputSink> *sink) {
+    // TODO
+    (void)sink;
+    return ScopedAStatus::fromServiceSpecificError(Status::OMITTED);
+}
+
 void Component::initListener(const std::shared_ptr<Component>& self) {
     if (__builtin_available(android __ANDROID_API_T__, *)) {
         std::shared_ptr<C2Component::Listener> c2listener =
@@ -411,7 +430,7 @@
 
         mDeathRecipient = ::ndk::ScopedAIBinder_DeathRecipient(
                 AIBinder_DeathRecipient_new(OnBinderDied));
-        mDeathContext = new DeathContext{weak_from_this()};
+        mDeathContext = new DeathContext{ref<Component>()};
         AIBinder_DeathRecipient_setOnUnlinked(mDeathRecipient.get(), OnBinderUnlinked);
         AIBinder_linkToDeath(mListener->asBinder().get(), mDeathRecipient.get(), mDeathContext);
     } else {
diff --git a/media/codec2/hal/aidl/ComponentStore.cpp b/media/codec2/hal/aidl/ComponentStore.cpp
index 2489683..f0a1490 100644
--- a/media/codec2/hal/aidl/ComponentStore.cpp
+++ b/media/codec2/hal/aidl/ComponentStore.cpp
@@ -216,7 +216,7 @@
 #endif
         onInterfaceLoaded(c2component->intf());
         std::shared_ptr<Component> comp =
-            SharedRefBase::make<Component>(c2component, listener, shared_from_this(), pool);
+            SharedRefBase::make<Component>(c2component, listener, ref<ComponentStore>(), pool);
         *component = comp;
         if (!component) {
             status = C2_CORRUPTED;
@@ -273,6 +273,13 @@
     return ScopedAStatus::ok();
 }
 
+ScopedAStatus ComponentStore::createInputSurface(
+        std::shared_ptr<IInputSurface> *inputSurface) {
+    // TODO
+    (void)inputSurface;
+    return ScopedAStatus::fromServiceSpecificError(Status::OMITTED);
+}
+
 void ComponentStore::onInterfaceLoaded(const std::shared_ptr<C2ComponentInterface> &intf) {
     // invalidate unsupported struct descriptors if a new interface is loaded as it may have
     // exposed new descriptors
diff --git a/media/codec2/hal/aidl/Configurable.cpp b/media/codec2/hal/aidl/Configurable.cpp
index 0326263..2daaac2 100644
--- a/media/codec2/hal/aidl/Configurable.cpp
+++ b/media/codec2/hal/aidl/Configurable.cpp
@@ -19,6 +19,7 @@
 #include <android-base/logging.h>
 
 #include <android/binder_auto_utils.h>
+#include <android-base/hex.h>
 #include <codec2/aidl/Configurable.h>
 #include <codec2/aidl/ParamTypes.h>
 
@@ -61,7 +62,7 @@
 ScopedAStatus CachedConfigurable::query(
         const std::vector<int32_t>& indices,
         bool mayBlock,
-        Params* params) {
+        QueryResult *queryResult) {
     typedef C2Param::Index Index;
     std::vector<Index> c2heapParamIndices(
             (Index*)indices.data(),
@@ -72,13 +73,11 @@
             mayBlock ? C2_MAY_BLOCK : C2_DONT_BLOCK,
             &c2heapParams);
 
-    if (!CreateParamsBlob(params, c2heapParams)) {
+    if (!CreateParamsBlob(&(queryResult->params), c2heapParams)) {
         LOG(WARNING) << "query -- invalid output params.";
     }
-    if (c2res == C2_OK) {
-        return ScopedAStatus::ok();
-    }
-    return ScopedAStatus::fromServiceSpecificError(c2res);
+    queryResult->status.status = c2res;
+    return ScopedAStatus::ok();
 }
 
 ScopedAStatus CachedConfigurable::config(
@@ -115,10 +114,8 @@
     if (!CreateParamsBlob(&result->params, c2params)) {
         LOG(DEBUG) << "config -- invalid output params.";
     }
-    if (c2res == C2_OK) {
-        return ScopedAStatus::ok();
-    }
-    return ScopedAStatus::fromServiceSpecificError(c2res);
+    result->status.status = c2res;
+    return ScopedAStatus::ok();
 }
 
 ScopedAStatus CachedConfigurable::querySupportedParams(
@@ -139,8 +136,6 @@
                 LOG(WARNING) << "querySupportedParams -- invalid output params.";
                 break;
             }
-        } else {
-            res = Status::BAD_INDEX;
         }
     }
     paramDesc->resize(dstIx);
@@ -153,7 +148,7 @@
 ScopedAStatus CachedConfigurable::querySupportedValues(
         const std::vector<FieldSupportedValuesQuery>& fields,
         bool mayBlock,
-        std::vector<FieldSupportedValuesQueryResult>* result) {
+        QuerySupportedValuesResult *queryValues) {
     std::vector<C2FieldSupportedValuesQuery> c2fields;
     {
         // C2FieldSupportedValuesQuery objects are restricted in that some
@@ -173,22 +168,20 @@
     c2_status_t c2res = mIntf->querySupportedValues(
             c2fields,
             mayBlock ? C2_MAY_BLOCK : C2_DONT_BLOCK);
-    result->resize(fields.size());
+    queryValues->values.resize(fields.size());
     size_t dstIx = 0;
     for (const C2FieldSupportedValuesQuery &res : c2fields) {
-        if (ToAidl(&(*result)[dstIx], res)) {
+        if (ToAidl(&(queryValues->values[dstIx]), res)) {
             ++dstIx;
         } else {
-            result->resize(dstIx);
+            queryValues->values.resize(dstIx);
             c2res = C2_CORRUPTED;
             LOG(WARNING) << "querySupportedValues -- invalid output params.";
             break;
         }
     }
-    if (c2res == C2_OK) {
-        return ScopedAStatus::ok();
-    }
-    return ScopedAStatus::fromServiceSpecificError(c2res);
+    queryValues->status.status = c2res;
+    return ScopedAStatus::ok();
 }
 
 }  // namespace utils
diff --git a/media/codec2/hal/aidl/ParamTypes.cpp b/media/codec2/hal/aidl/ParamTypes.cpp
index 7026f4c..5ad0810 100644
--- a/media/codec2/hal/aidl/ParamTypes.cpp
+++ b/media/codec2/hal/aidl/ParamTypes.cpp
@@ -18,7 +18,12 @@
 #define LOG_TAG "Codec2-AIDL-ParamTypes"
 #include <android-base/logging.h>
 
+#include <android/binder_manager.h>
+// NOTE: due to dependency from mainline modules cannot use libsysprop
+// #include <android/sysprop/MediaProperties.sysprop.h>
+#include <android-base/properties.h>
 #include <codec2/aidl/ParamTypes.h>
+#include <codec2/common/HalSelection.h>
 #include <codec2/common/ParamTypes.h>
 
 #include "ParamTypes-specialization.h"
@@ -157,8 +162,9 @@
 namespace c2 {
 namespace utils {
 
-// TODO: read it from aconfig flags
-bool IsEnabled() { return false; }
+bool IsSelected() {
+    return ::android::IsCodec2AidlHalSelected();
+}
 
 const char* asString(Status status, const char* def) {
     return asString(static_cast<c2_status_t>(status.status), def);
diff --git a/media/codec2/hal/aidl/fuzzer.cpp b/media/codec2/hal/aidl/fuzzer.cpp
index c1a2762..111ef26 100644
--- a/media/codec2/hal/aidl/fuzzer.cpp
+++ b/media/codec2/hal/aidl/fuzzer.cpp
@@ -29,6 +29,7 @@
     std::shared_ptr<C2ComponentStore> store = ::android::GetCodec2PlatformComponentStore();
     std::shared_ptr<ComponentStore> binder = SharedRefBase::make<ComponentStore>(store);
 
+    signal(SIGPIPE, SIG_IGN);
     fuzzService(binder->asBinder().get(), FuzzedDataProvider(data, size));
 
     return 0;
diff --git a/media/codec2/hal/aidl/include/codec2/aidl/Component.h b/media/codec2/hal/aidl/include/codec2/aidl/Component.h
index d5ea92b..94b760f 100644
--- a/media/codec2/hal/aidl/include/codec2/aidl/Component.h
+++ b/media/codec2/hal/aidl/include/codec2/aidl/Component.h
@@ -27,6 +27,9 @@
 #include <aidl/android/hardware/media/c2/IComponentInterface.h>
 #include <aidl/android/hardware/media/c2/IComponentListener.h>
 #include <aidl/android/hardware/media/c2/IComponentStore.h>
+#include <aidl/android/hardware/media/c2/IInputSink.h>
+#include <aidl/android/hardware/media/c2/IInputSurface.h>
+#include <aidl/android/hardware/media/c2/IInputSurfaceConnection.h>
 
 #include <C2Component.h>
 #include <C2Buffer.h>
@@ -46,8 +49,7 @@
 
 struct ComponentStore;
 
-struct Component : public BnComponent,
-                   public std::enable_shared_from_this<Component> {
+struct Component : public BnComponent {
     Component(
             const std::shared_ptr<C2Component>&,
             const std::shared_ptr<IComponentListener>& listener,
@@ -72,6 +74,11 @@
     ::ndk::ScopedAStatus configureVideoTunnel(
             int32_t avSyncHwId,
             common::NativeHandle* handle) override;
+    ::ndk::ScopedAStatus connectToInputSurface(
+            const std::shared_ptr<IInputSurface>& inputSurface,
+            std::shared_ptr<IInputSurfaceConnection> *connection) override;
+    ::ndk::ScopedAStatus asInputSink(
+            std::shared_ptr<IInputSink> *sink) override;
 
 protected:
     c2_status_t mInit;
diff --git a/media/codec2/hal/aidl/include/codec2/aidl/ComponentStore.h b/media/codec2/hal/aidl/include/codec2/aidl/ComponentStore.h
index 7fc5d2f..0698b0f 100644
--- a/media/codec2/hal/aidl/include/codec2/aidl/ComponentStore.h
+++ b/media/codec2/hal/aidl/include/codec2/aidl/ComponentStore.h
@@ -23,6 +23,7 @@
 
 #include <aidl/android/hardware/media/bufferpool2/IClientManager.h>
 #include <aidl/android/hardware/media/c2/BnComponentStore.h>
+#include <aidl/android/hardware/media/c2/IInputSurface.h>
 
 #include <C2Component.h>
 #include <C2Param.h>
@@ -50,8 +51,7 @@
 
 using ::aidl::android::hardware::media::bufferpool2::IClientManager;
 
-struct ComponentStore : public BnComponentStore,
-                        public std::enable_shared_from_this<ComponentStore> {
+struct ComponentStore : public BnComponentStore {
     ComponentStore(const std::shared_ptr<C2ComponentStore>& store);
     virtual ~ComponentStore();
 
@@ -86,6 +86,8 @@
             std::shared_ptr<IComponentInterface> *intf) override;
     virtual ::ndk::ScopedAStatus listComponents(
             std::vector<IComponentStore::ComponentTraits>* traits) override;
+    virtual ::ndk::ScopedAStatus createInputSurface(
+            std::shared_ptr<IInputSurface> *inputSurface) override;
     virtual ::ndk::ScopedAStatus getStructDescriptors(
             const std::vector<int32_t>& indices,
             std::vector<StructDescriptor> *descs) override;
diff --git a/media/codec2/hal/aidl/include/codec2/aidl/Configurable.h b/media/codec2/hal/aidl/include/codec2/aidl/Configurable.h
index 6cc2c1b..96d3516 100644
--- a/media/codec2/hal/aidl/include/codec2/aidl/Configurable.h
+++ b/media/codec2/hal/aidl/include/codec2/aidl/Configurable.h
@@ -111,7 +111,7 @@
     virtual ::ndk::ScopedAStatus query(
             const std::vector<int32_t>& indices,
             bool mayBlock,
-            Params* params) override;
+            QueryResult* result) override;
 
     virtual ::ndk::ScopedAStatus config(
             const ::aidl::android::hardware::media::c2::Params& params,
@@ -126,7 +126,7 @@
     virtual ::ndk::ScopedAStatus querySupportedValues(
             const std::vector<FieldSupportedValuesQuery>& fields,
             bool mayBlock,
-            std::vector<FieldSupportedValuesQueryResult>* result) override;
+            QuerySupportedValuesResult* result) override;
 
 protected:
     // Common Codec2.0 interface wrapper
diff --git a/media/codec2/hal/aidl/include/codec2/aidl/ParamTypes.h b/media/codec2/hal/aidl/include/codec2/aidl/ParamTypes.h
index 3f82ee3..7c31a06 100644
--- a/media/codec2/hal/aidl/include/codec2/aidl/ParamTypes.h
+++ b/media/codec2/hal/aidl/include/codec2/aidl/ParamTypes.h
@@ -37,8 +37,8 @@
 namespace c2 {
 namespace utils {
 
-// Returns true iff AIDL c2 HAL is enabled
-bool IsEnabled();
+// Returns true iff AIDL c2 HAL is selected for the system
+bool IsSelected();
 
 // Make asString() and operator<< work with Status as well as c2_status_t.
 C2_DECLARE_AS_STRING_AND_DEFINE_STREAM_OUT(Status);
diff --git a/media/codec2/hal/client/Android.bp b/media/codec2/hal/client/Android.bp
index 0b5b940..af6f4ae 100644
--- a/media/codec2/hal/client/Android.bp
+++ b/media/codec2/hal/client/Android.bp
@@ -43,7 +43,7 @@
         "android.hardware.media.c2@1.0",
         "android.hardware.media.c2@1.1",
         "android.hardware.media.c2@1.2",
-        "android.hardware.media.bufferpool2-V1-ndk",
+        "android.hardware.media.bufferpool2-V2-ndk",
         "android.hardware.media.c2-V1-ndk",
         "libbase",
         "libbinder",
diff --git a/media/codec2/hal/client/GraphicBufferAllocator.cpp b/media/codec2/hal/client/GraphicBufferAllocator.cpp
index 7045537..8f489ec 100644
--- a/media/codec2/hal/client/GraphicBufferAllocator.cpp
+++ b/media/codec2/hal/client/GraphicBufferAllocator.cpp
@@ -13,6 +13,9 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+//#define LOG_NDEBUG 0
+#define LOG_TAG "Codec2-GraphicBufferAllocator"
+
 
 #include <gui/IProducerListener.h>
 #include <media/stagefright/foundation/ADebug.h>
diff --git a/media/codec2/hal/client/GraphicsTracker.cpp b/media/codec2/hal/client/GraphicsTracker.cpp
index 573ded8..01b0678 100644
--- a/media/codec2/hal/client/GraphicsTracker.cpp
+++ b/media/codec2/hal/client/GraphicsTracker.cpp
@@ -13,6 +13,8 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+//#define LOG_NDEBUG 0
+#define LOG_TAG "GraphicsTracker"
 #include <fcntl.h>
 #include <unistd.h>
 
@@ -73,11 +75,19 @@
 }
 
 GraphicsTracker::BufferItem::BufferItem(
-        uint32_t generation,
-        AHardwareBuffer_Desc *desc, AHardwareBuffer *pBuf) :
+        uint32_t generation, AHardwareBuffer *pBuf, uint64_t usage) :
         mInit{true}, mGeneration{generation}, mSlot{-1},
-        mBuf{pBuf}, mUsage{::android::AHardwareBuffer_convertToGrallocUsageBits(desc->usage)},
+        mBuf{pBuf}, mUsage{usage},
         mFence{Fence::NO_FENCE} {
+    if (__builtin_available(android __ANDROID_API_T__, *)) {
+        int ret = AHardwareBuffer_getId(mBuf, &mId);
+        if (ret != ::android::OK) {
+            mInit = false;
+            mBuf = nullptr;
+            return;
+        }
+    }
+    AHardwareBuffer_acquire(mBuf);
 }
 
 GraphicsTracker::BufferItem::~BufferItem() {
@@ -86,7 +96,8 @@
     }
 }
 
-sp<GraphicBuffer> GraphicsTracker::BufferItem::updateBuffer(
+
+std::shared_ptr<GraphicsTracker::BufferItem> GraphicsTracker::BufferItem::migrateBuffer(
         uint64_t newUsage, uint32_t newGeneration) {
     if (!mInit) {
         return nullptr;
@@ -111,21 +122,28 @@
         return nullptr;
     }
 
-    GraphicBuffer *gb = ::android::AHardwareBuffer_to_GraphicBuffer(newBuf);
-    if (!gb) {
-        AHardwareBuffer_release(newBuf);
+    std::shared_ptr<BufferItem> newBuffer =
+            std::make_shared<BufferItem>(newGeneration, newBuf, newUsage);
+    AHardwareBuffer_release(newBuf);
+    return newBuffer;
+}
+
+sp<GraphicBuffer> GraphicsTracker::BufferItem::getGraphicBuffer() {
+    if (!mInit) {
         return nullptr;
     }
-
-    gb->setGenerationNumber(newGeneration);
-    mUsage = newUsage;
-    mGeneration = newGeneration;
-    AHardwareBuffer_release(mBuf);
-    // acquire is already done when creating.
-    mBuf = newBuf;
+    GraphicBuffer *gb = ::android::AHardwareBuffer_to_GraphicBuffer(mBuf);
+    if (!gb) {
+        return nullptr;
+    }
+    gb->setGenerationNumber(mGeneration);
     return gb;
 }
 
+GraphicsTracker::BufferCache::~BufferCache() {
+    ALOGV("BufferCache destruction: generation(%d), igbp(%d)", mGeneration, (bool)mIgbp);
+}
+
 void GraphicsTracker::BufferCache::waitOnSlot(int slot) {
     // TODO: log
     CHECK(0 <= slot && slot < kNumSlots);
@@ -138,6 +156,7 @@
 
 void GraphicsTracker::BufferCache::blockSlot(int slot) {
     CHECK(0 <= slot && slot < kNumSlots);
+    ALOGV("block slot %d", slot);
     BlockedSlot *p = &mBlockedSlots[slot];
     std::unique_lock<std::mutex> l(p->l);
     p->blocked = true;
@@ -145,6 +164,7 @@
 
 void GraphicsTracker::BufferCache::unblockSlot(int slot) {
     CHECK(0 <= slot && slot < kNumSlots);
+    ALOGV("unblock slot %d", slot);
     BlockedSlot *p = &mBlockedSlots[slot];
     std::unique_lock<std::mutex> l(p->l);
     p->blocked = false;
@@ -153,20 +173,17 @@
 }
 
 GraphicsTracker::GraphicsTracker(int maxDequeueCount)
-    : mMaxDequeue{maxDequeueCount}, mMaxDequeueRequested{maxDequeueCount},
+    : mBufferCache(new BufferCache()), mMaxDequeue{maxDequeueCount},
     mMaxDequeueCommitted{maxDequeueCount},
-    mMaxDequeueRequestedSeqId{0UL}, mMaxDequeueCommittedSeqId{0ULL},
     mDequeueable{maxDequeueCount},
     mTotalDequeued{0}, mTotalCancelled{0}, mTotalDropped{0}, mTotalReleased{0},
     mInConfig{false}, mStopped{false} {
     if (maxDequeueCount < kMaxDequeueMin) {
         mMaxDequeue = kMaxDequeueMin;
-        mMaxDequeueRequested = kMaxDequeueMin;
         mMaxDequeueCommitted = kMaxDequeueMin;
         mDequeueable = kMaxDequeueMin;
     } else if(maxDequeueCount > kMaxDequeueMax) {
         mMaxDequeue = kMaxDequeueMax;
-        mMaxDequeueRequested = kMaxDequeueMax;
         mMaxDequeueCommitted = kMaxDequeueMax;
         mDequeueable = kMaxDequeueMax;
     }
@@ -176,36 +193,36 @@
     mReadPipeFd.reset(pipefd[0]);
     mWritePipeFd.reset(pipefd[1]);
 
-    mEventQueueThread = std::thread([this](){processEvent();});
+    // ctor does not require lock to be held.
+    writeIncDequeueableLocked(mDequeueable);
 
     CHECK(ret >= 0);
-    CHECK(mEventQueueThread.joinable());
 }
 
 GraphicsTracker::~GraphicsTracker() {
     stop();
-    if (mEventQueueThread.joinable()) {
-        std::unique_lock<std::mutex> l(mEventLock);
-        l.unlock();
-        mEventCv.notify_one();
-        mEventQueueThread.join();
-    }
 }
 
 bool GraphicsTracker::adjustDequeueConfLocked(bool *updateDequeue) {
     // TODO: can't we adjust during config? not committing it may safe?
     *updateDequeue = false;
-    if (!mInConfig && mMaxDequeueRequested < mMaxDequeue) {
-        int delta = mMaxDequeue - mMaxDequeueRequested;
+    if (!mInConfig && mMaxDequeueRequested.has_value() && mMaxDequeueRequested < mMaxDequeue) {
+        int delta = mMaxDequeue - mMaxDequeueRequested.value();
+        int drained = 0;
         // Since we are supposed to increase mDequeuable by one already
         int adjustable = mDequeueable + 1;
         if (adjustable >= delta) {
-            mMaxDequeue = mMaxDequeueRequested;
+            mMaxDequeue = mMaxDequeueRequested.value();
             mDequeueable -= (delta - 1);
+            drained = delta - 1;
         } else {
             mMaxDequeue -= adjustable;
+            drained = mDequeueable;
             mDequeueable = 0;
         }
+        if (drained > 0) {
+            drainDequeueableLocked(drained);
+        }
         if (mMaxDequeueRequested == mMaxDequeue && mMaxDequeueRequested != mMaxDequeueCommitted) {
             *updateDequeue = true;
         }
@@ -216,6 +233,7 @@
 
 c2_status_t GraphicsTracker::configureGraphics(
         const sp<IGraphicBufferProducer>& igbp, uint32_t generation) {
+    // TODO: wait until operations to previous IGBP is completed.
     std::shared_ptr<BufferCache> prevCache;
     int prevDequeueCommitted;
 
@@ -235,14 +253,28 @@
     if (igbp) {
         ret = igbp->getUniqueId(&bqId);
     }
-    if (ret != ::android::OK || prevCache->mGeneration == generation || prevCache->mBqId == bqId) {
+    if (ret != ::android::OK ||
+            prevCache->mGeneration == generation) {
+        ALOGE("new surface configure fail due to wrong or same bqId or same generation:"
+              "igbp(%d:%llu -> %llu), gen(%lu -> %lu)", (bool)igbp,
+              (unsigned long long)prevCache->mBqId, (unsigned long long)bqId,
+              (unsigned long)prevCache->mGeneration, (unsigned long)generation);
+        std::unique_lock<std::mutex> l(mLock);
+        mInConfig = false;
         return C2_BAD_VALUE;
     }
-    ret = igbp->setMaxDequeuedBufferCount(prevDequeueCommitted);
-    if (ret != ::android::OK) {
-        // TODO: sort out the error from igbp and return an error accordingly.
-        return C2_CORRUPTED;
+    if (igbp) {
+        ret = igbp->setMaxDequeuedBufferCount(prevDequeueCommitted);
+        if (ret != ::android::OK) {
+            ALOGE("new surface maxDequeueBufferCount configure fail");
+            // TODO: sort out the error from igbp and return an error accordingly.
+            std::unique_lock<std::mutex> l(mLock);
+            mInConfig = false;
+            return C2_CORRUPTED;
+        }
     }
+    ALOGD("new surface configured with id:%llu gen:%lu maxDequeue:%d",
+          (unsigned long long)bqId, (unsigned long)generation, prevDequeueCommitted);
     std::shared_ptr<BufferCache> newCache = std::make_shared<BufferCache>(bqId, generation, igbp);
     {
         std::unique_lock<std::mutex> l(mLock);
@@ -264,59 +296,74 @@
     // (Sometimes maxDequeueCount cannot be committed if the number of
     // dequeued buffer count is bigger.)
     int maxDequeueToCommit;
-    // max dequeue count which is committed to IGBP currently
-    // (actually mMaxDequeueCommitted, but needs to be read outside lock.)
-    int curMaxDequeueCommitted;
     std::unique_lock<std::mutex> cl(mConfigLock);
     {
         std::unique_lock<std::mutex> l(mLock);
-        if (mMaxDequeueRequested == maxDequeueCount) {
+        if (mMaxDequeueRequested.has_value()) {
+            if (mMaxDequeueRequested == maxDequeueCount) {
+                ALOGD("maxDequeueCount requested with %d already", maxDequeueCount);
+                return C2_OK;
+            }
+        } else if (mMaxDequeue == maxDequeueCount) {
+            ALOGD("maxDequeueCount is already %d", maxDequeueCount);
             return C2_OK;
         }
         mInConfig = true;
         mMaxDequeueRequested = maxDequeueCount;
         cache = mBufferCache;
-        curMaxDequeueCommitted = mMaxDequeueCommitted;
         if (mMaxDequeue <= maxDequeueCount) {
             maxDequeueToCommit = maxDequeueCount;
         } else {
             // Since mDequeuable is decreasing,
             // a delievered ready to allocate event may not be fulfilled.
             // Another waiting via a waitable object may be necessary in the case.
-            int delta = mMaxDequeue - maxDequeueCount;
-            if (delta <= mDequeueable) {
-                maxDequeueToCommit = maxDequeueCount;
-                mDequeueable -= delta;
-            } else {
-                maxDequeueToCommit = mMaxDequeue - mDequeueable;
-                mDequeueable = 0;
+            int delta = std::min(mMaxDequeue - maxDequeueCount, mDequeueable);
+            maxDequeueToCommit = mMaxDequeue - delta;
+            mDequeueable -= delta;
+            if (delta > 0) {
+                drainDequeueableLocked(delta);
             }
         }
     }
 
     bool committed = true;
-    if (cache->mIgbp && maxDequeueToCommit != curMaxDequeueCommitted) {
+    if (cache->mIgbp && maxDequeueToCommit != mMaxDequeueCommitted) {
         ::android::status_t ret = cache->mIgbp->setMaxDequeuedBufferCount(maxDequeueToCommit);
         committed = (ret == ::android::OK);
-        if (!committed) {
+        if (committed) {
+            ALOGD("maxDequeueCount committed to IGBP: %d", maxDequeueToCommit);
+        } else {
             // This should not happen.
-            ALOGE("dequeueCount failed with error(%d)", (int)ret);
+            ALOGE("maxdequeueCount update to IGBP failed with error(%d)", (int)ret);
         }
     }
 
+    int oldMaxDequeue = 0;
+    int requested = 0;
     {
         std::unique_lock<std::mutex> l(mLock);
         mInConfig = false;
+        oldMaxDequeue = mMaxDequeue;
+        mMaxDequeue = maxDequeueToCommit; // we already drained dequeueable
         if (committed) {
+            clearCacheIfNecessaryLocked(cache, maxDequeueToCommit);
             mMaxDequeueCommitted = maxDequeueToCommit;
-            int delta = mMaxDequeueCommitted - mMaxDequeue;
+            if (mMaxDequeueRequested == mMaxDequeueCommitted &&
+                  mMaxDequeueRequested == mMaxDequeue) {
+                mMaxDequeueRequested.reset();
+            }
+            if (mMaxDequeueRequested.has_value()) {
+                requested = mMaxDequeueRequested.value();
+            }
+            int delta = mMaxDequeueCommitted - oldMaxDequeue;
             if (delta > 0) {
                 mDequeueable += delta;
-                l.unlock();
-                writeIncDequeueable(delta);
+                writeIncDequeueableLocked(delta);
             }
         }
     }
+    ALOGD("maxDqueueCount change %d -> %d: pending: %d",
+          oldMaxDequeue, maxDequeueToCommit, requested);
 
     if (!committed) {
         return C2_CORRUPTED;
@@ -327,115 +374,124 @@
 void GraphicsTracker::updateDequeueConf() {
     std::shared_ptr<BufferCache> cache;
     int dequeueCommit;
+    ALOGV("trying to update max dequeue count");
     std::unique_lock<std::mutex> cl(mConfigLock);
     {
         std::unique_lock<std::mutex> l(mLock);
-        if (mMaxDequeue == mMaxDequeueRequested && mMaxDequeueCommitted != mMaxDequeueRequested) {
-            dequeueCommit = mMaxDequeue;
-            mInConfig = true;
-            cache = mBufferCache;
-        } else {
+        if (!mMaxDequeueRequested.has_value() || mMaxDequeue != mMaxDequeueRequested) {
             return;
         }
+        if (mMaxDequeueCommitted == mMaxDequeueRequested) {
+            // already committed. may not happen.
+            mMaxDequeueRequested.reset();
+            return;
+        }
+        dequeueCommit = mMaxDequeue;
+        mInConfig = true;
+        cache = mBufferCache;
     }
     bool committed = true;
     if (cache->mIgbp) {
         ::android::status_t ret = cache->mIgbp->setMaxDequeuedBufferCount(dequeueCommit);
         committed = (ret == ::android::OK);
-        if (!committed) {
+        if (committed) {
+            ALOGD("delayed maxDequeueCount update to IGBP: %d", dequeueCommit);
+        } else {
             // This should not happen.
-            ALOGE("dequeueCount failed with error(%d)", (int)ret);
+            ALOGE("delayed maxdequeueCount update to IGBP failed with error(%d)", (int)ret);
         }
     }
-    int cleared = 0;
     {
         // cache == mCache here, since we locked config.
         std::unique_lock<std::mutex> l(mLock);
         mInConfig = false;
         if (committed) {
-            if (cache->mIgbp && dequeueCommit < mMaxDequeueCommitted) {
-                // we are shrinking # of buffers, so clearing the cache.
-                for (auto it = cache->mBuffers.begin(); it != cache->mBuffers.end();) {
-                    uint64_t bid = it->second->mId;
-                    if (mDequeued.count(bid) == 0 || mDeallocating.count(bid) > 0) {
-                        ++cleared;
-                        it = cache->mBuffers.erase(it);
-                    } else {
-                        ++it;
-                    }
-                }
-            }
+            clearCacheIfNecessaryLocked(cache, dequeueCommit);
             mMaxDequeueCommitted = dequeueCommit;
         }
+        mMaxDequeueRequested.reset();
     }
-    if (cleared > 0) {
-        ALOGD("%d buffers are cleared from cache, due to IGBP capacity change", cleared);
-    }
+}
 
+void GraphicsTracker::clearCacheIfNecessaryLocked(const std::shared_ptr<BufferCache> &cache,
+                                            int maxDequeueCommitted) {
+    int cleared = 0;
+    size_t origCacheSize = cache->mBuffers.size();
+    if (cache->mIgbp && maxDequeueCommitted < mMaxDequeueCommitted) {
+        // we are shrinking # of buffers in the case, so evict the previous
+        // cached buffers.
+        for (auto it = cache->mBuffers.begin(); it != cache->mBuffers.end();) {
+            uint64_t bid = it->second->mId;
+            if (mDequeued.count(bid) == 0 || mDeallocating.count(bid) > 0) {
+                ++cleared;
+                it = cache->mBuffers.erase(it);
+            } else {
+                ++it;
+            }
+        }
+    }
+    ALOGD("Cache size %zu -> %zu: maybe_cleared(%d), dequeued(%zu)",
+          origCacheSize, cache->mBuffers.size(), cleared, mDequeued.size());
+}
+
+int GraphicsTracker::getCurDequeueable() {
+    std::unique_lock<std::mutex> l(mLock);
+    return mDequeueable;
 }
 
 void GraphicsTracker::stop() {
-    bool expected = false;
-    std::unique_lock<std::mutex> l(mEventLock);
-    bool updated = mStopped.compare_exchange_strong(expected, true);
-    if (updated) {
-        int writeFd = mWritePipeFd.release();
+   // TODO: wait until all operation to current IGBP
+   // being completed.
+    std::unique_lock<std::mutex> l(mLock);
+    if (mStopped) {
+        return;
+    }
+    mStopped = true;
+    int writeFd = mWritePipeFd.release();
+    if (writeFd >= 0) {
         ::close(writeFd);
     }
 }
 
-void GraphicsTracker::writeIncDequeueable(int inc) {
+void GraphicsTracker::writeIncDequeueableLocked(int inc) {
     CHECK(inc > 0 && inc < kMaxDequeueMax);
     thread_local char buf[kMaxDequeueMax];
-    int diff = 0;
-    {
-        std::unique_lock<std::mutex> l(mEventLock);
-        if (mStopped) {
-            return;
-        }
-        CHECK(mWritePipeFd.get() >= 0);
-        int ret = ::write(mWritePipeFd.get(), buf, inc);
-        if (ret == inc) {
-            return;
-        }
-        diff = ret < 0 ? inc : inc - ret;
-
-        // Partial write or EINTR. This will not happen in a real scenario.
-        mIncDequeueable += diff;
-        if (mIncDequeueable > 0) {
-            l.unlock();
-            mEventCv.notify_one();
-            ALOGW("updating dequeueable to pipefd pending");
-        }
+    if (mStopped) { // reading end closed;
+        return;
     }
+    int writeFd = mWritePipeFd.get();
+    if (writeFd < 0) {
+        // initialization fail and not valid though.
+        return;
+    }
+    int ret = ::write(writeFd, buf, inc);
+    // Since this is non-blocking i/o, it never returns EINTR.
+    //
+    // ::write() to pipe guarantee to succeed atomically if it writes less than
+    // the given PIPE_BUF. And the buffer size in pipe/fifo is at least 4K and our total
+    // max pending buffer size is 64. So it never returns EAGAIN here either.
+    // See pipe(7) for further information.
+    //
+    // Other errors are serious errors and we cannot synchronize mDequeueable to
+    // length of pending buffer in pipe/fifo anymore. So better to abort here.
+    // TODO: do not abort here. (b/318717399)
+    CHECK(ret == inc);
 }
 
-void GraphicsTracker::processEvent() {
-    // This is for partial/failed writes to the writing end.
-    // This may not happen in the real scenario.
+void GraphicsTracker::drainDequeueableLocked(int dec) {
+    CHECK(dec > 0 && dec < kMaxDequeueMax);
     thread_local char buf[kMaxDequeueMax];
-    while (true) {
-        std::unique_lock<std::mutex> l(mEventLock);
-        if (mStopped) {
-            break;
-        }
-        if (mIncDequeueable > 0) {
-            int inc = mIncDequeueable > kMaxDequeueMax ? kMaxDequeueMax : mIncDequeueable;
-            int ret = ::write(mWritePipeFd.get(), buf, inc);
-            int written = ret <= 0 ? 0 : ret;
-            mIncDequeueable -= written;
-            if (mIncDequeueable > 0) {
-                l.unlock();
-                if (ret < 0) {
-                    ALOGE("write to writing end failed %d", errno);
-                } else {
-                    ALOGW("partial write %d(%d)", inc, written);
-                }
-                continue;
-            }
-        }
-        mEventCv.wait(l);
+    if (mStopped) {
+        return;
     }
+    int readFd = mReadPipeFd.get();
+    if (readFd < 0) {
+        // initializationf fail and not valid though.
+        return;
+    }
+    int ret = ::read(readFd, buf, dec);
+    // TODO: no dot abort here. (b/318717399)
+    CHECK(ret == dec);
 }
 
 c2_status_t GraphicsTracker::getWaitableFd(int *pipeFd) {
@@ -470,6 +526,7 @@
         }
         if (ret == 0) {
             // writing end is closed
+            ALOGE("writing end for the waitable object seems to be closed");
             return C2_BAD_STATE;
         }
         mDequeueable--;
@@ -492,6 +549,7 @@
             CHECK(it != cache->mBuffers.end());
             it->second->mFence = fence;
             *pBuffer = it->second;
+            ALOGV("an allocated buffer already cached, updated Fence");
         } else if (cache.get() == mBufferCache.get() && mBufferCache->mIgbp) {
             // Cache the buffer if it is allocated from the current IGBP
             CHECK(slot >= 0);
@@ -499,6 +557,7 @@
             if (!ret.second) {
                 ret.first->second = *pBuffer;
             }
+            ALOGV("an allocated buffer not cached from the current IGBP");
         }
         uint64_t bid = (*pBuffer)->mId;
         auto mapRet = mDequeued.emplace(bid, *pBuffer);
@@ -508,8 +567,7 @@
             return;
         }
         mDequeueable++;
-        l.unlock();
-        writeIncDequeueable(1);
+        writeIncDequeueableLocked(1);
     }
 }
 
@@ -520,7 +578,7 @@
 // retrieved by commitAllocate();
 c2_status_t GraphicsTracker::_allocate(const std::shared_ptr<BufferCache> &cache,
                                       uint32_t width, uint32_t height, PixelFormat format,
-                                      int64_t usage,
+                                      uint64_t usage,
                                       bool *cached,
                                       int *rSlotId,
                                       sp<Fence> *rFence,
@@ -545,11 +603,21 @@
             return ret == ::android::NO_MEMORY ? C2_NO_MEMORY : C2_CORRUPTED;
         }
         *cached = false;
-        *buffer = std::make_shared<BufferItem>(generation, &desc, buf);
+        *rSlotId = -1;
+        *rFence = Fence::NO_FENCE;
+        *buffer = std::make_shared<BufferItem>(generation, buf, usage);
+        AHardwareBuffer_release(buf); // remove an acquire count from
+                                      // AHwb_allocate().
         if (!*buffer) {
-            AHardwareBuffer_release(buf);
+            ALOGE("direct allocation of AHB successful, but failed to create BufferItem");
             return C2_NO_MEMORY;
         }
+        if (!(*buffer)->mInit) {
+            ALOGE("direct allocation of AHB successful, but BufferItem init failed");
+            buffer->reset();
+            return C2_CORRUPTED;
+        }
+        ALOGV("allocate: direct allocate without igbp");
         return C2_OK;
     }
 
@@ -578,19 +646,29 @@
         sp<GraphicBuffer> realloced;
         status = igbp->requestBuffer(slotId, &realloced);
         if (status != ::android::OK) {
+            ALOGE("allocate by dequeueBuffer() successful, but requestBuffer() failed %d",
+                  status);
             igbp->cancelBuffer(slotId, fence);
             return C2_CORRUPTED;
         }
         *buffer = std::make_shared<BufferItem>(generation, slotId, realloced, fence);
+        if (!*buffer) {
+            ALOGE("allocate by dequeueBuffer() successful, but creating BufferItem failed");
+            igbp->cancelBuffer(slotId, fence);
+            return C2_NO_MEMORY;
+        }
         if (!(*buffer)->mInit) {
+            ALOGE("allocate by dequeueBuffer() successful, but BufferItem init failed");
             buffer->reset();
             igbp->cancelBuffer(slotId, fence);
             return C2_CORRUPTED;
         }
         *cached = false;
-        return C2_OK;
+    } else {
+        *cached = true;
     }
-    *cached = true;
+    ALOGV("allocate: a new allocated buffer from igbp cached %d, slot: %d",
+          *cached, slotId);
     *rSlotId = slotId;
     *rFence = fence;
     return C2_OK;
@@ -600,6 +678,7 @@
         uint32_t width, uint32_t height, PixelFormat format, uint64_t usage,
         AHardwareBuffer **buf, sp<Fence> *rFence) {
     if (mStopped.load() == true) {
+        ALOGE("cannot allocate due to being stopped");
         return C2_BAD_STATE;
     }
     std::shared_ptr<BufferCache> cache;
@@ -607,6 +686,7 @@
     if (res != C2_OK) {
         return res;
     }
+    ALOGV("allocatable or dequeueable");
 
     bool cached = false;
     int slotId;
@@ -616,6 +696,8 @@
     res = _allocate(cache, width, height, format, usage, &cached, &slotId, &fence, &buffer);
     commitAllocate(res, cache, cached, slotId, fence, &buffer, &updateDequeue);
     if (res == C2_OK) {
+        ALOGV("allocated a buffer width:%u height:%u pixelformat:%d usage:%llu",
+              width, height, format, (unsigned long long)usage);
         *buf = buffer->mBuf;
         *rFence = buffer->mFence;
         // *buf should be valid even if buffer is dtor-ed.
@@ -660,22 +742,25 @@
             return C2_OK;
         }
         mDequeueable++;
-        l.unlock();
-        writeIncDequeueable(1);
+        writeIncDequeueableLocked(1);
     }
     return C2_OK;
 }
 
 void GraphicsTracker::commitDeallocate(
-        std::shared_ptr<BufferCache> &cache, int slotId, uint64_t bid) {
-    std::lock_guard<std::mutex> l(mLock);
+        std::shared_ptr<BufferCache> &cache, int slotId, uint64_t bid, bool *updateDequeue) {
+    std::unique_lock<std::mutex> l(mLock);
     size_t del1 = mDequeued.erase(bid);
     size_t del2 = mDeallocating.erase(bid);
     CHECK(del1 > 0 && del2 > 0);
-    mDequeueable++;
     if (cache) {
         cache->unblockSlot(slotId);
     }
+    if (adjustDequeueConfLocked(updateDequeue)) {
+        return;
+    }
+    mDequeueable++;
+    writeIncDequeueableLocked(1);
 }
 
 
@@ -701,12 +786,16 @@
     // cache->mIgbp is not null, if completed is false.
     (void)cache->mIgbp->cancelBuffer(slotId, rFence);
 
-    commitDeallocate(cache, slotId, bid);
+    commitDeallocate(cache, slotId, bid, &updateDequeue);
+    if (updateDequeue) {
+        updateDequeueConf();
+    }
     return C2_OK;
 }
 
 c2_status_t GraphicsTracker::requestRender(uint64_t bid, std::shared_ptr<BufferCache> *cache,
                                           std::shared_ptr<BufferItem> *pBuffer,
+                                          bool *fromCache,
                                           bool *updateDequeue) {
     std::unique_lock<std::mutex> l(mLock);
     if (mDeallocating.find(bid) != mDeallocating.end()) {
@@ -727,8 +816,7 @@
             return C2_BAD_STATE;
         }
         mDequeueable++;
-        l.unlock();
-        writeIncDequeueable(1);
+        writeIncDequeueableLocked(1);
         return C2_BAD_STATE;
     }
     std::shared_ptr<BufferItem> buffer = it->second;
@@ -737,38 +825,42 @@
         auto it = mBufferCache->mBuffers.find(buffer->mSlot);
         CHECK(it != mBufferCache->mBuffers.end() && it->second.get() == buffer.get());
         mBufferCache->blockSlot(buffer->mSlot);
+        *fromCache = true;
+    } else {
+        *fromCache = false;
     }
     *pBuffer = buffer;
     mDeallocating.emplace(bid);
     return C2_OK;
 }
 
-void GraphicsTracker::commitRender(uint64_t origBid,
-                                  const std::shared_ptr<BufferCache> &cache,
+void GraphicsTracker::commitRender(const std::shared_ptr<BufferCache> &cache,
                                   const std::shared_ptr<BufferItem> &buffer,
+                                  const std::shared_ptr<BufferItem> &oldBuffer,
+                                  bool bufferReplaced,
                                   bool *updateDequeue) {
     std::unique_lock<std::mutex> l(mLock);
-    uint64_t bid = buffer->mId;
+    uint64_t origBid = oldBuffer ? oldBuffer->mId : buffer->mId;
 
-    if (cache.get() != mBufferCache.get()) {
+    if (cache) {
+        cache->unblockSlot(buffer->mSlot);
+        if (oldBuffer) {
+            // migrated, register the new buffer to the cache.
+            cache->mBuffers.emplace(buffer->mSlot, buffer);
+        }
+    }
+    mDeallocating.erase(origBid);
+    mDequeued.erase(origBid);
+
+    if (cache.get() != mBufferCache.get() || bufferReplaced) {
         // Surface changed, no need to wait for buffer being released.
-        mDeallocating.erase(bid);
-        mDequeued.erase(bid);
         if (adjustDequeueConfLocked(updateDequeue)) {
             return;
         }
         mDequeueable++;
-        l.unlock();
-        writeIncDequeueable(1);
+        writeIncDequeueableLocked(1);
         return;
     }
-
-    if (origBid != bid) {
-        // migration happened, need to register the buffer to Cache
-        mBufferCache->mBuffers.emplace(buffer->mSlot, buffer);
-    }
-    mDeallocating.erase(bid);
-    mDequeued.erase(bid);
 }
 
 c2_status_t GraphicsTracker::render(const C2ConstGraphicBlock& blk,
@@ -780,59 +872,73 @@
         ALOGE("retrieving AHB-ID for GraphicBlock failed");
         return C2_CORRUPTED;
     }
+    std::shared_ptr<_C2BlockPoolData> poolData =
+            _C2BlockFactory::GetGraphicBlockPoolData(blk);
+    _C2BlockFactory::DisownIgbaBlock(poolData);
     std::shared_ptr<BufferCache> cache;
     std::shared_ptr<BufferItem> buffer;
+    std::shared_ptr<BufferItem> oldBuffer;
     bool updateDequeue = false;
-    res = requestRender(bid, &cache, &buffer, &updateDequeue);
+    bool fromCache = false;
+    res = requestRender(bid, &cache, &buffer, &fromCache, &updateDequeue);
     if (res != C2_OK) {
         if (updateDequeue) {
             updateDequeueConf();
         }
         return res;
     }
-    ::android::status_t migrateRes = ::android::OK;
-    ::android::status_t renderRes = ::android::OK;
-    if (cache->mGeneration != buffer->mGeneration) {
+    int cacheSlotId = fromCache ? buffer->mSlot : -1;
+    ALOGV("render prepared: igbp(%d) slot(%d)", bool(cache->mIgbp), cacheSlotId);
+    if (!fromCache) {
+        // The buffer does not come from the current cache.
+        // The buffer is needed to be migrated(attached).
         uint64_t newUsage = 0ULL;
-        int slotId = -1;;
 
         (void) cache->mIgbp->getConsumerUsage(&newUsage);
-        sp<GraphicBuffer> gb = buffer->updateBuffer(newUsage, cache->mGeneration);
-        if (gb) {
-            migrateRes = cache->mIgbp->attachBuffer(&(buffer->mSlot), gb);
-        } else {
-            ALOGW("realloc-ing a new buffer for migration failed");
-            migrateRes = ::android::INVALID_OPERATION;
-        }
-    }
-    if (migrateRes == ::android::OK) {
-        renderRes = cache->mIgbp->queueBuffer(buffer->mSlot, input, output);
-        if (renderRes != ::android::OK) {
-            CHECK(renderRes != ::android::BAD_VALUE);
-        }
-    }
-    if (migrateRes != ::android::OK || renderRes != ::android::OK) {
-        // since it is not renderable, just de-allocate
-        if (migrateRes != ::android::OK) {
+        std::shared_ptr<BufferItem> newBuffer =
+                buffer->migrateBuffer(newUsage, cache->mGeneration);
+        sp<GraphicBuffer> gb = newBuffer ? newBuffer->getGraphicBuffer() : nullptr;
+
+        if (!gb) {
+            ALOGE("render: realloc-ing a new buffer for migration failed");
             std::shared_ptr<BufferCache> nullCache;
-            commitDeallocate(nullCache, -1, bid);
-        } else {
-            (void) cache->mIgbp->cancelBuffer(buffer->mSlot, input.fence);
-            commitDeallocate(cache, buffer->mSlot, bid);
+            commitDeallocate(nullCache, -1, bid, &updateDequeue);
+            if (updateDequeue) {
+                updateDequeueConf();
+            }
+            return C2_REFUSED;
         }
-        ALOGE("migration error(%d), render error(%d)", (int)migrateRes, (int)renderRes);
+        if (cache->mIgbp->attachBuffer(&(newBuffer->mSlot), gb) != ::android::OK) {
+            ALOGE("render: attaching a new buffer to IGBP failed");
+            std::shared_ptr<BufferCache> nullCache;
+            commitDeallocate(nullCache, -1, bid, &updateDequeue);
+            if (updateDequeue) {
+                updateDequeueConf();
+            }
+            return C2_REFUSED;
+        }
+        cache->waitOnSlot(newBuffer->mSlot);
+        cache->blockSlot(newBuffer->mSlot);
+        oldBuffer = buffer;
+        buffer = newBuffer;
+    }
+    ::android::status_t renderRes = cache->mIgbp->queueBuffer(buffer->mSlot, input, output);
+    ALOGV("render done: migration(%d), render(err = %d)", !fromCache, renderRes);
+    if (renderRes != ::android::OK) {
+        CHECK(renderRes != ::android::BAD_VALUE);
+        ALOGE("render: failed to queueBuffer() err = %d", renderRes);
+        (void) cache->mIgbp->cancelBuffer(buffer->mSlot, input.fence);
+        commitDeallocate(cache, buffer->mSlot, bid, &updateDequeue);
+        if (updateDequeue) {
+            updateDequeueConf();
+        }
         return C2_REFUSED;
     }
 
-    updateDequeue = false;
-    commitRender(bid, cache, buffer, &updateDequeue);
+    commitRender(cache, buffer, oldBuffer, output->bufferReplaced, &updateDequeue);
     if (updateDequeue) {
         updateDequeueConf();
     }
-    if (output->bufferReplaced) {
-        // in case of buffer drop during render
-        onReleased(cache->mGeneration);
-    }
     return C2_OK;
 }
 
@@ -843,8 +949,7 @@
         if (mBufferCache->mGeneration == generation) {
             if (!adjustDequeueConfLocked(&updateDequeue)) {
                 mDequeueable++;
-                l.unlock();
-                writeIncDequeueable(1);
+                writeIncDequeueableLocked(1);
             }
         }
     }
diff --git a/media/codec2/hal/client/client.cpp b/media/codec2/hal/client/client.cpp
index e3f8b1c..85b5ec8 100644
--- a/media/codec2/hal/client/client.cpp
+++ b/media/codec2/hal/client/client.cpp
@@ -16,8 +16,11 @@
 
 //#define LOG_NDEBUG 0
 #define LOG_TAG "Codec2Client"
+#define ATRACE_TAG  ATRACE_TAG_VIDEO
 #include <android-base/logging.h>
+#include <utils/Trace.h>
 
+#include <codec2/aidl/GraphicBufferAllocator.h>
 #include <codec2/hidl/client.h>
 #include <C2Debug.h>
 #include <C2BufferPriv.h>
@@ -45,6 +48,7 @@
 #include <aidl/android/hardware/media/c2/StructDescriptor.h>
 
 #include <aidlcommonsupport/NativeHandle.h>
+#include <android/api-level.h>
 #include <android/binder_auto_utils.h>
 #include <android/binder_ibinder.h>
 #include <android/binder_manager.h>
@@ -72,6 +76,7 @@
 #include <limits>
 #include <map>
 #include <mutex>
+#include <optional>
 #include <sstream>
 #include <thread>
 #include <type_traits>
@@ -94,6 +99,9 @@
         V2_0::utils::H2BGraphicBufferProducer;
 using ::android::hardware::media::c2::V1_2::SurfaceSyncObj;
 
+using AidlGraphicBufferAllocator = ::aidl::android::hardware::media::c2::
+        implementation::GraphicBufferAllocator;
+
 namespace bufferpool2_aidl = ::aidl::android::hardware::media::bufferpool2;
 namespace bufferpool_hidl = ::android::hardware::media::bufferpool::V2_0;
 namespace c2_aidl = ::aidl::android::hardware::media::c2;
@@ -627,21 +635,22 @@
     if (heapParams) {
         heapParams->reserve(heapParams->size() + numIndices);
     }
-    c2_aidl::Params result;
+    c2_aidl::IConfigurable::QueryResult result;
     ndk::ScopedAStatus transStatus = mBase->query(indices, (mayBlock == C2_MAY_BLOCK), &result);
     c2_status_t status = GetC2Status(transStatus, "query");
     if (status != C2_OK) {
         return status;
     }
+    status = static_cast<c2_status_t>(result.status.status);
 
     std::vector<C2Param*> paramPointers;
-    if (!c2_aidl::utils::ParseParamsBlob(&paramPointers, result)) {
+    if (!c2_aidl::utils::ParseParamsBlob(&paramPointers, result.params)) {
         LOG(ERROR) << "query -- error while parsing params.";
         return C2_CORRUPTED;
     }
     size_t i = 0;
-    for (auto it = paramPointers.begin();
-            it != paramPointers.end(); ) {
+    size_t numUpdatedStackParams = 0;
+    for (auto it = paramPointers.begin(); it != paramPointers.end(); ) {
         C2Param* paramPointer = *it;
         if (numStackIndices > 0) {
             --numStackIndices;
@@ -668,7 +677,9 @@
                 status = C2_BAD_INDEX;
                 continue;
             }
-            if (!stackParams[i++]->updateFrom(*paramPointer)) {
+            if (stackParams[i++]->updateFrom(*paramPointer)) {
+                ++numUpdatedStackParams;
+            } else {
                 LOG(WARNING) << "query -- param update failed: "
                                 "index = "
                              << paramPointer->index() << ".";
@@ -688,6 +699,13 @@
         }
         ++it;
     }
+    size_t numQueried = numUpdatedStackParams;
+    if (heapParams) {
+        numQueried += heapParams->size();
+    }
+    if (status == C2_OK && indices.size() != numQueried) {
+        status = C2_BAD_INDEX;
+    }
     return status;
 }
 
@@ -706,6 +724,7 @@
     if (status != C2_OK) {
         return status;
     }
+    status = static_cast<c2_status_t>(result.status.status);
     size_t i = failures->size();
     failures->resize(i + result.failures.size());
     for (const c2_aidl::SettingResult& sf : result.failures) {
@@ -756,21 +775,23 @@
         }
     }
 
-    std::vector<c2_aidl::FieldSupportedValuesQueryResult> result;
+    c2_aidl::IConfigurable::QuerySupportedValuesResult result;
+
     ndk::ScopedAStatus transStatus = mBase->querySupportedValues(
             inFields, (mayBlock == C2_MAY_BLOCK), &result);
     c2_status_t status = GetC2Status(transStatus, "querySupportedValues");
     if (status != C2_OK) {
         return status;
     }
-    if (result.size() != fields.size()) {
+    status = static_cast<c2_status_t>(result.status.status);
+    if (result.values.size() != fields.size()) {
         LOG(ERROR) << "querySupportedValues -- "
                       "input and output lists "
                       "have different sizes.";
         return C2_CORRUPTED;
     }
     for (size_t i = 0; i < fields.size(); ++i) {
-        if (!c2_aidl::utils::FromAidl(&fields[i], inFields[i], result[i])) {
+        if (!c2_aidl::utils::FromAidl(&fields[i], inFields[i], result.values[i])) {
             LOG(ERROR) << "querySupportedValues -- "
                           "invalid returned value.";
             return C2_CORRUPTED;
@@ -1039,6 +1060,85 @@
     }
 };
 
+// The class holds GraphicBufferAllocator and the associated id of
+// HAL side BlockPool.
+// This is tightly coupled with BlockPool creation and destruction.
+// The life cycle inside class will be as follows.
+//
+// On createBlockPool client request.
+//    1. this::create() creates a GraphicBufferAllocator and set it as
+//        the current.
+//    2. C2AIDL_HAL::createBlockPool() creates a C2BlockPool using
+//        the GraphicBufferAllocator created in #1.
+//    3. this::setCurrentId() associates the id returned in #2 to the current
+//
+// On destroyBlockPool cliet request
+//    1. C2AIDL_HAL::destroyBlockPool() destroys the block pool
+//       from HAL process.
+//    2. this::remove() destroys GraphicBufferAllocator which is associatted
+//       with the C2BlockPool in #1.
+//
+struct Codec2Client::Component::GraphicBufferAllocators {
+private:
+    std::optional<C2BlockPool::local_id_t> mCurrentId;
+    std::shared_ptr<AidlGraphicBufferAllocator> mCurrent;
+
+    // A new BlockPool is created before the old BlockPool is destroyed.
+    // This holds the reference of the old BlockPool when a new BlockPool is
+    // created until the old BlockPool is explicitly requested for destruction.
+    std::map<C2BlockPool::local_id_t, std::shared_ptr<AidlGraphicBufferAllocator>> mOlds;
+    std::mutex mMutex;
+
+public:
+    // Creates a GraphicBufferAllocator which will be passed to HAL
+    // for creating C2BlockPool. And the created GraphicBufferAllocator
+    // will be used afterwards by current().
+    std::shared_ptr<AidlGraphicBufferAllocator> create() {
+        std::unique_lock<std::mutex> l(mMutex);
+        if (mCurrent) {
+            // If this is not stopped.
+            mCurrent->reset();
+            if (mCurrentId.has_value()) {
+                mOlds.emplace(mCurrentId.value(), mCurrent);
+            }
+            mCurrentId.reset();
+            mCurrent.reset();
+        }
+        // TODO: integrate initial value with CCodec/CCodecBufferChannel
+        mCurrent =
+                AidlGraphicBufferAllocator::CreateGraphicBufferAllocator(3 /* maxDequeueCount */);
+        ALOGD("GraphicBufferAllocator created");
+        return mCurrent;
+    }
+
+    // Associates the blockpool Id returned from HAL to the
+    // current GraphicBufferAllocator.
+    void setCurrentId(C2BlockPool::local_id_t id) {
+        std::unique_lock<std::mutex> l(mMutex);
+        CHECK(!mCurrentId.has_value());
+        mCurrentId = id;
+    }
+
+    // Returns the current GraphicBufferAllocator.
+    std::shared_ptr<AidlGraphicBufferAllocator> current() {
+        std::unique_lock<std::mutex> l(mMutex);
+        return mCurrent;
+    }
+
+    // Removes the GraphicBufferAllocator associated with given \p id.
+    void remove(C2BlockPool::local_id_t id) {
+        std::unique_lock<std::mutex> l(mMutex);
+        mOlds.erase(id);
+        if (mCurrentId == id) {
+            if (mCurrent) {
+                mCurrent->reset();
+                mCurrent.reset();
+            }
+            mCurrentId.reset();
+        }
+    }
+};
+
 // Codec2Client
 Codec2Client::Codec2Client(sp<HidlBase> const& base,
                            sp<c2_hidl::IConfigurable> const& configurable,
@@ -1123,6 +1223,7 @@
                        << status << ".";
         }
         (*component)->mAidlBufferPoolSender->setReceiver(mAidlHostPoolManager);
+        aidlListener->component = *component;
         return status;
     }
 
@@ -1438,35 +1539,39 @@
 std::vector<std::string> Codec2Client::CacheServiceNames() {
     std::vector<std::string> names;
 
-    if (c2_aidl::utils::IsEnabled()) {
-        // Get AIDL service names
-        AServiceManager_forEachDeclaredInstance(
-                AidlBase::descriptor, &names, [](const char *name, void *context) {
-                    std::vector<std::string> *names = (std::vector<std::string> *)context;
-                    names->emplace_back(name);
-                });
-    }
-
-    // Get HIDL service names
-    using ::android::hardware::media::c2::V1_0::IComponentStore;
-    using ::android::hidl::manager::V1_2::IServiceManager;
-    while (true) {
-        sp<IServiceManager> serviceManager = IServiceManager::getService();
-        CHECK(serviceManager) << "Hardware service manager is not running.";
-
-        Return<void> transResult;
-        transResult = serviceManager->listManifestByInterface(
-                IComponentStore::descriptor,
-                [&names](
-                        hidl_vec<hidl_string> const& instanceNames) {
-                    names.insert(names.end(), instanceNames.begin(), instanceNames.end());
-                });
-        if (transResult.isOk()) {
-            break;
+    if (c2_aidl::utils::IsSelected()) {
+        if (__builtin_available(android __ANDROID_API_S__, *)) {
+            // Get AIDL service names
+            AServiceManager_forEachDeclaredInstance(
+                    AidlBase::descriptor, &names, [](const char *name, void *context) {
+                        std::vector<std::string> *names = (std::vector<std::string> *)context;
+                        names->emplace_back(name);
+                    });
+        } else {
+            LOG(FATAL) << "C2 AIDL cannot be selected on Android version older than 35";
         }
-        LOG(ERROR) << "Could not retrieve the list of service instances of "
-                   << IComponentStore::descriptor
-                   << ". Retrying...";
+    } else {
+        // Get HIDL service names
+        using ::android::hardware::media::c2::V1_0::IComponentStore;
+        using ::android::hidl::manager::V1_2::IServiceManager;
+        while (true) {
+            sp<IServiceManager> serviceManager = IServiceManager::getService();
+            CHECK(serviceManager) << "Hardware service manager is not running.";
+
+            Return<void> transResult;
+            transResult = serviceManager->listManifestByInterface(
+                    IComponentStore::descriptor,
+                    [&names](
+                            hidl_vec<hidl_string> const& instanceNames) {
+                        names.insert(names.end(), instanceNames.begin(), instanceNames.end());
+                    });
+            if (transResult.isOk()) {
+                break;
+            }
+            LOG(ERROR) << "Could not retrieve the list of service instances of "
+                       << IComponentStore::descriptor
+                       << ". Retrying...";
+        }
     }
     // Sort service names in each category.
     std::stable_sort(
@@ -1545,34 +1650,41 @@
     std::string const& name = GetServiceNames()[index];
     LOG(VERBOSE) << "Creating a Codec2 client to service \"" << name << "\"";
 
-    if (c2_aidl::utils::IsEnabled()) {
-        std::string instanceName =
-            ::android::base::StringPrintf("%s/%s", AidlBase::descriptor, name.c_str());
-        if (AServiceManager_isDeclared(instanceName.c_str())) {
-            std::shared_ptr<AidlBase> baseStore = AidlBase::fromBinder(
-                    ::ndk::SpAIBinder(AServiceManager_waitForService(instanceName.c_str())));
-            CHECK(baseStore) << "Codec2 AIDL service \"" << name << "\""
-                                " inaccessible for unknown reasons.";
-            LOG(VERBOSE) << "Client to Codec2 AIDL service \"" << name << "\" created";
-            std::shared_ptr<c2_aidl::IConfigurable> configurable;
-            ::ndk::ScopedAStatus transStatus = baseStore->getConfigurable(&configurable);
-            CHECK(transStatus.isOk()) << "Codec2 AIDL service \"" << name << "\""
-                                        "does not have IConfigurable.";
-            return std::make_shared<Codec2Client>(baseStore, configurable, index);
+    if (c2_aidl::utils::IsSelected()) {
+        if (__builtin_available(android __ANDROID_API_S__, *)) {
+            std::string instanceName =
+                ::android::base::StringPrintf("%s/%s", AidlBase::descriptor, name.c_str());
+            if (AServiceManager_isDeclared(instanceName.c_str())) {
+                std::shared_ptr<AidlBase> baseStore = AidlBase::fromBinder(
+                        ::ndk::SpAIBinder(AServiceManager_waitForService(instanceName.c_str())));
+                CHECK(baseStore) << "Codec2 AIDL service \"" << name << "\""
+                                    " inaccessible for unknown reasons.";
+                LOG(VERBOSE) << "Client to Codec2 AIDL service \"" << name << "\" created";
+                std::shared_ptr<c2_aidl::IConfigurable> configurable;
+                ::ndk::ScopedAStatus transStatus = baseStore->getConfigurable(&configurable);
+                CHECK(transStatus.isOk()) << "Codec2 AIDL service \"" << name << "\""
+                                            "does not have IConfigurable.";
+                return std::make_shared<Codec2Client>(baseStore, configurable, index);
+            } else {
+                LOG(ERROR) << "Codec2 AIDL service \"" << name << "\" is not declared";
+            }
+        } else {
+            LOG(FATAL) << "C2 AIDL cannot be selected on Android version older than 35";
         }
+    } else {
+        std::string instanceName = "android.hardware.media.c2/" + name;
+        sp<HidlBase> baseStore = HidlBase::getService(name);
+        CHECK(baseStore) << "Codec2 service \"" << name << "\""
+                            " inaccessible for unknown reasons.";
+        LOG(VERBOSE) << "Client to Codec2 service \"" << name << "\" created";
+        Return<sp<c2_hidl::IConfigurable>> transResult = baseStore->getConfigurable();
+        CHECK(transResult.isOk()) << "Codec2 service \"" << name << "\""
+                                    "does not have IConfigurable.";
+        sp<c2_hidl::IConfigurable> configurable =
+            static_cast<sp<c2_hidl::IConfigurable>>(transResult);
+        return std::make_shared<Codec2Client>(baseStore, configurable, index);
     }
-
-    std::string instanceName = "android.hardware.media.c2/" + name;
-    sp<HidlBase> baseStore = HidlBase::getService(name);
-    CHECK(baseStore) << "Codec2 service \"" << name << "\""
-                        " inaccessible for unknown reasons.";
-    LOG(VERBOSE) << "Client to Codec2 service \"" << name << "\" created";
-    Return<sp<c2_hidl::IConfigurable>> transResult = baseStore->getConfigurable();
-    CHECK(transResult.isOk()) << "Codec2 service \"" << name << "\""
-                                "does not have IConfigurable.";
-    sp<c2_hidl::IConfigurable> configurable =
-        static_cast<sp<c2_hidl::IConfigurable>>(transResult);
-    return std::make_shared<Codec2Client>(baseStore, configurable, index);
+    return nullptr;
 }
 
 c2_status_t Codec2Client::ForAllServices(
@@ -1938,7 +2050,7 @@
         },
         mAidlBase{base},
         mAidlBufferPoolSender{std::make_unique<AidlBufferPoolSender>()},
-        mOutputBufferQueue{std::make_unique<OutputBufferQueue>()} {
+        mGraphicBufferAllocators{std::make_unique<GraphicBufferAllocators>()} {
 }
 
 Codec2Client::Component::~Component() {
@@ -1953,11 +2065,42 @@
         std::shared_ptr<Codec2Client::Configurable>* configurable) {
     if (mAidlBase) {
         c2_aidl::IComponent::BlockPool aidlBlockPool;
-        ::ndk::ScopedAStatus transStatus = mAidlBase->createBlockPool(static_cast<int32_t>(id),
-                                                                      &aidlBlockPool);
-        c2_status_t status = GetC2Status(transStatus, "createBlockPool");
-        if (status != C2_OK) {
-            return status;
+        c2_status_t status = C2_OK;
+
+        // TODO: Temporary mapping for the current CCodecBufferChannel.
+        // Handle this properly and remove this temporary allocator mapping.
+        id = id == C2PlatformAllocatorStore::BUFFERQUEUE ?
+                C2PlatformAllocatorStore::IGBA : id;
+
+        if (id == C2PlatformAllocatorStore::IGBA)  {
+            std::shared_ptr<AidlGraphicBufferAllocator> gba =
+                    mGraphicBufferAllocators->create();
+            ::ndk::ScopedFileDescriptor waitableFd;
+            ::ndk::ScopedAStatus ret = gba->getWaitableFd(&waitableFd);
+            status = GetC2Status(ret, "Gba::getWaitableFd");
+            if (status != C2_OK) {
+                return status;
+            }
+            c2_aidl::IComponent::BlockPoolAllocator allocator;
+            allocator.set<c2_aidl::IComponent::BlockPoolAllocator::allocator>();
+            allocator.get<c2_aidl::IComponent::BlockPoolAllocator::allocator>().igba =
+                    c2_aidl::IGraphicBufferAllocator::fromBinder(gba->asBinder());
+            allocator.get<c2_aidl::IComponent::BlockPoolAllocator::allocator>().waitableFd =
+                    std::move(waitableFd);
+            ::ndk::ScopedAStatus transStatus = mAidlBase->createBlockPool(
+                    allocator, &aidlBlockPool);
+            status = GetC2Status(transStatus, "createBlockPool");
+            if (status != C2_OK) {
+                return status;
+            }
+            mGraphicBufferAllocators->setCurrentId(aidlBlockPool.blockPoolId);
+        } else {
+            ::ndk::ScopedAStatus transStatus = mAidlBase->createBlockPool(
+                    static_cast<int32_t>(id), &aidlBlockPool);
+            status = GetC2Status(transStatus, "createBlockPool");
+            if (status != C2_OK) {
+                return status;
+            }
         }
         *blockPoolId = aidlBlockPool.blockPoolId;
         *configurable = std::make_shared<Configurable>(aidlBlockPool.configurable);
@@ -1990,6 +2133,7 @@
 c2_status_t Codec2Client::Component::destroyBlockPool(
         C2BlockPool::local_id_t localId) {
     if (mAidlBase) {
+        mGraphicBufferAllocators->remove(localId);
         ::ndk::ScopedAStatus transStatus = mAidlBase->destroyBlockPool(localId);
         return GetC2Status(transStatus, "destroyBlockPool");
     }
@@ -2004,8 +2148,12 @@
 
 void Codec2Client::Component::handleOnWorkDone(
         const std::list<std::unique_ptr<C2Work>> &workItems) {
-    // Output bufferqueue-based blocks' lifetime management
-    mOutputBufferQueue->holdBufferQueueBlocks(workItems);
+    if (mAidlBase) {
+        holdIgbaBlocks(workItems);
+    } else {
+        // Output bufferqueue-based blocks' lifetime management
+        mOutputBufferQueue->holdBufferQueueBlocks(workItems);
+    }
 }
 
 c2_status_t Codec2Client::Component::queue(
@@ -2089,8 +2237,12 @@
         }
     }
 
-    // Output bufferqueue-based blocks' lifetime management
-    mOutputBufferQueue->holdBufferQueueBlocks(*flushedWork);
+    if (mAidlBase) {
+        holdIgbaBlocks(*flushedWork);
+    } else {
+        // Output bufferqueue-based blocks' lifetime management
+        mOutputBufferQueue->holdBufferQueueBlocks(*flushedWork);
+    }
 
     return status;
 }
@@ -2229,6 +2381,17 @@
         const sp<IGraphicBufferProducer>& surface,
         uint32_t generation,
         int maxDequeueCount) {
+    if (mAidlBase) {
+        std::shared_ptr<AidlGraphicBufferAllocator> gba =
+              mGraphicBufferAllocators->current();
+        if (!gba) {
+            LOG(ERROR) << "setOutputSurface for AIDL -- "
+                       "GraphicBufferAllocator was not created.";
+            return C2_CORRUPTED;
+        }
+        bool ret = gba->configure(surface, generation, maxDequeueCount);
+        return ret ? C2_OK : C2_CORRUPTED;
+    }
     uint64_t bqId = 0;
     sp<IGraphicBufferProducer> nullIgbp;
     sp<HGraphicBufferProducer2> nullHgbp;
@@ -2290,10 +2453,6 @@
     ALOGD("setOutputSurface -- generation=%u consumer usage=%#llx%s",
             generation, (long long)consumerUsage, syncObj ? " sync" : "");
 
-    if (mAidlBase) {
-        // FIXME
-        return C2_OMITTED;
-    }
     Return<c2_hidl::Status> transStatus = syncObj ?
             mHidlBase1_2->setOutputSurfaceWithSyncObj(
                     static_cast<uint64_t>(blockPoolId),
@@ -2321,26 +2480,52 @@
         const C2ConstGraphicBlock& block,
         const QueueBufferInput& input,
         QueueBufferOutput* output) {
+    ScopedTrace trace(ATRACE_TAG,"Codec2Client::Component::queueToOutputSurface");
+    if (mAidlBase) {
+        std::shared_ptr<AidlGraphicBufferAllocator> gba =
+                mGraphicBufferAllocators->current();
+        if (gba) {
+            return gba->displayBuffer(block, input, output);
+        } else {
+            return C2_NOT_FOUND;
+        }
+    }
     return mOutputBufferQueue->outputBuffer(block, input, output);
 }
 
 void Codec2Client::Component::pollForRenderedFrames(FrameEventHistoryDelta* delta) {
+    if (mAidlBase) {
+        // TODO b/311348680
+        return;
+    }
     mOutputBufferQueue->pollForRenderedFrames(delta);
 }
 
 void Codec2Client::Component::setOutputSurfaceMaxDequeueCount(
         int maxDequeueCount) {
+    if (mAidlBase) {
+        std::shared_ptr<AidlGraphicBufferAllocator> gba =
+                mGraphicBufferAllocators->current();
+        if (gba) {
+            gba->updateMaxDequeueBufferCount(maxDequeueCount);
+        }
+        return;
+    }
     mOutputBufferQueue->updateMaxDequeueBufferCount(maxDequeueCount);
 }
 
 void Codec2Client::Component::stopUsingOutputSurface(
         C2BlockPool::local_id_t blockPoolId) {
-    std::scoped_lock lock(mOutputMutex);
-    mOutputBufferQueue->stop();
     if (mAidlBase) {
-        // FIXME
+        std::shared_ptr<AidlGraphicBufferAllocator> gba =
+                mGraphicBufferAllocators->current();
+        if (gba) {
+            gba->reset();
+        }
         return;
     }
+    std::scoped_lock lock(mOutputMutex);
+    mOutputBufferQueue->stop();
     Return<c2_hidl::Status> transStatus = mHidlBase1_0->setOutputSurface(
             static_cast<uint64_t>(blockPoolId), nullptr);
     if (!transStatus.isOk()) {
@@ -2356,6 +2541,52 @@
     mOutputBufferQueue->expireOldWaiters();
 }
 
+void Codec2Client::Component::onBufferReleasedFromOutputSurface(
+        uint32_t generation) {
+    if (mAidlBase) {
+        std::shared_ptr<AidlGraphicBufferAllocator> gba =
+                mGraphicBufferAllocators->current();
+        if (gba) {
+            gba->onBufferReleased(generation);
+        }
+        return;
+    }
+    mOutputBufferQueue->onBufferReleased(generation);
+}
+
+void Codec2Client::Component::holdIgbaBlocks(
+        const std::list<std::unique_ptr<C2Work>>& workList) {
+    if (!mAidlBase) {
+        return;
+    }
+    std::shared_ptr<AidlGraphicBufferAllocator> gba =
+            mGraphicBufferAllocators->current();
+    if (!gba) {
+        return;
+    }
+    std::shared_ptr<c2_aidl::IGraphicBufferAllocator> igba =
+            c2_aidl::IGraphicBufferAllocator::fromBinder(gba->asBinder());
+    for (const std::unique_ptr<C2Work>& work : workList) {
+        if (!work) [[unlikely]] {
+            continue;
+        }
+        for (const std::unique_ptr<C2Worklet>& worklet : work->worklets) {
+            if (!worklet) {
+                continue;
+            }
+            for (const std::shared_ptr<C2Buffer>& buffer : worklet->output.buffers) {
+                if (buffer) {
+                    for (const C2ConstGraphicBlock& block : buffer->data().graphicBlocks()) {
+                        std::shared_ptr<_C2BlockPoolData> poolData =
+                              _C2BlockFactory::GetGraphicBlockPoolData(block);
+                        _C2BlockFactory::RegisterIgba(poolData, igba);
+                    }
+                }
+            }
+        }
+    }
+}
+
 c2_status_t Codec2Client::Component::connectToInputSurface(
         const std::shared_ptr<InputSurface>& inputSurface,
         std::shared_ptr<InputSurfaceConnection>* connection) {
diff --git a/media/codec2/hal/client/include/codec2/aidl/GraphicsTracker.h b/media/codec2/hal/client/include/codec2/aidl/GraphicsTracker.h
index 1fd9049..dd6c869 100644
--- a/media/codec2/hal/client/include/codec2/aidl/GraphicsTracker.h
+++ b/media/codec2/hal/client/include/codec2/aidl/GraphicsTracker.h
@@ -27,6 +27,7 @@
 #include <mutex>
 #include <set>
 #include <thread>
+#include <optional>
 
 #include <C2Buffer.h>
 
@@ -153,6 +154,11 @@
     c2_status_t getWaitableFd(int *pipeFd);
 
     /**
+     * Get the current max allocatable/dequeueable buffer count without de-allocating.
+     */
+    int getCurDequeueable();
+
+    /**
      *  Ends to use the class. after the call, allocate will fail.
      */
     void stop();
@@ -177,12 +183,15 @@
         // Create from an AHB (no slot information)
         // Should be attached to IGBP for rendering
         BufferItem(uint32_t generation,
-                   AHardwareBuffer_Desc *desc,
-                   AHardwareBuffer *pBuf);
+                   AHardwareBuffer *pBuf,
+                   uint64_t usage);
 
         ~BufferItem();
 
-        sp<GraphicBuffer> updateBuffer(uint64_t newUsage, uint32_t newGeneration);
+        std::shared_ptr<BufferItem> migrateBuffer(uint64_t newUsage, uint32_t newGeneration);
+
+        sp<GraphicBuffer> getGraphicBuffer();
+
     };
 
     struct BufferCache {
@@ -212,6 +221,8 @@
         BufferCache(uint64_t bqId, uint32_t generation, const sp<IGraphicBufferProducer>& igbp) :
             mBqId{bqId}, mGeneration{generation}, mIgbp{igbp} {}
 
+        ~BufferCache();
+
         void waitOnSlot(int slot);
 
         void blockSlot(int slot);
@@ -224,12 +235,14 @@
     std::map<uint64_t, std::shared_ptr<BufferItem>> mDequeued;
     std::set<uint64_t> mDeallocating;
 
+    // These member variables are read and modified accessed as follows.
+    // 1. mConfigLock being held
+    //    Set mInConfig true with mLock in the beginning
+    //    Clear mInConfig with mLock in the end
+    // 2. mLock is held and mInConfig is false.
     int mMaxDequeue;
-    int mMaxDequeueRequested;
     int mMaxDequeueCommitted;
-
-    uint32_t mMaxDequeueRequestedSeqId;
-    uint32_t mMaxDequeueCommittedSeqId;
+    std::optional<int> mMaxDequeueRequested;
 
     int mDequeueable;
 
@@ -261,13 +274,6 @@
     ::android::base::unique_fd mWritePipeFd;  // The writing end file descriptor
 
     std::atomic<bool> mStopped;
-    std::thread mEventQueueThread; // Thread to handle interrupted
-                                   // writes to the writing end.
-    std::mutex mEventLock;
-    std::condition_variable mEventCv;
-
-    bool mStopEventThread;
-    int mIncDequeueable; // pending # of write to increase dequeueable eventfd
 
 private:
     explicit GraphicsTracker(int maxDequeueCount);
@@ -279,6 +285,9 @@
     bool adjustDequeueConfLocked(bool *updateDequeueConf);
 
     void updateDequeueConf();
+    void clearCacheIfNecessaryLocked(
+            const std::shared_ptr<BufferCache> &cache,
+            int maxDequeueCommitted);
 
     c2_status_t requestAllocate(std::shared_ptr<BufferCache> *cache);
     c2_status_t requestDeallocate(uint64_t bid, const sp<Fence> &fence,
@@ -287,6 +296,7 @@
                                   sp<Fence> *rFence);
     c2_status_t requestRender(uint64_t bid, std::shared_ptr<BufferCache> *cache,
                               std::shared_ptr<BufferItem> *pBuffer,
+                              bool *fromCache,
                               bool *updateDequeue);
 
     void commitAllocate(c2_status_t res,
@@ -294,20 +304,23 @@
                         bool cached, int slotId, const sp<Fence> &fence,
                         std::shared_ptr<BufferItem> *buffer,
                         bool *updateDequeue);
-    void commitDeallocate(std::shared_ptr<BufferCache> &cache, int slotId, uint64_t bid);
-    void commitRender(uint64_t origBid,
-                      const std::shared_ptr<BufferCache> &cache,
+    void commitDeallocate(std::shared_ptr<BufferCache> &cache,
+                          int slotId, uint64_t bid,
+                          bool *updateDequeue);
+    void commitRender(const std::shared_ptr<BufferCache> &cache,
                       const std::shared_ptr<BufferItem> &buffer,
+                      const std::shared_ptr<BufferItem> &oldBuffer,
+                      bool bufferReplaced,
                       bool *updateDequeue);
 
     c2_status_t _allocate(
             const std::shared_ptr<BufferCache> &cache,
-            uint32_t width, uint32_t height, PixelFormat format, int64_t usage,
+            uint32_t width, uint32_t height, PixelFormat format, uint64_t usage,
             bool *cached, int *rSlotId, sp<Fence> *rFence,
             std::shared_ptr<BufferItem> *buffer);
 
-    void writeIncDequeueable(int inc);
-    void processEvent();
+    void writeIncDequeueableLocked(int inc);
+    void drainDequeueableLocked(int dec);
 };
 
 } // namespace aidl::android::hardware::media::c2::implementation
diff --git a/media/codec2/hal/client/include/codec2/hidl/client.h b/media/codec2/hal/client/include/codec2/hidl/client.h
index 0c7dd77..3b7f7a6 100644
--- a/media/codec2/hal/client/include/codec2/hidl/client.h
+++ b/media/codec2/hal/client/include/codec2/hidl/client.h
@@ -474,6 +474,18 @@
     void stopUsingOutputSurface(
             C2BlockPool::local_id_t blockPoolId);
 
+    // Notify a buffer is released from output surface.
+    void onBufferReleasedFromOutputSurface(
+            uint32_t generation);
+
+    // When the client received \p workList and the blocks inside
+    // \p workList are IGBA based graphic blocks, specify the owner
+    // as the current IGBA for the future operations.
+    // Future operations could be rendering the blocks to the surface
+    // or deallocating blocks to the surface.
+    void holdIgbaBlocks(
+            const std::list<std::unique_ptr<C2Work>>& workList);
+
     // Connect to a given InputSurface.
     c2_status_t connectToInputSurface(
             const std::shared_ptr<InputSurface>& inputSurface,
@@ -513,6 +525,9 @@
     // In order to prevent the race condition mutex is added.
     std::mutex mOutputMutex;
 
+    struct GraphicBufferAllocators;
+    std::unique_ptr<GraphicBufferAllocators> mGraphicBufferAllocators;
+
     class AidlDeathManager;
     static AidlDeathManager *GetAidlDeathManager();
     std::optional<size_t> mAidlDeathSeq;
diff --git a/media/codec2/hal/client/include/codec2/hidl/output.h b/media/codec2/hal/client/include/codec2/hidl/output.h
index 2e89c3b..fda34a8 100644
--- a/media/codec2/hal/client/include/codec2/hidl/output.h
+++ b/media/codec2/hal/client/include/codec2/hidl/output.h
@@ -65,6 +65,10 @@
             const BnGraphicBufferProducer::QueueBufferInput& input,
             BnGraphicBufferProducer::QueueBufferOutput* output);
 
+    // Nofify a buffer is released from the output surface. If HAL ver is 1.2
+    // update the number of dequeueable/allocatable buffers.
+    void onBufferReleased(uint32_t generation);
+
     // Retrieve frame event history from the output surface.
     void pollForRenderedFrames(FrameEventHistoryDelta* delta);
 
diff --git a/media/codec2/hal/client/output.cpp b/media/codec2/hal/client/output.cpp
index 4eebd1c..36322f5 100644
--- a/media/codec2/hal/client/output.cpp
+++ b/media/codec2/hal/client/output.cpp
@@ -16,7 +16,9 @@
 
 //#define LOG_NDEBUG 0
 #define LOG_TAG "Codec2-OutputBufferQueue"
+#define ATRACE_TAG  ATRACE_TAG_VIDEO
 #include <android-base/logging.h>
+#include <utils/Trace.h>
 
 #include <android/hardware/graphics/bufferqueue/2.0/IGraphicBufferProducer.h>
 #include <codec2/hidl/output.h>
@@ -139,11 +141,14 @@
                             "status = " << INVALID_OPERATION << ".";
             return INVALID_OPERATION;
         }
-        result = igbp->attachBuffer(bqSlot, graphicBuffer);
-        if (result == OK) {
-            syncVar->notifyDequeuedLocked();
-        }
+        syncVar->notifyDequeuedLocked();
         syncVar->unlock();
+        result = igbp->attachBuffer(bqSlot, graphicBuffer);
+        if (result != OK) {
+            syncVar->lock();
+            syncVar->notifyQueuedLocked();
+            syncVar->unlock();
+        }
     } else {
         result = igbp->attachBuffer(bqSlot, graphicBuffer);
     }
@@ -336,9 +341,25 @@
 }
 
 void OutputBufferQueue::stop() {
-    std::scoped_lock<std::mutex> l(mMutex);
-    mStopped = true;
-    mOwner.reset(); // destructor of the block will not triger IGBP::cancel()
+    std::shared_ptr<C2SurfaceSyncMemory> oldMem;
+    {
+        std::scoped_lock<std::mutex> l(mMutex);
+        if (mStopped) {
+            return;
+        }
+        mStopped = true;
+        mOwner.reset(); // destructor of the block will not trigger IGBP::cancel()
+        // basically configuring null surface
+        oldMem = mSyncMem;
+        mSyncMem.reset();
+        mIgbp.clear();
+        mGeneration = 0;
+        mBqId = 0;
+    }
+    {
+        std::scoped_lock<std::mutex> l(mOldMutex);
+        mOldMem = oldMem;
+    }
 }
 
 bool OutputBufferQueue::registerBuffer(const C2ConstGraphicBlock& block) {
@@ -388,6 +409,7 @@
     uint32_t generation;
     uint64_t bqId;
     int32_t bqSlot;
+    ScopedTrace trace(ATRACE_TAG,"Codec2-OutputBufferQueue::outputBuffer");
     bool display = V1_0::utils::displayBufferQueueBlock(block);
     if (!getBufferQueueAssignment(block, &generation, &bqId, &bqSlot) ||
         bqId == 0) {
@@ -416,13 +438,15 @@
 
         auto syncVar = syncMem ? syncMem->mem() : nullptr;
         if(syncVar) {
-            syncVar->lock();
             status = outputIgbp->queueBuffer(static_cast<int>(bqSlot),
                                          input, output);
             if (status == OK) {
-                syncVar->notifyQueuedLocked();
+                if (output->bufferReplaced) {
+                    syncVar->lock();
+                    syncVar->notifyQueuedLocked();
+                    syncVar->unlock();
+                }
             }
-            syncVar->unlock();
         } else {
             status = outputIgbp->queueBuffer(static_cast<int>(bqSlot),
                                          input, output);
@@ -471,13 +495,15 @@
     auto syncVar = syncMem ? syncMem->mem() : nullptr;
     status_t status = OK;
     if (syncVar) {
-        syncVar->lock();
         status = outputIgbp->queueBuffer(static_cast<int>(bqSlot),
                                                   input, output);
         if (status == OK) {
-            syncVar->notifyQueuedLocked();
+            if (output->bufferReplaced) {
+                syncVar->lock();
+                syncVar->notifyQueuedLocked();
+                syncVar->unlock();
+            }
         }
-        syncVar->unlock();
     } else {
         status = outputIgbp->queueBuffer(static_cast<int>(bqSlot),
                                                   input, output);
@@ -492,6 +518,30 @@
     return OK;
 }
 
+void OutputBufferQueue::onBufferReleased(uint32_t generation) {
+    std::shared_ptr<C2SurfaceSyncMemory> syncMem;
+    sp<IGraphicBufferProducer> outputIgbp;
+    uint32_t outputGeneration = 0;
+    {
+        std::unique_lock<std::mutex> l(mMutex);
+        if (mStopped) {
+            return;
+        }
+        outputIgbp = mIgbp;
+        outputGeneration = mGeneration;
+        syncMem = mSyncMem;
+    }
+
+    if (outputIgbp && generation == outputGeneration) {
+        auto syncVar = syncMem ? syncMem->mem() : nullptr;
+        if (syncVar) {
+            syncVar->lock();
+            syncVar->notifyQueuedLocked();
+            syncVar->unlock();
+        }
+    }
+}
+
 void OutputBufferQueue::pollForRenderedFrames(FrameEventHistoryDelta* delta) {
     if (mIgbp) {
         mIgbp->getFrameTimestamps(delta);
diff --git a/media/codec2/hal/common/Android.bp b/media/codec2/hal/common/Android.bp
index f0193d7..2aedd8b 100644
--- a/media/codec2/hal/common/Android.bp
+++ b/media/codec2/hal/common/Android.bp
@@ -28,3 +28,40 @@
         "libstagefright_foundation",
     ],
 }
+
+cc_library_static {
+    name: "libcodec2_hal_selection_static",
+    double_loadable: true,
+    vendor_available: true,
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.media",
+        "com.android.media.swcodec",
+    ],
+    min_sdk_version: "29",
+
+    srcs: [
+        "HalSelection.cpp",
+    ],
+
+    export_include_dirs: ["include/"],
+
+    shared_libs: [
+        "libbase",
+        "server_configurable_flags",
+    ],
+
+    static_libs: ["aconfig_mediacodec_flags_c_lib"],
+}
+
+cc_defaults {
+    name: "libcodec2_hal_selection",
+    static_libs: [
+        "aconfig_mediacodec_flags_c_lib",
+        "libcodec2_hal_selection_static",
+    ],
+    shared_libs: [
+        "libbase",
+        "server_configurable_flags",
+    ],
+}
diff --git a/media/codec2/hal/common/HalSelection.cpp b/media/codec2/hal/common/HalSelection.cpp
new file mode 100644
index 0000000..761a409
--- /dev/null
+++ b/media/codec2/hal/common/HalSelection.cpp
@@ -0,0 +1,64 @@
+/*
+ * Copyright 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "Codec2-HalSelection"
+#include <android-base/logging.h>
+
+// NOTE: due to dependency from mainline modules cannot use libsysprop
+// #include <android/sysprop/MediaProperties.sysprop.h>
+#include <android-base/properties.h>
+#include <com_android_media_codec_flags.h>
+
+#include <codec2/common/HalSelection.h>
+
+namespace android {
+
+bool IsCodec2AidlHalSelected() {
+    if (!com::android::media::codec::flags::provider_->aidl_hal()) {
+        // Cannot select AIDL if not enabled
+        return false;
+    }
+#if 0
+    // NOTE: due to dependency from mainline modules cannot use libsysprop
+    using ::android::sysprop::MediaProperties::codec2_hal_selection;
+    using ::android::sysprop::MediaProperties::codec2_hal_selection_values;
+    constexpr codec2_hal_selection_values AIDL = codec2_hal_selection_values::AIDL;
+    constexpr codec2_hal_selection_values HIDL = codec2_hal_selection_values::HIDL;
+    codec2_hal_selection_values selection = codec2_hal_selection().value_or(HIDL);
+    switch (selection) {
+    case AIDL:
+        return true;
+    case HIDL:
+        return false;
+    default:
+        LOG(FATAL) << "Unexpected codec2 HAL selection value: " << (int)selection;
+    }
+#else
+    std::string selection = ::android::base::GetProperty("media.c2.hal.selection", "hidl");
+    if (selection == "aidl") {
+        return true;
+    } else if (selection == "hidl") {
+        return false;
+    } else {
+        LOG(FATAL) << "Unexpected codec2 HAL selection value: " << selection;
+    }
+#endif
+
+    return false;
+}
+
+}  // namespace android
diff --git a/media/codec2/hal/common/include/codec2/common/BufferTypes.h b/media/codec2/hal/common/include/codec2/common/BufferTypes.h
index afd2db0..af71122 100644
--- a/media/codec2/hal/common/include/codec2/common/BufferTypes.h
+++ b/media/codec2/hal/common/include/codec2/common/BufferTypes.h
@@ -183,7 +183,8 @@
                 baseBlocks, baseBlockIndices);
     }
     switch (blockPoolData->getType()) {
-    case _C2BlockPoolData::TYPE_BUFFERPOOL: {
+    case _C2BlockPoolData::TYPE_BUFFERPOOL:
+    case _C2BlockPoolData::TYPE_BUFFERPOOL2: {
             // BufferPoolData
             std::shared_ptr<typename BufferPoolTypes::BufferPoolData> bpData;
             if (!GetBufferPoolData<BufferPoolTypes>(blockPoolData, &bpData) || !bpData) {
@@ -194,28 +195,30 @@
                     index, bpData,
                     bufferPoolSender, baseBlocks, baseBlockIndices);
         }
-    case _C2BlockPoolData::TYPE_BUFFERQUEUE:
-        uint32_t gen;
-        uint64_t bqId;
-        int32_t bqSlot;
-        // Update handle if migration happened.
-        if (_C2BlockFactory::GetBufferQueueData(
-                blockPoolData, &gen, &bqId, &bqSlot)) {
-            android::MigrateNativeCodec2GrallocHandle(
-                    const_cast<native_handle_t*>(handle), gen, bqId, bqSlot);
+    case _C2BlockPoolData::TYPE_BUFFERQUEUE: {
+            uint32_t gen;
+            uint64_t bqId;
+            int32_t bqSlot;
+            // Update handle if migration happened.
+            if (_C2BlockFactory::GetBufferQueueData(
+                    blockPoolData, &gen, &bqId, &bqSlot)) {
+                android::MigrateNativeCodec2GrallocHandle(
+                        const_cast<native_handle_t*>(handle), gen, bqId, bqSlot);
+            }
+            return _addBaseBlock(
+                    index, handle,
+                    baseBlocks, baseBlockIndices);
         }
-        return _addBaseBlock(
-                index, handle,
-                baseBlocks, baseBlockIndices);
-    case _C2BlockPoolData::TYPE_AHWBUFFER:
-        AHardwareBuffer *pBuf;
-        if (!_C2BlockFactory::GetAHardwareBuffer(blockPoolData, &pBuf)) {
-            LOG(ERROR) << "AHardwareBuffer unavailable in a block.";
-            return false;
+    case _C2BlockPoolData::TYPE_AHWBUFFER: {
+            AHardwareBuffer *pBuf;
+            if (!_C2BlockFactory::GetAHardwareBuffer(blockPoolData, &pBuf)) {
+                LOG(ERROR) << "AHardwareBuffer unavailable in a block.";
+                return false;
+            }
+            return _addBaseBlock(
+                    index, pBuf,
+                    baseBlocks, baseBlockIndices);
         }
-        return _addBaseBlock(
-                index, pBuf,
-                baseBlocks, baseBlockIndices);
     default:
         LOG(ERROR) << "Unknown C2BlockPoolData type.";
         return false;
diff --git a/media/codec2/hal/common/include/codec2/common/HalSelection.h b/media/codec2/hal/common/include/codec2/common/HalSelection.h
new file mode 100644
index 0000000..7c77515
--- /dev/null
+++ b/media/codec2/hal/common/include/codec2/common/HalSelection.h
@@ -0,0 +1,27 @@
+/*
+ * Copyright 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef CODEC2_HAL_SELECTION_H
+#define CODEC2_HAL_SELECTION_H
+
+namespace android {
+
+// Returns true iff AIDL c2 HAL is selected for the system
+bool IsCodec2AidlHalSelected();
+
+}  // namespace android
+
+#endif  // CODEC2_HAL_SELECTION_H
diff --git a/media/codec2/hal/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioDecTest.cpp b/media/codec2/hal/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioDecTest.cpp
index 222c3d2..c7c04c5 100644
--- a/media/codec2/hal/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioDecTest.cpp
+++ b/media/codec2/hal/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioDecTest.cpp
@@ -14,10 +14,11 @@
  * limitations under the License.
  */
 
-// #define LOG_NDEBUG 0
+//#define LOG_NDEBUG 0
 #define LOG_TAG "codec2_hidl_hal_audio_dec_test"
 
 #include <android-base/logging.h>
+#include <android/binder_process.h>
 #include <gtest/gtest.h>
 #include <hidl/GtestPrinter.h>
 #include <stdio.h>
@@ -27,6 +28,7 @@
 #include <C2BufferPriv.h>
 #include <C2Config.h>
 #include <C2Debug.h>
+#include <codec2/aidl/ParamTypes.h>
 #include <codec2/hidl/client.h>
 
 #include "media_c2_hidl_test_common.h"
@@ -88,7 +90,8 @@
 
         std::shared_ptr<C2AllocatorStore> store = android::GetCodec2PlatformAllocatorStore();
         CHECK_EQ(store->fetchAllocator(C2AllocatorStore::DEFAULT_LINEAR, &mLinearAllocator), C2_OK);
-        mLinearPool = std::make_shared<C2PooledBlockPool>(mLinearAllocator, mBlockPoolId++);
+        mLinearPool = std::make_shared<C2PooledBlockPool>(mLinearAllocator, mBlockPoolId++,
+                                                          getBufferPoolVer());
         ASSERT_NE(mLinearPool, nullptr);
 
         std::vector<std::unique_ptr<C2Param>> queried;
@@ -333,7 +336,9 @@
             ASSERT_TRUE(false) << "Wait for generating C2Work exceeded timeout";
         }
         int64_t timestamp = (*Info)[frameID].timestamp;
-        flags = ((*Info)[frameID].flags == FLAG_CONFIG_DATA) ? C2FrameData::FLAG_CODEC_CONFIG : 0;
+        flags = ((*Info)[frameID].vtsFlags & (1 << VTS_BIT_FLAG_CSD_FRAME))
+                        ? C2FrameData::FLAG_CODEC_CONFIG
+                        : 0;
         if (signalEOS && ((frameID == (int)Info->size() - 1) || (frameID == (offset + range - 1))))
             flags |= C2FrameData::FLAG_END_OF_STREAM;
 
@@ -528,14 +533,10 @@
 
     // request EOS for thumbnail
     // signal EOS flag with last frame
-    size_t i = -1;
-    uint32_t flags;
-    do {
-        i++;
-        flags = 0;
-        if (Info[i].flags) flags = 1u << (Info[i].flags - 1);
-
-    } while (!(flags & SYNC_FRAME));
+    size_t i;
+    for (i = 0; i < Info.size(); i++) {
+        if (Info[i].vtsFlags & (1 << VTS_BIT_FLAG_SYNC_FRAME)) break;
+    }
     std::ifstream eleStream;
     eleStream.open(mInputFile, std::ifstream::binary);
     ASSERT_EQ(eleStream.is_open(), true);
@@ -640,14 +641,11 @@
     mFlushedIndices.clear();
     int index = numFramesFlushed;
     bool keyFrame = false;
-    uint32_t flags = 0;
     while (index < (int)Info.size()) {
-        if (Info[index].flags) flags = 1u << (Info[index].flags - 1);
-        if ((flags & SYNC_FRAME) == SYNC_FRAME) {
+        if (Info[index].vtsFlags & (1 << VTS_BIT_FLAG_SYNC_FRAME)) {
             keyFrame = true;
             break;
         }
-        flags = 0;
         eleStream.ignore(Info[index].bytesCount);
         index++;
     }
@@ -681,24 +679,24 @@
     int bytesCount = 0;
     uint32_t frameId = 0;
     uint32_t flags = 0;
+    uint32_t vtsFlags = 0;
     uint32_t timestamp = 0;
     bool codecConfig = false;
     // This test introduces empty CSD after every 20th frame
     // and empty input frames at an interval of 5 frames.
     while (1) {
         if (!(frameId % 5)) {
-            if (!(frameId % 20))
-                flags = 32;
-            else
-                flags = 0;
+            vtsFlags = !(frameId % 20) ? (1 << VTS_BIT_FLAG_CSD_FRAME) : 0;
             bytesCount = 0;
         } else {
             if (!(eleInfo >> bytesCount)) break;
             eleInfo >> flags;
+            vtsFlags = mapInfoFlagstoVtsFlags(flags);
+            ASSERT_NE(vtsFlags, 0xFF) << "unrecognized flag entry in info file: " << mInfoFile;
             eleInfo >> timestamp;
-            codecConfig = flags ? ((1 << (flags - 1)) & C2FrameData::FLAG_CODEC_CONFIG) != 0 : 0;
+            codecConfig = (vtsFlags & (1 << VTS_BIT_FLAG_CSD_FRAME)) != 0;
         }
-        Info.push_back({bytesCount, flags, timestamp});
+        Info.push_back({bytesCount, vtsFlags, timestamp});
         frameId++;
     }
     eleInfo.close();
@@ -864,5 +862,6 @@
     }
 
     ::testing::InitGoogleTest(&argc, argv);
+    ABinderProcess_startThreadPool();
     return RUN_ALL_TESTS();
 }
diff --git a/media/codec2/hal/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioEncTest.cpp b/media/codec2/hal/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioEncTest.cpp
index 327717b..f8c2903 100644
--- a/media/codec2/hal/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioEncTest.cpp
+++ b/media/codec2/hal/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioEncTest.cpp
@@ -14,10 +14,11 @@
  * limitations under the License.
  */
 
-// #define LOG_NDEBUG 0
+//#define LOG_NDEBUG 0
 #define LOG_TAG "codec2_hidl_hal_audio_enc_test"
 
 #include <android-base/logging.h>
+#include <android/binder_process.h>
 #include <gtest/gtest.h>
 #include <hidl/GtestPrinter.h>
 #include <stdio.h>
@@ -69,7 +70,8 @@
 
         std::shared_ptr<C2AllocatorStore> store = android::GetCodec2PlatformAllocatorStore();
         CHECK_EQ(store->fetchAllocator(C2AllocatorStore::DEFAULT_LINEAR, &mLinearAllocator), C2_OK);
-        mLinearPool = std::make_shared<C2PooledBlockPool>(mLinearAllocator, mBlockPoolId++);
+        mLinearPool = std::make_shared<C2PooledBlockPool>(
+                mLinearAllocator, mBlockPoolId++, getBufferPoolVer());
         ASSERT_NE(mLinearPool, nullptr);
 
         std::vector<std::unique_ptr<C2Param>> queried;
@@ -775,6 +777,7 @@
                 std::make_tuple(std::get<0>(params), std::get<1>(params), true, 2));
     }
 
+    ABinderProcess_startThreadPool();
     ::testing::InitGoogleTest(&argc, argv);
     return RUN_ALL_TESTS();
 }
diff --git a/media/codec2/hal/hidl/1.0/vts/functional/common/Android.bp b/media/codec2/hal/hidl/1.0/vts/functional/common/Android.bp
index be4bafa..0f07077 100644
--- a/media/codec2/hal/hidl/1.0/vts/functional/common/Android.bp
+++ b/media/codec2/hal/hidl/1.0/vts/functional/common/Android.bp
@@ -11,6 +11,7 @@
     name: "VtsHalMediaC2V1_0CommonUtil",
     defaults: [
         "VtsHalTargetTestDefaults",
+        "libcodec2-aidl-client-defaults",
         "libcodec2-hidl-client-defaults",
     ],
 
@@ -29,6 +30,7 @@
     name: "VtsHalMediaC2V1_0Defaults",
     defaults: [
         "VtsHalTargetTestDefaults",
+        "libcodec2-aidl-client-defaults",
         "libcodec2-hidl-client-defaults",
     ],
 
@@ -38,6 +40,7 @@
     ],
 
     shared_libs: [
+        "libbinder_ndk",
         "libcodec2_client",
     ],
     test_suites: [
diff --git a/media/codec2/hal/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.cpp b/media/codec2/hal/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.cpp
index 1f1681d..a72f7bd 100644
--- a/media/codec2/hal/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.cpp
+++ b/media/codec2/hal/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.cpp
@@ -14,13 +14,14 @@
  * limitations under the License.
  */
 
-// #define LOG_NDEBUG 0
+//#define LOG_NDEBUG 0
 #define LOG_TAG "media_c2_hidl_test_common"
 #include <stdio.h>
 
 #include "media_c2_hidl_test_common.h"
 
 #include <android/hardware/media/c2/1.0/IComponentStore.h>
+#include <codec2/aidl/ParamTypes.h>
 
 std::string sResourceDir = "";
 
@@ -44,6 +45,14 @@
     std::cerr << "\t -h,  --help:   Print usage \n";
 }
 
+C2PooledBlockPool::BufferPoolVer getBufferPoolVer() {
+    if (::aidl::android::hardware::media::c2::utils::IsSelected()) {
+        return C2PooledBlockPool::VER_AIDL2;
+    } else {
+        return C2PooledBlockPool::VER_HIDL;
+    }
+}
+
 void parseArgs(int argc, char** argv) {
     int arg;
     int option_index;
@@ -224,18 +233,24 @@
     int32_t numCsds = 0;
     int32_t bytesCount = 0;
     uint32_t flags = 0;
+    uint32_t vtsFlags = 0;
     uint32_t timestamp = 0;
     while (1) {
         if (!(eleInfo >> bytesCount)) break;
         eleInfo >> flags;
+        vtsFlags = mapInfoFlagstoVtsFlags(flags);
+        if (vtsFlags == 0xFF) {
+            ALOGE("unrecognized flag entry in info file %s", info.c_str());
+            return -1;
+        }
         eleInfo >> timestamp;
-        bool codecConfig = flags ? ((1 << (flags - 1)) & C2FrameData::FLAG_CODEC_CONFIG) != 0 : 0;
+        bool codecConfig = (vtsFlags & (1 << VTS_BIT_FLAG_CSD_FRAME)) != 0 ;
         if (codecConfig) numCsds++;
-        bool nonDisplayFrame = ((flags & FLAG_NON_DISPLAY_FRAME) != 0);
+        bool nonDisplayFrame = (vtsFlags & (1 << VTS_BIT_FLAG_NO_SHOW_FRAME)) != 0;
         if (timestampDevTest && !codecConfig && !nonDisplayFrame) {
             timestampUslist->push_back(timestamp);
         }
-        frameInfo->push_back({bytesCount, flags, timestamp});
+        frameInfo->push_back({bytesCount, vtsFlags, timestamp});
     }
     ALOGV("numCsds : %d", numCsds);
     eleInfo.close();
@@ -264,3 +279,11 @@
     ASSERT_EQ(flushedIndices.empty(), true);
     flushedWork.clear();
 }
+
+int mapInfoFlagstoVtsFlags(int infoFlags) {
+    if (infoFlags == 0) return 0;
+    else if (infoFlags == 0x1) return (1 << VTS_BIT_FLAG_SYNC_FRAME);
+    else if (infoFlags == 0x10) return (1 << VTS_BIT_FLAG_NO_SHOW_FRAME);
+    else if (infoFlags == 0x20) return (1 << VTS_BIT_FLAG_CSD_FRAME);
+    return 0xFF;
+}
diff --git a/media/codec2/hal/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.h b/media/codec2/hal/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.h
index ecab0cb..eda7b99 100644
--- a/media/codec2/hal/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.h
+++ b/media/codec2/hal/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.h
@@ -17,8 +17,10 @@
 #ifndef MEDIA_C2_HIDL_TEST_COMMON_H
 #define MEDIA_C2_HIDL_TEST_COMMON_H
 
+#include <C2BufferPriv.h>
 #include <C2Component.h>
 #include <C2Config.h>
+#include <C2PlatformSupport.h>
 
 #include <codec2/hidl/client.h>
 #include <getopt.h>
@@ -27,9 +29,6 @@
 #include <chrono>
 #include <fstream>
 
-#define FLAG_NON_DISPLAY_FRAME (1 << 4)
-#define FLAG_CONFIG_DATA (1 << 5)
-
 #define MAX_RETRY 20
 #define TIME_OUT 400ms
 #define MAX_INPUT_BUFFERS 8
@@ -51,9 +50,15 @@
 // Component name prefix
 extern std::string sComponentNamePrefix;
 
+enum c2_vts_flags_t {
+    VTS_BIT_FLAG_SYNC_FRAME = 1,
+    VTS_BIT_FLAG_NO_SHOW_FRAME = 2,
+    VTS_BIT_FLAG_CSD_FRAME = 3,
+};
+
 struct FrameInfo {
     int bytesCount;
-    uint32_t flags;
+    uint32_t vtsFlags;
     int64_t timestamp;
 };
 
@@ -126,6 +131,8 @@
     std::function<void(std::list<std::unique_ptr<C2Work>>& workItems)> callBack;
 };
 
+C2PooledBlockPool::BufferPoolVer getBufferPoolVer();
+
 void parseArgs(int argc, char** argv);
 
 // Return all test parameters, a list of tuple of <instance, component>.
@@ -161,4 +168,7 @@
 void verifyFlushOutput(std::list<std::unique_ptr<C2Work>>& flushedWork,
                        std::list<std::unique_ptr<C2Work>>& workQueue,
                        std::list<uint64_t>& flushedIndices, std::mutex& queueLock);
+
+int mapInfoFlagstoVtsFlags(int infoFlags);
+
 #endif  // MEDIA_C2_HIDL_TEST_COMMON_H
diff --git a/media/codec2/hal/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp b/media/codec2/hal/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp
index d561adc..df89510 100644
--- a/media/codec2/hal/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp
+++ b/media/codec2/hal/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp
@@ -18,6 +18,7 @@
 #define LOG_TAG "codec2_hidl_hal_video_dec_test"
 
 #include <android-base/logging.h>
+#include <android/binder_process.h>
 #include <gtest/gtest.h>
 #include <hidl/GtestPrinter.h>
 #include <stdio.h>
@@ -28,6 +29,7 @@
 #include <C2BufferPriv.h>
 #include <C2Config.h>
 #include <C2Debug.h>
+#include <codec2/common/HalSelection.h>
 #include <codec2/hidl/client.h>
 #include <gui/BufferQueue.h>
 #include <gui/IConsumerListener.h>
@@ -119,7 +121,8 @@
 
         std::shared_ptr<C2AllocatorStore> store = android::GetCodec2PlatformAllocatorStore();
         CHECK_EQ(store->fetchAllocator(C2AllocatorStore::DEFAULT_LINEAR, &mLinearAllocator), C2_OK);
-        mLinearPool = std::make_shared<C2PooledBlockPool>(mLinearAllocator, mBlockPoolId++);
+        mLinearPool = std::make_shared<C2PooledBlockPool>(mLinearAllocator, mBlockPoolId++,
+                                                          getBufferPoolVer());
         ASSERT_NE(mLinearPool, nullptr);
 
         std::vector<std::unique_ptr<C2Param>> queried;
@@ -405,30 +408,45 @@
                       surfaceMode_t surfMode) {
     using namespace android;
     sp<IGraphicBufferProducer> producer = nullptr;
+    sp<IGraphicBufferConsumer> consumer = nullptr;
+    sp<GLConsumer> texture = nullptr;
+    sp<ANativeWindow> surface = nullptr;
     static std::atomic_uint32_t surfaceGeneration{0};
     uint32_t generation =
             (getpid() << 10) |
             ((surfaceGeneration.fetch_add(1, std::memory_order_relaxed) + 1) & ((1 << 10) - 1));
     int32_t maxDequeueBuffers = kSmoothnessFactor + kRenderingDepth;
+    C2BlockPool::local_id_t poolId = C2BlockPool::BASIC_GRAPHIC;
+    std::shared_ptr<Codec2Client::Configurable> configurable;
+    bool aidl = ::android::IsCodec2AidlHalSelected();
+    if (aidl) {
+        // AIDL does not support blockpool-less mode.
+        c2_status_t poolRet = component->createBlockPool(
+                C2PlatformAllocatorStore::IGBA, &poolId, &configurable);
+        ASSERT_EQ(poolRet, C2_OK) << "setOutputSurface failed";
+    }
+
     if (surfMode == SURFACE) {
-        sp<IGraphicBufferConsumer> consumer = nullptr;
         BufferQueue::createBufferQueue(&producer, &consumer);
         ASSERT_NE(producer, nullptr) << "createBufferQueue returned invalid producer";
         ASSERT_NE(consumer, nullptr) << "createBufferQueue returned invalid consumer";
 
-        sp<GLConsumer> texture =
+        texture =
                 new GLConsumer(consumer, 0 /* tex */, GLConsumer::TEXTURE_EXTERNAL,
                                true /* useFenceSync */, false /* isControlledByApp */);
 
-        sp<ANativeWindow> gSurface = new Surface(producer);
-        ASSERT_NE(gSurface, nullptr) << "getSurface failed";
+        surface = new Surface(producer);
+        ASSERT_NE(surface, nullptr) << "failed to create Surface object";
 
         producer->setGenerationNumber(generation);
     }
 
-    c2_status_t err = component->setOutputSurface(C2BlockPool::BASIC_GRAPHIC, producer, generation,
+    c2_status_t err = component->setOutputSurface(poolId, producer, generation,
                                                   maxDequeueBuffers);
-    ASSERT_EQ(err, C2_OK) << "setOutputSurface failed";
+    std::string surfStr = surfMode == NO_SURFACE ? "NO_SURFACE" :
+            (surfMode == NULL_SURFACE ? "NULL_SURFACE" : "WITH_SURFACE");
+
+    ASSERT_EQ(err, C2_OK) << "setOutputSurface failed, surfMode: " << surfStr;
 }
 
 void decodeNFrames(const std::shared_ptr<android::Codec2Client::Component>& component,
@@ -461,7 +479,9 @@
         }
         int64_t timestamp = (*Info)[frameID].timestamp;
 
-        flags = ((*Info)[frameID].flags == FLAG_CONFIG_DATA) ? C2FrameData::FLAG_CODEC_CONFIG : 0;
+        flags = ((*Info)[frameID].vtsFlags & (1 << VTS_BIT_FLAG_CSD_FRAME))
+                        ? C2FrameData::FLAG_CODEC_CONFIG
+                        : 0;
         if (signalEOS && ((frameID == (int)Info->size() - 1) || (frameID == (offset + range - 1))))
             flags |= C2FrameData::FLAG_END_OF_STREAM;
 
@@ -709,17 +729,19 @@
         ASSERT_EQ(eleInfo.is_open(), true) << mInputFile << " - file not found";
         int bytesCount = 0;
         uint32_t flags = 0;
+        uint32_t vtsFlags = 0;
         uint32_t timestamp = 0;
         uint32_t timestampMax = 0;
         while (1) {
             if (!(eleInfo >> bytesCount)) break;
             eleInfo >> flags;
+            vtsFlags = mapInfoFlagstoVtsFlags(flags);
+            ASSERT_NE(vtsFlags, 0xFF) << "unrecognized flag entry in info file: " << mInfoFile;
             eleInfo >> timestamp;
             timestamp += timestampOffset;
-            Info.push_back({bytesCount, flags, timestamp});
-            bool codecConfig =
-                    flags ? ((1 << (flags - 1)) & C2FrameData::FLAG_CODEC_CONFIG) != 0 : 0;
-            bool nonDisplayFrame = ((flags & FLAG_NON_DISPLAY_FRAME) != 0);
+            Info.push_back({bytesCount, vtsFlags, timestamp});
+            bool codecConfig = (vtsFlags & (1 << VTS_BIT_FLAG_CSD_FRAME)) != 0;
+            bool nonDisplayFrame = (vtsFlags & (1 << VTS_BIT_FLAG_NO_SHOW_FRAME)) != 0;
 
             {
                 ULock l(mQueueLock);
@@ -793,20 +815,15 @@
     int32_t numCsds = populateInfoVector(mInfoFile, &Info, mTimestampDevTest, &mTimestampUslist);
     ASSERT_GE(numCsds, 0) << "Error in parsing input info file: " << mInfoFile;
 
-    uint32_t flags = 0;
     for (size_t i = 0; i < MAX_ITERATIONS; i++) {
         ASSERT_EQ(mComponent->start(), C2_OK);
 
         // request EOS for thumbnail
         // signal EOS flag with last frame
         size_t j = -1;
-        do {
-            j++;
-            flags = 0;
-            if (Info[j].flags) flags = 1u << (Info[j].flags - 1);
-
-        } while (!(flags & SYNC_FRAME));
-
+        for (j = 0; j < Info.size(); j++) {
+            if (Info[j].vtsFlags & (1 << VTS_BIT_FLAG_SYNC_FRAME)) break;
+        }
         std::ifstream eleStream;
         eleStream.open(mInputFile, std::ifstream::binary);
         ASSERT_EQ(eleStream.is_open(), true);
@@ -906,14 +923,11 @@
     // Seek to next key frame and start decoding till the end
     int index = numFramesFlushed;
     bool keyFrame = false;
-    uint32_t flags = 0;
     while (index < (int)Info.size()) {
-        if (Info[index].flags) flags = 1u << (Info[index].flags - 1);
-        if ((flags & SYNC_FRAME) == SYNC_FRAME) {
+        if (Info[index].vtsFlags & (1 << VTS_BIT_FLAG_SYNC_FRAME)) {
             keyFrame = true;
             break;
         }
-        flags = 0;
         eleStream.ignore(Info[index].bytesCount);
         index++;
     }
@@ -947,24 +961,24 @@
     int bytesCount = 0;
     uint32_t frameId = 0;
     uint32_t flags = 0;
+    uint32_t vtsFlags = 0;
     uint32_t timestamp = 0;
     bool codecConfig = false;
     // This test introduces empty CSD after every 20th frame
     // and empty input frames at an interval of 5 frames.
     while (1) {
         if (!(frameId % 5)) {
-            if (!(frameId % 20))
-                flags = 32;
-            else
-                flags = 0;
+            vtsFlags = !(frameId % 20) ? (1 << VTS_BIT_FLAG_CSD_FRAME) : 0;
             bytesCount = 0;
         } else {
             if (!(eleInfo >> bytesCount)) break;
             eleInfo >> flags;
+            vtsFlags = mapInfoFlagstoVtsFlags(flags);
+            ASSERT_NE(vtsFlags, 0xFF) << "unrecognized flag entry in info file: " << mInfoFile;
             eleInfo >> timestamp;
-            codecConfig = flags ? ((1 << (flags - 1)) & C2FrameData::FLAG_CODEC_CONFIG) != 0 : 0;
+            codecConfig = (vtsFlags & (1 << VTS_BIT_FLAG_CSD_FRAME)) != 0;
         }
-        Info.push_back({bytesCount, flags, timestamp});
+        Info.push_back({bytesCount, vtsFlags, timestamp});
         frameId++;
     }
     eleInfo.close();
@@ -1044,12 +1058,9 @@
     }
 
     int offset = framesToDecode;
-    uint32_t flags = 0;
     while (1) {
         while (offset < (int)Info.size()) {
-            flags = 0;
-            if (Info[offset].flags) flags = 1u << (Info[offset].flags - 1);
-            if (flags & SYNC_FRAME) {
+            if (Info[offset].vtsFlags & (1 << VTS_BIT_FLAG_SYNC_FRAME)) {
                 keyFrame = true;
                 break;
             }
@@ -1132,5 +1143,6 @@
     }
 
     ::testing::InitGoogleTest(&argc, argv);
+    ABinderProcess_startThreadPool();
     return RUN_ALL_TESTS();
 }
diff --git a/media/codec2/hal/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoEncTest.cpp b/media/codec2/hal/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoEncTest.cpp
index db68b96..8ecb9c0 100644
--- a/media/codec2/hal/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoEncTest.cpp
+++ b/media/codec2/hal/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoEncTest.cpp
@@ -18,6 +18,8 @@
 #define LOG_TAG "codec2_hidl_hal_video_enc_test"
 
 #include <android-base/logging.h>
+#include <android/binder_process.h>
+#include <codec2/common/HalSelection.h>
 #include <gtest/gtest.h>
 #include <hidl/GtestPrinter.h>
 #include <stdio.h>
@@ -70,7 +72,9 @@
         std::shared_ptr<C2AllocatorStore> store = android::GetCodec2PlatformAllocatorStore();
         CHECK_EQ(store->fetchAllocator(C2AllocatorStore::DEFAULT_GRAPHIC, &mGraphicAllocator),
                  C2_OK);
-        mGraphicPool = std::make_shared<C2PooledBlockPool>(mGraphicAllocator, mBlockPoolId++);
+        C2PooledBlockPool::BufferPoolVer ver = ::android::IsCodec2AidlHalSelected() ?
+                C2PooledBlockPool::VER_AIDL2 : C2PooledBlockPool::VER_HIDL;
+        mGraphicPool = std::make_shared<C2PooledBlockPool>(mGraphicAllocator, mBlockPoolId++, ver);
         ASSERT_NE(mGraphicPool, nullptr);
 
         std::vector<std::unique_ptr<C2Param>> queried;
@@ -930,5 +934,6 @@
     }
 
     ::testing::InitGoogleTest(&argc, argv);
+    ABinderProcess_startThreadPool();
     return RUN_ALL_TESTS();
 }
diff --git a/media/codec2/hal/plugin/FilterWrapper.cpp b/media/codec2/hal/plugin/FilterWrapper.cpp
index d5124fd..197d6e7 100644
--- a/media/codec2/hal/plugin/FilterWrapper.cpp
+++ b/media/codec2/hal/plugin/FilterWrapper.cpp
@@ -969,6 +969,15 @@
         C2PlatformAllocatorStore::id_t allocatorId,
         std::shared_ptr<const C2Component> component,
         std::shared_ptr<C2BlockPool> *pool) {
+    C2PlatformAllocatorDesc allocatorParam;
+    allocatorParam.allocatorId = allocatorId;
+    return createBlockPool(allocatorParam, component, pool);
+}
+
+c2_status_t FilterWrapper::createBlockPool(
+        C2PlatformAllocatorDesc &allocatorParam,
+        std::shared_ptr<const C2Component> component,
+        std::shared_ptr<C2BlockPool> *pool) {
     std::unique_lock lock(mWrappedComponentsMutex);
     for (auto it = mWrappedComponents.begin(); it != mWrappedComponents.end(); ) {
         std::shared_ptr<const C2Component> comp = it->front().lock();
@@ -983,13 +992,13 @@
                     [](const std::weak_ptr<const C2Component> &el) {
                         return el.lock();
                     });
-            if (C2_OK == CreateCodec2BlockPool(allocatorId, components, pool)) {
+            if (C2_OK == CreateCodec2BlockPool(allocatorParam, components, pool)) {
                 return C2_OK;
             }
         }
         ++it;
     }
-    return CreateCodec2BlockPool(allocatorId, component, pool);
+    return CreateCodec2BlockPool(allocatorParam, component, pool);
 }
 
 c2_status_t FilterWrapper::queryParamsForPreviousComponent(
diff --git a/media/codec2/hal/plugin/FilterWrapperStub.cpp b/media/codec2/hal/plugin/FilterWrapperStub.cpp
index 01ca596..3fd5409 100644
--- a/media/codec2/hal/plugin/FilterWrapperStub.cpp
+++ b/media/codec2/hal/plugin/FilterWrapperStub.cpp
@@ -45,7 +45,16 @@
         C2PlatformAllocatorStore::id_t allocatorId,
         std::shared_ptr<const C2Component> component,
         std::shared_ptr<C2BlockPool> *pool) {
-    return CreateCodec2BlockPool(allocatorId, component, pool);
+    C2PlatformAllocatorDesc allocatorParam;
+    allocatorParam.allocatorId = allocatorId;
+    return createBlockPool(allocatorParam, component, pool);
+}
+
+c2_status_t FilterWrapper::createBlockPool(
+        C2PlatformAllocatorDesc &allocatorParam,
+        std::shared_ptr<const C2Component> component,
+        std::shared_ptr<C2BlockPool> *pool) {
+    return CreateCodec2BlockPool(allocatorParam, component, pool);
 }
 
 }  // namespace android
diff --git a/media/codec2/hal/plugin/internal/FilterWrapper.h b/media/codec2/hal/plugin/internal/FilterWrapper.h
index cf2cc30..dcffb5c 100644
--- a/media/codec2/hal/plugin/internal/FilterWrapper.h
+++ b/media/codec2/hal/plugin/internal/FilterWrapper.h
@@ -90,6 +90,14 @@
             std::shared_ptr<C2BlockPool> *pool);
 
     /**
+     * Create a C2BlockPool object with |allocatorParam| for |component|.
+     */
+    c2_status_t createBlockPool(
+            C2PlatformAllocatorDesc &allocatorParam,
+            std::shared_ptr<const C2Component> component,
+            std::shared_ptr<C2BlockPool> *pool);
+
+    /**
      * Query parameters that |intf| wants from the previous component.
      */
     c2_status_t queryParamsForPreviousComponent(
diff --git a/media/codec2/sfplugin/C2OMXNode.cpp b/media/codec2/sfplugin/C2OMXNode.cpp
index 92cfe31..bba022b 100644
--- a/media/codec2/sfplugin/C2OMXNode.cpp
+++ b/media/codec2/sfplugin/C2OMXNode.cpp
@@ -202,8 +202,10 @@
     android_fdsan_set_error_level(ANDROID_FDSAN_ERROR_LEVEL_WARN_ALWAYS);
     mQueueThread->run("C2OMXNode", PRIORITY_AUDIO);
 
-    Mutexed<android_dataspace>::Locked ds(mDataspace);
-    *ds = HAL_DATASPACE_UNKNOWN;
+    android_dataspace ds = HAL_DATASPACE_UNKNOWN;
+    mDataspace.lock().set(ds);
+    uint32_t pf = PIXEL_FORMAT_UNKNOWN;
+    mPixelFormat.lock().set(pf);
 }
 
 status_t C2OMXNode::freeNode() {
@@ -521,8 +523,8 @@
     ALOGD("dataspace changed to %#x pixel format: %#x", dataSpace, pixelFormat);
     mQueueThread->setDataspace(dataSpace);
 
-    Mutexed<android_dataspace>::Locked ds(mDataspace);
-    *ds = dataSpace;
+    mDataspace.lock().set(dataSpace);
+    mPixelFormat.lock().set(pixelFormat);
     return OK;
 }
 
@@ -559,6 +561,10 @@
     return *mDataspace.lock();
 }
 
+uint32_t C2OMXNode::getPixelFormat() {
+    return *mPixelFormat.lock();
+}
+
 void C2OMXNode::setPriority(int priority) {
     mQueueThread->setPriority(priority);
 }
diff --git a/media/codec2/sfplugin/C2OMXNode.h b/media/codec2/sfplugin/C2OMXNode.h
index 6669318..c8ce336 100644
--- a/media/codec2/sfplugin/C2OMXNode.h
+++ b/media/codec2/sfplugin/C2OMXNode.h
@@ -99,6 +99,11 @@
     android_dataspace getDataspace();
 
     /**
+     * Returns dataspace information from GraphicBufferSource.
+     */
+    uint32_t getPixelFormat();
+
+    /**
      * Sets priority of the queue thread.
      */
     void setPriority(int priority);
@@ -112,6 +117,7 @@
     uint32_t mHeight;
     uint64_t mUsage;
     Mutexed<android_dataspace> mDataspace;
+    Mutexed<uint32_t> mPixelFormat;
 
     // WORKAROUND: timestamp adjustment
 
diff --git a/media/codec2/sfplugin/CCodec.cpp b/media/codec2/sfplugin/CCodec.cpp
index 86fd8ab..9c264af 100644
--- a/media/codec2/sfplugin/CCodec.cpp
+++ b/media/codec2/sfplugin/CCodec.cpp
@@ -45,7 +45,9 @@
 #include <media/stagefright/CCodec.h>
 #include <media/stagefright/BufferProducerWrapper.h>
 #include <media/stagefright/MediaCodecConstants.h>
+#include <media/stagefright/MediaCodecMetricsConstants.h>
 #include <media/stagefright/PersistentSurface.h>
+#include <media/stagefright/RenderedFrameInfo.h>
 #include <utils/NativeHandle.h>
 
 #include "C2OMXNode.h"
@@ -429,6 +431,10 @@
         return mNode->getDataspace();
     }
 
+    uint32_t getPixelFormat() override {
+        return mNode->getPixelFormat();
+    }
+
 private:
     sp<HGraphicBufferSource> mSource;
     sp<C2OMXNode> mNode;
@@ -672,8 +678,7 @@
     }
 
     void onOutputFramesRendered(int64_t mediaTimeUs, nsecs_t renderTimeNs) override {
-        mCodec->mCallback->onOutputFramesRendered(
-                {RenderedFrameInfo(mediaTimeUs, renderTimeNs)});
+        mCodec->mCallback->onOutputFramesRendered({RenderedFrameInfo(mediaTimeUs, renderTimeNs)});
     }
 
     void onOutputBuffersChanged() override {
@@ -862,6 +867,8 @@
         sp<Surface> surface;
         if (msg->findObject("native-window", &obj)) {
             surface = static_cast<Surface *>(obj.get());
+            int32_t generation;
+            (void)msg->findInt32("native-window-generation", &generation);
             // setup tunneled playback
             if (surface != nullptr) {
                 Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
@@ -896,7 +903,7 @@
                     }
                 }
             }
-            setSurface(surface);
+            setSurface(surface, (uint32_t)generation);
         }
 
         Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
@@ -1536,6 +1543,9 @@
 
     config->queryConfiguration(comp);
 
+    mMetrics = new AMessage;
+    mChannel->resetBuffersPixelFormat((config->mDomain & Config::IS_ENCODER) ? true : false);
+
     mCallback->onComponentConfigured(config->mInputFormat, config->mOutputFormat);
 }
 
@@ -1915,8 +1925,16 @@
         }
         comp = state->comp;
     }
-    status_t err = comp->stop();
+
+    // Note: Logically mChannel->stopUseOutputSurface() should be after comp->stop().
+    // But in the case some HAL implementations hang forever on comp->stop().
+    // (HAL is waiting for C2Fence until fetchGraphicBlock unblocks and not
+    // completing stop()).
+    // So we reverse their order for stopUseOutputSurface() to notify C2Fence waiters
+    // prior to comp->stop().
+    // See also b/300350761.
     mChannel->stopUseOutputSurface(pushBlankBuffer);
+    status_t err = comp->stop();
     if (err != C2_OK) {
         // TODO: convert err into status_t
         mCallback->onError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
@@ -2004,8 +2022,15 @@
         }
         comp = state->comp;
     }
-    comp->release();
+    // Note: Logically mChannel->stopUseOutputSurface() should be after comp->release().
+    // But in the case some HAL implementations hang forever on comp->release().
+    // (HAL is waiting for C2Fence until fetchGraphicBlock unblocks and not
+    // completing release()).
+    // So we reverse their order for stopUseOutputSurface() to notify C2Fence waiters
+    // prior to comp->release().
+    // See also b/300350761.
     mChannel->stopUseOutputSurface(pushBlankBuffer);
+    comp->release();
 
     {
         Mutexed<State>::Locked state(mState);
@@ -2018,7 +2043,7 @@
     }
 }
 
-status_t CCodec::setSurface(const sp<Surface> &surface) {
+status_t CCodec::setSurface(const sp<Surface> &surface, uint32_t generation) {
     bool pushBlankBuffer = false;
     {
         Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
@@ -2047,7 +2072,7 @@
         }
         pushBlankBuffer = config->mPushBlankBuffersOnStop;
     }
-    return mChannel->setSurface(surface, pushBlankBuffer);
+    return mChannel->setSurface(surface, generation, pushBlankBuffer);
 }
 
 void CCodec::signalFlush() {
@@ -2137,7 +2162,7 @@
     }
 
     std::map<size_t, sp<MediaCodecBuffer>> clientInputBuffers;
-    status_t err = mChannel->prepareInitialInputBuffers(&clientInputBuffers);
+    status_t err = mChannel->prepareInitialInputBuffers(&clientInputBuffers, true);
     if (err != OK) {
         if (err == NO_MEMORY) {
             // NO_MEMORY happens here when all the buffers are still
@@ -2160,7 +2185,6 @@
         const std::unique_ptr<Config> &config = *configLocked;
         return config->mBuffersBoundToCodec;
     }());
-
     {
         Mutexed<State>::Locked state(mState);
         if (state->get() != RESUMING) {
@@ -2311,9 +2335,12 @@
 void CCodec::onWorkDone(std::list<std::unique_ptr<C2Work>> &workItems) {
     if (!workItems.empty()) {
         Mutexed<std::list<std::unique_ptr<C2Work>>>::Locked queue(mWorkDoneQueue);
+        bool shouldPost = queue->empty();
         queue->splice(queue->end(), workItems);
+        if (shouldPost) {
+            (new AMessage(kWhatWorkDone, this))->post();
+        }
     }
-    (new AMessage(kWhatWorkDone, this))->post();
 }
 
 void CCodec::onInputBufferDone(uint64_t frameIndex, size_t arrayIndex) {
@@ -2500,6 +2527,21 @@
             }
             mChannel->onWorkDone(
                     std::move(work), outputFormat, initData ? initData.get() : nullptr);
+            // log metrics to MediaCodec
+            if (mMetrics->countEntries() == 0) {
+                Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
+                const std::unique_ptr<Config> &config = *configLocked;
+                uint32_t pf = PIXEL_FORMAT_UNKNOWN;
+                if (!config->mInputSurface) {
+                    pf = mChannel->getBuffersPixelFormat(config->mDomain & Config::IS_ENCODER);
+                } else {
+                    pf = config->mInputSurface->getPixelFormat();
+                }
+                if (pf != PIXEL_FORMAT_UNKNOWN) {
+                    mMetrics->setInt64(kCodecPixelFormat, pf);
+                    mCallback->onMetricsUpdated(mMetrics);
+                }
+            }
             break;
         }
         case kWhatWatch: {
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.cpp b/media/codec2/sfplugin/CCodecBufferChannel.cpp
index c93b7d0..6b45e0e 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.cpp
+++ b/media/codec2/sfplugin/CCodecBufferChannel.cpp
@@ -25,6 +25,8 @@
 #include <atomic>
 #include <list>
 #include <numeric>
+#include <thread>
+#include <chrono>
 
 #include <C2AllocatorGralloc.h>
 #include <C2PlatformSupport.h>
@@ -598,6 +600,8 @@
     size_t bufferSize = 0;
     c2_status_t blockRes = C2_OK;
     bool copied = false;
+    ScopedTrace trace(ATRACE_TAG, android::base::StringPrintf(
+            "CCodecBufferChannel::decrypt(%s)", mName).c_str());
     if (mSendEncryptedInfoBuffer) {
         static const C2MemoryUsage kDefaultReadWriteUsage{
             C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE};
@@ -1042,6 +1046,15 @@
     if (desiredRenderTimeNs < nowNs) {
         desiredRenderTimeNs = nowNs;
     }
+
+    // If the render time is more than a second from now, then pretend the frame is supposed to be
+    // rendered immediately, because that's what SurfaceFlinger heuristics will do. This is a tight
+    // coupling, but is really the only way to optimize away unnecessary present fence checks in
+    // processRenderedFrames.
+    if (desiredRenderTimeNs > nowNs + 1*1000*1000*1000LL) {
+        desiredRenderTimeNs = nowNs;
+    }
+
     // We've just queued a frame to the surface, so keep track of it and later check to see if it is
     // actually rendered.
     TrackedFrame frame;
@@ -1119,6 +1132,17 @@
     processRenderedFrames(delta);
 }
 
+void CCodecBufferChannel::onBufferReleasedFromOutputSurface(uint32_t generation) {
+    // Note: Since this is called asynchronously from IProducerListener not
+    // knowing the internal state of CCodec/CCodecBufferChannel,
+    // prevent mComponent from being destroyed by holding the shared reference
+    // during this interface being executed.
+    std::shared_ptr<Codec2Client::Component> comp = mComponent;
+    if (comp) {
+        comp->onBufferReleasedFromOutputSurface(generation);
+    }
+}
+
 status_t CCodecBufferChannel::discardBuffer(const sp<MediaCodecBuffer> &buffer) {
     ALOGV("[%s] discardBuffer: %p", mName, buffer.get());
     bool released = false;
@@ -1615,22 +1639,31 @@
 }
 
 status_t CCodecBufferChannel::prepareInitialInputBuffers(
-        std::map<size_t, sp<MediaCodecBuffer>> *clientInputBuffers) {
+        std::map<size_t, sp<MediaCodecBuffer>> *clientInputBuffers, bool retry) {
     if (mInputSurface) {
         return OK;
     }
 
     size_t numInputSlots = mInput.lock()->numSlots;
-
-    {
-        Mutexed<Input>::Locked input(mInput);
-        while (clientInputBuffers->size() < numInputSlots) {
-            size_t index;
-            sp<MediaCodecBuffer> buffer;
-            if (!input->buffers->requestNewBuffer(&index, &buffer)) {
-                break;
+    int retryCount = 1;
+    for (; clientInputBuffers->empty() && retryCount >= 0; retryCount--) {
+        {
+            Mutexed<Input>::Locked input(mInput);
+            while (clientInputBuffers->size() < numInputSlots) {
+                size_t index;
+                sp<MediaCodecBuffer> buffer;
+                if (!input->buffers->requestNewBuffer(&index, &buffer)) {
+                    break;
+                }
+                clientInputBuffers->emplace(index, buffer);
             }
-            clientInputBuffers->emplace(index, buffer);
+        }
+        if (!retry || (retryCount <= 0)) {
+            break;
+        }
+        if (clientInputBuffers->empty()) {
+            // wait: buffer may be in transit from component.
+            std::this_thread::sleep_for(std::chrono::milliseconds(4));
         }
     }
     if (clientInputBuffers->empty()) {
@@ -2263,12 +2296,8 @@
     }
 }
 
-status_t CCodecBufferChannel::setSurface(const sp<Surface> &newSurface, bool pushBlankBuffer) {
-    static std::atomic_uint32_t surfaceGeneration{0};
-    uint32_t generation = (getpid() << 10) |
-            ((surfaceGeneration.fetch_add(1, std::memory_order_relaxed) + 1)
-                & ((1 << 10) - 1));
-
+status_t CCodecBufferChannel::setSurface(const sp<Surface> &newSurface,
+                                         uint32_t generation, bool pushBlankBuffer) {
     sp<IGraphicBufferProducer> producer;
     int maxDequeueCount;
     sp<Surface> oldSurface;
@@ -2282,7 +2311,6 @@
         newSurface->setDequeueTimeout(kDequeueTimeoutNs);
         newSurface->setMaxDequeuedBufferCount(maxDequeueCount);
         producer = newSurface->getIGraphicBufferProducer();
-        producer->setGenerationNumber(generation);
     } else {
         ALOGE("[%s] setting output surface to null", mName);
         return INVALID_OPERATION;
@@ -2362,6 +2390,46 @@
     mDescrambler = descrambler;
 }
 
+uint32_t CCodecBufferChannel::getBuffersPixelFormat(bool isEncoder) {
+    if (isEncoder) {
+        return getInputBuffersPixelFormat();
+    } else {
+        return getOutputBuffersPixelFormat();
+    }
+}
+
+uint32_t CCodecBufferChannel::getInputBuffersPixelFormat() {
+    Mutexed<Input>::Locked input(mInput);
+    if (input->buffers == nullptr) {
+        return PIXEL_FORMAT_UNKNOWN;
+    }
+    return input->buffers->getPixelFormatIfApplicable();
+}
+
+uint32_t CCodecBufferChannel::getOutputBuffersPixelFormat() {
+    Mutexed<Output>::Locked output(mOutput);
+    if (output->buffers == nullptr) {
+        return PIXEL_FORMAT_UNKNOWN;
+    }
+    return output->buffers->getPixelFormatIfApplicable();
+}
+
+void CCodecBufferChannel::resetBuffersPixelFormat(bool isEncoder) {
+    if (isEncoder) {
+        Mutexed<Input>::Locked input(mInput);
+        if (input->buffers == nullptr) {
+            return;
+        }
+        input->buffers->resetPixelFormatIfApplicable();
+    } else {
+        Mutexed<Output>::Locked output(mOutput);
+        if (output->buffers == nullptr) {
+            return;
+        }
+        output->buffers->resetPixelFormatIfApplicable();
+    }
+}
+
 status_t toStatusT(c2_status_t c2s, c2_operation_t c2op) {
     // C2_OK is always translated to OK.
     if (c2s == C2_OK) {
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.h b/media/codec2/sfplugin/CCodecBufferChannel.h
index 1b5c031..8dc9fb6 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.h
+++ b/media/codec2/sfplugin/CCodecBufferChannel.h
@@ -62,8 +62,8 @@
     void setCrypto(const sp<ICrypto> &crypto) override;
     void setDescrambler(const sp<IDescrambler> &descrambler) override;
 
-    virtual status_t queueInputBuffer(const sp<MediaCodecBuffer> &buffer) override;
-    virtual status_t queueSecureInputBuffer(
+    status_t queueInputBuffer(const sp<MediaCodecBuffer> &buffer) override;
+    status_t queueSecureInputBuffer(
             const sp<MediaCodecBuffer> &buffer,
             bool secure,
             const uint8_t *key,
@@ -73,10 +73,10 @@
             const CryptoPlugin::SubSample *subSamples,
             size_t numSubSamples,
             AString *errorDetailMsg) override;
-    virtual status_t attachBuffer(
+    status_t attachBuffer(
             const std::shared_ptr<C2Buffer> &c2Buffer,
             const sp<MediaCodecBuffer> &buffer) override;
-    virtual status_t attachEncryptedBuffer(
+    status_t attachEncryptedBuffer(
             const sp<hardware::HidlMemory> &memory,
             bool secure,
             const uint8_t *key,
@@ -88,12 +88,13 @@
             size_t numSubSamples,
             const sp<MediaCodecBuffer> &buffer,
             AString* errorDetailMsg) override;
-    virtual status_t renderOutputBuffer(
+    status_t renderOutputBuffer(
             const sp<MediaCodecBuffer> &buffer, int64_t timestampNs) override;
-    virtual void pollForRenderedBuffers() override;
-    virtual status_t discardBuffer(const sp<MediaCodecBuffer> &buffer) override;
-    virtual void getInputBufferArray(Vector<sp<MediaCodecBuffer>> *array) override;
-    virtual void getOutputBufferArray(Vector<sp<MediaCodecBuffer>> *array) override;
+    void pollForRenderedBuffers() override;
+    void onBufferReleasedFromOutputSurface(uint32_t generation) override;
+    status_t discardBuffer(const sp<MediaCodecBuffer> &buffer) override;
+    void getInputBufferArray(Vector<sp<MediaCodecBuffer>> *array) override;
+    void getOutputBufferArray(Vector<sp<MediaCodecBuffer>> *array) override;
 
     // Methods below are interface for CCodec to use.
 
@@ -105,7 +106,7 @@
     /**
      * Set output graphic surface for rendering.
      */
-    status_t setSurface(const sp<Surface> &surface, bool pushBlankBuffer);
+    status_t setSurface(const sp<Surface> &surface, uint32_t generation, bool pushBlankBuffer);
 
     /**
      * Set GraphicBufferSource object from which the component extracts input
@@ -140,7 +141,8 @@
      *                                  initial input buffers.
      */
     status_t prepareInitialInputBuffers(
-            std::map<size_t, sp<MediaCodecBuffer>> *clientInputBuffers);
+            std::map<size_t, sp<MediaCodecBuffer>> *clientInputBuffers,
+            bool retry = false);
 
     /**
      * Request initial input buffers as prepared in clientInputBuffers.
@@ -206,7 +208,20 @@
 
     void setMetaMode(MetaMode mode);
 
+    /**
+     * get pixel format from output buffers.
+     *
+     * @return 0 if no valid pixel format found.
+     */
+    uint32_t getBuffersPixelFormat(bool isEncoder);
+
+    void resetBuffersPixelFormat(bool isEncoder);
+
 private:
+    uint32_t getInputBuffersPixelFormat();
+
+    uint32_t getOutputBuffersPixelFormat();
+
     class QueueGuard;
 
     /**
diff --git a/media/codec2/sfplugin/CCodecBuffers.cpp b/media/codec2/sfplugin/CCodecBuffers.cpp
index 0f4a8d8..670923b 100644
--- a/media/codec2/sfplugin/CCodecBuffers.cpp
+++ b/media/codec2/sfplugin/CCodecBuffers.cpp
@@ -18,6 +18,7 @@
 #define LOG_TAG "CCodecBuffers"
 #include <utils/Log.h>
 
+#include <C2AllocatorGralloc.h>
 #include <C2PlatformSupport.h>
 
 #include <media/stagefright/foundation/ADebug.h>
@@ -121,6 +122,10 @@
     buffer->setFormat(mFormatWithImageData);
 }
 
+uint32_t CCodecBuffers::getPixelFormatIfApplicable() { return PIXEL_FORMAT_UNKNOWN; }
+
+bool CCodecBuffers::resetPixelFormatIfApplicable() { return false; }
+
 // InputBuffers
 
 sp<Codec2Buffer> InputBuffers::cloneAndReleaseBuffer(const sp<MediaCodecBuffer> &buffer) {
@@ -1043,7 +1048,8 @@
         const char *componentName, const char *name)
     : InputBuffers(componentName, name),
       mImpl(mName),
-      mLocalBufferPool(LocalBufferPool::Create()) { }
+      mLocalBufferPool(LocalBufferPool::Create()),
+      mPixelFormat(PIXEL_FORMAT_UNKNOWN) { }
 
 bool GraphicInputBuffers::requestNewBuffer(size_t *index, sp<MediaCodecBuffer> *buffer) {
     sp<Codec2Buffer> newBuffer = createNewBuffer();
@@ -1109,8 +1115,16 @@
 
 sp<Codec2Buffer> GraphicInputBuffers::createNewBuffer() {
     C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE };
+    mPixelFormat = extractPixelFormat(mFormat);
     return AllocateInputGraphicBuffer(
-            mPool, mFormat, extractPixelFormat(mFormat), usage, mLocalBufferPool);
+            mPool, mFormat, mPixelFormat, usage, mLocalBufferPool);
+}
+
+uint32_t GraphicInputBuffers::getPixelFormatIfApplicable() { return mPixelFormat; }
+
+bool GraphicInputBuffers::resetPixelFormatIfApplicable() {
+    mPixelFormat = PIXEL_FORMAT_UNKNOWN;
+    return true;
 }
 
 // OutputBuffersArray
@@ -1269,6 +1283,8 @@
     *index = mImpl.assignSlot(newBuffer);
     handleImageData(newBuffer);
     *clientBuffer = newBuffer;
+
+    extractPixelFormatFromC2Buffer(buffer);
     ALOGV("[%s] registered buffer %zu", mName, *index);
     return OK;
 }
@@ -1309,6 +1325,32 @@
     return mImpl.numActiveSlots();
 }
 
+bool FlexOutputBuffers::extractPixelFormatFromC2Buffer(const std::shared_ptr<C2Buffer> &buffer) {
+    if (buffer == nullptr) {
+        return false;
+    }
+    const C2BufferData &data = buffer->data();
+    // only extract the first pixel format in a metric session.
+    if (mPixelFormat != PIXEL_FORMAT_UNKNOWN || data.type() != C2BufferData::GRAPHIC
+            || data.graphicBlocks().empty()) {
+        return false;
+    }
+    const C2Handle *const handle = data.graphicBlocks().front().handle();
+    uint32_t pf = ExtractFormatFromCodec2GrallocHandle(handle);
+    if (pf == PIXEL_FORMAT_UNKNOWN) {
+        return false;
+    }
+    mPixelFormat = pf;
+    return true;
+}
+
+bool FlexOutputBuffers::resetPixelFormatIfApplicable() {
+    mPixelFormat = PIXEL_FORMAT_UNKNOWN;
+    return true;
+}
+
+uint32_t FlexOutputBuffers::getPixelFormatIfApplicable() { return mPixelFormat; }
+
 // LinearOutputBuffers
 
 void LinearOutputBuffers::flush(
diff --git a/media/codec2/sfplugin/CCodecBuffers.h b/media/codec2/sfplugin/CCodecBuffers.h
index 6335f13..cbef644 100644
--- a/media/codec2/sfplugin/CCodecBuffers.h
+++ b/media/codec2/sfplugin/CCodecBuffers.h
@@ -81,6 +81,16 @@
      */
     void handleImageData(const sp<Codec2Buffer> &buffer);
 
+    /**
+     * Get the first pixel format of a metric session.
+     */
+    virtual uint32_t getPixelFormatIfApplicable();
+
+    /**
+     * Reset the pixel format when a new metric session started.
+     */
+    virtual bool resetPixelFormatIfApplicable();
+
 protected:
     std::string mComponentName; ///< name of component for debugging
     std::string mChannelName; ///< name of channel for debugging
@@ -938,12 +948,17 @@
 
     size_t numActiveSlots() const final;
 
+    uint32_t getPixelFormatIfApplicable() override;
+
+    bool resetPixelFormatIfApplicable() override;
+
 protected:
     sp<Codec2Buffer> createNewBuffer() override;
 
 private:
     FlexBuffersImpl mImpl;
     std::shared_ptr<LocalBufferPool> mLocalBufferPool;
+    uint32_t mPixelFormat;
 };
 
 class DummyInputBuffers : public InputBuffers {
@@ -1064,7 +1079,8 @@
 public:
     FlexOutputBuffers(const char *componentName, const char *name = "Output[]")
         : OutputBuffers(componentName, name),
-          mImpl(mName) { }
+          mImpl(mName),
+          mPixelFormat(0) { }
 
     status_t registerBuffer(
             const std::shared_ptr<C2Buffer> &buffer,
@@ -1107,8 +1123,20 @@
      */
     virtual std::function<sp<Codec2Buffer>()> getAlloc() = 0;
 
+    uint32_t getPixelFormatIfApplicable() override;
+
+    bool resetPixelFormatIfApplicable() override;
 private:
     FlexBuffersImpl mImpl;
+
+    uint32_t mPixelFormat;
+
+    /**
+     * extract pixel format from C2Buffer when register.
+     *
+     * \param buffer   The C2Buffer used to extract pixel format.
+     */
+    bool extractPixelFormatFromC2Buffer(const std::shared_ptr<C2Buffer> &buffer);
 };
 
 class LinearOutputBuffers : public FlexOutputBuffers {
diff --git a/media/codec2/sfplugin/Codec2Buffer.cpp b/media/codec2/sfplugin/Codec2Buffer.cpp
index 3bb6593..5c1755e 100644
--- a/media/codec2/sfplugin/Codec2Buffer.cpp
+++ b/media/codec2/sfplugin/Codec2Buffer.cpp
@@ -1049,7 +1049,11 @@
         // Unwrap raw buffer handle from the C2Handle
         native_handle_t *nh = UnwrapNativeCodec2GrallocHandle(handle);
         if (!nh) {
-            return;
+            nh = UnwrapNativeCodec2AhwbHandle(handle);
+            if (!nh) {
+                ALOGE("handle is not compatible to neither C2HandleGralloc nor C2HandleAhwb");
+                return;
+            }
         }
         // Import the raw handle so IMapper can use the buffer. The imported
         // handle must be freed when the client is done with the buffer.
diff --git a/media/codec2/sfplugin/InputSurfaceWrapper.h b/media/codec2/sfplugin/InputSurfaceWrapper.h
index 44ba78a..4bf6cd0 100644
--- a/media/codec2/sfplugin/InputSurfaceWrapper.h
+++ b/media/codec2/sfplugin/InputSurfaceWrapper.h
@@ -28,7 +28,8 @@
 class InputSurfaceWrapper {
 public:
     InputSurfaceWrapper()
-        : mDataSpace(HAL_DATASPACE_UNKNOWN) {
+        : mDataSpace(HAL_DATASPACE_UNKNOWN),
+          mPixelFormat(PIXEL_FORMAT_UNKNOWN) {
     }
 
     virtual ~InputSurfaceWrapper() = default;
@@ -112,8 +113,14 @@
      */
     virtual android_dataspace getDataspace() { return mDataSpace; }
 
+    /**
+     * Returns pixel format information from GraphicBufferSource.
+     */
+    virtual uint32_t getPixelFormat() { return mPixelFormat; }
+
 protected:
     android_dataspace mDataSpace;
+    uint32_t mPixelFormat;
 };
 
 }  // namespace android
diff --git a/media/codec2/sfplugin/include/media/stagefright/CCodec.h b/media/codec2/sfplugin/include/media/stagefright/CCodec.h
index 13713bc..2b1cf60 100644
--- a/media/codec2/sfplugin/include/media/stagefright/CCodec.h
+++ b/media/codec2/sfplugin/include/media/stagefright/CCodec.h
@@ -56,7 +56,7 @@
     virtual void initiateStart() override;
     virtual void initiateShutdown(bool keepComponentAllocated = false) override;
 
-    virtual status_t setSurface(const sp<Surface> &surface) override;
+    virtual status_t setSurface(const sp<Surface> &surface, uint32_t generation) override;
 
     virtual void signalFlush() override;
     virtual void signalResume() override;
@@ -205,6 +205,8 @@
     Mutexed<std::unique_ptr<CCodecConfig>> mConfig;
     Mutexed<std::list<std::unique_ptr<C2Work>>> mWorkDoneQueue;
 
+    sp<AMessage> mMetrics;
+
     friend class CCodecCallbackImpl;
 
     DISALLOW_EVIL_CONSTRUCTORS(CCodec);
diff --git a/media/codec2/sfplugin/utils/Codec2CommonUtils.cpp b/media/codec2/sfplugin/utils/Codec2CommonUtils.cpp
index 7492cab..ff72b1f 100644
--- a/media/codec2/sfplugin/utils/Codec2CommonUtils.cpp
+++ b/media/codec2/sfplugin/utils/Codec2CommonUtils.cpp
@@ -47,35 +47,12 @@
 }
 
 static bool isP010Allowed() {
-    // The first SDK the device shipped with.
-    static const int32_t kProductFirstApiLevel =
-        base::GetIntProperty<int32_t>("ro.product.first_api_level", 0);
+    // The Vendor API level which is min(ro.product.first_api_level, ro.board.[first_]api_level).
+    // This is the api level to which VSR requirement the device conform.
+    static const int32_t kVendorApiLevel =
+        base::GetIntProperty<int32_t>("ro.vendor.api_level", 0);
 
-    // GRF devices (introduced in Android 11) list the first and possibly the current api levels
-    // to signal which VSR requirements they conform to even if the first device SDK was higher.
-    static const int32_t kBoardFirstApiLevel =
-        base::GetIntProperty<int32_t>("ro.board.first_api_level", 0);
-
-    // Some devices that launched prior to Android S may not support P010 correctly, even
-    // though they may advertise it as supported.
-    if (kProductFirstApiLevel != 0 && kProductFirstApiLevel < __ANDROID_API_S__) {
-        return false;
-    }
-
-    if (kBoardFirstApiLevel != 0 && kBoardFirstApiLevel < __ANDROID_API_S__) {
-        return false;
-    }
-
-    static const int32_t kBoardApiLevel =
-        base::GetIntProperty<int32_t>("ro.board.api_level", 0);
-
-    // For non-GRF devices, use the first SDK version by the product.
-    static const int32_t kFirstApiLevel =
-        kBoardApiLevel != 0 ? kBoardApiLevel :
-        kBoardFirstApiLevel != 0 ? kBoardFirstApiLevel :
-        kProductFirstApiLevel;
-
-    return kFirstApiLevel >= __ANDROID_API_T__;
+    return kVendorApiLevel >= __ANDROID_API_T__;
 }
 
 bool isHalPixelFormatSupported(AHardwareBuffer_Format format) {
diff --git a/media/codec2/tests/Android.bp b/media/codec2/tests/Android.bp
index 2217235..02c356c 100644
--- a/media/codec2/tests/Android.bp
+++ b/media/codec2/tests/Android.bp
@@ -27,6 +27,7 @@
     static_libs: [
     ],
 
+    cpp_std: "gnu++17",
     cflags: [
         "-Werror",
         "-Wall",
diff --git a/media/codec2/tests/aidl/Android.bp b/media/codec2/tests/aidl/Android.bp
new file mode 100644
index 0000000..2ad245c
--- /dev/null
+++ b/media/codec2/tests/aidl/Android.bp
@@ -0,0 +1,50 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_test {
+    name: "c2aidl_gtracker_test",
+    test_suites: ["device-tests"],
+    defaults: [
+        "libcodec2-aidl-client-defaults",
+    ],
+
+    header_libs: [
+        "libcodec2_client_headers",
+        "libcodec2_internal",
+        "libcodec2_vndk_headers",
+    ],
+
+    srcs: [
+        "GraphicsTracker_test.cpp",
+    ],
+
+    shared_libs: [
+        "libbinder",
+        "libcodec2_client",
+        "libgui",
+        "libnativewindow",
+        "libui",
+    ],
+}
diff --git a/media/codec2/tests/aidl/GraphicsTracker_test.cpp b/media/codec2/tests/aidl/GraphicsTracker_test.cpp
new file mode 100644
index 0000000..9008086
--- /dev/null
+++ b/media/codec2/tests/aidl/GraphicsTracker_test.cpp
@@ -0,0 +1,820 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+//#define LOG_NDEBUG 0
+#define LOG_TAG "GraphicsTracker_test"
+#include <unistd.h>
+
+#include <android/hardware_buffer.h>
+#include <codec2/aidl/GraphicsTracker.h>
+#include <binder/IPCThreadState.h>
+#include <binder/IServiceManager.h>
+#include <binder/ProcessState.h>
+#include <gtest/gtest.h>
+#include <gui/BufferQueue.h>
+#include <gui/IProducerListener.h>
+#include <gui/IConsumerListener.h>
+#include <gui/Surface.h>
+#include <private/android/AHardwareBufferHelpers.h>
+
+#include <C2BlockInternal.h>
+#include <C2FenceFactory.h>
+
+#include <atomic>
+#include <memory>
+#include <iostream>
+#include <thread>
+
+using ::aidl::android::hardware::media::c2::implementation::GraphicsTracker;
+using ::android::BufferItem;
+using ::android::BufferQueue;
+using ::android::Fence;
+using ::android::GraphicBuffer;
+using ::android::IGraphicBufferProducer;
+using ::android::IGraphicBufferConsumer;
+using ::android::IProducerListener;
+using ::android::IConsumerListener;
+using ::android::OK;
+using ::android::sp;
+using ::android::wp;
+
+namespace {
+struct BqStatistics {
+    std::atomic<int> mDequeued;
+    std::atomic<int> mQueued;
+    std::atomic<int> mBlocked;
+    std::atomic<int> mDropped;
+    std::atomic<int> mDiscarded;
+    std::atomic<int> mReleased;
+
+    void log() {
+        ALOGD("Dequeued: %d, Queued: %d, Blocked: %d, "
+              "Dropped: %d, Discarded %d, Released %d",
+              (int)mDequeued, (int)mQueued, (int)mBlocked,
+              (int)mDropped, (int)mDiscarded, (int)mReleased);
+    }
+
+    void clear() {
+        mDequeued = 0;
+        mQueued = 0;
+        mBlocked = 0;
+        mDropped = 0;
+        mDiscarded = 0;
+        mReleased = 0;
+    }
+};
+
+struct DummyConsumerListener : public android::BnConsumerListener {
+    void onFrameAvailable(const BufferItem& /* item */) override {}
+    void onBuffersReleased() override {}
+    void onSidebandStreamChanged() override {}
+};
+
+struct TestConsumerListener : public android::BnConsumerListener {
+    TestConsumerListener(const sp<IGraphicBufferConsumer> &consumer)
+            : BnConsumerListener(), mConsumer(consumer) {}
+    void onFrameAvailable(const BufferItem&) override {
+        constexpr static int kRenderDelayUs = 1000000/30; // 30fps
+        BufferItem buffer;
+        // consume buffer
+        sp<IGraphicBufferConsumer> consumer = mConsumer.promote();
+        if (consumer != nullptr && consumer->acquireBuffer(&buffer, 0) == android::NO_ERROR) {
+            ::usleep(kRenderDelayUs);
+            consumer->releaseBuffer(buffer.mSlot, buffer.mFrameNumber,
+                                    EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, buffer.mFence);
+        }
+    }
+    void onBuffersReleased() override {}
+    void onSidebandStreamChanged() override {}
+
+    wp<IGraphicBufferConsumer> mConsumer;
+};
+
+struct TestProducerListener : public android::BnProducerListener {
+    TestProducerListener(std::shared_ptr<GraphicsTracker> tracker,
+                         std::shared_ptr<BqStatistics> &stat,
+                         uint32_t generation) : BnProducerListener(),
+        mTracker(tracker), mStat(stat), mGeneration(generation) {}
+    virtual void onBufferReleased() override {
+        auto tracker = mTracker.lock();
+        if (tracker) {
+            mStat->mReleased++;
+            tracker->onReleased(mGeneration);
+        }
+    }
+    virtual bool needsReleaseNotify() override { return true; }
+    virtual void onBuffersDiscarded(const std::vector<int32_t>&) override {}
+
+    std::weak_ptr<GraphicsTracker> mTracker;
+    std::shared_ptr<BqStatistics> mStat;
+    uint32_t mGeneration;
+};
+
+struct Frame {
+    AHardwareBuffer *buffer_;
+    sp<Fence> fence_;
+
+    Frame() : buffer_{nullptr}, fence_{nullptr} {}
+    Frame(AHardwareBuffer *buffer, sp<Fence> fence)
+            : buffer_(buffer), fence_(fence) {}
+    ~Frame() {
+        if (buffer_) {
+            AHardwareBuffer_release(buffer_);
+        }
+    }
+};
+
+struct FrameQueue {
+    bool mStopped;
+    bool mDrain;
+    std::queue<std::shared_ptr<Frame>> mQueue;
+    std::mutex mMutex;
+    std::condition_variable mCond;
+
+    FrameQueue() : mStopped{false}, mDrain{false} {}
+
+    bool queueItem(AHardwareBuffer *buffer, sp<Fence> fence) {
+        std::shared_ptr<Frame> frame = std::make_shared<Frame>(buffer, fence);
+        if (mStopped) {
+            return false;
+        }
+        if (!frame) {
+            return false;
+        }
+        std::unique_lock<std::mutex> l(mMutex);
+        mQueue.emplace(frame);
+        l.unlock();
+        mCond.notify_all();
+        return true;
+    }
+
+    void stop(bool drain = false) {
+        bool stopped = false;
+        {
+            std::unique_lock<std::mutex> l(mMutex);
+            if (!mStopped) {
+                mStopped = true;
+                mDrain = drain;
+                stopped = true;
+            }
+            l.unlock();
+            if (stopped) {
+                mCond.notify_all();
+            }
+        }
+    }
+
+    bool waitItem(std::shared_ptr<Frame> *frame) {
+        while(true) {
+            std::unique_lock<std::mutex> l(mMutex);
+            if (!mDrain && mStopped) {
+                // stop without consuming the queue.
+                return false;
+            }
+            if (!mQueue.empty()) {
+                *frame = mQueue.front();
+                mQueue.pop();
+                return true;
+            } else if (mStopped) {
+                // stop after consuming the queue.
+                return false;
+            }
+            mCond.wait(l);
+        }
+    }
+};
+
+} // namespace anonymous
+
+class GraphicsTrackerTest : public ::testing::Test {
+public:
+    const uint64_t kTestUsageFlag = GRALLOC_USAGE_SW_WRITE_OFTEN;
+
+    void queueBuffer(FrameQueue *queue) {
+        while (true) {
+            std::shared_ptr<Frame> frame;
+            if (!queue->waitItem(&frame)) {
+                break;
+            }
+            uint64_t bid;
+            if (__builtin_available(android __ANDROID_API_T__, *)) {
+                if (AHardwareBuffer_getId(frame->buffer_, &bid) !=
+                        android::NO_ERROR) {
+                    break;
+                }
+            } else {
+                break;
+            }
+            android::status_t ret = frame->fence_->wait(-1);
+            if (ret != android::NO_ERROR) {
+                mTracker->deallocate(bid, frame->fence_);
+                mBqStat->mDiscarded++;
+                continue;
+            }
+
+            std::shared_ptr<C2GraphicBlock> blk =
+                    _C2BlockFactory::CreateGraphicBlock(frame->buffer_);
+            if (!blk) {
+                mTracker->deallocate(bid, Fence::NO_FENCE);
+                mBqStat->mDiscarded++;
+                continue;
+            }
+            IGraphicBufferProducer::QueueBufferInput input(
+                    0, false,
+                    HAL_DATASPACE_UNKNOWN, android::Rect(0, 0, 1, 1),
+                    NATIVE_WINDOW_SCALING_MODE_FREEZE, 0, Fence::NO_FENCE);
+            IGraphicBufferProducer::QueueBufferOutput output{};
+            c2_status_t res = mTracker->render(
+                    blk->share(C2Rect(1, 1), C2Fence()),
+                    input, &output);
+            if (res != C2_OK) {
+                mTracker->deallocate(bid, Fence::NO_FENCE);
+                mBqStat->mDiscarded++;
+                continue;
+            }
+            if (output.bufferReplaced) {
+                mBqStat->mDropped++;
+            }
+            mBqStat->mQueued++;
+        }
+    }
+
+    void stopTrackerAfterUs(int us) {
+        ::usleep(us);
+        mTracker->stop();
+    }
+
+protected:
+    bool init(int maxDequeueCount) {
+        mTracker = GraphicsTracker::CreateGraphicsTracker(maxDequeueCount);
+        if (!mTracker) {
+            return false;
+        }
+        BufferQueue::createBufferQueue(&mProducer, &mConsumer);
+        if (!mProducer || !mConsumer) {
+            return false;
+        }
+        return true;
+    }
+    bool configure(sp<IProducerListener> producerListener,
+                   sp<IConsumerListener> consumerListener,
+                   int maxAcquiredCount = 1, bool controlledByApp = true) {
+        if (mConsumer->consumerConnect(
+                consumerListener, controlledByApp) != ::android::NO_ERROR) {
+            return false;
+        }
+        if (mConsumer->setMaxAcquiredBufferCount(maxAcquiredCount) != ::android::NO_ERROR) {
+            return false;
+        }
+        IGraphicBufferProducer::QueueBufferOutput qbo{};
+        if (mProducer->connect(producerListener,
+                          NATIVE_WINDOW_API_MEDIA, true, &qbo) != ::android::NO_ERROR) {
+            return false;
+        }
+        if (mProducer->setDequeueTimeout(0) != ::android::NO_ERROR) {
+            return false;
+        }
+        return true;
+    }
+
+    virtual void TearDown() override {
+        mBqStat->log();
+        mBqStat->clear();
+
+        if (mTracker) {
+            mTracker->stop();
+            mTracker.reset();
+        }
+        if (mProducer) {
+            mProducer->disconnect(NATIVE_WINDOW_API_MEDIA);
+        }
+        mProducer.clear();
+        mConsumer.clear();
+    }
+
+protected:
+    std::shared_ptr<BqStatistics> mBqStat = std::make_shared<BqStatistics>();
+    sp<IGraphicBufferProducer> mProducer;
+    sp<IGraphicBufferConsumer> mConsumer;
+    std::shared_ptr<GraphicsTracker> mTracker;
+};
+
+
+TEST_F(GraphicsTrackerTest, AllocateAndBlockedTest) {
+    uint32_t generation = 1;
+    const int maxDequeueCount = 10;
+
+    ASSERT_TRUE(init(maxDequeueCount));
+    ASSERT_TRUE(configure(new TestProducerListener(mTracker, mBqStat, generation),
+                          new DummyConsumerListener()));
+
+    ASSERT_EQ(OK, mProducer->setGenerationNumber(generation));
+    c2_status_t ret = mTracker->configureGraphics(mProducer, generation);
+    ASSERT_EQ(C2_OK, ret);
+    ASSERT_EQ(maxDequeueCount, mTracker->getCurDequeueable());
+
+    AHardwareBuffer *buf;
+    sp<Fence> fence;
+    uint64_t bid;
+
+    // Allocate and check dequeueable
+    if (__builtin_available(android __ANDROID_API_T__, *)) {
+        for (int i = 0; i < maxDequeueCount; ++i) {
+            ret = mTracker->allocate(0, 0, 0, kTestUsageFlag, &buf, &fence);
+            ASSERT_EQ(C2_OK, ret);
+            mBqStat->mDequeued++;
+            ASSERT_EQ(maxDequeueCount - (i + 1), mTracker->getCurDequeueable());
+            ASSERT_EQ(OK, AHardwareBuffer_getId(buf, &bid));
+            ALOGD("alloced : bufferId: %llu", (unsigned long long)bid);
+            AHardwareBuffer_release(buf);
+        }
+    } else {
+        GTEST_SKIP();
+    }
+
+    // Allocate should be blocked
+    ret = mTracker->allocate(0, 0, 0, kTestUsageFlag, &buf, &fence);
+    ALOGD("alloc : err(%d, %d)", ret, C2_BLOCKING);
+    ASSERT_EQ(C2_BLOCKING, ret);
+    mBqStat->mBlocked++;
+    ASSERT_EQ(0, mTracker->getCurDequeueable());
+}
+
+TEST_F(GraphicsTrackerTest, AllocateAndDeallocateTest) {
+    uint32_t generation = 1;
+    const int maxDequeueCount = 10;
+
+    ASSERT_TRUE(init(maxDequeueCount));
+    ASSERT_TRUE(configure(new TestProducerListener(mTracker, mBqStat, generation),
+                          new DummyConsumerListener()));
+
+    ASSERT_EQ(OK, mProducer->setGenerationNumber(generation));
+    c2_status_t ret = mTracker->configureGraphics(mProducer, generation);
+    ASSERT_EQ(C2_OK, ret);
+
+    ASSERT_EQ(maxDequeueCount, mTracker->getCurDequeueable());
+    AHardwareBuffer *buf;
+    sp<Fence> fence;
+    uint64_t bid;
+    std::vector<uint64_t> bids;
+
+    // Allocate and store buffer id
+    if (__builtin_available(android __ANDROID_API_T__, *)) {
+        for (int i = 0; i < maxDequeueCount; ++i) {
+            ret = mTracker->allocate(0, 0, 0, kTestUsageFlag, &buf, &fence);
+            ASSERT_EQ(C2_OK, ret);
+            mBqStat->mDequeued++;
+            ASSERT_EQ(OK, AHardwareBuffer_getId(buf, &bid));
+            bids.push_back(bid);
+            ALOGD("alloced : bufferId: %llu", (unsigned long long)bid);
+            AHardwareBuffer_release(buf);
+        }
+    } else {
+        GTEST_SKIP();
+    }
+
+    // Deallocate and check dequeueable
+    for (int i = 0; i < maxDequeueCount; ++i) {
+        ALOGD("dealloc : bufferId: %llu", (unsigned long long)bids[i]);
+        ret = mTracker->deallocate(bids[i], Fence::NO_FENCE);
+        ASSERT_EQ(C2_OK, ret);
+        ASSERT_EQ(i + 1, mTracker->getCurDequeueable());
+        mBqStat->mDiscarded++;
+    }
+}
+
+TEST_F(GraphicsTrackerTest, DropAndReleaseTest) {
+    uint32_t generation = 1;
+    const int maxDequeueCount = 10;
+
+    ASSERT_TRUE(init(maxDequeueCount));
+    ASSERT_TRUE(configure(new TestProducerListener(mTracker, mBqStat, generation),
+                          new DummyConsumerListener()));
+
+    ASSERT_EQ(OK, mProducer->setGenerationNumber(generation));
+    c2_status_t ret = mTracker->configureGraphics(mProducer, generation);
+    ASSERT_EQ(C2_OK, ret);
+
+    ASSERT_EQ(maxDequeueCount, mTracker->getCurDequeueable());
+
+    FrameQueue frameQueue;
+    std::thread queueThread(&GraphicsTrackerTest::queueBuffer, this, &frameQueue);
+    AHardwareBuffer *buf1, *buf2;
+    sp<Fence> fence1, fence2;
+
+    ret = mTracker->allocate(0, 0, 0, kTestUsageFlag, &buf1, &fence1);
+    ASSERT_EQ(C2_OK, ret);
+    mBqStat->mDequeued++;
+    ASSERT_EQ(maxDequeueCount - 1, mTracker->getCurDequeueable());
+
+    ret = mTracker->allocate(0, 0, 0, kTestUsageFlag, &buf2, &fence2);
+    ASSERT_EQ(C2_OK, ret);
+    mBqStat->mDequeued++;
+    ASSERT_EQ(maxDequeueCount - 2, mTracker->getCurDequeueable());
+
+    // Queue two buffers without consuming, one should be dropped
+    ASSERT_TRUE(frameQueue.queueItem(buf1, fence1));
+    ASSERT_TRUE(frameQueue.queueItem(buf2, fence2));
+
+    frameQueue.stop(true);
+    if (queueThread.joinable()) {
+        queueThread.join();
+    }
+
+    ASSERT_EQ(maxDequeueCount - 1, mTracker->getCurDequeueable());
+
+    // Consume one buffer and release
+    BufferItem item;
+    ASSERT_EQ(OK, mConsumer->acquireBuffer(&item, 0));
+    ASSERT_EQ(OK, mConsumer->releaseBuffer(item.mSlot, item.mFrameNumber,
+            EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, item.mFence));
+    // Nothing to consume
+    ASSERT_NE(OK, mConsumer->acquireBuffer(&item, 0));
+
+    ASSERT_EQ(maxDequeueCount, mTracker->getCurDequeueable());
+    ASSERT_EQ(1, mBqStat->mReleased);
+    ASSERT_EQ(1, mBqStat->mDropped);
+}
+
+TEST_F(GraphicsTrackerTest, RenderTest) {
+    uint32_t generation = 1;
+    const int maxDequeueCount = 10;
+    const int maxNumAlloc = 20;
+
+    ASSERT_TRUE(init(maxDequeueCount));
+    ASSERT_TRUE(configure(new TestProducerListener(mTracker, mBqStat, generation),
+                          new TestConsumerListener(mConsumer), 1, false));
+
+    ASSERT_EQ(OK, mProducer->setGenerationNumber(generation));
+
+    ASSERT_EQ(C2_OK, mTracker->configureGraphics(mProducer, generation));
+    ASSERT_EQ(C2_OK, mTracker->configureMaxDequeueCount(maxDequeueCount));
+
+    int waitFd = -1;
+    ASSERT_EQ(C2_OK, mTracker->getWaitableFd(&waitFd));
+    C2Fence waitFence = _C2FenceFactory::CreatePipeFence(waitFd);
+
+
+    FrameQueue frameQueue;
+    std::thread queueThread(&GraphicsTrackerTest::queueBuffer, this, &frameQueue);
+
+    int numAlloc = 0;
+
+    while (numAlloc < maxNumAlloc) {
+        AHardwareBuffer *buf;
+        sp<Fence> fence;
+        c2_status_t ret = mTracker->allocate(0, 0, 0, kTestUsageFlag, &buf, &fence);
+        if (ret == C2_BLOCKING) {
+            mBqStat->mBlocked++;
+            c2_status_t waitRes = waitFence.wait(3000000000);
+            if (waitRes == C2_TIMED_OUT || waitRes == C2_OK) {
+                continue;
+            }
+            ALOGE("alloc wait failed: c2_err(%d)", waitRes);
+            break;
+        }
+        if (ret != C2_OK) {
+            ALOGE("alloc error: c2_err(%d)", ret);
+            break;
+        }
+        mBqStat->mDequeued++;
+        if (!frameQueue.queueItem(buf, fence)) {
+            ALOGE("queue to render failed");
+            break;
+        }
+        ++numAlloc;
+    }
+
+    frameQueue.stop(true);
+    // Wait more than enough time(1 sec) to render all queued frames for sure.
+    ::usleep(1000000);
+
+    if (queueThread.joinable()) {
+        queueThread.join();
+    }
+    ASSERT_EQ(numAlloc, maxNumAlloc);
+    ASSERT_EQ(numAlloc, mBqStat->mDequeued);
+    ASSERT_EQ(mBqStat->mDequeued, mBqStat->mQueued);
+    ASSERT_EQ(mBqStat->mDequeued, mBqStat->mReleased + mBqStat->mDropped);
+}
+
+TEST_F(GraphicsTrackerTest, StopAndWaitTest) {
+    uint32_t generation = 1;
+    const int maxDequeueCount = 2;
+
+    ASSERT_TRUE(init(maxDequeueCount));
+    ASSERT_TRUE(configure(new TestProducerListener(mTracker, mBqStat, generation),
+                          new TestConsumerListener(mConsumer), 1, false));
+
+    ASSERT_EQ(OK, mProducer->setGenerationNumber(generation));
+
+    ASSERT_EQ(C2_OK, mTracker->configureGraphics(mProducer, generation));
+    ASSERT_EQ(C2_OK, mTracker->configureMaxDequeueCount(maxDequeueCount));
+
+    int waitFd = -1;
+    ASSERT_EQ(C2_OK, mTracker->getWaitableFd(&waitFd));
+    C2Fence waitFence = _C2FenceFactory::CreatePipeFence(waitFd);
+
+    AHardwareBuffer *buf1, *buf2;
+    sp<Fence> fence;
+
+    ASSERT_EQ(C2_OK, mTracker->allocate(0, 0, 0, kTestUsageFlag, &buf1, &fence));
+    mBqStat->mDequeued++;
+    AHardwareBuffer_release(buf1);
+
+    ASSERT_EQ(C2_OK, mTracker->allocate(0, 0, 0, kTestUsageFlag, &buf2, &fence));
+    mBqStat->mDequeued++;
+    AHardwareBuffer_release(buf2);
+
+    ASSERT_EQ(0, mTracker->getCurDequeueable());
+    ASSERT_EQ(C2_TIMED_OUT, waitFence.wait(3000000000));
+
+    std::thread stopThread(&GraphicsTrackerTest::stopTrackerAfterUs, this, 500000);
+    ASSERT_EQ(C2_BAD_STATE, waitFence.wait(3000000000));
+
+    if (stopThread.joinable()) {
+        stopThread.join();
+    }
+}
+
+TEST_F(GraphicsTrackerTest, SurfaceChangeTest) {
+    uint32_t generation = 1;
+    const int maxDequeueCount = 10;
+
+    const int maxNumAlloc = 20;
+
+    const int firstPassAlloc = 12;
+    const int firstPassRender = 8;
+
+    ASSERT_TRUE(init(maxDequeueCount));
+    ASSERT_TRUE(configure(new TestProducerListener(mTracker, mBqStat, generation),
+                          new TestConsumerListener(mConsumer), 1, false));
+
+    ASSERT_EQ(OK, mProducer->setGenerationNumber(generation));
+
+    ASSERT_EQ(C2_OK, mTracker->configureGraphics(mProducer, generation));
+    ASSERT_EQ(C2_OK, mTracker->configureMaxDequeueCount(maxDequeueCount));
+
+    int waitFd = -1;
+    ASSERT_EQ(C2_OK, mTracker->getWaitableFd(&waitFd));
+    C2Fence waitFence = _C2FenceFactory::CreatePipeFence(waitFd);
+
+    AHardwareBuffer *bufs[maxNumAlloc];
+    sp<Fence> fences[maxNumAlloc];
+
+    FrameQueue frameQueue;
+    std::thread queueThread(&GraphicsTrackerTest::queueBuffer, this, &frameQueue);
+    int numAlloc = 0;
+
+    for (int i = 0; i < firstPassRender; ++i) {
+        ASSERT_EQ(C2_OK, mTracker->allocate(
+                0, 0, 0, kTestUsageFlag, &bufs[i], &fences[i]));
+        mBqStat->mDequeued++;
+        numAlloc++;
+        ASSERT_EQ(true, frameQueue.queueItem(bufs[i], fences[i]));
+    }
+
+    while (numAlloc < firstPassAlloc) {
+        c2_status_t ret = mTracker->allocate(
+                0, 0, 0, kTestUsageFlag, &bufs[numAlloc], &fences[numAlloc]);
+        if (ret == C2_BLOCKING) {
+            mBqStat->mBlocked++;
+            c2_status_t waitRes = waitFence.wait(3000000000);
+            if (waitRes == C2_TIMED_OUT || waitRes == C2_OK) {
+                continue;
+            }
+            ALOGE("alloc wait failed: c2_err(%d)", waitRes);
+            break;
+        }
+        if (ret != C2_OK) {
+            ALOGE("alloc error: c2_err(%d)", ret);
+            break;
+        }
+        mBqStat->mDequeued++;
+        numAlloc++;
+    }
+    ASSERT_EQ(numAlloc, firstPassAlloc);
+
+    // switching surface
+    sp<IGraphicBufferProducer> oldProducer = mProducer;
+    sp<IGraphicBufferConsumer> oldConsumer = mConsumer;
+    mProducer.clear();
+    mConsumer.clear();
+    BufferQueue::createBufferQueue(&mProducer, &mConsumer);
+    ASSERT_TRUE((bool)mProducer && (bool)mConsumer);
+
+    generation += 1;
+
+    ASSERT_TRUE(configure(new TestProducerListener(mTracker, mBqStat, generation),
+                          new TestConsumerListener(mConsumer), 1, false));
+    ASSERT_EQ(OK, mProducer->setGenerationNumber(generation));
+    ASSERT_EQ(C2_OK, mTracker->configureGraphics(mProducer, generation));
+    ASSERT_EQ(C2_OK, mTracker->configureMaxDequeueCount(maxDequeueCount));
+
+    ASSERT_EQ(OK, oldProducer->disconnect(NATIVE_WINDOW_API_MEDIA));
+    oldProducer.clear();
+    oldConsumer.clear();
+
+    for (int i = firstPassRender ; i < firstPassAlloc; ++i) {
+        ASSERT_EQ(true, frameQueue.queueItem(bufs[i], fences[i]));
+    }
+
+    while (numAlloc < maxNumAlloc) {
+        AHardwareBuffer *buf;
+        sp<Fence> fence;
+        c2_status_t ret = mTracker->allocate(0, 0, 0, kTestUsageFlag, &buf, &fence);
+        if (ret == C2_BLOCKING) {
+            mBqStat->mBlocked++;
+            c2_status_t waitRes = waitFence.wait(3000000000);
+            if (waitRes == C2_TIMED_OUT || waitRes == C2_OK) {
+                continue;
+            }
+            ALOGE("alloc wait failed: c2_err(%d)", waitRes);
+            break;
+        }
+        if (ret != C2_OK) {
+            ALOGE("alloc error: c2_err(%d)", ret);
+            break;
+        }
+        mBqStat->mDequeued++;
+        if (!frameQueue.queueItem(buf, fence)) {
+            ALOGE("queue to render failed");
+            break;
+        }
+        ++numAlloc;
+    }
+
+    ASSERT_EQ(numAlloc, maxNumAlloc);
+
+    frameQueue.stop(true);
+    // Wait more than enough time(1 sec) to render all queued frames for sure.
+    ::usleep(1000000);
+
+    if (queueThread.joinable()) {
+        queueThread.join();
+    }
+    // mReleased should not be checked. IProducerListener::onBufferReleased()
+    // from the previous Surface could be missing after a new Surface was
+    // configured. Instead check # of dequeueable and queueBuffer() calls.
+    ASSERT_EQ(numAlloc, mBqStat->mQueued);
+    ASSERT_EQ(maxDequeueCount, mTracker->getCurDequeueable());
+
+    for (int i = 0; i < maxDequeueCount; ++i) {
+        AHardwareBuffer *buf;
+        sp<Fence> fence;
+
+        ASSERT_EQ(C2_OK, mTracker->allocate(
+                0, 0, 0, kTestUsageFlag, &buf, &fence));
+        AHardwareBuffer_release(buf);
+        mBqStat->mDequeued++;
+        numAlloc++;
+    }
+    ASSERT_EQ(C2_BLOCKING, mTracker->allocate(
+            0, 0, 0, kTestUsageFlag, &bufs[0], &fences[0]));
+}
+
+TEST_F(GraphicsTrackerTest, maxDequeueIncreaseTest) {
+    uint32_t generation = 1;
+    int maxDequeueCount = 10;
+    int dequeueIncrease = 4;
+
+    int numAlloc = 0;
+
+    ASSERT_TRUE(init(maxDequeueCount));
+    ASSERT_TRUE(configure(new TestProducerListener(mTracker, mBqStat, generation),
+                          new TestConsumerListener(mConsumer), 1, false));
+
+    ASSERT_EQ(OK, mProducer->setGenerationNumber(generation));
+    ASSERT_EQ(C2_OK, mTracker->configureGraphics(mProducer, generation));
+
+    int waitFd = -1;
+    ASSERT_EQ(C2_OK, mTracker->getWaitableFd(&waitFd));
+    C2Fence waitFence = _C2FenceFactory::CreatePipeFence(waitFd);
+
+    AHardwareBuffer *buf;
+    sp<Fence> fence;
+    uint64_t bids[maxDequeueCount];
+    if (__builtin_available(android __ANDROID_API_T__, *)) {
+        for (int i = 0; i < maxDequeueCount; ++i) {
+            ASSERT_EQ(C2_OK, waitFence.wait(1000000000));
+            ASSERT_EQ(C2_OK, mTracker->allocate( 0, 0, 0, kTestUsageFlag, &buf, &fence));
+            ASSERT_EQ(OK, AHardwareBuffer_getId(buf, &bids[i]));
+            AHardwareBuffer_release(buf);
+            mBqStat->mDequeued++;
+            numAlloc++;
+        }
+    } else {
+        GTEST_SKIP();
+    }
+    ASSERT_EQ(C2_TIMED_OUT, waitFence.wait(1000000000));
+    ASSERT_EQ(C2_BLOCKING, mTracker->allocate( 0, 0, 0, kTestUsageFlag, &buf, &fence));
+
+    ASSERT_EQ(C2_OK, mTracker->deallocate(bids[0], Fence::NO_FENCE));
+    mBqStat->mDiscarded++;
+
+    maxDequeueCount += dequeueIncrease;
+    ASSERT_EQ(C2_OK, mTracker->configureMaxDequeueCount(maxDequeueCount));
+    for (int i = 0; i < dequeueIncrease + 1; ++i) {
+        ASSERT_EQ(C2_OK, waitFence.wait(1000000000));
+        ASSERT_EQ(C2_OK, mTracker->allocate( 0, 0, 0, kTestUsageFlag, &buf, &fence));
+        AHardwareBuffer_release(buf);
+        mBqStat->mDequeued++;
+        numAlloc++;
+    }
+    ASSERT_EQ(C2_TIMED_OUT, waitFence.wait(1000000000));
+    ASSERT_EQ(C2_BLOCKING, mTracker->allocate( 0, 0, 0, kTestUsageFlag, &buf, &fence));
+
+    ASSERT_EQ(C2_OK, mTracker->deallocate(bids[1], Fence::NO_FENCE));
+    mBqStat->mDiscarded++;
+
+    maxDequeueCount += dequeueIncrease;
+    ASSERT_EQ(C2_OK, mTracker->configureMaxDequeueCount(maxDequeueCount));
+    for (int i = 0; i < dequeueIncrease + 1; ++i) {
+        ASSERT_EQ(C2_OK, waitFence.wait(1000000000));
+        ASSERT_EQ(C2_OK, mTracker->allocate( 0, 0, 0, kTestUsageFlag, &buf, &fence));
+        AHardwareBuffer_release(buf);
+        mBqStat->mDequeued++;
+        numAlloc++;
+    }
+    ASSERT_EQ(C2_TIMED_OUT, waitFence.wait(1000000000));
+    ASSERT_EQ(C2_BLOCKING, mTracker->allocate( 0, 0, 0, kTestUsageFlag, &buf, &fence));
+}
+
+TEST_F(GraphicsTrackerTest, maxDequeueDecreaseTest) {
+    uint32_t generation = 1;
+    int maxDequeueCount = 12;
+    int dequeueDecrease = 4;
+
+    int numAlloc = 0;
+
+    ASSERT_TRUE(init(maxDequeueCount));
+    ASSERT_TRUE(configure(new TestProducerListener(mTracker, mBqStat, generation),
+                          new TestConsumerListener(mConsumer), 1, false));
+
+    ASSERT_EQ(OK, mProducer->setGenerationNumber(generation));
+    ASSERT_EQ(C2_OK, mTracker->configureGraphics(mProducer, generation));
+
+    int waitFd = -1;
+    ASSERT_EQ(C2_OK, mTracker->getWaitableFd(&waitFd));
+    C2Fence waitFence = _C2FenceFactory::CreatePipeFence(waitFd);
+
+    AHardwareBuffer *buf;
+    sp<Fence> fence;
+    uint64_t bids[maxDequeueCount];
+    if (__builtin_available(android __ANDROID_API_T__, *)) {
+        for (int i = 0; i < maxDequeueCount; ++i) {
+            ASSERT_EQ(C2_OK, waitFence.wait(1000000000));
+            ASSERT_EQ(C2_OK, mTracker->allocate( 0, 0, 0, kTestUsageFlag, &buf, &fence));
+            ASSERT_EQ(OK, AHardwareBuffer_getId(buf, &bids[i]));
+            AHardwareBuffer_release(buf);
+            mBqStat->mDequeued++;
+            numAlloc++;
+        }
+    } else {
+        GTEST_SKIP();
+    }
+    ASSERT_EQ(C2_TIMED_OUT, waitFence.wait(1000000000));
+    ASSERT_EQ(C2_BLOCKING, mTracker->allocate( 0, 0, 0, kTestUsageFlag, &buf, &fence));
+
+    int discardIdx = 0;
+    maxDequeueCount -= dequeueDecrease;
+    ASSERT_EQ(C2_OK, mTracker->configureMaxDequeueCount(maxDequeueCount));
+    for (int i = 0; i < dequeueDecrease + 1; ++i) {
+        ASSERT_EQ(C2_TIMED_OUT, waitFence.wait(1000000000));
+        ASSERT_EQ(C2_BLOCKING, mTracker->allocate( 0, 0, 0, kTestUsageFlag, &buf, &fence));
+        ASSERT_EQ(C2_OK, mTracker->deallocate(bids[discardIdx++], Fence::NO_FENCE));
+        mBqStat->mDiscarded++;
+    }
+    ASSERT_EQ(C2_OK, waitFence.wait(1000000000));
+    ASSERT_EQ(C2_OK, mTracker->allocate( 0, 0, 0, kTestUsageFlag, &buf, &fence));
+    mBqStat->mDequeued++;
+
+    ASSERT_EQ(C2_OK, mTracker->deallocate(bids[discardIdx++], Fence::NO_FENCE));
+    mBqStat->mDiscarded++;
+    ASSERT_EQ(C2_OK, mTracker->deallocate(bids[discardIdx++], Fence::NO_FENCE));
+    mBqStat->mDiscarded++;
+    maxDequeueCount -= dequeueDecrease;
+
+    ASSERT_EQ(C2_OK, mTracker->configureMaxDequeueCount(maxDequeueCount));
+    for (int i = 0; i < dequeueDecrease - 1; ++i) {
+        ASSERT_EQ(C2_TIMED_OUT, waitFence.wait(1000000000));
+        ASSERT_EQ(C2_BLOCKING, mTracker->allocate( 0, 0, 0, kTestUsageFlag, &buf, &fence));
+        ASSERT_EQ(C2_OK, mTracker->deallocate(bids[discardIdx++], Fence::NO_FENCE));
+        mBqStat->mDiscarded++;
+    }
+    ASSERT_EQ(C2_OK, waitFence.wait(1000000000));
+    ASSERT_EQ(C2_OK, mTracker->allocate( 0, 0, 0, kTestUsageFlag, &buf, &fence));
+    mBqStat->mDequeued++;
+}
diff --git a/media/codec2/vndk/Android.bp b/media/codec2/vndk/Android.bp
index bbe228c..9f57bfd 100644
--- a/media/codec2/vndk/Android.bp
+++ b/media/codec2/vndk/Android.bp
@@ -52,6 +52,9 @@
         "com.android.media.swcodec",
     ],
 
+    defaults: [
+        "libcodec2_hal_selection",
+    ],
 
     srcs: [
         "C2AllocatorBlob.cpp",
@@ -81,7 +84,7 @@
         "libbase",
         "libdmabufheap",
         "android.hardware.media.bufferpool@2.0",
-        "android.hardware.media.bufferpool2-V1-ndk",
+        "android.hardware.media.bufferpool2-V2-ndk",
     ],
 
     local_include_dirs: [
@@ -99,7 +102,7 @@
         "android.hardware.common-V2-ndk",
         "android.hardware.common.fmq-V1-ndk",
         "android.hardware.media.bufferpool@2.0",
-        "android.hardware.media.bufferpool2-V1-ndk",
+        "android.hardware.media.bufferpool2-V2-ndk",
         "android.hardware.media.c2-V1-ndk",
         "libbase",
         "libbinder_ndk",
@@ -129,6 +132,10 @@
 cc_defaults {
     name: "libcodec2-static-defaults",
 
+    defaults: [
+        "libcodec2_hal_selection",
+    ],
+
     static_libs: [
         "liblog",
         "libion",
@@ -155,11 +162,12 @@
         "android.hardware.graphics.bufferqueue@2.0",
         "android.hardware.common-V2-ndk",
         "android.hardware.common.fmq-V1-ndk",
-        "android.hardware.media.bufferpool2-V1-ndk",
+        "android.hardware.media.bufferpool2-V2-ndk",
         "android.hardware.media.c2-V1-ndk",
     ],
 
     shared_libs: [
+        "libbinder",
         "libbinder_ndk",
         "libui",
         "libdl",
@@ -171,6 +179,11 @@
 // public dependency for implementing Codec 2 components
 cc_defaults {
     name: "libcodec2-impl-defaults",
+    cpp_std: "gnu++17",
+
+    defaults: [
+        "libcodec2_hal_selection",
+    ],
 
     shared_libs: [
         "libbase", // for C2_LOG
diff --git a/media/codec2/vndk/C2AllocatorGralloc.cpp b/media/codec2/vndk/C2AllocatorGralloc.cpp
index e04c637..60b5b29 100644
--- a/media/codec2/vndk/C2AllocatorGralloc.cpp
+++ b/media/codec2/vndk/C2AllocatorGralloc.cpp
@@ -188,6 +188,14 @@
         return res;
     }
 
+    static uint32_t getPixelFormat(const C2Handle *const handle) {
+        if (handle == nullptr) {
+            return 0;
+        }
+        const ExtraData *xd = GetExtraData(handle);
+        return xd->format;
+    }
+
     static bool MigrateNativeHandle(
             native_handle_t *handle,
             uint32_t generation, uint64_t igbp_id, uint32_t igbp_slot) {
@@ -375,7 +383,7 @@
     }
 
     uint8_t *pointer = nullptr;
-    err = mapper.lock(handle, usage, bounds, (void **)&pointer, nullptr, nullptr);
+    err = mapper.lock(handle, usage, bounds, (void **)&pointer);
     if (err != NO_ERROR || pointer == nullptr) {
         return C2_CORRUPTED;
     }
@@ -902,6 +910,10 @@
                                              generation, igbp_id, igbp_slot);
 }
 
+uint32_t ExtractFormatFromCodec2GrallocHandle(const C2Handle *const handle) {
+    return C2HandleGralloc::getPixelFormat(handle);
+}
+
 bool MigrateNativeCodec2GrallocHandle(
         native_handle_t *handle,
         uint32_t generation, uint64_t igbp_id, uint32_t igbp_slot) {
diff --git a/media/codec2/vndk/C2Buffer.cpp b/media/codec2/vndk/C2Buffer.cpp
index 018e269..7b9b80d 100644
--- a/media/codec2/vndk/C2Buffer.cpp
+++ b/media/codec2/vndk/C2Buffer.cpp
@@ -16,7 +16,9 @@
 
 //#define LOG_NDEBUG 0
 #define LOG_TAG "C2Buffer"
+#define ATRACE_TAG  ATRACE_TAG_VIDEO
 #include <utils/Log.h>
+#include <utils/Trace.h>
 
 #include <list>
 #include <map>
@@ -26,6 +28,7 @@
 #include <C2AllocatorGralloc.h>
 #include <C2AllocatorIon.h>
 #include <C2BufferPriv.h>
+#include <C2Debug.h>
 #include <C2BlockInternal.h>
 #include <C2PlatformSupport.h>
 #include <bufferpool/ClientManager.h>
@@ -33,6 +36,7 @@
 
 namespace {
 
+using android::ScopedTrace;
 using android::C2AllocatorBlob;
 using android::C2AllocatorGralloc;
 using android::C2AllocatorIon;
@@ -113,6 +117,32 @@
 
 }  // namespace
 
+/*
+*/
+
+c2_status_t C2BlockPool::fetchLinearBlock(
+        uint32_t capacity, C2MemoryUsage usage,
+        std::shared_ptr<C2LinearBlock> *block /* nonnull */,
+        C2Fence *fence /* nonnull */) {
+    // fall back to non-waitable implementation, as long as it does not return C2_BLOCKING
+    c2_status_t result = fetchLinearBlock(capacity, usage, block);
+    C2_CHECK_NE(result, C2_BLOCKING);
+    *fence = C2Fence();
+    return result;
+}
+
+c2_status_t C2BlockPool::fetchGraphicBlock(
+        uint32_t width, uint32_t height, uint32_t format,
+        C2MemoryUsage usage,
+        std::shared_ptr<C2GraphicBlock> *block /* nonnull */,
+        C2Fence *fence /* nonnull */) {
+    // fall back to non-waitable implementation, as long as it does not return C2_BLOCKING
+    c2_status_t result = fetchGraphicBlock(width, height, format, usage, block);
+    C2_CHECK_NE(result, C2_BLOCKING);
+    *fence = C2Fence();
+    return result;
+}
+
 /* ========================================== 1D BLOCK ========================================= */
 
 /**
@@ -1159,6 +1189,7 @@
         uint32_t capacity,
         C2MemoryUsage usage,
         std::shared_ptr<C2LinearBlock> *block /* nonnull */) {
+    ScopedTrace trace(ATRACE_TAG,"C2PooledBlockPool::fetchLinearBlock");
     if (mBufferPoolVer == VER_HIDL && mImpl) {
         return mImpl->fetchLinearBlock(capacity, usage, block);
     }
@@ -1174,6 +1205,7 @@
         uint32_t format,
         C2MemoryUsage usage,
         std::shared_ptr<C2GraphicBlock> *block) {
+    ScopedTrace trace(ATRACE_TAG,"C2PooledBlockPool::fetchGraphicBlock");
     if (mBufferPoolVer == VER_HIDL && mImpl) {
         return mImpl->fetchGraphicBlock(width, height, format, usage, block);
     }
diff --git a/media/codec2/vndk/C2Fence.cpp b/media/codec2/vndk/C2Fence.cpp
index b91ac6d..5d50fc3 100644
--- a/media/codec2/vndk/C2Fence.cpp
+++ b/media/codec2/vndk/C2Fence.cpp
@@ -26,6 +26,8 @@
 #include <C2FenceFactory.h>
 #include <C2SurfaceSyncObj.h>
 
+#include <utility>
+
 #define MAX_FENCE_FDS 1
 
 class C2Fence::Impl {
@@ -333,7 +335,8 @@
             p.reset();
         }
     } else {
-        ALOGE("Create sync fence from invalid fd");
+        ALOGV("Create sync fence from invalid fd");
+        return C2Fence();
     }
     return C2Fence(p);
 }
@@ -378,7 +381,7 @@
         struct timespec ts;
         if (timeoutNs >= 0) {
             ts.tv_sec = int(timeoutNs / 1000000000);
-            ts.tv_nsec = timeoutNs;
+            ts.tv_nsec = timeoutNs % 1000000000;
         } else {
             ALOGD("polling for indefinite duration requested, but changed to wait for %d sec",
                   kPipeFenceWaitLimitSecs);
@@ -485,17 +488,26 @@
         mValid = (mPipeFd.get() >= 0);
     }
 
+    PipeFenceImpl(::android::base::unique_fd &&ufd) : mPipeFd{std::move(ufd)} {
+        mValid = (mPipeFd.get() >= 0);
+    }
+
 private:
     friend struct _C2FenceFactory;
     static constexpr int kPipeFenceWaitLimitSecs = 5;
 
     mutable std::atomic<bool> mValid;
-    ::android::base::unique_fd mPipeFd;
+    const ::android::base::unique_fd mPipeFd;
 };
 
 C2Fence _C2FenceFactory::CreatePipeFence(int fd) {
+    ::android::base::unique_fd ufd{fd};
+    return CreatePipeFence(std::move(ufd));
+}
+
+C2Fence _C2FenceFactory::CreatePipeFence(::android::base::unique_fd &&ufd) {
     std::shared_ptr<_C2FenceFactory::PipeFenceImpl> impl =
-        std::make_shared<_C2FenceFactory::PipeFenceImpl>(fd);
+        std::make_shared<_C2FenceFactory::PipeFenceImpl>(std::move(ufd));
     std::shared_ptr<C2Fence::Impl> p = std::static_pointer_cast<C2Fence::Impl>(impl);
     if (!p) {
         ALOGE("PipeFence creation failure");
@@ -520,7 +532,9 @@
             p = SyncFenceImpl::CreateFromNativeHandle(handle);
             break;
         default:
-            ALOGD("Unsupported fence type %d", type);
+            ALOGV("Unsupported fence type %d", type);
+            // If this is malformed-handle close the handle here.
+            (void) native_handle_close(handle);
             // return a null-fence in this case
             break;
     }
diff --git a/media/codec2/vndk/C2Store.cpp b/media/codec2/vndk/C2Store.cpp
index f6f97da..e7fd14f 100644
--- a/media/codec2/vndk/C2Store.cpp
+++ b/media/codec2/vndk/C2Store.cpp
@@ -26,11 +26,15 @@
 #include <C2BqBufferPriv.h>
 #include <C2Component.h>
 #include <C2Config.h>
+#include <C2IgbaBufferPriv.h>
 #include <C2PlatformStorePluginLoader.h>
 #include <C2PlatformSupport.h>
+#include <codec2/common/HalSelection.h>
 #include <cutils/properties.h>
 #include <util/C2InterfaceHelper.h>
 
+#include <aidl/android/hardware/media/c2/IGraphicBufferAllocator.h>
+
 #include <dlfcn.h>
 #include <unistd.h> // getpagesize
 
@@ -91,6 +95,9 @@
     /// returns a shared-singleton bufferqueue supporting gralloc allocator
     std::shared_ptr<C2Allocator> fetchBufferQueueAllocator();
 
+    /// returns a shared-singleton IGBA supporting AHardwareBuffer/gralloc allocator
+    std::shared_ptr<C2Allocator> fetchIgbaAllocator();
+
     /// component store to use
     std::mutex _mComponentStoreSetLock; // protects the entire updating _mComponentStore and its
                                         // dependencies
@@ -157,6 +164,10 @@
         *allocator = fetchBlobAllocator();
         break;
 
+    case C2PlatformAllocatorStore::IGBA:
+        *allocator = fetchIgbaAllocator();
+        break;
+
     default:
         // Try to create allocator from platform store plugins.
         c2_status_t res =
@@ -388,6 +399,18 @@
     return allocator;
 }
 
+std::shared_ptr<C2Allocator> C2PlatformAllocatorStoreImpl::fetchIgbaAllocator() {
+    static std::mutex mutex;
+    static std::weak_ptr<C2Allocator> ahwbAllocator;
+    std::lock_guard<std::mutex> lock(mutex);
+    std::shared_ptr<C2Allocator> allocator = ahwbAllocator.lock();
+    if (allocator == nullptr) {
+        allocator = std::make_shared<C2AllocatorAhwb>(C2PlatformAllocatorStore::IGBA);
+        ahwbAllocator = allocator;
+    }
+    return allocator;
+}
+
 namespace {
     std::mutex gPreferredComponentStoreMutex;
     std::shared_ptr<C2ComponentStore> gPreferredComponentStore;
@@ -447,18 +470,25 @@
 
 namespace {
 
+static C2PooledBlockPool::BufferPoolVer GetBufferPoolVer() {
+    static C2PooledBlockPool::BufferPoolVer sVer =
+        IsCodec2AidlHalSelected() ? C2PooledBlockPool::VER_AIDL2 : C2PooledBlockPool::VER_HIDL;
+    return sVer;
+}
+
 class _C2BlockPoolCache {
 public:
     _C2BlockPoolCache() : mBlockPoolSeqId(C2BlockPool::PLATFORM_START + 1) {}
 
 private:
     c2_status_t _createBlockPool(
-            C2PlatformAllocatorStore::id_t allocatorId,
+            C2PlatformAllocatorDesc &allocatorParam,
             std::vector<std::shared_ptr<const C2Component>> components,
             C2BlockPool::local_id_t poolId,
             std::shared_ptr<C2BlockPool> *pool) {
         std::shared_ptr<C2AllocatorStore> allocatorStore =
                 GetCodec2PlatformAllocatorStore();
+        C2PlatformAllocatorStore::id_t allocatorId = allocatorParam.allocatorId;
         std::shared_ptr<C2Allocator> allocator;
         c2_status_t res = C2_NOT_FOUND;
 
@@ -477,7 +507,7 @@
                         C2PlatformAllocatorStore::ION, &allocator);
                 if (res == C2_OK) {
                     std::shared_ptr<C2BlockPool> ptr(
-                            new C2PooledBlockPool(allocator, poolId), deleter);
+                            new C2PooledBlockPool(allocator, poolId, GetBufferPoolVer()), deleter);
                     *pool = ptr;
                     mBlockPools[poolId] = ptr;
                     mComponents[poolId].insert(
@@ -490,7 +520,7 @@
                         C2PlatformAllocatorStore::BLOB, &allocator);
                 if (res == C2_OK) {
                     std::shared_ptr<C2BlockPool> ptr(
-                            new C2PooledBlockPool(allocator, poolId), deleter);
+                            new C2PooledBlockPool(allocator, poolId, GetBufferPoolVer()), deleter);
                     *pool = ptr;
                     mBlockPools[poolId] = ptr;
                     mComponents[poolId].insert(
@@ -504,7 +534,7 @@
                         C2AllocatorStore::DEFAULT_GRAPHIC, &allocator);
                 if (res == C2_OK) {
                     std::shared_ptr<C2BlockPool> ptr(
-                        new C2PooledBlockPool(allocator, poolId), deleter);
+                            new C2PooledBlockPool(allocator, poolId, GetBufferPoolVer()), deleter);
                     *pool = ptr;
                     mBlockPools[poolId] = ptr;
                     mComponents[poolId].insert(
@@ -525,6 +555,22 @@
                            components.begin(), components.end());
                 }
                 break;
+            case C2PlatformAllocatorStore::IGBA:
+                res = allocatorStore->fetchAllocator(
+                        C2PlatformAllocatorStore::IGBA, &allocator);
+                if (res == C2_OK) {
+                    std::shared_ptr<C2BlockPool> ptr(
+                            new C2IgbaBlockPool(allocator,
+                                                allocatorParam.igba,
+                                                std::move(allocatorParam.waitableFd),
+                                                poolId), deleter);
+                    *pool = ptr;
+                    mBlockPools[poolId] = ptr;
+                    mComponents[poolId].insert(
+                           mComponents[poolId].end(),
+                           components.begin(), components.end());
+                }
+                break;
             default:
                 // Try to create block pool from platform store plugins.
                 std::shared_ptr<C2BlockPool> ptr;
@@ -547,10 +593,20 @@
             C2PlatformAllocatorStore::id_t allocatorId,
             std::vector<std::shared_ptr<const C2Component>> components,
             std::shared_ptr<C2BlockPool> *pool) {
-        std::unique_lock lock(mMutex);
-        return _createBlockPool(allocatorId, components, mBlockPoolSeqId++, pool);
+        C2PlatformAllocatorDesc allocator;
+        allocator.allocatorId = allocatorId;
+        return createBlockPool(allocator, components, pool);
     }
 
+    c2_status_t createBlockPool(
+            C2PlatformAllocatorDesc &allocator,
+            std::vector<std::shared_ptr<const C2Component>> components,
+            std::shared_ptr<C2BlockPool> *pool) {
+        std::unique_lock lock(mMutex);
+        return _createBlockPool(allocator, components, mBlockPoolSeqId++, pool);
+    }
+
+
     c2_status_t getBlockPool(
             C2BlockPool::local_id_t blockPoolId,
             std::shared_ptr<const C2Component> component,
@@ -579,8 +635,10 @@
         }
         // TODO: remove this. this is temporary
         if (blockPoolId == C2BlockPool::PLATFORM_START) {
+            C2PlatformAllocatorDesc allocator;
+            allocator.allocatorId = C2PlatformAllocatorStore::BUFFERQUEUE;
             return _createBlockPool(
-                    C2PlatformAllocatorStore::BUFFERQUEUE, {component}, blockPoolId, pool);
+                    allocator, {component}, blockPoolId, pool);
         }
         return C2_NOT_FOUND;
     }
@@ -637,7 +695,9 @@
         std::shared_ptr<C2BlockPool> *pool) {
     pool->reset();
 
-    return sBlockPoolCache->createBlockPool(allocatorId, components, pool);
+    C2PlatformAllocatorDesc allocator;
+    allocator.allocatorId = allocatorId;
+    return sBlockPoolCache->createBlockPool(allocator, components, pool);
 }
 
 c2_status_t CreateCodec2BlockPool(
@@ -646,7 +706,27 @@
         std::shared_ptr<C2BlockPool> *pool) {
     pool->reset();
 
-    return sBlockPoolCache->createBlockPool(allocatorId, {component}, pool);
+    C2PlatformAllocatorDesc allocator;
+    allocator.allocatorId = allocatorId;
+    return sBlockPoolCache->createBlockPool(allocator, {component}, pool);
+}
+
+c2_status_t CreateCodec2BlockPool(
+        C2PlatformAllocatorDesc &allocator,
+        const std::vector<std::shared_ptr<const C2Component>> &components,
+        std::shared_ptr<C2BlockPool> *pool) {
+    pool->reset();
+
+    return sBlockPoolCache->createBlockPool(allocator, components, pool);
+}
+
+c2_status_t CreateCodec2BlockPool(
+        C2PlatformAllocatorDesc &allocator,
+        std::shared_ptr<const C2Component> component,
+        std::shared_ptr<C2BlockPool> *pool) {
+    pool->reset();
+
+    return sBlockPoolCache->createBlockPool(allocator, {component}, pool);
 }
 
 class C2PlatformComponentStore : public C2ComponentStore {
@@ -1081,6 +1161,7 @@
     emplace("libcodec2_soft_amrwbenc.so");
     //emplace("libcodec2_soft_av1dec_aom.so"); // deprecated for the gav1 implementation
     emplace("libcodec2_soft_av1dec_gav1.so");
+    emplace("libcodec2_soft_av1dec_dav1d.so");
     emplace("libcodec2_soft_av1enc.so");
     emplace("libcodec2_soft_avcdec.so");
     emplace("libcodec2_soft_avcenc.so");
diff --git a/media/codec2/vndk/include/C2AllocatorGralloc.h b/media/codec2/vndk/include/C2AllocatorGralloc.h
index dfcdb8b..1a34c30 100644
--- a/media/codec2/vndk/include/C2AllocatorGralloc.h
+++ b/media/codec2/vndk/include/C2AllocatorGralloc.h
@@ -46,6 +46,13 @@
         uint32_t generation = 0, uint64_t igbp_id = 0, uint32_t igbp_slot = 0);
 
 /**
+ * Extract pixel format from the extra data of gralloc handle.
+ *
+ * @return 0 when no valid pixel format exists.
+ */
+uint32_t ExtractFormatFromCodec2GrallocHandle(const C2Handle *const handle);
+
+/**
  * When the gralloc handle is migrated to another bufferqueue, update
  * bufferqueue information.
  *
diff --git a/media/codec2/vndk/include/C2FenceFactory.h b/media/codec2/vndk/include/C2FenceFactory.h
index 9b09980..4f974ca 100644
--- a/media/codec2/vndk/include/C2FenceFactory.h
+++ b/media/codec2/vndk/include/C2FenceFactory.h
@@ -20,6 +20,8 @@
 
 #include <C2Buffer.h>
 
+#include <android-base/unique_fd.h>
+
 /*
  * Create a list of fds from fence
  *
@@ -69,6 +71,7 @@
 
     /*
      * Create C2Fence from an fd created by pipe()/pipe2() syscall.
+     * The ownership of \p fd is transterred to the returned C2Fence.
      *
      * \param fd                An fd representing the write end from a pair of
      *                          file descriptors which are created by
@@ -76,6 +79,15 @@
      */
     static C2Fence CreatePipeFence(int fd);
 
+    /*
+     * Create C2Fence from a unique_fd created by pipe()/pipe2() syscall.
+     *
+     * \param ufd               A unique_fd representing the write end from a pair
+     *                          of file descriptors which are created by
+     *                          pipe()/pipe2() syscall.
+     */
+    static C2Fence CreatePipeFence(::android::base::unique_fd &&ufd);
+
     /**
      * Create a native handle from fence for marshalling
      *
diff --git a/media/codec2/vndk/include/C2IgbaBufferPriv.h b/media/codec2/vndk/include/C2IgbaBufferPriv.h
index a5676b7..5879263 100644
--- a/media/codec2/vndk/include/C2IgbaBufferPriv.h
+++ b/media/codec2/vndk/include/C2IgbaBufferPriv.h
@@ -17,6 +17,8 @@
 
 #include <C2Buffer.h>
 
+#include <android-base/unique_fd.h>
+
 #include <memory>
 
 namespace aidl::android::hardware::media::c2 {
@@ -32,8 +34,9 @@
 public:
     explicit C2IgbaBlockPool(
             const std::shared_ptr<C2Allocator> &allocator,
-            const std::shared_ptr<
-                    ::aidl::android::hardware::media::c2::IGraphicBufferAllocator> &igba,
+            const std::shared_ptr<::aidl::android::hardware::media::c2::IGraphicBufferAllocator>
+                    &igba,
+            ::android::base::unique_fd &&ufd,
             const local_id_t localId);
 
     virtual ~C2IgbaBlockPool() = default;
@@ -89,8 +92,7 @@
 
     C2IgbaBlockPoolData(
             const AHardwareBuffer *buffer,
-            const std::shared_ptr<::aidl::android::hardware::media::c2::IGraphicBufferAllocator>
-                &igba);
+            std::shared_ptr<::aidl::android::hardware::media::c2::IGraphicBufferAllocator> &igba);
 
     virtual ~C2IgbaBlockPoolData() override;
 
@@ -103,7 +105,10 @@
 
     void disown();
 
+    void registerIgba(std::shared_ptr<
+            ::aidl::android::hardware::media::c2::IGraphicBufferAllocator> &igba);
+
     bool mOwned;
     const AHardwareBuffer *mBuffer;
-    const std::weak_ptr<::aidl::android::hardware::media::c2::IGraphicBufferAllocator> mIgba;
+    std::weak_ptr<::aidl::android::hardware::media::c2::IGraphicBufferAllocator> mIgba;
 };
diff --git a/media/codec2/vndk/include/C2PlatformSupport.h b/media/codec2/vndk/include/C2PlatformSupport.h
index 221a799..6fa155a 100644
--- a/media/codec2/vndk/include/C2PlatformSupport.h
+++ b/media/codec2/vndk/include/C2PlatformSupport.h
@@ -22,6 +22,12 @@
 
 #include <memory>
 
+#include <android-base/unique_fd.h>
+
+namespace aidl::android::hardware::media::c2 {
+class IGraphicBufferAllocator;
+}
+
 namespace android {
 
 /**
@@ -164,6 +170,53 @@
         std::shared_ptr<C2BlockPool> *pool);
 
 /**
+ * BlockPool creation parameters regarding allocator.
+ *
+ * igba, waitableFd are required only when allocatorId is
+ * C2PlatformAllocatorStore::IGBA.
+ */
+struct C2PlatformAllocatorDesc {
+    C2PlatformAllocatorStore::id_t allocatorId;
+    std::shared_ptr<::aidl::android::hardware::media::c2::IGraphicBufferAllocator> igba;
+    ::android::base::unique_fd waitableFd; // This will be passed and moved to C2Fence
+                                           // implementation.
+};
+
+/**
+ * Creates a block pool.
+ * \param allocator     allocator ID and parameters which are used to allocate blocks
+ * \param component     the component using the block pool (must be non-null)
+ * \param pool          pointer to where the created block pool shall be store on success.
+ *                      nullptr will be stored here on failure
+ *
+ * \retval C2_OK        the operation was successful
+ * \retval C2_BAD_VALUE the component is null
+ * \retval C2_NOT_FOUND if the allocator does not exist
+ * \retval C2_NO_MEMORY not enough memory to create a block pool
+ */
+c2_status_t CreateCodec2BlockPool(
+        C2PlatformAllocatorDesc &allocator,
+        std::shared_ptr<const C2Component> component,
+        std::shared_ptr<C2BlockPool> *pool);
+
+/**
+ * Creates a block pool.
+ * \param allocator     allocator ID and parameters which are used to allocate blocks
+ * \param components    the components using the block pool
+ * \param pool          pointer to where the created block pool shall be store on success.
+ *                      nullptr will be stored here on failure
+ *
+ * \retval C2_OK        the operation was successful
+ * \retval C2_BAD_VALUE the component is null
+ * \retval C2_NOT_FOUND if the allocator does not exist
+ * \retval C2_NO_MEMORY not enough memory to create a block pool
+ */
+c2_status_t CreateCodec2BlockPool(
+        C2PlatformAllocatorDesc &allocator,
+        const std::vector<std::shared_ptr<const C2Component>> &components,
+        std::shared_ptr<C2BlockPool> *pool);
+
+/**
  * Returns the platform component store.
  * \retval nullptr if the platform component store could not be obtained
  */
diff --git a/media/codec2/vndk/include/C2SurfaceSyncObj.h b/media/codec2/vndk/include/C2SurfaceSyncObj.h
index b193b4a..7c1a405 100644
--- a/media/codec2/vndk/include/C2SurfaceSyncObj.h
+++ b/media/codec2/vndk/include/C2SurfaceSyncObj.h
@@ -117,6 +117,16 @@
      */
     void notifyAll();
 
+    /**
+     * Invalide current sync variables on the death of the other process.
+     */
+    void invalidate();
+
+    /**
+     * If a dead process holds the lock, clear the lock.
+     */
+    void clearLockIfNecessary();
+
     C2SyncVariables() {}
 
 private:
@@ -135,6 +145,11 @@
      */
     int wait();
 
+    /**
+     * try lock for the specified duration.
+     */
+    bool tryLockFor(size_t ms);
+
     std::atomic<uint32_t> mLock;
     std::atomic<uint32_t> mCond;
     int32_t mMaxDequeueCount;
diff --git a/media/codec2/vndk/internal/C2BlockInternal.h b/media/codec2/vndk/internal/C2BlockInternal.h
index 8198ee1..4baf2db 100644
--- a/media/codec2/vndk/internal/C2BlockInternal.h
+++ b/media/codec2/vndk/internal/C2BlockInternal.h
@@ -39,6 +39,12 @@
 
 }
 
+namespace aidl::android::hardware::media::c2 {
+
+// IGraphicBufferAllocator for media.c2 aidl
+class IGraphicBufferAllocator;
+}
+
 typedef struct AHardwareBuffer AHardwareBuffer;
 
 using bufferpool_BufferPoolData = android::hardware::media::bufferpool::BufferPoolData;
@@ -472,6 +478,16 @@
      */
     static void DisownIgbaBlock(
             const std::shared_ptr<_C2BlockPoolData>& poolData);
+
+    /**
+     * When the client receives a block from HAL, the client needs to store
+     * IGraphicBufferAllocator from which the block was originally allocated.
+     * The stored \p igba will be used in the dtor to deallocate the buffer.
+     * (calling IGraphicBufferAllocator::deallocate to reclaim.)
+     */
+    static void RegisterIgba(
+            const std::shared_ptr<_C2BlockPoolData>& poolData,
+            std::shared_ptr<::aidl::android::hardware::media::c2::IGraphicBufferAllocator> &igba);
 };
 
 #endif // ANDROID_STAGEFRIGHT_C2BLOCK_INTERNAL_H_
diff --git a/media/codec2/vndk/platform/C2BqBuffer.cpp b/media/codec2/vndk/platform/C2BqBuffer.cpp
index 5fb0c8f..48157c8 100644
--- a/media/codec2/vndk/platform/C2BqBuffer.cpp
+++ b/media/codec2/vndk/platform/C2BqBuffer.cpp
@@ -16,8 +16,10 @@
 
 //#define LOG_NDEBUG 0
 #define LOG_TAG "C2BqBuffer"
+#define ATRACE_TAG  ATRACE_TAG_VIDEO
 #include <android/hardware_buffer.h>
 #include <utils/Log.h>
+#include <utils/Trace.h>
 
 #include <ui/BufferQueueDefs.h>
 #include <ui/GraphicBuffer.h>
@@ -33,10 +35,12 @@
 #include <C2FenceFactory.h>
 #include <C2SurfaceSyncObj.h>
 
+#include <atomic>
 #include <list>
 #include <map>
 #include <mutex>
 
+using ::android::ScopedTrace;
 using ::android::BufferQueueDefs::NUM_BUFFER_SLOTS;
 using ::android::C2AllocatorGralloc;
 using ::android::C2AndroidMemoryUsage;
@@ -392,6 +396,12 @@
                     if (c2Fence) {
                         *c2Fence = _C2FenceFactory::CreateSurfaceFence(mSyncMem, waitId);
                     }
+                    if (mInvalidated) {
+                        if (c2Fence) {
+                            *c2Fence = C2Fence();
+                        }
+                        return C2_BAD_STATE;
+                    }
                     return C2_BLOCKING;
                 }
                 if (syncVar->getSyncStatusLocked() != C2SyncVariables::STATUS_ACTIVE) {
@@ -400,6 +410,12 @@
                     if (c2Fence) {
                         *c2Fence = _C2FenceFactory::CreateSurfaceFence(mSyncMem, waitId);
                     }
+                    if (mInvalidated) {
+                        if (c2Fence) {
+                            *c2Fence = C2Fence();
+                        }
+                        return C2_BAD_STATE;
+                    }
                     return C2_BLOCKING;
                 }
                 syncVar->notifyDequeuedLocked();
@@ -686,7 +702,6 @@
             }
         }
         int migrated = 0;
-        std::shared_ptr<C2SurfaceSyncMemory> oldMem;
         // poolDatas dtor should not be called during lock is held.
         std::shared_ptr<C2BufferQueueBlockPoolData>
                 poolDatas[NUM_BUFFER_SLOTS];
@@ -704,8 +719,22 @@
                 mGeneration = 0;
                 ALOGD("configuring null producer: igbp_information(%d)", bqInformation);
             }
-            oldMem = mSyncMem; // preven destruction while locked.
-            mSyncMem = c2SyncMem;
+            if (mInvalidated) {
+                return;
+            }
+            {
+                std::unique_lock<std::mutex> memLock(mSyncMemMutex);
+                mOldMem = mSyncMem; // prevent destruction while locked.
+                                    // The waiters from the old memory will be
+                                    // woken up by the client after this
+                                    // configuration from HAL being finished.
+                                    // But we will keep this in case of the
+                                    // client being dead in between.
+                                    // In the case the death listener will wake
+                                    // up the wiators for the old memory using
+                                    // mOldMem here.
+                mSyncMem = c2SyncMem;
+            }
             C2SyncVariables *syncVar = mSyncMem ? mSyncMem->mem() : nullptr;
             if (syncVar) {
                 syncVar->lock();
@@ -732,6 +761,9 @@
                 // is no longer valid.
                 mIgbpValidityToken = std::make_shared<int>(0);
             }
+            if (mInvalidated) {
+                mIgbpValidityToken = std::make_shared<int>(0);
+            }
             for (int i = 0; i < NUM_BUFFER_SLOTS; ++i) {
                 mBuffers[i] = buffers[i];
                 mPoolDatas[i] = poolDatas[i];
@@ -750,8 +782,33 @@
     }
 
     void invalidate() {
-        std::scoped_lock<std::mutex> lock(mMutex);
-        mInvalidated = true;
+        std::shared_ptr<C2SurfaceSyncMemory> syncMem;
+        std::shared_ptr<C2SurfaceSyncMemory> oldMem;
+        {
+            std::unique_lock<std::mutex> l(mSyncMemMutex);
+            bool old = mInvalidated.exchange(true);
+            if (old) {
+                return;
+            }
+            syncMem = mSyncMem;
+            oldMem = mOldMem;
+        }
+        mIgbpValidityToken.reset();
+        C2SyncVariables *syncVar = syncMem ? syncMem->mem(): nullptr;
+        if (syncVar) {
+            syncVar->invalidate();
+        }
+        C2SyncVariables *oldVar = oldMem ? oldMem->mem(): nullptr;
+        if (oldVar) {
+            oldVar->invalidate();
+        }
+        // invalidate pending lock from a dead process if any
+        if (syncVar) {
+            syncVar->clearLockIfNecessary();
+        }
+        if (oldVar) {
+            oldVar->clearLockIfNecessary();
+        }
     }
 
 private:
@@ -776,7 +833,9 @@
     sp<GraphicBuffer> mBuffers[NUM_BUFFER_SLOTS];
     std::weak_ptr<C2BufferQueueBlockPoolData> mPoolDatas[NUM_BUFFER_SLOTS];
 
+    std::mutex mSyncMemMutex;
     std::shared_ptr<C2SurfaceSyncMemory> mSyncMem;
+    std::shared_ptr<C2SurfaceSyncMemory> mOldMem;
 
     // IGBP invalidation notification token.
     // The buffers(C2BufferQueueBlockPoolData) has the reference to the IGBP where
@@ -791,7 +850,7 @@
     // if the token has been expired, the buffers will not call IGBP::cancelBuffer()
     // when they are no longer used.
     std::shared_ptr<int> mIgbpValidityToken;
-    bool mInvalidated{false};
+    std::atomic<bool> mInvalidated{false};
 };
 
 C2BufferQueueBlockPoolData::C2BufferQueueBlockPoolData(
@@ -1063,6 +1122,7 @@
         uint32_t format,
         C2MemoryUsage usage,
         std::shared_ptr<C2GraphicBlock> *block /* nonnull */) {
+    ScopedTrace trace(ATRACE_TAG,"C2BufferQueueBlockPool::fetchGraphicBlock");
     if (mImpl) {
         return mImpl->fetchGraphicBlock(width, height, format, usage, block, nullptr);
     }
diff --git a/media/codec2/vndk/platform/C2IgbaBuffer.cpp b/media/codec2/vndk/platform/C2IgbaBuffer.cpp
index 853d5a3..eafdb22 100644
--- a/media/codec2/vndk/platform/C2IgbaBuffer.cpp
+++ b/media/codec2/vndk/platform/C2IgbaBuffer.cpp
@@ -67,7 +67,8 @@
             return err;
         }
         std::shared_ptr<C2IgbaBlockPoolData> poolData =
-                std::make_shared<C2IgbaBlockPoolData>(ahwb, igba);
+                std::make_shared<C2IgbaBlockPoolData>(
+                        ahwb, const_cast<std::shared_ptr<C2IGBA>&>(igba));
         *block = _C2BlockFactory::CreateGraphicBlock(alloc, poolData);
         return C2_OK;
     } else {
@@ -79,7 +80,7 @@
 
 C2IgbaBlockPoolData::C2IgbaBlockPoolData(
         const AHardwareBuffer *buffer,
-        const std::shared_ptr<C2IGBA> &igba) : mOwned(true), mBuffer(buffer), mIgba(igba) {
+        std::shared_ptr<C2IGBA> &igba) : mOwned(true), mBuffer(buffer), mIgba(igba) {
     CHECK(mBuffer);
     AHardwareBuffer_acquire(const_cast<AHardwareBuffer *>(mBuffer));
 }
@@ -115,6 +116,10 @@
     mOwned = false;
 }
 
+void C2IgbaBlockPoolData::registerIgba(std::shared_ptr<C2IGBA> &igba) {
+    mIgba = igba;
+}
+
 std::shared_ptr<C2GraphicBlock> _C2BlockFactory::CreateGraphicBlock(AHardwareBuffer *ahwb) {
     // TODO: get proper allocator? and synchronization? or allocator-less?
     static std::shared_ptr<C2AllocatorAhwb> sAllocator = std::make_shared<C2AllocatorAhwb>(0);
@@ -148,22 +153,30 @@
     }
 }
 
+void _C2BlockFactory::RegisterIgba(
+        const std::shared_ptr<_C2BlockPoolData>& data,
+        std::shared_ptr<C2IGBA> &igba) {
+    if (data && data->getType() == _C2BlockPoolData::TYPE_AHWBUFFER) {
+        const std::shared_ptr<C2IgbaBlockPoolData> poolData =
+                std::static_pointer_cast<C2IgbaBlockPoolData>(data);
+        poolData->registerIgba(igba);
+    }
+}
+
 C2IgbaBlockPool::C2IgbaBlockPool(
         const std::shared_ptr<C2Allocator> &allocator,
         const std::shared_ptr<C2IGBA> &igba,
+        ::android::base::unique_fd &&ufd,
         const local_id_t localId) : mAllocator(allocator), mIgba(igba), mLocalId(localId) {
     if (!mIgba) {
         mValid = false;
         return;
     }
-    // TODO: Remove IPC (This is a nested IPC call during c2aidl creatBlockPool().
-    ::ndk::ScopedFileDescriptor fd;
-    ::ndk::ScopedAStatus status = mIgba->getWaitableFd(&fd);
-    if (!status.isOk()) {
+    if (ufd.get() < 0) {
         mValid = false;
         return;
     }
-    mWaitFence = _C2FenceFactory::CreatePipeFence(fd.release());
+    mWaitFence = _C2FenceFactory::CreatePipeFence(std::move(ufd));
     if (!mWaitFence.valid()) {
         mValid = false;
         return;
@@ -180,7 +193,7 @@
             width, height, format, usage, kBlockingFetchTimeoutNs, &origId, block, &fence);
 
     if (res == C2_BLOCKING) {
-        return C2_TIMED_OUT;
+        return C2_BLOCKING;
     }
     if (res != C2_OK) {
         return res;
diff --git a/media/codec2/vndk/platform/C2SurfaceSyncObj.cpp b/media/codec2/vndk/platform/C2SurfaceSyncObj.cpp
index d8c2292..41d16b5 100644
--- a/media/codec2/vndk/platform/C2SurfaceSyncObj.cpp
+++ b/media/codec2/vndk/platform/C2SurfaceSyncObj.cpp
@@ -26,6 +26,33 @@
 #include <chrono>
 #include <C2SurfaceSyncObj.h>
 
+namespace {
+static inline void timespec_add_ms(timespec& ts, size_t ms) {
+    constexpr int kNanoSecondsPerSec = 1000000000;
+    ts.tv_sec  += ms / 1000;
+    ts.tv_nsec += (ms % 1000) * 1000000;
+    if (ts.tv_nsec >= kNanoSecondsPerSec) {
+        ts.tv_sec++;
+        ts.tv_nsec -= kNanoSecondsPerSec;
+    }
+}
+
+/*
+ * lhs < rhs:  return <0
+ * lhs == rhs: return 0
+ * lhs > rhs:  return >0
+ */
+static inline int timespec_compare(const timespec& lhs, const timespec& rhs) {
+    if (lhs.tv_sec < rhs.tv_sec) {
+        return -1;
+    }
+    if (lhs.tv_sec > rhs.tv_sec) {
+        return 1;
+    }
+    return lhs.tv_nsec - rhs.tv_nsec;
+}
+}
+
 const native_handle_t C2SurfaceSyncMemory::HandleSyncMem::cHeader = {
     C2SurfaceSyncMemory::HandleSyncMem::version,
     C2SurfaceSyncMemory::HandleSyncMem::numFds,
@@ -284,6 +311,26 @@
     this->unlock();
 }
 
+void C2SyncVariables::invalidate() {
+    mCond++;
+    (void) syscall(__NR_futex, &mCond, FUTEX_REQUEUE, INT_MAX, (void *)INT_MAX, &mLock, 0);
+}
+
+void C2SyncVariables::clearLockIfNecessary() {
+    // Note: After waiting for 30ms without acquiring the lock,
+    // we will consider the lock is dangling.
+    // Since the lock duration is very brief to manage the counter,
+    // waiting for 30ms should be more than enough.
+    constexpr size_t kTestLockDurationMs = 30;
+
+    bool locked = tryLockFor(kTestLockDurationMs);
+    unlock();
+
+    if (!locked) {
+        ALOGW("A dead process might be holding the lock");
+    }
+}
+
 int C2SyncVariables::signal() {
     mCond++;
 
@@ -308,3 +355,35 @@
     }
     return 0;
 }
+
+bool C2SyncVariables::tryLockFor(size_t ms) {
+    uint32_t old = FUTEX_UNLOCKED;
+
+    if (mLock.compare_exchange_strong(old, FUTEX_LOCKED_UNCONTENDED)) {
+        return true;
+    }
+
+    if (old == FUTEX_LOCKED_UNCONTENDED) {
+        old = mLock.exchange(FUTEX_LOCKED_CONTENDED);
+    }
+
+    struct timespec wait{
+            static_cast<time_t>(ms / 1000),
+            static_cast<long>((ms % 1000) * 1000000)};
+    struct timespec end;
+    clock_gettime(CLOCK_REALTIME, &end);
+    timespec_add_ms(end, ms);
+
+    while (old != FUTEX_UNLOCKED) { // case of EINTR being returned;
+        (void)syscall(__NR_futex, &mLock, FUTEX_WAIT, FUTEX_LOCKED_CONTENDED, &wait, NULL, 0);
+        old = mLock.exchange(FUTEX_LOCKED_CONTENDED);
+
+        struct timespec now;
+        clock_gettime(CLOCK_REALTIME, &now);
+        if (timespec_compare(now, end) >= 0) {
+            break;
+        }
+    }
+
+    return old == FUTEX_UNLOCKED;
+}
diff --git a/media/common_time/OWNERS b/media/common_time/OWNERS
deleted file mode 100644
index f9cb567..0000000
--- a/media/common_time/OWNERS
+++ /dev/null
@@ -1 +0,0 @@
-gkasten@google.com
diff --git a/media/janitors/audio_OWNERS b/media/janitors/audio_OWNERS
new file mode 100644
index 0000000..0d0c487
--- /dev/null
+++ b/media/janitors/audio_OWNERS
@@ -0,0 +1,8 @@
+# Bug component: 48436
+# gerrit owner/approvers in the audio team
+# For catch-all/last resort from other projects
+
+elaurent@google.com # lead
+hunga@google.com
+jmtrivi@google.com
+philburk@google.com
diff --git a/media/janitors/media_reliability_OWNERS b/media/janitors/media_reliability_OWNERS
new file mode 100644
index 0000000..cced19c
--- /dev/null
+++ b/media/janitors/media_reliability_OWNERS
@@ -0,0 +1,5 @@
+# Bug component: 1051309
+# go/android-media-reliability
+
+essick@google.com
+nchalko@google.com
diff --git a/media/libaaudio/OWNERS b/media/libaaudio/OWNERS
index f4d51f9..3285bf3 100644
--- a/media/libaaudio/OWNERS
+++ b/media/libaaudio/OWNERS
@@ -1 +1,4 @@
+# Bug component: 48436
+jiabin@google.com
 philburk@google.com
+include platform/frameworks/av:/media/janitors/audio_OWNERS #{LAST_RESORT_SUGGESTION}
diff --git a/media/libaaudio/fuzzer/libaaudio_fuzzer.cpp b/media/libaaudio/fuzzer/libaaudio_fuzzer.cpp
index 0233ee1..1b06ea7 100644
--- a/media/libaaudio/fuzzer/libaaudio_fuzzer.cpp
+++ b/media/libaaudio/fuzzer/libaaudio_fuzzer.cpp
@@ -19,8 +19,10 @@
 #include "aaudio/AAudioTesting.h"
 #include <fuzzer/FuzzedDataProvider.h>
 
-constexpr int32_t kRandomStringLength = 256;
+#include <functional>
 
+constexpr int32_t kRandomStringLength = 256;
+constexpr int32_t kMaxRuns = 100;
 constexpr int64_t kNanosPerMillisecond = 1000 * 1000;
 
 constexpr aaudio_direction_t kDirections[] = {
@@ -97,6 +99,7 @@
 public:
   ~LibAaudioFuzzer() { deInit(); }
   bool init();
+  void invokeAAudioSetAPIs(FuzzedDataProvider &fdp);
   void process(const uint8_t *data, size_t size);
   void deInit();
 
@@ -113,160 +116,208 @@
   return true;
 }
 
-void LibAaudioFuzzer::process(const uint8_t *data, size_t size) {
-  FuzzedDataProvider fdp(data, size);
-  aaudio_performance_mode_t mode =
-      fdp.PickValueInArray({fdp.PickValueInArray(kPerformanceModes),
-                            fdp.ConsumeIntegral<int32_t>()});
+void LibAaudioFuzzer::invokeAAudioSetAPIs(FuzzedDataProvider &fdp){
+  aaudio_performance_mode_t mode = fdp.PickValueInArray(
+          {fdp.PickValueInArray(kPerformanceModes), fdp.ConsumeIntegral<int32_t>()});
   AAudioStreamBuilder_setPerformanceMode(mAaudioBuilder, mode);
 
-  int32_t deviceId = fdp.PickValueInArray(
-      {AAUDIO_UNSPECIFIED, fdp.ConsumeIntegral<int32_t>()});
+  int32_t deviceId = fdp.PickValueInArray({AAUDIO_UNSPECIFIED, fdp.ConsumeIntegral<int32_t>()});
   AAudioStreamBuilder_setDeviceId(mAaudioBuilder, deviceId);
 
-  std::string packageName = fdp.PickValueInArray<std::string>(
-      {"android.nativemedia.aaudio", "android.app.appops.cts",
-       fdp.ConsumeRandomLengthString(kRandomStringLength)});
+  std::string packageName =
+          fdp.PickValueInArray<std::string>({"android.nativemedia.aaudio", "android.app.appops.cts",
+                                             fdp.ConsumeRandomLengthString(kRandomStringLength)});
   AAudioStreamBuilder_setPackageName(mAaudioBuilder, packageName.c_str());
 
-  std::string attributionTag =
-      fdp.ConsumeRandomLengthString(kRandomStringLength);
+  std::string attributionTag = fdp.ConsumeRandomLengthString(kRandomStringLength);
   AAudioStreamBuilder_setAttributionTag(mAaudioBuilder, attributionTag.c_str());
 
   int32_t sampleRate = fdp.PickValueInArray(kSampleRates);
   AAudioStreamBuilder_setSampleRate(mAaudioBuilder, sampleRate);
 
-  int32_t channelCount = fdp.PickValueInArray(
-      {AAUDIO_UNSPECIFIED, fdp.ConsumeIntegral<int32_t>()});
+  int32_t channelCount = fdp.PickValueInArray({AAUDIO_UNSPECIFIED, fdp.ConsumeIntegral<int32_t>()});
   AAudioStreamBuilder_setChannelCount(mAaudioBuilder, channelCount);
 
-  aaudio_direction_t direction = fdp.PickValueInArray(
-      {fdp.PickValueInArray(kDirections), fdp.ConsumeIntegral<int32_t>()});
+  aaudio_direction_t direction =
+          fdp.PickValueInArray({fdp.PickValueInArray(kDirections), fdp.ConsumeIntegral<int32_t>()});
   AAudioStreamBuilder_setDirection(mAaudioBuilder, direction);
 
-  aaudio_format_t format = fdp.PickValueInArray(
-      {fdp.PickValueInArray(kFormats), fdp.ConsumeIntegral<int32_t>()});
+  aaudio_format_t format =
+          fdp.PickValueInArray({fdp.PickValueInArray(kFormats), fdp.ConsumeIntegral<int32_t>()});
   AAudioStreamBuilder_setFormat(mAaudioBuilder, format);
 
   aaudio_sharing_mode_t sharingMode = fdp.PickValueInArray(
-      {fdp.PickValueInArray(kSharingModes), fdp.ConsumeIntegral<int32_t>()});
+          {fdp.PickValueInArray(kSharingModes), fdp.ConsumeIntegral<int32_t>()});
   AAudioStreamBuilder_setSharingMode(mAaudioBuilder, sharingMode);
 
-  aaudio_usage_t usage = fdp.PickValueInArray(
-      {fdp.PickValueInArray(kUsages), fdp.ConsumeIntegral<int32_t>()});
+  aaudio_usage_t usage =
+          fdp.PickValueInArray({fdp.PickValueInArray(kUsages), fdp.ConsumeIntegral<int32_t>()});
   AAudioStreamBuilder_setUsage(mAaudioBuilder, usage);
 
   aaudio_content_type_t contentType = fdp.PickValueInArray(
-      {fdp.PickValueInArray(kContentTypes), fdp.ConsumeIntegral<int32_t>()});
+          {fdp.PickValueInArray(kContentTypes), fdp.ConsumeIntegral<int32_t>()});
   AAudioStreamBuilder_setContentType(mAaudioBuilder, contentType);
 
   aaudio_input_preset_t inputPreset = fdp.PickValueInArray(
-      {fdp.PickValueInArray(kInputPresets), fdp.ConsumeIntegral<int32_t>()});
+          {fdp.PickValueInArray(kInputPresets), fdp.ConsumeIntegral<int32_t>()});
   AAudioStreamBuilder_setInputPreset(mAaudioBuilder, inputPreset);
 
   bool privacySensitive = fdp.ConsumeBool();
   AAudioStreamBuilder_setPrivacySensitive(mAaudioBuilder, privacySensitive);
 
-  int32_t frames = fdp.PickValueInArray(
-      {AAUDIO_UNSPECIFIED, fdp.ConsumeIntegral<int32_t>()});
+  int32_t frames = fdp.PickValueInArray({AAUDIO_UNSPECIFIED, fdp.ConsumeIntegral<int32_t>()});
   AAudioStreamBuilder_setBufferCapacityInFrames(mAaudioBuilder, frames);
 
-  aaudio_allowed_capture_policy_t allowedCapturePolicy =
-      fdp.PickValueInArray({fdp.PickValueInArray(kAllowedCapturePolicies),
-                            fdp.ConsumeIntegral<int32_t>()});
-  AAudioStreamBuilder_setAllowedCapturePolicy(mAaudioBuilder,
-                                              allowedCapturePolicy);
+  aaudio_allowed_capture_policy_t allowedCapturePolicy = fdp.PickValueInArray(
+          {fdp.PickValueInArray(kAllowedCapturePolicies), fdp.ConsumeIntegral<int32_t>()});
+  AAudioStreamBuilder_setAllowedCapturePolicy(mAaudioBuilder, allowedCapturePolicy);
 
-  aaudio_session_id_t sessionId = fdp.PickValueInArray(
-      {fdp.PickValueInArray(kSessionIds), fdp.ConsumeIntegral<int32_t>()});
+  aaudio_session_id_t sessionId =
+          fdp.PickValueInArray({fdp.PickValueInArray(kSessionIds), fdp.ConsumeIntegral<int32_t>()});
   AAudioStreamBuilder_setSessionId(mAaudioBuilder, sessionId);
 
   AAudioStreamBuilder_setDataCallback(mAaudioBuilder, nullptr, nullptr);
   AAudioStreamBuilder_setErrorCallback(mAaudioBuilder, nullptr, nullptr);
 
-  int32_t framesPerDataCallback = fdp.PickValueInArray(
-      {AAUDIO_UNSPECIFIED, fdp.ConsumeIntegral<int32_t>()});
-  AAudioStreamBuilder_setFramesPerDataCallback(mAaudioBuilder,
-                                               framesPerDataCallback);
+  int32_t framesPerDataCallback =
+          fdp.PickValueInArray({AAUDIO_UNSPECIFIED, fdp.ConsumeIntegral<int32_t>()});
+  AAudioStreamBuilder_setFramesPerDataCallback(mAaudioBuilder, framesPerDataCallback);
 
-  aaudio_policy_t policy = fdp.PickValueInArray(
-      {fdp.PickValueInArray(kPolicies), fdp.ConsumeIntegral<int32_t>()});
+  aaudio_policy_t policy =
+          fdp.PickValueInArray({fdp.PickValueInArray(kPolicies), fdp.ConsumeIntegral<int32_t>()});
   AAudio_setMMapPolicy(policy);
-  (void)AAudio_getMMapPolicy();
+}
 
-  aaudio_result_t result =
-      AAudioStreamBuilder_openStream(mAaudioBuilder, &mAaudioStream);
+void LibAaudioFuzzer::process(const uint8_t *data, size_t size) {
+  FuzzedDataProvider fdp(data, size);
+  int32_t maxFrames = 0;
+  int32_t count = 0;
+  aaudio_stream_state_t state = AAUDIO_STREAM_STATE_UNKNOWN;
+
+  invokeAAudioSetAPIs(fdp);
+
+  aaudio_result_t result = AAudioStreamBuilder_openStream(mAaudioBuilder, &mAaudioStream);
   if ((result != AAUDIO_OK) || (!mAaudioStream)) {
     return;
   }
+  /* The 'runs' variable serves to set an upper limit on the loop iterations, preventing excessive
+   * execution.
+   */
+  int32_t runs = kMaxRuns;
+  while (fdp.remaining_bytes() > 0 && --runs) {
+    auto AAudioapi = fdp.PickValueInArray<const std::function<void()>>({
+            [&]() { (void)AAudio_getMMapPolicy(); },
 
-  int32_t framesPerBurst = AAudioStream_getFramesPerBurst(mAaudioStream);
-  uint8_t numberOfBursts = fdp.ConsumeIntegral<uint8_t>();
-  int32_t maxFrames = numberOfBursts * framesPerBurst;
-  int32_t requestedBufferSize =
-      fdp.ConsumeIntegral<uint16_t>() * framesPerBurst;
-  AAudioStream_setBufferSizeInFrames(mAaudioStream, requestedBufferSize);
+            [&]() {
+                int32_t framesPerBurst = AAudioStream_getFramesPerBurst(mAaudioStream);
+                uint8_t numberOfBursts = fdp.ConsumeIntegral<uint8_t>();
+                maxFrames = numberOfBursts * framesPerBurst;
+                int32_t requestedBufferSize = fdp.ConsumeIntegral<uint16_t>() * framesPerBurst;
+                AAudioStream_setBufferSizeInFrames(mAaudioStream, requestedBufferSize);
+            },
+            [&]() {
+                int64_t position = 0, nanoseconds = 0;
+                AAudioStream_getTimestamp(mAaudioStream, CLOCK_MONOTONIC, &position, &nanoseconds);
+            },
+            [&]() {
+                AAudioStream_requestStart(mAaudioStream);
+            },
+            [&]() {
+                AAudioStream_requestPause(mAaudioStream);
+            },
+            [&]() {
+                AAudioStream_requestFlush(mAaudioStream);
+            },
+            [&]() {
+                AAudioStream_requestStop(mAaudioStream);
+            },
+            [&]() {
+                aaudio_format_t actualFormat = AAudioStream_getFormat(mAaudioStream);
+                int32_t actualChannelCount = AAudioStream_getChannelCount(mAaudioStream);
 
-  int64_t position = 0, nanoseconds = 0;
-  AAudioStream_getTimestamp(mAaudioStream, CLOCK_MONOTONIC, &position,
-                            &nanoseconds);
+                count = fdp.ConsumeIntegral<int32_t>();
+                aaudio_direction_t direction = AAudioStream_getDirection(mAaudioStream);
 
-  AAudioStream_requestStart(mAaudioStream);
-
-  aaudio_format_t actualFormat = AAudioStream_getFormat(mAaudioStream);
-  int32_t actualChannelCount = AAudioStream_getChannelCount(mAaudioStream);
-
-  int32_t count = fdp.ConsumeIntegral<int32_t>();
-  direction = AAudioStream_getDirection(mAaudioStream);
-
-  if (actualFormat == AAUDIO_FORMAT_PCM_I16) {
-      std::vector<int16_t> inputShortData(maxFrames * actualChannelCount, 0x0);
-      if (direction == AAUDIO_DIRECTION_INPUT) {
-          AAudioStream_read(mAaudioStream, inputShortData.data(), maxFrames,
-                            count * kNanosPerMillisecond);
-    } else if (direction == AAUDIO_DIRECTION_OUTPUT) {
-        AAudioStream_write(mAaudioStream, inputShortData.data(), maxFrames,
-                           count * kNanosPerMillisecond);
-    }
-  } else if (actualFormat == AAUDIO_FORMAT_PCM_FLOAT) {
-      std::vector<float> inputFloatData(maxFrames * actualChannelCount, 0x0);
-      if (direction == AAUDIO_DIRECTION_INPUT) {
-          AAudioStream_read(mAaudioStream, inputFloatData.data(), maxFrames,
-                            count * kNanosPerMillisecond);
-    } else if (direction == AAUDIO_DIRECTION_OUTPUT) {
-        AAudioStream_write(mAaudioStream, inputFloatData.data(), maxFrames,
-                           count * kNanosPerMillisecond);
-    }
+                if (actualFormat == AAUDIO_FORMAT_PCM_I16) {
+                    std::vector<int16_t> inputShortData(maxFrames * actualChannelCount, 0x0);
+                    if (direction == AAUDIO_DIRECTION_INPUT) {
+                        AAudioStream_read(mAaudioStream, inputShortData.data(), maxFrames,
+                                          count * kNanosPerMillisecond);
+                    } else if (direction == AAUDIO_DIRECTION_OUTPUT) {
+                        AAudioStream_write(mAaudioStream, inputShortData.data(), maxFrames,
+                                           count * kNanosPerMillisecond);
+                    }
+                } else if (actualFormat == AAUDIO_FORMAT_PCM_FLOAT) {
+                    std::vector<float> inputFloatData(maxFrames * actualChannelCount, 0x0);
+                    if (direction == AAUDIO_DIRECTION_INPUT) {
+                        AAudioStream_read(mAaudioStream, inputFloatData.data(), maxFrames,
+                                          count * kNanosPerMillisecond);
+                    } else if (direction == AAUDIO_DIRECTION_OUTPUT) {
+                        AAudioStream_write(mAaudioStream, inputFloatData.data(), maxFrames,
+                                           count * kNanosPerMillisecond);
+                    }
+                }
+            },
+            [&]() {
+                AAudioStream_waitForStateChange(mAaudioStream, AAUDIO_STREAM_STATE_UNKNOWN, &state,
+                                                count * kNanosPerMillisecond);
+            },
+            [&]() { (void)AAudio_convertStreamStateToText(state); },
+            [&]() {
+                (void)AAudioStream_getState(mAaudioStream);
+            },
+            [&]() {
+                (void)AAudioStream_getUsage(mAaudioStream);
+            },
+            [&]() {
+                (void)AAudioStream_getSamplesPerFrame(mAaudioStream);
+            },
+            [&]() {
+                (void)AAudioStream_getContentType(mAaudioStream);
+            },
+            [&]() {
+                (void)AAudioStream_getInputPreset(mAaudioStream);
+            },
+            [&]() {
+                (void)AAudioStream_isPrivacySensitive(mAaudioStream);
+            },
+            [&]() {
+                (void)AAudioStream_getAllowedCapturePolicy(mAaudioStream);
+            },
+            [&]() {
+                (void)AAudioStream_getPerformanceMode(mAaudioStream);
+            },
+            [&]() {
+                (void)AAudioStream_getDeviceId(mAaudioStream);
+            },
+            [&]() {
+                (void)AAudioStream_getSharingMode(mAaudioStream);
+            },
+            [&]() {
+                (void)AAudioStream_getSessionId(mAaudioStream);
+            },
+            [&]() {
+                (void)AAudioStream_getFramesRead(mAaudioStream);
+            },
+            [&]() {
+                (void)AAudioStream_getXRunCount(mAaudioStream);
+            },
+            [&]() {
+                (void)AAudioStream_getFramesWritten(mAaudioStream);
+            },
+            [&]() {
+                (void)AAudioStream_getBufferCapacityInFrames(mAaudioStream);
+            },
+            [&]() {
+                (void)AAudioStream_getBufferSizeInFrames(mAaudioStream);
+            },
+            [&]() {
+                (void)AAudioStream_isMMapUsed(mAaudioStream);
+            },
+    });
+    AAudioapi();
   }
-
-  aaudio_stream_state_t state = AAUDIO_STREAM_STATE_UNKNOWN;
-  AAudioStream_waitForStateChange(mAaudioStream, AAUDIO_STREAM_STATE_UNKNOWN,
-                                  &state, count * kNanosPerMillisecond);
-  (void)AAudio_convertStreamStateToText(state);
-
-  (void)AAudioStream_getUsage(mAaudioStream);
-  (void)AAudioStream_getSampleRate(mAaudioStream);
-  (void)AAudioStream_getState(mAaudioStream);
-  (void)AAudioStream_getSamplesPerFrame(mAaudioStream);
-  (void)AAudioStream_getContentType(mAaudioStream);
-  (void)AAudioStream_getInputPreset(mAaudioStream);
-  (void)AAudioStream_isPrivacySensitive(mAaudioStream);
-  (void)AAudioStream_getAllowedCapturePolicy(mAaudioStream);
-  (void)AAudioStream_getPerformanceMode(mAaudioStream);
-  (void)AAudioStream_getDeviceId(mAaudioStream);
-  (void)AAudioStream_getSharingMode(mAaudioStream);
-  (void)AAudioStream_getSessionId(mAaudioStream);
-  (void)AAudioStream_getFramesRead(mAaudioStream);
-  (void)AAudioStream_getFramesWritten(mAaudioStream);
-  (void)AAudioStream_getXRunCount(mAaudioStream);
-  (void)AAudioStream_getBufferCapacityInFrames(mAaudioStream);
-  (void)AAudioStream_getBufferSizeInFrames(mAaudioStream);
-  (void)AAudioStream_isMMapUsed(mAaudioStream);
-
-  AAudioStream_requestPause(mAaudioStream);
-  AAudioStream_requestFlush(mAaudioStream);
   AAudioStream_release(mAaudioStream);
-  AAudioStream_requestStop(mAaudioStream);
 }
 
 void LibAaudioFuzzer::deInit() {
diff --git a/media/libaaudio/src/Android.bp b/media/libaaudio/src/Android.bp
index 30f451a..7882951 100644
--- a/media/libaaudio/src/Android.bp
+++ b/media/libaaudio/src/Android.bp
@@ -254,6 +254,9 @@
     name: "aaudio-aidl",
     unstable: true,
     local_include_dir: "binding/aidl",
+    defaults: [
+        "latest_android_media_audio_common_types_import_interface",
+    ],
     srcs: [
         "binding/aidl/aaudio/Endpoint.aidl",
         "binding/aidl/aaudio/RingBuffer.aidl",
@@ -264,7 +267,6 @@
         "binding/aidl/aaudio/IAAudioService.aidl",
     ],
     imports: [
-        "android.media.audio.common.types-V2",
         "audioclient-types-aidl",
         "shared-file-region-aidl",
         "framework-permission-aidl",
diff --git a/media/libaudioclient/Android.bp b/media/libaudioclient/Android.bp
index 01e3d53..b44dc18 100644
--- a/media/libaudioclient/Android.bp
+++ b/media/libaudioclient/Android.bp
@@ -217,6 +217,7 @@
     ],
     defaults: [
         "audio_aidl_conversion_common_default",
+        "audio_aidl_conversion_common_default_cpp",
         "latest_android_media_audio_common_types_cpp_export_shared",
     ],
 }
@@ -255,8 +256,10 @@
         "aidl/android/media/IEffect.aidl",
         "aidl/android/media/IEffectClient.aidl",
     ],
+    defaults: [
+        "latest_android_media_audio_common_types_import_interface",
+    ],
     imports: [
-        "android.media.audio.common.types-V2",
         "shared-file-region-aidl",
     ],
     backend: {
@@ -309,8 +312,10 @@
         "aidl/android/media/TrackSecondaryOutputInfo.aidl",
         "aidl/android/media/SurroundSoundConfig.aidl",
     ],
+    defaults: [
+        "latest_android_media_audio_common_types_import_interface",
+    ],
     imports: [
-        "android.media.audio.common.types-V2",
         "framework-permission-aidl",
     ],
     backend: {
@@ -351,12 +356,11 @@
         "aidl/android/media/AudioVolumeGroup.aidl",
         "aidl/android/media/DeviceRole.aidl",
         "aidl/android/media/SoundTriggerSession.aidl",
-        "aidl/android/media/SpatializationLevel.aidl",
-        "aidl/android/media/SpatializationMode.aidl",
-        "aidl/android/media/SpatializerHeadTrackingMode.aidl",
+    ],
+    defaults: [
+        "latest_android_media_audio_common_types_import_interface",
     ],
     imports: [
-        "android.media.audio.common.types-V2",
         "audioclient-types-aidl",
     ],
     backend: {
@@ -402,8 +406,10 @@
         "aidl/android/media/ISoundDoseCallback.aidl",
         "aidl/android/media/SoundDoseRecord.aidl",
     ],
+    defaults: [
+        "latest_android_media_audio_common_types_import_interface",
+    ],
     imports: [
-        "android.media.audio.common.types-V2",
         "audioclient-types-aidl",
         "av-types-aidl",
         "effect-aidl",
@@ -439,8 +445,10 @@
         "aidl/android/media/IAudioPolicyService.aidl",
         "aidl/android/media/IAudioPolicyServiceClient.aidl",
     ],
+    defaults: [
+        "latest_android_media_audio_common_types_import_interface",
+    ],
     imports: [
-        "android.media.audio.common.types-V2",
         "audioclient-types-aidl",
         "audiopolicy-types-aidl",
         "capture_state_listener-aidl",
@@ -474,6 +482,9 @@
         "aidl/android/media/ISpatializer.aidl",
         "aidl/android/media/ISpatializerHeadTrackingCallback.aidl",
     ],
+    defaults: [
+        "latest_android_media_audio_common_types_import_interface",
+    ],
     imports: [
         "audiopolicy-types-aidl",
     ],
diff --git a/media/libaudioclient/AudioEffect.cpp b/media/libaudioclient/AudioEffect.cpp
index 2870c4c..a7adfbd 100644
--- a/media/libaudioclient/AudioEffect.cpp
+++ b/media/libaudioclient/AudioEffect.cpp
@@ -366,14 +366,15 @@
         return mStatus;
     }
 
+    std::unique_lock ul(mLock, std::defer_lock);
     if (cmdCode == EFFECT_CMD_ENABLE || cmdCode == EFFECT_CMD_DISABLE) {
+        ul.lock();
         if (mEnabled == (cmdCode == EFFECT_CMD_ENABLE)) {
             return NO_ERROR;
         }
         if (replySize == nullptr || *replySize != sizeof(status_t) || replyData == nullptr) {
             return BAD_VALUE;
         }
-        mLock.lock();
     }
 
     std::vector<uint8_t> data;
@@ -398,7 +399,6 @@
         if (status == NO_ERROR) {
             mEnabled = (cmdCode == EFFECT_CMD_ENABLE);
         }
-        mLock.unlock();
     }
 
     return status;
diff --git a/media/libaudioclient/AudioRecord.cpp b/media/libaudioclient/AudioRecord.cpp
index 073a030..91bc700 100644
--- a/media/libaudioclient/AudioRecord.cpp
+++ b/media/libaudioclient/AudioRecord.cpp
@@ -68,10 +68,9 @@
     }
 
     // We double the size of input buffer for ping pong use of record buffer.
-    // Assumes audio_is_linear_pcm(format)
-    const auto sampleSize = audio_channel_count_from_in_mask(channelMask) *
-                                      audio_bytes_per_sample(format);
-    if (sampleSize == 0 || ((*frameCount = (size * 2) / sampleSize) == 0)) {
+    const auto frameSize = audio_bytes_per_frame(
+            audio_channel_count_from_in_mask(channelMask), format);
+    if (frameSize == 0 || ((*frameCount = (size * 2) / frameSize) == 0)) {
         ALOGE("%s(): Unsupported configuration: sampleRate %u, format %#x, channelMask %#x",
                 __func__, sampleRate, format, channelMask);
         return BAD_VALUE;
@@ -353,12 +352,7 @@
     }
 
     mChannelCount = audio_channel_count_from_in_mask(mChannelMask);
-
-    if (audio_is_linear_pcm(mFormat)) {
-        mFrameSize = mChannelCount * audio_bytes_per_sample(mFormat);
-    } else {
-        mFrameSize = sizeof(uint8_t);
-    }
+    mFrameSize = audio_bytes_per_frame(mChannelCount, mFormat);
 
     // mFrameCount is initialized in createRecord_l
     mReqFrameCount = frameCount;
@@ -1231,8 +1225,12 @@
         }
 
         size_t bytesRead = audioBuffer.frameCount * mFrameSize;
-        memcpy_by_audio_format(buffer, mFormat, audioBuffer.raw, mServerConfig.format,
-                               audioBuffer.mSize / mServerSampleSize);
+        if (audio_is_linear_pcm(mFormat)) {
+            memcpy_by_audio_format(buffer, mFormat, audioBuffer.raw, mServerConfig.format,
+                                audioBuffer.mSize / mServerSampleSize);
+        } else {
+            memcpy(buffer, audioBuffer.raw, audioBuffer.mSize);
+        }
         buffer = ((char *) buffer) + bytesRead;
         userSize -= bytesRead;
         read += bytesRead;
diff --git a/media/libaudioclient/AudioSystem.cpp b/media/libaudioclient/AudioSystem.cpp
index 6616197..5b94845 100644
--- a/media/libaudioclient/AudioSystem.cpp
+++ b/media/libaudioclient/AudioSystem.cpp
@@ -108,7 +108,7 @@
 }
 
 // establish binder interface to AudioFlinger service
-const sp<IAudioFlinger> AudioSystem::get_audio_flinger() {
+const sp<IAudioFlinger> AudioSystem::getAudioFlingerImpl(bool canStartThreadPool = true) {
     sp<IAudioFlinger> af;
     sp<AudioFlingerClient> afc;
     bool reportNoError = false;
@@ -147,7 +147,9 @@
         afc = gAudioFlingerClient;
         af = gAudioFlinger;
         // Make sure callbacks can be received by gAudioFlingerClient
-        ProcessState::self()->startThreadPool();
+        if(canStartThreadPool) {
+            ProcessState::self()->startThreadPool();
+        }
     }
     const int64_t token = IPCThreadState::self()->clearCallingIdentity();
     af->registerClient(afc);
@@ -156,6 +158,14 @@
     return af;
 }
 
+const sp<IAudioFlinger> AudioSystem:: get_audio_flinger() {
+    return getAudioFlingerImpl();
+}
+
+const sp<IAudioFlinger> AudioSystem:: get_audio_flinger_for_fuzzer() {
+    return getAudioFlingerImpl(false);
+}
+
 const sp<AudioSystem::AudioFlingerClient> AudioSystem::getAudioFlingerClient() {
     // calling get_audio_flinger() will initialize gAudioFlingerClient if needed
     const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
diff --git a/media/libaudioclient/AudioTrack.cpp b/media/libaudioclient/AudioTrack.cpp
index f3539a1..ae37152 100644
--- a/media/libaudioclient/AudioTrack.cpp
+++ b/media/libaudioclient/AudioTrack.cpp
@@ -594,18 +594,13 @@
     channelCount = audio_channel_count_from_out_mask(channelMask);
     mChannelCount = channelCount;
 
-    if (mFlags & AUDIO_OUTPUT_FLAG_DIRECT) {
-        if (audio_has_proportional_frames(format)) {
-            mFrameSize = channelCount * audio_bytes_per_sample(format);
-        } else {
-            mFrameSize = sizeof(uint8_t);
-        }
-    } else {
-        ALOG_ASSERT(audio_has_proportional_frames(format));
-        mFrameSize = channelCount * audio_bytes_per_sample(format);
+    if (!(mFlags & AUDIO_OUTPUT_FLAG_DIRECT)) {
         // createTrack will return an error if PCM format is not supported by server,
         // so no need to check for specific PCM formats here
+        ALOGW_IF(!audio_has_proportional_frames(format), "%s(): no direct flag for format 0x%x",
+            __func__, format);
     }
+    mFrameSize = audio_bytes_per_frame(channelCount, format);
 
     // sampling rate must be specified for direct outputs
     if (sampleRate == 0 && (mFlags & AUDIO_OUTPUT_FLAG_DIRECT) != 0) {
@@ -1709,10 +1704,14 @@
             __func__, mPortId, deviceId, mSelectedDeviceId, mRoutedDeviceId);
     if (mSelectedDeviceId != deviceId) {
         mSelectedDeviceId = deviceId;
-        if (mStatus == NO_ERROR && mSelectedDeviceId != mRoutedDeviceId) {
+        if (mStatus == NO_ERROR) {
+            // allow track invalidation when track is not playing to propagate
+            // the updated mSelectedDeviceId
             if (isPlaying_l()) {
-                android_atomic_or(CBLK_INVALID, &mCblk->mFlags);
-                mProxy->interrupt();
+                if (mSelectedDeviceId != mRoutedDeviceId) {
+                    android_atomic_or(CBLK_INVALID, &mCblk->mFlags);
+                    mProxy->interrupt();
+                }
             } else {
                 // if the track is idle, try to restore now and
                 // defer to next start if not possible
diff --git a/media/libaudioclient/IAudioFlinger.cpp b/media/libaudioclient/IAudioFlinger.cpp
index 4bd12b8..01edf72 100644
--- a/media/libaudioclient/IAudioFlinger.cpp
+++ b/media/libaudioclient/IAudioFlinger.cpp
@@ -898,6 +898,22 @@
     return NO_ERROR;
 }
 
+status_t AudioFlingerClientAdapter::getAudioMixPort(const struct audio_port_v7 *devicePort,
+                                                    struct audio_port_v7 *mixPort) const {
+    if (devicePort == nullptr || mixPort == nullptr) {
+        return BAD_VALUE;
+    }
+    media::AudioPortFw devicePortAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_port_v7_AudioPortFw(*devicePort));
+    media::AudioPortFw mixPortAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_port_v7_AudioPortFw(*mixPort));
+    media::AudioPortFw aidlRet;
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
+            mDelegate->getAudioMixPort(devicePortAidl, mixPortAidl, &aidlRet)));
+    *mixPort = VALUE_OR_RETURN_STATUS(aidl2legacy_AudioPortFw_audio_port_v7(aidlRet));
+    return OK;
+}
+
 ////////////////////////////////////////////////////////////////////////////////////////////////////
 // AudioFlingerServerAdapter
 AudioFlingerServerAdapter::AudioFlingerServerAdapter(
@@ -1444,4 +1460,16 @@
     return Status::fromStatusT(mDelegate->getAudioPolicyConfig(_aidl_return));
 }
 
+Status AudioFlingerServerAdapter::getAudioMixPort(const media::AudioPortFw &devicePort,
+                                                  const media::AudioPortFw &mixPort,
+                                                  media::AudioPortFw *_aidl_return) {
+    audio_port_v7 devicePortLegacy = VALUE_OR_RETURN_BINDER(
+            aidl2legacy_AudioPortFw_audio_port_v7(devicePort));
+    audio_port_v7 mixPortLegacy = VALUE_OR_RETURN_BINDER(
+            aidl2legacy_AudioPortFw_audio_port_v7(mixPort));
+    RETURN_BINDER_IF_ERROR(mDelegate->getAudioMixPort(&devicePortLegacy, &mixPortLegacy));
+    *_aidl_return = VALUE_OR_RETURN_BINDER(legacy2aidl_audio_port_v7_AudioPortFw(mixPortLegacy));
+    return Status::ok();
+}
+
 } // namespace android
diff --git a/media/libaudioclient/OWNERS b/media/libaudioclient/OWNERS
index 034d161..afc4d9b 100644
--- a/media/libaudioclient/OWNERS
+++ b/media/libaudioclient/OWNERS
@@ -1,4 +1,6 @@
-gkasten@google.com
+# Bug component: 48436
+atneya@google.com
 hunga@google.com
 jmtrivi@google.com
 mnaganov@google.com
+include platform/frameworks/av:/media/janitors/audio_OWNERS #{LAST_RESORT_SUGGESTION}
diff --git a/media/libaudioclient/TEST_MAPPING b/media/libaudioclient/TEST_MAPPING
index 60bb4f0..234e858 100644
--- a/media/libaudioclient/TEST_MAPPING
+++ b/media/libaudioclient/TEST_MAPPING
@@ -43,8 +43,17 @@
     }
   ],
   "postsubmit": [
+  // TODO(b/302036943): Enable once we make it pass with AIDL HAL on CF.
+  //   {
+  //      "name": "audioeffect_analysis"
+  //   },
     {
-       "name": "audioeffect_analysis"
+      "name": "CtsVirtualDevicesTestCases",
+      "options" : [
+        {
+          "include-filter": "android.virtualdevice.cts.VirtualAudioTest"
+        }
+      ]
     }
   ]
 }
diff --git a/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl b/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl
index 6412810..31d3af5 100644
--- a/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl
+++ b/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl
@@ -288,6 +288,11 @@
      */
     AudioPolicyConfig getAudioPolicyConfig();
 
+    /**
+     * Get the attributes of the mix port when connecting to the given device port.
+     */
+    AudioPortFw getAudioMixPort(in AudioPortFw devicePort, in AudioPortFw mixPort);
+
     // When adding a new method, please review and update
     // IAudioFlinger.h AudioFlingerServerAdapter::Delegate::TransactionCode
     // AudioFlinger.cpp AudioFlinger::onTransactWrapper()
diff --git a/media/libaudioclient/aidl/android/media/INativeSpatializerCallback.aidl b/media/libaudioclient/aidl/android/media/INativeSpatializerCallback.aidl
index 88b8108..8b30b29 100644
--- a/media/libaudioclient/aidl/android/media/INativeSpatializerCallback.aidl
+++ b/media/libaudioclient/aidl/android/media/INativeSpatializerCallback.aidl
@@ -16,8 +16,7 @@
 
 package android.media;
 
-import android.media.SpatializationLevel;
-import android.media.SpatializerHeadTrackingMode;
+import android.media.audio.common.Spatialization;
 
 /**
  * The INativeSpatializerCallback interface is a callback associated to the
@@ -30,7 +29,7 @@
     /** Called when the spatialization level applied by the spatializer changes
      * (e.g. when the spatializer is enabled or disabled)
      */
-    void onLevelChanged(SpatializationLevel level);
+    void onLevelChanged(Spatialization.Level level);
 
     /** Called when the output stream the Spatializer is attached to changes.
      * Indicates the IO Handle of the new output.
diff --git a/media/libaudioclient/aidl/android/media/ISoundDose.aidl b/media/libaudioclient/aidl/android/media/ISoundDose.aidl
index 6cb22ef..d80b6bf 100644
--- a/media/libaudioclient/aidl/android/media/ISoundDose.aidl
+++ b/media/libaudioclient/aidl/android/media/ISoundDose.aidl
@@ -55,6 +55,30 @@
      */
     oneway void setCsdEnabled(boolean enabled);
 
+    /**
+     * Structure containing a device identifier by address and type together with
+     * the categorization whether it is a headphone or not.
+     */
+    @JavaDerive(toString = true)
+    parcelable AudioDeviceCategory {
+        @utf8InCpp String address;
+        int internalAudioType;
+        boolean csdCompatible;
+    }
+
+    /**
+     * Resets the list of stored device categories for the native layer. Should
+     * only be called once at boot time after parsing the existing AudioDeviceCategories.
+     */
+    oneway void initCachedAudioDeviceCategories(in AudioDeviceCategory[] audioDevices);
+
+    /**
+     * Sets whether a device for a given address and type is a headphone or not.
+     * This is used to determine whether we compute the CSD on the given device
+     * since we can not rely completely on the device annotations.
+     */
+    oneway void setAudioDeviceCategory(in AudioDeviceCategory audioDevice);
+
     /* -------------------------- Test API methods --------------------------
     /** Get the currently used RS2 upper bound. */
     float getOutputRs2UpperBound();
diff --git a/media/libaudioclient/aidl/android/media/ISpatializer.aidl b/media/libaudioclient/aidl/android/media/ISpatializer.aidl
index 250c450..37dd776 100644
--- a/media/libaudioclient/aidl/android/media/ISpatializer.aidl
+++ b/media/libaudioclient/aidl/android/media/ISpatializer.aidl
@@ -16,11 +16,9 @@
 
 package android.media;
 
+import android.media.audio.common.HeadTracking;
+import android.media.audio.common.Spatialization;
 import android.media.ISpatializerHeadTrackingCallback;
-import android.media.SpatializationLevel;
-import android.media.SpatializationMode;
-import android.media.SpatializerHeadTrackingMode;
-
 
 /**
  * The ISpatializer interface is used to control the native audio service implementation
@@ -34,21 +32,25 @@
     /** Releases a ISpatializer interface previously acquired. */
     void release();
 
-    /** Reports the list of supported spatialization levels (see SpatializationLevel.aidl).
+    /**
+     * Reports the list of supported spatialization levels.
      * The list should never be empty if an ISpatializer interface was successfully
      * retrieved with IAudioPolicyService.getSpatializer().
      */
-    SpatializationLevel[] getSupportedLevels();
+    Spatialization.Level[] getSupportedLevels();
 
-    /** Selects the desired spatialization level (see SpatializationLevel.aidl). Selecting a level
-     * different from SpatializationLevel.NONE with create the specialized multichannel output
+    /**
+     * Selects the desired spatialization level. Selecting a level
+     * different from Spatializer.Level.NONE with create the specialized multichannel output
      * mixer, create and enable the spatializer effect and let the audio policy attach eligible
      * AudioTrack to this output stream.
      */
-    void setLevel(SpatializationLevel level);
+    void setLevel(Spatialization.Level level);
 
-    /** Gets the selected spatialization level (see SpatializationLevel.aidl) */
-    SpatializationLevel getLevel();
+    /**
+     * Gets the selected spatialization level.
+     */
+    Spatialization.Level getLevel();
 
     /** Reports if the spatializer engine supports head tracking or not.
      * This is a pre condition independent of the fact that a head tracking sensor is
@@ -56,26 +58,33 @@
      */
     boolean isHeadTrackingSupported();
 
-    /** Reports the list of supported head tracking modes (see SpatializerHeadTrackingMode.aidl).
+    /**
+     * Reports the list of supported head tracking modes.
      * The list always contains SpatializerHeadTrackingMode.DISABLED and can include other modes
      * if the spatializer effect implementation supports head tracking.
      * The result does not depend on currently connected sensors but reflects the capabilities
      * when sensors are available.
      */
-    SpatializerHeadTrackingMode[] getSupportedHeadTrackingModes();
+    HeadTracking.Mode[] getSupportedHeadTrackingModes();
 
-    /** Selects the desired head tracking mode (see SpatializerHeadTrackingMode.aidl) */
-    void setDesiredHeadTrackingMode(SpatializerHeadTrackingMode mode);
+    /**
+     * Selects the desired head tracking mode.
+     */
+    void setDesiredHeadTrackingMode(HeadTracking.Mode mode);
 
-    /** Gets the actual head tracking mode. Can be different from the desired mode if conditions to
+    /**
+     * Gets the actual head tracking mode. Can be different from the desired mode if conditions to
      * enable the desired mode are not met (e.g if the head tracking device was removed)
      */
-    SpatializerHeadTrackingMode getActualHeadTrackingMode();
+    HeadTracking.Mode getActualHeadTrackingMode();
 
-    /** Reset the head tracking algorithm to consider current head pose as neutral */
+    /**
+     * Reset the head tracking algorithm to consider current head pose as neutral
+     */
     void recenterHeadTracker();
 
-    /** Set the screen to stage transform to use by the head tracking algorithm
+    /**
+     * Set the screen to stage transform to use by the head tracking algorithm
      * The screen to stage transform is conveyed as a vector of 6 elements,
      * where the first three are a translation vector and
      * the last three are a rotation vector.
@@ -123,11 +132,12 @@
      */
     void setFoldState(boolean folded);
 
-    /** Reports the list of supported spatialization modess (see SpatializationMode.aidl).
+    /**
+     * Reports the list of supported spatialization modess.
      * The list should never be empty if an ISpatializer interface was successfully
      * retrieved with IAudioPolicyService.getSpatializer().
      */
-    SpatializationMode[] getSupportedModes();
+    Spatialization.Mode[] getSupportedModes();
 
     /**
      * Registers a callback to monitor head tracking functions.
diff --git a/media/libaudioclient/aidl/android/media/ISpatializerHeadTrackingCallback.aidl b/media/libaudioclient/aidl/android/media/ISpatializerHeadTrackingCallback.aidl
index 23d5e13..615b971 100644
--- a/media/libaudioclient/aidl/android/media/ISpatializerHeadTrackingCallback.aidl
+++ b/media/libaudioclient/aidl/android/media/ISpatializerHeadTrackingCallback.aidl
@@ -16,8 +16,7 @@
 
 package android.media;
 
-import android.media.SpatializationLevel;
-import android.media.SpatializerHeadTrackingMode;
+import android.media.audio.common.HeadTracking;
 
 /**
  * The ISpatializerHeadTrackingCallback interface is a callback associated to the
@@ -28,7 +27,7 @@
 oneway interface ISpatializerHeadTrackingCallback {
     /** Called when the head tracking mode has changed
      */
-    void onHeadTrackingModeChanged(SpatializerHeadTrackingMode mode);
+    void onHeadTrackingModeChanged(HeadTracking.Mode mode);
 
     /** Called when the head to stage pose hase been updated
      * The head to stage pose is conveyed as a vector of 6 elements,
diff --git a/media/libaudioclient/aidl/android/media/SpatializationLevel.aidl b/media/libaudioclient/aidl/android/media/SpatializationLevel.aidl
deleted file mode 100644
index 961c5a1..0000000
--- a/media/libaudioclient/aidl/android/media/SpatializationLevel.aidl
+++ /dev/null
@@ -1,34 +0,0 @@
-/*
- * Copyright 2021 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media;
-
-/**
- * The spatialization level supported by the spatializer stage effect implementation.
- * Used by methods of the ISpatializer interface.
- * {@hide}
- */
-@Backing(type="byte")
-enum SpatializationLevel {
-    /** Spatialization is disabled. */
-    NONE = 0,
-    /** The spatializer accepts audio with positional multichannel masks (e.g 5.1). */
-    SPATIALIZER_MULTICHANNEL = 1,
-    /** The spatializer accepts audio made of a channel bed of positional multichannels (e.g 5.1)
-     * and audio objects positioned independently via meta data.
-     */
-    SPATIALIZER_MCHAN_BED_PLUS_OBJECTS = 2,
-}
diff --git a/media/libaudioclient/aidl/android/media/SpatializationMode.aidl b/media/libaudioclient/aidl/android/media/SpatializationMode.aidl
deleted file mode 100644
index eaaff37..0000000
--- a/media/libaudioclient/aidl/android/media/SpatializationMode.aidl
+++ /dev/null
@@ -1,30 +0,0 @@
-/*
- * Copyright 2021 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media;
-
-/**
- * The spatialization mode supported by the spatializer stage effect implementation.
- * Used by methods of the ISpatializer interface.
- * {@hide}
- */
-@Backing(type="byte")
-enum SpatializationMode {
-    /** The spatializer supports binaural mode (over headphones type devices). */
-    SPATIALIZER_BINAURAL = 0,
-    /** The spatializer supports transaural mode (over speaker type devices). */
-    SPATIALIZER_TRANSAURAL = 1,
-}
diff --git a/media/libaudioclient/aidl/android/media/SpatializerHeadTrackingMode.aidl b/media/libaudioclient/aidl/android/media/SpatializerHeadTrackingMode.aidl
deleted file mode 100644
index 58e0f61..0000000
--- a/media/libaudioclient/aidl/android/media/SpatializerHeadTrackingMode.aidl
+++ /dev/null
@@ -1,35 +0,0 @@
-/*
- * Copyright 2021 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media;
-
-
-/**
- * The head tracking mode supported by the spatializer effect implementation.
- * Used by methods of the ISpatializer interface.
- * {@hide}
- */
-@Backing(type="byte")
-enum SpatializerHeadTrackingMode {
-    /** Head tracking is active in a mode not listed below (forward compatibility) */
-    OTHER = 0,
-    /** Head tracking is disabled */
-    DISABLED = 1,
-    /** Head tracking is performed relative to the real work environment */
-    RELATIVE_WORLD = 2,
-    /** Head tracking is performed relative to the device's screen */
-    RELATIVE_SCREEN = 3,
-}
diff --git a/media/libaudioclient/aidl/fuzzer/Android.bp b/media/libaudioclient/aidl/fuzzer/Android.bp
new file mode 100644
index 0000000..6093933
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/Android.bp
@@ -0,0 +1,95 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+cc_defaults {
+    name: "libaudioclient_aidl_fuzzer_defaults",
+    static_libs: [
+        "android.hardware.audio.common@7.0-enums",
+        "effect-aidl-cpp",
+        "libcgrouprc",
+        "libcgrouprc_format",
+        "libfakeservicemanager",
+        "libjsoncpp",
+        "liblog",
+        "libmediametricsservice",
+        "libmedia_helper",
+        "libprocessgroup",
+        "shared-file-region-aidl-cpp",
+    ],
+    shared_libs: [
+        "android.hardware.audio.common-util",
+        "audioclient-types-aidl-cpp",
+        "audiopolicy-aidl-cpp",
+        "audiopolicy-types-aidl-cpp",
+        "av-types-aidl-cpp",
+        "capture_state_listener-aidl-cpp",
+        "framework-permission-aidl-cpp",
+        "libaudioclient",
+        "audioflinger-aidl-cpp",
+        "libaudioflinger",
+        "libaudioclient_aidl_conversion",
+        "libaudiofoundation",
+        "libaudiomanager",
+        "libaudiopolicy",
+        "libaudioutils",
+        "libaudiopolicyservice",
+        "libaudiopolicymanagerdefault",
+        "libaudiohal",
+        "libaudioprocessing",
+        "libactivitymanager_aidl",
+        "libdl",
+        "libheadtracking",
+        "libmediautils",
+        "libmediametrics",
+        "libnblog",
+        "libnbaio",
+        "libpowermanager",
+        "libvibrator",
+        "libvndksupport",
+        "libxml2",
+        "mediametricsservice-aidl-cpp",
+        "packagemanager_aidl-cpp",
+    ],
+    header_libs: [
+        "libaudiopolicymanager_interface_headers",
+        "libaudiofoundation_headers",
+        "libaudiohal_headers",
+        "libaudioflinger_headers",
+        "libbinder_headers",
+        "libmedia_headers",
+    ],
+     fuzz_config: {
+        cc: [
+            "android-media-fuzzing-reports@google.com",
+        ],
+        componentid: 155276,
+        hotlists: ["4593311"],
+        description: "The fuzzer targets the APIs of libaudioflinger",
+        vector: "local_no_privileges_required",
+        service_privilege: "privileged",
+        users: "multi_user",
+        fuzzed_code_usage: "shipped",
+    },
+}
+
+cc_fuzz {
+    name: "audioflinger_aidl_fuzzer",
+    srcs: ["audioflinger_aidl_fuzzer.cpp"],
+    defaults: [
+        "libaudioclient_aidl_fuzzer_defaults",
+        "service_fuzzer_defaults"
+    ],
+}
diff --git a/media/libaudioclient/aidl/fuzzer/audioflinger_aidl_fuzzer.cpp b/media/libaudioclient/aidl/fuzzer/audioflinger_aidl_fuzzer.cpp
new file mode 100644
index 0000000..f99cc3b
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/audioflinger_aidl_fuzzer.cpp
@@ -0,0 +1,88 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+#include <AudioFlinger.h>
+#include <android-base/logging.h>
+#include <android/binder_interface_utils.h>
+#include <android/binder_process.h>
+#include <android/media/IAudioPolicyService.h>
+#include <fakeservicemanager/FakeServiceManager.h>
+#include <fuzzbinder/libbinder_driver.h>
+#include <fuzzbinder/random_binder.h>
+#include <fuzzer/FuzzedDataProvider.h>
+#include <media/IAudioFlinger.h>
+#include <service/AudioPolicyService.h>
+
+using namespace android;
+using namespace android::binder;
+using android::fuzzService;
+
+[[clang::no_destroy]] static std::once_flag gSmOnce;
+sp<FakeServiceManager> gFakeServiceManager;
+
+bool addService(const String16& serviceName, const sp<FakeServiceManager>& fakeServiceManager,
+                FuzzedDataProvider& fdp) {
+    sp<IBinder> binder = getRandomBinder(&fdp);
+    if (binder == nullptr) {
+        return false;
+    }
+    CHECK_EQ(NO_ERROR, fakeServiceManager->addService(serviceName, binder));
+    return true;
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    FuzzedDataProvider fdp(data, size);
+
+    std::call_once(gSmOnce, [&] {
+        /* Create a FakeServiceManager instance and add required services */
+        gFakeServiceManager = sp<FakeServiceManager>::make();
+        setDefaultServiceManager(gFakeServiceManager);
+    });
+    gFakeServiceManager->clear();
+
+    for (const char* service :
+         {"activity", "sensor_privacy", "permission", "scheduling_policy",
+          "android.hardware.audio.core.IConfig", "batterystats", "media.metrics"}) {
+        if (!addService(String16(service), gFakeServiceManager, fdp)) {
+            return 0;
+        }
+    }
+
+    const auto audioFlinger = sp<AudioFlinger>::make();
+    const auto afAdapter = sp<AudioFlingerServerAdapter>::make(audioFlinger);
+
+    CHECK_EQ(NO_ERROR,
+             gFakeServiceManager->addService(
+                     String16(IAudioFlinger::DEFAULT_SERVICE_NAME), IInterface::asBinder(afAdapter),
+                     false /* allowIsolated */, IServiceManager::DUMP_FLAG_PRIORITY_DEFAULT));
+
+    AudioSystem::get_audio_flinger_for_fuzzer();
+    const auto audioPolicyService = sp<AudioPolicyService>::make();
+
+    CHECK_EQ(NO_ERROR,
+             gFakeServiceManager->addService(String16("media.audio_policy"), audioPolicyService,
+                                             false /* allowIsolated */,
+                                             IServiceManager::DUMP_FLAG_PRIORITY_DEFAULT));
+
+    sp<IBinder> audioFlingerServiceBinder =
+            gFakeServiceManager->getService(String16(IAudioFlinger::DEFAULT_SERVICE_NAME));
+    sp<media::IAudioFlingerService> audioFlingerService =
+            interface_cast<media::IAudioFlingerService>(audioFlingerServiceBinder);
+
+    fuzzService(media::IAudioFlingerService::asBinder(audioFlingerService), std::move(fdp));
+
+    return 0;
+}
diff --git a/media/libaudioclient/include/media/AudioSystem.h b/media/libaudioclient/include/media/AudioSystem.h
index 8f8c9dd..0215f3c 100644
--- a/media/libaudioclient/include/media/AudioSystem.h
+++ b/media/libaudioclient/include/media/AudioSystem.h
@@ -178,6 +178,7 @@
 
     // helper function to obtain AudioFlinger service handle
     static const sp<IAudioFlinger> get_audio_flinger();
+    static const sp<IAudioFlinger> get_audio_flinger_for_fuzzer();
 
     static float linearToLog(int volume);
     static int logToLinear(float volume);
@@ -876,6 +877,7 @@
     static audio_io_handle_t getOutput(audio_stream_type_t stream);
     static const sp<AudioFlingerClient> getAudioFlingerClient();
     static sp<AudioIoDescriptor> getIoDescriptor(audio_io_handle_t ioHandle);
+    static const sp<IAudioFlinger> getAudioFlingerImpl(bool canStartThreadPool);
 
     // Invokes all registered error callbacks with the given error code.
     static void reportError(status_t err);
diff --git a/media/libaudioclient/include/media/IAudioFlinger.h b/media/libaudioclient/include/media/IAudioFlinger.h
index 3c96862..eb27e25 100644
--- a/media/libaudioclient/include/media/IAudioFlinger.h
+++ b/media/libaudioclient/include/media/IAudioFlinger.h
@@ -384,6 +384,9 @@
     virtual status_t supportsBluetoothVariableLatency(bool* support) const = 0;
 
     virtual status_t getAudioPolicyConfig(media::AudioPolicyConfig* output) = 0;
+
+    virtual status_t getAudioMixPort(const struct audio_port_v7 *devicePort,
+                                     struct audio_port_v7 *mixPort) const = 0;
 };
 
 /**
@@ -498,6 +501,8 @@
                                    sp<media::ISoundDose>* soundDose) const override;
     status_t invalidateTracks(const std::vector<audio_port_handle_t>& portIds) override;
     status_t getAudioPolicyConfig(media::AudioPolicyConfig* output) override;
+    status_t getAudioMixPort(const struct audio_port_v7 *devicePort,
+                             struct audio_port_v7 *mixPort) const override;
 
 private:
     const sp<media::IAudioFlingerService> mDelegate;
@@ -599,6 +604,7 @@
             INVALIDATE_TRACKS = media::BnAudioFlingerService::TRANSACTION_invalidateTracks,
             GET_AUDIO_POLICY_CONFIG =
                     media::BnAudioFlingerService::TRANSACTION_getAudioPolicyConfig,
+            GET_AUDIO_MIX_PORT = media::BnAudioFlingerService::TRANSACTION_getAudioMixPort,
         };
 
     protected:
@@ -732,6 +738,9 @@
                                  sp<media::ISoundDose>* _aidl_return) override;
     Status invalidateTracks(const std::vector<int32_t>& portIds) override;
     Status getAudioPolicyConfig(media::AudioPolicyConfig* _aidl_return) override;
+    Status getAudioMixPort(const media::AudioPortFw& devicePort,
+                           const media::AudioPortFw& mixPort,
+                           media::AudioPortFw* _aidl_return) override;
 private:
     const sp<AudioFlingerServerAdapter::Delegate> mDelegate;
 };
diff --git a/media/libaudioclient/tests/audio_aidl_legacy_conversion_tests.cpp b/media/libaudioclient/tests/audio_aidl_legacy_conversion_tests.cpp
index 9a46b20..7f55e48 100644
--- a/media/libaudioclient/tests/audio_aidl_legacy_conversion_tests.cpp
+++ b/media/libaudioclient/tests/audio_aidl_legacy_conversion_tests.cpp
@@ -43,7 +43,9 @@
 using media::audio::common::AudioGain;
 using media::audio::common::AudioGainConfig;
 using media::audio::common::AudioGainMode;
+using media::audio::common::AudioInputFlags;
 using media::audio::common::AudioIoFlags;
+using media::audio::common::AudioOutputFlags;
 using media::audio::common::AudioPortDeviceExt;
 using media::audio::common::AudioProfile;
 using media::audio::common::AudioStandard;
@@ -831,3 +833,27 @@
     EXPECT_EQ(MicrophoneDynamicInfo::ChannelMapping::UNUSED, aidl.dynamic.channelMapping[2]);
     EXPECT_EQ(MicrophoneDynamicInfo::ChannelMapping::PROCESSED, aidl.dynamic.channelMapping[3]);
 }
+
+TEST(AudioInputFlags, Aidl2Legacy2Aidl) {
+    for (auto flag : enum_range<AudioInputFlags>()) {
+        int32_t aidlMask = 1 << static_cast<int32_t>(flag);
+        auto convMask = aidl2legacy_int32_t_audio_input_flags_t_mask(aidlMask);
+        ASSERT_TRUE(convMask.ok());
+        ASSERT_EQ(1, __builtin_popcount(convMask.value()));
+        auto convFlag = legacy2aidl_audio_input_flags_t_AudioInputFlags(convMask.value());
+        ASSERT_TRUE(convFlag.ok());
+        EXPECT_EQ(flag, convFlag.value());
+    }
+}
+
+TEST(AudioOutputFlags, Aidl2Legacy2Aidl) {
+    for (auto flag : enum_range<AudioOutputFlags>()) {
+        int32_t aidlMask = 1 << static_cast<int32_t>(flag);
+        auto convMask = aidl2legacy_int32_t_audio_output_flags_t_mask(aidlMask);
+        ASSERT_TRUE(convMask.ok());
+        ASSERT_EQ(1, __builtin_popcount(convMask.value()));
+        auto convFlag = legacy2aidl_audio_output_flags_t_AudioOutputFlags(convMask.value());
+        ASSERT_TRUE(convFlag.ok());
+        EXPECT_EQ(flag, convFlag.value());
+    }
+}
diff --git a/media/libaudioclient/tests/audioeffect_tests.cpp b/media/libaudioclient/tests/audioeffect_tests.cpp
index e6149e4..e12ae23 100644
--- a/media/libaudioclient/tests/audioeffect_tests.cpp
+++ b/media/libaudioclient/tests/audioeffect_tests.cpp
@@ -80,7 +80,7 @@
     uint32_t numEffects = AudioEffect::kMaxPreProcessing;
     status_t ret = AudioEffect::queryDefaultPreProcessing(audioRecord->getSessionId(), descriptors,
                                                           &numEffects);
-    if (ret != OK) {
+    if (ret != OK || numEffects > AudioEffect::kMaxPreProcessing) {
         return false;
     }
     for (int i = 0; i < numEffects; i++) {
@@ -247,11 +247,14 @@
             ASSERT_NE(capture, nullptr) << "Unable to create Record Application";
             EXPECT_EQ(NO_ERROR, capture->create());
             EXPECT_EQ(NO_ERROR, capture->start());
+            ASSERT_NE(capture->getAudioRecordHandle(), nullptr);
             if (!isEffectDefaultOnRecord(&descriptors[i].type, &descriptors[i].uuid,
                                          capture->getAudioRecordHandle())) {
                 selectedEffect = i;
+                EXPECT_EQ(OK, capture->stop());
                 break;
             }
+            EXPECT_EQ(OK, capture->stop());
         }
     }
     if (selectedEffect == -1) GTEST_SKIP() << " expected at least one preprocessing effect";
@@ -263,6 +266,7 @@
     ASSERT_NE(capture, nullptr) << "Unable to create Record Application";
     EXPECT_EQ(NO_ERROR, capture->create());
     EXPECT_EQ(NO_ERROR, capture->start());
+    ASSERT_NE(capture->getAudioRecordHandle(), nullptr);
     EXPECT_FALSE(isEffectDefaultOnRecord(selectedEffectType, selectedEffectUuid,
                                          capture->getAudioRecordHandle()))
             << "Effect should not have been default on record. " << type;
@@ -285,6 +289,7 @@
     ASSERT_NE(capture, nullptr) << "Unable to create Record Application";
     EXPECT_EQ(NO_ERROR, capture->create());
     EXPECT_EQ(NO_ERROR, capture->start());
+    ASSERT_NE(capture->getAudioRecordHandle(), nullptr);
     EXPECT_TRUE(isEffectDefaultOnRecord(selectedEffectType, selectedEffectUuid,
                                         capture->getAudioRecordHandle()))
             << "Effect should have been default on record. " << type;
@@ -302,6 +307,7 @@
     ASSERT_NE(capture, nullptr) << "Unable to create Record Application";
     EXPECT_EQ(NO_ERROR, capture->create());
     EXPECT_EQ(NO_ERROR, capture->start());
+    ASSERT_NE(capture->getAudioRecordHandle(), nullptr);
     EXPECT_FALSE(isEffectDefaultOnRecord(selectedEffectType, selectedEffectUuid,
                                          capture->getAudioRecordHandle()))
             << "Effect should not have been default on record. " << type;
diff --git a/media/libaudioclient/tests/audiorecord_tests.cpp b/media/libaudioclient/tests/audiorecord_tests.cpp
index 8c63a6d..61edd4d 100644
--- a/media/libaudioclient/tests/audiorecord_tests.cpp
+++ b/media/libaudioclient/tests/audiorecord_tests.cpp
@@ -14,9 +14,13 @@
  * limitations under the License.
  */
 
+#include <sstream>
+
 //#define LOG_NDEBUG 0
 #define LOG_TAG "AudioRecordTest"
 
+#include <android-base/logging.h>
+#include <binder/ProcessState.h>
 #include <gtest/gtest.h>
 
 #include "audio_test_utils.h"
@@ -25,32 +29,40 @@
 
 class AudioRecordTest : public ::testing::Test {
   public:
-    virtual void SetUp() override {
+    void SetUp() override {
         mAC = new AudioCapture(AUDIO_SOURCE_DEFAULT, 44100, AUDIO_FORMAT_PCM_16_BIT,
                                AUDIO_CHANNEL_IN_FRONT);
         ASSERT_NE(nullptr, mAC);
         ASSERT_EQ(OK, mAC->create()) << "record creation failed";
     }
 
-    virtual void TearDown() override {
+    void TearDown() override {
         if (mAC) ASSERT_EQ(OK, mAC->stop());
     }
 
     sp<AudioCapture> mAC;
 };
 
-class AudioRecordCreateTest
-    : public ::testing::TestWithParam<
-              std::tuple<uint32_t, audio_format_t, audio_channel_mask_t, audio_input_flags_t,
-                         audio_session_t, audio_source_t>> {
+using RecordCreateTestParam = std::tuple<uint32_t, audio_format_t, audio_channel_mask_t,
+                                         audio_input_flags_t, audio_session_t, audio_source_t>;
+enum {
+    RECORD_PARAM_SAMPLE_RATE,
+    RECORD_PARAM_FORMAT,
+    RECORD_PARAM_CHANNEL_MASK,
+    RECORD_PARAM_FLAGS,
+    RECORD_PARAM_SESSION_ID,
+    RECORD_PARAM_INPUT_SOURCE
+};
+
+class AudioRecordCreateTest : public ::testing::TestWithParam<RecordCreateTestParam> {
   public:
     AudioRecordCreateTest()
-        : mSampleRate(std::get<0>(GetParam())),
-          mFormat(std::get<1>(GetParam())),
-          mChannelMask(std::get<2>(GetParam())),
-          mFlags(std::get<3>(GetParam())),
-          mSessionId(std::get<4>(GetParam())),
-          mInputSource(std::get<5>(GetParam())){};
+        : mSampleRate(std::get<RECORD_PARAM_SAMPLE_RATE>(GetParam())),
+          mFormat(std::get<RECORD_PARAM_FORMAT>(GetParam())),
+          mChannelMask(std::get<RECORD_PARAM_CHANNEL_MASK>(GetParam())),
+          mFlags(std::get<RECORD_PARAM_FLAGS>(GetParam())),
+          mSessionId(std::get<RECORD_PARAM_SESSION_ID>(GetParam())),
+          mInputSource(std::get<RECORD_PARAM_INPUT_SOURCE>(GetParam())){};
 
     const uint32_t mSampleRate;
     const audio_format_t mFormat;
@@ -62,14 +74,14 @@
 
     sp<AudioCapture> mAC;
 
-    virtual void SetUp() override {
+    void SetUp() override {
         mAC = new AudioCapture(mInputSource, mSampleRate, mFormat, mChannelMask, mFlags, mSessionId,
                                mTransferType);
         ASSERT_NE(nullptr, mAC);
         ASSERT_EQ(OK, mAC->create()) << "record creation failed";
     }
 
-    virtual void TearDown() override {
+    void TearDown() override {
         if (mAC) ASSERT_EQ(OK, mAC->stop());
     }
 };
@@ -197,6 +209,18 @@
     EXPECT_EQ(OK, mAC->audioProcess()) << "audioProcess failed";
 }
 
+static std::string GetRecordTestName(const testing::TestParamInfo<RecordCreateTestParam>& info) {
+    const auto& p = info.param;
+    std::ostringstream s;
+    s << std::get<RECORD_PARAM_SAMPLE_RATE>(p) << "_"
+      << audio_format_to_string(std::get<RECORD_PARAM_FORMAT>(p)) << "__"
+      << audio_channel_mask_to_string(std::get<RECORD_PARAM_CHANNEL_MASK>(p)) << "__"
+      << "Flags_0x" << std::hex << std::get<RECORD_PARAM_FLAGS>(p) << std::dec << "__"
+      << "Session_" << std::get<RECORD_PARAM_SESSION_ID>(p) << "__"
+      << audio_source_to_string(std::get<RECORD_PARAM_INPUT_SOURCE>(p));
+    return s.str();
+}
+
 // for port primary input
 INSTANTIATE_TEST_SUITE_P(AudioRecordPrimaryInput, AudioRecordCreateTest,
                          ::testing::Combine(::testing::Values(8000, 11025, 12000, 16000, 22050,
@@ -207,7 +231,8 @@
                                                               AUDIO_CHANNEL_IN_FRONT_BACK),
                                             ::testing::Values(AUDIO_INPUT_FLAG_NONE),
                                             ::testing::Values(AUDIO_SESSION_NONE),
-                                            ::testing::Values(AUDIO_SOURCE_DEFAULT)));
+                                            ::testing::Values(AUDIO_SOURCE_DEFAULT)),
+                         GetRecordTestName);
 
 // for port fast input
 INSTANTIATE_TEST_SUITE_P(AudioRecordFastInput, AudioRecordCreateTest,
@@ -219,7 +244,8 @@
                                                               AUDIO_CHANNEL_IN_FRONT_BACK),
                                             ::testing::Values(AUDIO_INPUT_FLAG_FAST),
                                             ::testing::Values(AUDIO_SESSION_NONE),
-                                            ::testing::Values(AUDIO_SOURCE_DEFAULT)));
+                                            ::testing::Values(AUDIO_SOURCE_DEFAULT)),
+                         GetRecordTestName);
 
 // misc
 INSTANTIATE_TEST_SUITE_P(AudioRecordMiscInput, AudioRecordCreateTest,
@@ -232,4 +258,35 @@
                                                               AUDIO_SOURCE_CAMCORDER,
                                                               AUDIO_SOURCE_VOICE_RECOGNITION,
                                                               AUDIO_SOURCE_VOICE_COMMUNICATION,
-                                                              AUDIO_SOURCE_UNPROCESSED)));
+                                                              AUDIO_SOURCE_UNPROCESSED)),
+                         GetRecordTestName);
+
+namespace {
+
+class TestExecutionTracer : public ::testing::EmptyTestEventListener {
+  public:
+    void OnTestStart(const ::testing::TestInfo& test_info) override {
+        TraceTestState("Started", test_info);
+    }
+    void OnTestEnd(const ::testing::TestInfo& test_info) override {
+        TraceTestState("Finished", test_info);
+    }
+    void OnTestPartResult(const ::testing::TestPartResult& result) override { LOG(INFO) << result; }
+
+  private:
+    static void TraceTestState(const std::string& state, const ::testing::TestInfo& test_info) {
+        LOG(INFO) << state << " " << test_info.test_suite_name() << "::" << test_info.name();
+    }
+};
+
+}  // namespace
+
+int main(int argc, char** argv) {
+    ::testing::InitGoogleTest(&argc, argv);
+    ::testing::UnitTest::GetInstance()->listeners().Append(new TestExecutionTracer());
+    android::base::SetMinimumLogSeverity(::android::base::DEBUG);
+    // This is for death handlers instantiated by the framework code.
+    android::ProcessState::self()->setThreadPoolMaxThreadCount(1);
+    android::ProcessState::self()->startThreadPool();
+    return RUN_ALL_TESTS();
+}
diff --git a/media/libaudiofoundation/AudioContainers.cpp b/media/libaudiofoundation/AudioContainers.cpp
index 202a400..e1265cf 100644
--- a/media/libaudiofoundation/AudioContainers.cpp
+++ b/media/libaudiofoundation/AudioContainers.cpp
@@ -119,4 +119,115 @@
     return ss.str();
 }
 
+std::string dumpMixerBehaviors(const MixerBehaviorSet& mixerBehaviors) {
+    std::stringstream ss;
+    for (auto it = mixerBehaviors.begin(); it != mixerBehaviors.end(); ++it) {
+        if (it != mixerBehaviors.begin()) {
+            ss << ", ";
+        }
+        ss << (*it);
+    }
+    return ss.str();
+}
+
+AudioProfileAttributesMultimap createAudioProfilesAttrMap(audio_profile profiles[],
+                                                          uint32_t first,
+                                                          uint32_t last) {
+    AudioProfileAttributesMultimap result;
+    for (uint32_t i = first; i < last; ++i) {
+        SampleRateSet sampleRates(profiles[i].sample_rates,
+                                  profiles[i].sample_rates + profiles[i].num_sample_rates);
+        ChannelMaskSet channelMasks(profiles[i].channel_masks,
+                                    profiles[i].channel_masks + profiles[i].num_channel_masks);
+        result.emplace(profiles[i].format, std::make_pair(sampleRates, channelMasks));
+    }
+    return result;
+}
+
+namespace {
+
+void populateAudioProfile(audio_format_t format,
+                          const ChannelMaskSet& channelMasks,
+                          const SampleRateSet& samplingRates,
+                          audio_profile* profile) {
+    profile->format = format;
+    profile->num_channel_masks = 0;
+    for (auto it = channelMasks.begin();
+         it != channelMasks.end() && profile->num_channel_masks < AUDIO_PORT_MAX_CHANNEL_MASKS;
+         ++it) {
+        profile->channel_masks[profile->num_channel_masks++] = *it;
+    }
+    profile->num_sample_rates = 0;
+    for (auto it = samplingRates.begin();
+         it != samplingRates.end() && profile->num_sample_rates < AUDIO_PORT_MAX_SAMPLING_RATES;
+         ++it) {
+        profile->sample_rates[profile->num_sample_rates++] = *it;
+    }
+}
+
+} // namespace
+
+void populateAudioProfiles(const AudioProfileAttributesMultimap& profileAttrs,
+                           audio_format_t format,
+                           ChannelMaskSet allChannelMasks,
+                           SampleRateSet allSampleRates,
+                           audio_profile audioProfiles[],
+                           uint32_t* numAudioProfiles,
+                           uint32_t maxAudioProfiles) {
+    if (*numAudioProfiles >= maxAudioProfiles) {
+        return;
+    }
+
+    const auto lower= profileAttrs.lower_bound(format);
+    const auto upper = profileAttrs.upper_bound(format);
+    SampleRateSet sampleRatesPresent;
+    ChannelMaskSet channelMasksPresent;
+    for (auto it = lower; it != upper && *numAudioProfiles < maxAudioProfiles; ++it) {
+        SampleRateSet srs;
+        std::set_intersection(it->second.first.begin(), it->second.first.end(),
+                              allSampleRates.begin(), allSampleRates.end(),
+                              std::inserter(srs, srs.begin()));
+        if (srs.empty()) {
+            continue;
+        }
+        ChannelMaskSet cms;
+        std::set_intersection(it->second.second.begin(), it->second.second.end(),
+                              allChannelMasks.begin(), allChannelMasks.end(),
+                              std::inserter(cms, cms.begin()));
+        if (cms.empty()) {
+            continue;
+        }
+        sampleRatesPresent.insert(srs.begin(), srs.end());
+        channelMasksPresent.insert(cms.begin(), cms.end());
+        populateAudioProfile(it->first, cms, srs,
+                             &audioProfiles[(*numAudioProfiles)++]);
+    }
+    if (*numAudioProfiles >= maxAudioProfiles) {
+        ALOGW("%s, too many audio profiles", __func__);
+        return;
+    }
+
+    SampleRateSet srs;
+    std::set_difference(allSampleRates.begin(), allSampleRates.end(),
+                        sampleRatesPresent.begin(), sampleRatesPresent.end(),
+                        std::inserter(srs, srs.begin()));
+    if (!srs.empty()) {
+        populateAudioProfile(format, allChannelMasks, srs,
+                             &audioProfiles[(*numAudioProfiles)++]);
+    }
+    if (*numAudioProfiles >= maxAudioProfiles) {
+        ALOGW("%s, too many audio profiles", __func__);
+        return;
+    }
+    ChannelMaskSet cms;
+    std::set_difference(allChannelMasks.begin(), allChannelMasks.end(),
+                        channelMasksPresent.begin(), channelMasksPresent.end(),
+                        std::inserter(cms, cms.begin()));
+    if (!cms.empty()) {
+        populateAudioProfile(format, cms, allSampleRates,
+                             &audioProfiles[(*numAudioProfiles)++]);
+    }
+
+}
+
 } // namespace android
diff --git a/media/libaudiofoundation/AudioPort.cpp b/media/libaudiofoundation/AudioPort.cpp
index 6e05abc..ae0457f 100644
--- a/media/libaudiofoundation/AudioPort.cpp
+++ b/media/libaudiofoundation/AudioPort.cpp
@@ -57,14 +57,21 @@
 
 void AudioPort::importAudioPort(const audio_port_v7 &port) {
     for (size_t i = 0; i < port.num_audio_profiles; ++i) {
+        if (port.audio_profiles[i].format == AUDIO_FORMAT_DEFAULT) {
+            // The dynamic format from AudioPort should not be AUDIO_FORMAT_DEFAULT.
+            continue;
+        }
         sp<AudioProfile> profile = new AudioProfile(port.audio_profiles[i].format,
                 ChannelMaskSet(port.audio_profiles[i].channel_masks,
                         port.audio_profiles[i].channel_masks +
-                        port.audio_profiles->num_channel_masks),
+                        port.audio_profiles[i].num_channel_masks),
                 SampleRateSet(port.audio_profiles[i].sample_rates,
                         port.audio_profiles[i].sample_rates +
                         port.audio_profiles[i].num_sample_rates),
                 port.audio_profiles[i].encapsulation_type);
+        profile->setDynamicFormat(true);
+        profile->setDynamicChannels(true);
+        profile->setDynamicRate(true);
         if (!mProfiles.contains(profile)) {
             addAudioProfile(profile);
         }
diff --git a/media/libaudiofoundation/AudioProfile.cpp b/media/libaudiofoundation/AudioProfile.cpp
index 999e263..4a5fb96 100644
--- a/media/libaudiofoundation/AudioProfile.cpp
+++ b/media/libaudiofoundation/AudioProfile.cpp
@@ -383,6 +383,16 @@
     }
 }
 
+ChannelMaskSet AudioProfileVector::getSupportedChannelMasks() const {
+    ChannelMaskSet channelMasks;
+    for (const auto& profile : *this) {
+        if (profile->isValid()) {
+            channelMasks.insert(profile->getChannels().begin(), profile->getChannels().end());
+        }
+    }
+    return channelMasks;
+}
+
 ConversionResult<AudioProfileVector>
 aidl2legacy_AudioProfileVector(const AudioProfileVector::Aidl& aidl, bool isInput) {
     return convertContainers<AudioProfileVector>(aidl.first, aidl.second,
diff --git a/media/libaudiofoundation/TEST_MAPPING b/media/libaudiofoundation/TEST_MAPPING
index a4e271e..f7e5b12 100644
--- a/media/libaudiofoundation/TEST_MAPPING
+++ b/media/libaudiofoundation/TEST_MAPPING
@@ -20,5 +20,10 @@
         }
       ]
     }
+  ],
+  "postsubmit": [
+    {
+      "name": "audiofoundation_containers_test"
+    }
   ]
 }
diff --git a/media/libaudiofoundation/include/media/AudioContainers.h b/media/libaudiofoundation/include/media/AudioContainers.h
index 88dcee9..46fd620 100644
--- a/media/libaudiofoundation/include/media/AudioContainers.h
+++ b/media/libaudiofoundation/include/media/AudioContainers.h
@@ -19,6 +19,7 @@
 #include <algorithm>
 #include <functional>
 #include <iterator>
+#include <map>
 #include <set>
 #include <vector>
 
@@ -34,6 +35,8 @@
 using MixerBehaviorSet = std::set<audio_mixer_behavior_t>;
 
 using FormatVector = std::vector<audio_format_t>;
+using AudioProfileAttributesMultimap =
+        std::multimap<audio_format_t, std::pair<SampleRateSet, ChannelMaskSet>>;
 
 const DeviceTypeSet& getAudioDeviceOutAllSet();
 const DeviceTypeSet& getAudioDeviceOutAllA2dpSet();
@@ -126,6 +129,8 @@
 
 std::string dumpDeviceTypes(const DeviceTypeSet& deviceTypes);
 
+std::string dumpMixerBehaviors(const MixerBehaviorSet& mixerBehaviors);
+
 /**
  * Return human readable string for device types.
  */
@@ -133,5 +138,49 @@
     return deviceTypesToString(deviceTypes);
 }
 
+/**
+ * Create audio profile attributes map by given audio profile array from the range of [first, last).
+ *
+ * @param profiles the array of audio profiles.
+ * @param first the first index of the profile.
+ * @param last the last index of the profile.
+ * @return a multipmap of audio format to pair of corresponding sample rates and channel masks set.
+ */
+AudioProfileAttributesMultimap createAudioProfilesAttrMap(audio_profile profiles[],
+                                                          uint32_t first,
+                                                          uint32_t last);
+
+/**
+ * Populate audio profiles according to given profile attributes, format, channel masks and
+ * sample rates.
+ *
+ * The function will first go over all pairs of channel masks and sample rates that are present in
+ * the profile attributes of the given map. Note that the channel masks and the sample rates that
+ * are not present in the collections of all valid channel masks and all valid sample rates will be
+ * excluded. After that, if there are channel masks and sample rates that present in the all values
+ * collection but not in profile attributes, they will also be place in a new audio profile in the
+ * profile array.
+ *
+ * Note that if the resulting index of the audio profile exceeds the maximum, no new audio profiles
+ * will be placed in the array.
+ *
+ * @param profileAttrs a multimap that contains format and its corresponding channel masks and
+ *                     sample rates.
+ * @param format the targeted audio format.
+ * @param allChannelMasks all valid channel masks for the format.
+ * @param allSampleRates all valid sample rates for the format.
+ * @param audioProfiles the audio profile array.
+ * @param numAudioProfiles the start index to put audio profile in the array. The value will be
+ *                         updated if there is new audio profile placed.
+ * @param maxAudioProfiles the maximum number of audio profile.
+ */
+void populateAudioProfiles(const AudioProfileAttributesMultimap& profileAttrs,
+                           audio_format_t format,
+                           ChannelMaskSet allChannelMasks,
+                           SampleRateSet allSampleRates,
+                           audio_profile audioProfiles[],
+                           uint32_t* numAudioProfiles,
+                           uint32_t maxAudioProfiles = AUDIO_PORT_MAX_AUDIO_PROFILES);
+
 
 } // namespace android
diff --git a/media/libaudiofoundation/include/media/AudioPort.h b/media/libaudiofoundation/include/media/AudioPort.h
index 77e58ed..5786f7f 100644
--- a/media/libaudiofoundation/include/media/AudioPort.h
+++ b/media/libaudiofoundation/include/media/AudioPort.h
@@ -48,7 +48,10 @@
     audio_port_role_t getRole() const { return mRole; }
 
     virtual void setFlags(uint32_t flags);
-    uint32_t getFlags() const { return useInputChannelMask() ? mFlags.input : mFlags.output; }
+    uint32_t getFlags() const {
+        return useInputChannelMask() ? static_cast<uint32_t>(mFlags.input)
+                                     : static_cast<uint32_t>(mFlags.output);
+    }
 
     void setGains(const AudioGains &gains) { mGains = gains; }
     const AudioGains &getGains() const { return mGains; }
diff --git a/media/libaudiofoundation/include/media/AudioProfile.h b/media/libaudiofoundation/include/media/AudioProfile.h
index a668afe..bcde1fe 100644
--- a/media/libaudiofoundation/include/media/AudioProfile.h
+++ b/media/libaudiofoundation/include/media/AudioProfile.h
@@ -149,6 +149,8 @@
     bool equals(const AudioProfileVector& other) const;
     void addAllValidProfiles(const AudioProfileVector& audioProfiles);
 
+    ChannelMaskSet getSupportedChannelMasks() const;
+
     using Aidl = std::pair<
             std::vector<media::audio::common::AudioProfile>,
             std::vector<media::AudioProfileSys>>;
diff --git a/media/libaudiofoundation/include/media/DeviceDescriptorBase.h b/media/libaudiofoundation/include/media/DeviceDescriptorBase.h
index 501831d..2ab8053 100644
--- a/media/libaudiofoundation/include/media/DeviceDescriptorBase.h
+++ b/media/libaudiofoundation/include/media/DeviceDescriptorBase.h
@@ -79,8 +79,8 @@
 
     bool equals(const sp<DeviceDescriptorBase>& other) const;
 
-    status_t writeToParcelable(media::AudioPortFw* parcelable) const;
-    status_t readFromParcelable(const media::AudioPortFw& parcelable);
+    virtual status_t writeToParcelable(media::AudioPortFw* parcelable) const;
+    virtual status_t readFromParcelable(const media::AudioPortFw& parcelable);
 
 protected:
     AudioDeviceTypeAddr mDeviceTypeAddr;
diff --git a/media/libaudiofoundation/tests/Android.bp b/media/libaudiofoundation/tests/Android.bp
index 2f4aee0..82c7db7 100644
--- a/media/libaudiofoundation/tests/Android.bp
+++ b/media/libaudiofoundation/tests/Android.bp
@@ -43,3 +43,30 @@
 
     test_suites: ["device-tests"],
 }
+
+cc_test {
+    name: "audiofoundation_containers_test",
+
+    shared_libs: [
+        "liblog",
+    ],
+
+    static_libs: [
+        "libaudiofoundation",
+    ],
+
+    header_libs: [
+        "libaudio_system_headers",
+    ],
+
+    srcs: [
+        "audiofoundation_containers_test.cpp",
+    ],
+
+    cflags: [
+        "-Werror",
+        "-Wall",
+    ],
+
+    test_suites: ["device-tests"],
+}
diff --git a/media/libaudiofoundation/tests/audiofoundation_containers_test.cpp b/media/libaudiofoundation/tests/audiofoundation_containers_test.cpp
new file mode 100644
index 0000000..967e2ee
--- /dev/null
+++ b/media/libaudiofoundation/tests/audiofoundation_containers_test.cpp
@@ -0,0 +1,108 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <gtest/gtest.h>
+
+#include <media/AudioContainers.h>
+
+namespace android {
+
+const static AudioProfileAttributesMultimap AUDIO_PROFILE_ATTRIBUTES = {
+        {AUDIO_FORMAT_PCM_16_BIT, {{44100, 48000},
+                                   {AUDIO_CHANNEL_OUT_STEREO, AUDIO_CHANNEL_OUT_7POINT1}}},
+        {AUDIO_FORMAT_PCM_16_BIT, {{96000},
+                                   {AUDIO_CHANNEL_OUT_STEREO}}},
+        {AUDIO_FORMAT_PCM_8_24_BIT, {{48000},
+                                     {AUDIO_CHANNEL_OUT_STEREO}}}
+};
+
+TEST(PopulateAudioProfilesTest, AllAttributesMatches) {
+    const AudioProfileAttributesMultimap expected = {
+            {AUDIO_FORMAT_PCM_16_BIT, {{44100, 48000},
+                                       {AUDIO_CHANNEL_OUT_STEREO, AUDIO_CHANNEL_OUT_7POINT1}}},
+            {AUDIO_FORMAT_PCM_16_BIT, {{96000},
+                                       {AUDIO_CHANNEL_OUT_STEREO}}}
+    };
+    const audio_format_t format = AUDIO_FORMAT_PCM_16_BIT;
+    const SampleRateSet allSampleRates = {44100, 48000, 96000};
+    const ChannelMaskSet allChannelMasks = {AUDIO_CHANNEL_OUT_STEREO, AUDIO_CHANNEL_OUT_7POINT1};
+
+    audio_profile profiles[AUDIO_PORT_MAX_AUDIO_PROFILES];
+    uint32_t numProfiles = 0;
+    populateAudioProfiles(AUDIO_PROFILE_ATTRIBUTES, format, allChannelMasks, allSampleRates,
+                          profiles, &numProfiles);
+    ASSERT_EQ(expected, createAudioProfilesAttrMap(profiles, 0, numProfiles));
+}
+
+TEST(PopulateAudioProfilesTest, AttributesNotInAllValues) {
+    const AudioProfileAttributesMultimap expected = {
+            {AUDIO_FORMAT_PCM_16_BIT, {{48000},
+                                       {AUDIO_CHANNEL_OUT_STEREO, AUDIO_CHANNEL_OUT_7POINT1}}},
+            {AUDIO_FORMAT_PCM_16_BIT, {{96000},
+                                       {AUDIO_CHANNEL_OUT_STEREO}}}
+    };
+    const audio_format_t format = AUDIO_FORMAT_PCM_16_BIT;
+    const SampleRateSet allSampleRates = {48000, 96000};
+    const ChannelMaskSet allChannelMasks = {AUDIO_CHANNEL_OUT_STEREO, AUDIO_CHANNEL_OUT_7POINT1};
+
+    audio_profile profiles[AUDIO_PORT_MAX_AUDIO_PROFILES];
+    uint32_t numProfiles = 0;
+    populateAudioProfiles(AUDIO_PROFILE_ATTRIBUTES, format, allChannelMasks, allSampleRates,
+            profiles, &numProfiles);
+    ASSERT_EQ(expected, createAudioProfilesAttrMap(profiles, 0, numProfiles));
+}
+
+TEST(PopulateAudioProfilesTest, AllValuesNotInAttributes) {
+    const AudioProfileAttributesMultimap expected = {
+            {AUDIO_FORMAT_PCM_16_BIT, {{48000},
+                                       {AUDIO_CHANNEL_OUT_STEREO, AUDIO_CHANNEL_OUT_7POINT1}}},
+            {AUDIO_FORMAT_PCM_16_BIT, {{96000},
+                                       {AUDIO_CHANNEL_OUT_STEREO}}},
+            {AUDIO_FORMAT_PCM_16_BIT, {{88200},
+                                       {AUDIO_CHANNEL_OUT_MONO, AUDIO_CHANNEL_OUT_STEREO,
+                                        AUDIO_CHANNEL_OUT_7POINT1}}},
+            {AUDIO_FORMAT_PCM_16_BIT, {{48000, 88200, 96000},
+                                       {AUDIO_CHANNEL_OUT_MONO}}}
+    };
+    const audio_format_t format = AUDIO_FORMAT_PCM_16_BIT;
+    const SampleRateSet allSampleRates = {48000, 88200, 96000};
+    const ChannelMaskSet allChannelMasks =
+            {AUDIO_CHANNEL_OUT_MONO, AUDIO_CHANNEL_OUT_STEREO, AUDIO_CHANNEL_OUT_7POINT1};
+
+    audio_profile profiles[AUDIO_PORT_MAX_AUDIO_PROFILES];
+    uint32_t numProfiles = 0;
+    populateAudioProfiles(AUDIO_PROFILE_ATTRIBUTES, format, allChannelMasks, allSampleRates,
+            profiles, &numProfiles);
+    ASSERT_EQ(expected, createAudioProfilesAttrMap(profiles, 0, numProfiles));
+}
+
+TEST(PopulateAudioProfilesTest, NoOverflow) {
+    const audio_format_t format = AUDIO_FORMAT_PCM_16_BIT;
+    const SampleRateSet allSampleRates = {48000, 88200, 96000};
+    const ChannelMaskSet allChannelMasks =
+            {AUDIO_CHANNEL_OUT_MONO, AUDIO_CHANNEL_OUT_STEREO, AUDIO_CHANNEL_OUT_7POINT1};
+
+    audio_profile profiles[AUDIO_PORT_MAX_AUDIO_PROFILES];
+    const uint32_t expectedNumProfiles = 4;
+    for (uint32_t i = 0; i <= AUDIO_PORT_MAX_AUDIO_PROFILES; ++i) {
+        uint32_t numProfiles = 0;
+        populateAudioProfiles(AUDIO_PROFILE_ATTRIBUTES, format, allChannelMasks, allSampleRates,
+                              profiles, &numProfiles, i);
+        ASSERT_EQ(std::min(i, expectedNumProfiles), numProfiles);
+    }
+}
+
+} // namespace android
diff --git a/media/libaudiohal/FactoryHal.cpp b/media/libaudiohal/FactoryHal.cpp
index f88915d..c414e19 100644
--- a/media/libaudiohal/FactoryHal.cpp
+++ b/media/libaudiohal/FactoryHal.cpp
@@ -50,9 +50,8 @@
  * This list need to keep sync with AudioHalVersionInfo.VERSIONS in
  * media/java/android/media/AudioHalVersionInfo.java.
  */
-static const std::array<AudioHalVersionInfo, 5> sAudioHALVersions = {
-    // TODO: remove this comment to get AIDL
-    // AudioHalVersionInfo(AudioHalVersionInfo::Type::AIDL, 1, 0),
+static const std::array<AudioHalVersionInfo, 6> sAudioHALVersions = {
+    AudioHalVersionInfo(AudioHalVersionInfo::Type::AIDL, 1, 0),
     AudioHalVersionInfo(AudioHalVersionInfo::Type::HIDL, 7, 1),
     AudioHalVersionInfo(AudioHalVersionInfo::Type::HIDL, 7, 0),
     AudioHalVersionInfo(AudioHalVersionInfo::Type::HIDL, 6, 0),
diff --git a/media/libaudiohal/OWNERS b/media/libaudiohal/OWNERS
index 71b17e6..993e0b3 100644
--- a/media/libaudiohal/OWNERS
+++ b/media/libaudiohal/OWNERS
@@ -1 +1,4 @@
+# Bug component: 48436
 mnaganov@google.com
+yaoshunkai@google.com
+include platform/frameworks/av:/media/janitors/audio_OWNERS #{LAST_RESORT_SUGGESTION}
diff --git a/media/libaudiohal/impl/Android.bp b/media/libaudiohal/impl/Android.bp
index 1689365..fb1cc34 100644
--- a/media/libaudiohal/impl/Android.bp
+++ b/media/libaudiohal/impl/Android.bp
@@ -319,6 +319,7 @@
         "ConversionHelperAidl.cpp",
         "DeviceHalAidl.cpp",
         "DevicesFactoryHalAidl.cpp",
+        "Hal2AidlMapper.cpp",
         "StreamHalAidl.cpp",
     ],
 }
diff --git a/media/libaudiohal/impl/Cleanups.h b/media/libaudiohal/impl/Cleanups.h
new file mode 100644
index 0000000..a313da1
--- /dev/null
+++ b/media/libaudiohal/impl/Cleanups.h
@@ -0,0 +1,107 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <forward_list>
+#include <mutex>
+#include <utility>
+
+namespace android {
+
+// This class implements the "monitor" idiom for providing locked access to a class instance.
+// This is how it is intended to be used. Let's assume there is a "Main" class which owns
+// an instance of a "Resource" class, which is protected by a mutex. We add an instance of
+// "LockedAccessor<Resource>" as a member of "Main":
+//
+// class Resource;
+//
+// class Main {
+//     Main() : mAccessor(mResource, mLock) {}
+//   private:
+//     std::mutex mLock;
+//     Resource mResource GUARDED_BY(mLock);  // owns the resource
+//     LockedAccessor<Resource> mAccessor;
+// };
+//
+// The accessor is initialized in the constructor when no locking is needed. The accessor
+// defers locking until the resource is accessed.
+//
+// Although "mAccessor" can be used by the methods of "Main" for scoped access to the resource,
+// its main role is for granting access to the resource to other classes. This is achieved by
+// making a copy of "mAccessor" and giving it away to another class. This obviously does not
+// transfer ownership of the resource. The intent is to allow another class to use the resource
+// with proper locking in a "lazy" fashion:
+//
+// class Another {
+//   public:
+//     Another(const LockedAccessor<Resource>& accessor) : mAccessor(accessor) {}
+//     void doItLater() {  // Use explicit 'lock' / 'unlock'
+//         auto resource = mAccessor.lock();
+//         resource.use();
+//         mAccessor.unlock();
+//     }
+//     void doItLaterScoped() {  // Rely on the scoped accessor do perform unlocking.
+//         LockedAccessor<Resource> scopedAccessor(mAccessor);
+//         auto resource = scopedAccessor.lock();
+//         resource.use();
+//     }
+//   private:
+//     LockedAccessor<Resource> mAccessor;
+// };
+//
+template<class C>
+class LockedAccessor {
+  public:
+    LockedAccessor(C& instance, std::mutex& mutex)
+            : mInstance(instance), mMutex(mutex), mLock(mMutex, std::defer_lock) {}
+    LockedAccessor(const LockedAccessor& other)
+            : mInstance(other.mInstance), mMutex(other.mMutex), mLock(mMutex, std::defer_lock) {}
+    ~LockedAccessor() { if (mLock.owns_lock()) mLock.unlock(); }
+    C& lock() { mLock.lock(); return mInstance; }
+    void unlock() { mLock.unlock(); }
+  private:
+    C& mInstance;
+    std::mutex& mMutex;
+    std::unique_lock<std::mutex> mLock;
+};
+
+// This class implements scoped cleanups. A "cleanup" is a call to a method of class "C" which
+// takes an integer parameter. Cleanups are executed in the reverse order to how they were added.
+// For executing cleanups, the instance of "C" is retrieved via the provided "LockedAccessor".
+template<class C>
+class Cleanups {
+  public:
+    typedef void (C::*Cleaner)(int32_t);  // A member function of "C" performing a cleanup action.
+    explicit Cleanups(const LockedAccessor<C>& accessor) : mAccessor(accessor) {}
+    ~Cleanups() {
+        if (!mCleanups.empty()) {
+            C& c = mAccessor.lock();
+            for (auto& cleanup : mCleanups) (c.*cleanup.first)(cleanup.second);
+            mAccessor.unlock();
+        }
+    }
+    void add(Cleaner cleaner, int32_t id) {
+        mCleanups.emplace_front(cleaner, id);
+    }
+    void disarmAll() { mCleanups.clear(); }
+  private:
+    using Cleanup = std::pair<Cleaner, int32_t>;
+    LockedAccessor<C> mAccessor;
+    std::forward_list<Cleanup> mCleanups;
+};
+
+}  // namespace android
diff --git a/media/libaudiohal/impl/DeviceHalAidl.cpp b/media/libaudiohal/impl/DeviceHalAidl.cpp
index b18e64b..fc3f699 100644
--- a/media/libaudiohal/impl/DeviceHalAidl.cpp
+++ b/media/libaudiohal/impl/DeviceHalAidl.cpp
@@ -18,7 +18,6 @@
 // #define LOG_NDEBUG 0
 
 #include <algorithm>
-#include <forward_list>
 
 #include <aidl/android/hardware/audio/core/BnStreamCallback.h>
 #include <aidl/android/hardware/audio/core/BnStreamOutEventCallback.h>
@@ -28,6 +27,7 @@
 #include <media/AidlConversionNdkCpp.h>
 #include <media/AidlConversionUtil.h>
 #include <mediautils/TimeCheck.h>
+#include <system/audio.h>
 #include <Utils.h>
 #include <utils/Log.h>
 
@@ -37,14 +37,9 @@
 
 using aidl::android::aidl_utils::statusTFromBinderStatus;
 using aidl::android::media::audio::common::Boolean;
-using aidl::android::media::audio::common::AudioChannelLayout;
 using aidl::android::media::audio::common::AudioConfig;
 using aidl::android::media::audio::common::AudioDevice;
-using aidl::android::media::audio::common::AudioDeviceAddress;
 using aidl::android::media::audio::common::AudioDeviceType;
-using aidl::android::media::audio::common::AudioFormatDescription;
-using aidl::android::media::audio::common::AudioFormatType;
-using aidl::android::media::audio::common::AudioInputFlags;
 using aidl::android::media::audio::common::AudioIoFlags;
 using aidl::android::media::audio::common::AudioLatencyMode;
 using aidl::android::media::audio::common::AudioMMapPolicy;
@@ -54,11 +49,7 @@
 using aidl::android::media::audio::common::AudioOutputFlags;
 using aidl::android::media::audio::common::AudioPort;
 using aidl::android::media::audio::common::AudioPortConfig;
-using aidl::android::media::audio::common::AudioPortDeviceExt;
 using aidl::android::media::audio::common::AudioPortExt;
-using aidl::android::media::audio::common::AudioPortMixExt;
-using aidl::android::media::audio::common::AudioPortMixExtUseCase;
-using aidl::android::media::audio::common::AudioProfile;
 using aidl::android::media::audio::common::AudioSource;
 using aidl::android::media::audio::common::Float;
 using aidl::android::media::audio::common::Int;
@@ -67,9 +58,8 @@
 using aidl::android::media::audio::IHalAdapterVendorExtension;
 using aidl::android::hardware::audio::common::getFrameSizeInBytes;
 using aidl::android::hardware::audio::common::isBitPositionFlagSet;
-using aidl::android::hardware::audio::common::isDefaultAudioFormat;
-using aidl::android::hardware::audio::common::makeBitPositionFlagMask;
 using aidl::android::hardware::audio::common::RecordTrackMetadata;
+using aidl::android::hardware::audio::core::sounddose::ISoundDose;
 using aidl::android::hardware::audio::core::AudioPatch;
 using aidl::android::hardware::audio::core::AudioRoute;
 using aidl::android::hardware::audio::core::IBluetooth;
@@ -78,37 +68,12 @@
 using aidl::android::hardware::audio::core::IModule;
 using aidl::android::hardware::audio::core::ITelephony;
 using aidl::android::hardware::audio::core::ModuleDebug;
-using aidl::android::hardware::audio::core::StreamDescriptor;
 using aidl::android::hardware::audio::core::VendorParameter;
 
 namespace android {
 
 namespace {
 
-bool isConfigEqualToPortConfig(const AudioConfig& config, const AudioPortConfig& portConfig) {
-    return portConfig.sampleRate.value().value == config.base.sampleRate &&
-            portConfig.channelMask.value() == config.base.channelMask &&
-            portConfig.format.value() == config.base.format;
-}
-
-void setConfigFromPortConfig(AudioConfig* config, const AudioPortConfig& portConfig) {
-    config->base.sampleRate = portConfig.sampleRate.value().value;
-    config->base.channelMask = portConfig.channelMask.value();
-    config->base.format = portConfig.format.value();
-}
-
-void setPortConfigFromConfig(AudioPortConfig* portConfig, const AudioConfig& config) {
-    if (config.base.sampleRate != 0) {
-        portConfig->sampleRate = Int{ .value = config.base.sampleRate };
-    }
-    if (config.base.channelMask != AudioChannelLayout{}) {
-        portConfig->channelMask = config.base.channelMask;
-    }
-    if (config.base.format != AudioFormatDescription{}) {
-        portConfig->format = config.base.format;
-    }
-}
-
 // Note: these converters are for types defined in different AIDL files. Although these
 // AIDL files are copies of each other, however formally these are different types
 // thus we don't use a conversion via a parcelable.
@@ -142,28 +107,29 @@
           mTelephony(retrieveSubInterface<ITelephony>(module, &IModule::getTelephony)),
           mBluetooth(retrieveSubInterface<IBluetooth>(module, &IModule::getBluetooth)),
           mBluetoothA2dp(retrieveSubInterface<IBluetoothA2dp>(module, &IModule::getBluetoothA2dp)),
-          mBluetoothLe(retrieveSubInterface<IBluetoothLe>(module, &IModule::getBluetoothLe)) {
+          mBluetoothLe(retrieveSubInterface<IBluetoothLe>(module, &IModule::getBluetoothLe)),
+          mSoundDose(retrieveSubInterface<ISoundDose>(module, &IModule::getSoundDose)),
+          mMapper(instance, module), mMapperAccessor(mMapper, mLock) {
 }
 
 status_t DeviceHalAidl::getAudioPorts(std::vector<media::audio::common::AudioPort> *ports) {
-    return ::aidl::android::convertContainer(mPorts, ports,
-            [](const Ports::value_type& pair) { return ndk2cpp_AudioPort(pair.second); });
+    std::lock_guard l(mLock);
+    return mMapper.getAudioPorts(ports, ndk2cpp_AudioPort);
 }
 
 status_t DeviceHalAidl::getAudioRoutes(std::vector<media::AudioRoute> *routes) {
-    *routes = VALUE_OR_RETURN_STATUS(
-            ::aidl::android::convertContainer<std::vector<media::AudioRoute>>(
-                    mRoutes, ndk2cpp_AudioRoute));
-    return OK;
+    std::lock_guard l(mLock);
+    return mMapper.getAudioRoutes(routes, ndk2cpp_AudioRoute);
 }
 
 status_t DeviceHalAidl::getSupportedModes(std::vector<media::audio::common::AudioMode> *modes) {
+    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
     TIME_CHECK();
+    if (mModule == nullptr) return NO_INIT;
+    if (mTelephony == nullptr) return INVALID_OPERATION;
     if (modes == nullptr) {
         return BAD_VALUE;
     }
-    if (mModule == nullptr) return NO_INIT;
-    if (mTelephony == nullptr) return INVALID_OPERATION;
     std::vector<AudioMode> aidlModes;
     RETURN_STATUS_IF_ERROR(
             statusTFromBinderStatus(mTelephony->getSupportedAudioModes(&aidlModes)));
@@ -179,51 +145,17 @@
 }
 
 status_t DeviceHalAidl::initCheck() {
+    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
     TIME_CHECK();
     if (mModule == nullptr) return NO_INIT;
-    std::vector<AudioPort> ports;
-    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mModule->getAudioPorts(&ports)));
-    ALOGW_IF(ports.empty(), "%s: module %s returned an empty list of audio ports",
-            __func__, mInstance.c_str());
-    std::transform(ports.begin(), ports.end(), std::inserter(mPorts, mPorts.end()),
-            [](const auto& p) { return std::make_pair(p.id, p); });
-    mDefaultInputPortId = mDefaultOutputPortId = -1;
-    const int defaultDeviceFlag = 1 << AudioPortDeviceExt::FLAG_INDEX_DEFAULT_DEVICE;
-    for (const auto& pair : mPorts) {
-        const auto& p = pair.second;
-        if (p.ext.getTag() == AudioPortExt::Tag::device &&
-                (p.ext.get<AudioPortExt::Tag::device>().flags & defaultDeviceFlag) != 0) {
-            if (p.flags.getTag() == AudioIoFlags::Tag::input) {
-                mDefaultInputPortId = p.id;
-            } else if (p.flags.getTag() == AudioIoFlags::Tag::output) {
-                mDefaultOutputPortId = p.id;
-            }
-        }
-    }
-    ALOGI("%s: module %s default port ids: input %d, output %d",
-            __func__, mInstance.c_str(), mDefaultInputPortId, mDefaultOutputPortId);
-    RETURN_STATUS_IF_ERROR(updateRoutes());
-    std::vector<AudioPortConfig> portConfigs;
-    RETURN_STATUS_IF_ERROR(
-            statusTFromBinderStatus(mModule->getAudioPortConfigs(&portConfigs)));  // OK if empty
-    std::transform(portConfigs.begin(), portConfigs.end(),
-            std::inserter(mPortConfigs, mPortConfigs.end()),
-            [](const auto& p) { return std::make_pair(p.id, p); });
-    std::transform(mPortConfigs.begin(), mPortConfigs.end(),
-            std::inserter(mInitialPortConfigIds, mInitialPortConfigIds.end()),
-            [](const auto& pcPair) { return pcPair.first; });
-    std::vector<AudioPatch> patches;
-    RETURN_STATUS_IF_ERROR(
-            statusTFromBinderStatus(mModule->getAudioPatches(&patches)));  // OK if empty
-    std::transform(patches.begin(), patches.end(),
-            std::inserter(mPatches, mPatches.end()),
-            [](const auto& p) { return std::make_pair(p.id, p); });
-    return OK;
+    std::lock_guard l(mLock);
+    return mMapper.initialize();
 }
 
 status_t DeviceHalAidl::setVoiceVolume(float volume) {
+    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
     TIME_CHECK();
-    if (!mModule) return NO_INIT;
+    if (mModule == nullptr) return NO_INIT;
     if (mTelephony == nullptr) return INVALID_OPERATION;
     ITelephony::TelecomConfig inConfig{ .voiceVolume = Float{volume} }, outConfig;
     RETURN_STATUS_IF_ERROR(
@@ -235,20 +167,26 @@
 }
 
 status_t DeviceHalAidl::setMasterVolume(float volume) {
+    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
     TIME_CHECK();
-    if (!mModule) return NO_INIT;
+    if (mModule == nullptr) return NO_INIT;
     return statusTFromBinderStatus(mModule->setMasterVolume(volume));
 }
 
 status_t DeviceHalAidl::getMasterVolume(float *volume) {
+    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
     TIME_CHECK();
-    if (!mModule) return NO_INIT;
+    if (mModule == nullptr) return NO_INIT;
+    if (volume == nullptr) {
+        return BAD_VALUE;
+    }
     return statusTFromBinderStatus(mModule->getMasterVolume(volume));
 }
 
 status_t DeviceHalAidl::setMode(audio_mode_t mode) {
+    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
     TIME_CHECK();
-    if (!mModule) return NO_INIT;
+    if (mModule == nullptr) return NO_INIT;
     AudioMode audioMode = VALUE_OR_FATAL(::aidl::android::legacy2aidl_audio_mode_t_AudioMode(mode));
     if (mTelephony != nullptr) {
         RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mTelephony->switchAudioMode(audioMode)));
@@ -257,31 +195,43 @@
 }
 
 status_t DeviceHalAidl::setMicMute(bool state) {
+    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
     TIME_CHECK();
-    if (!mModule) return NO_INIT;
+    if (mModule == nullptr) return NO_INIT;
     return statusTFromBinderStatus(mModule->setMicMute(state));
 }
 
 status_t DeviceHalAidl::getMicMute(bool *state) {
+    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
     TIME_CHECK();
-    if (!mModule) return NO_INIT;
+    if (mModule == nullptr) return NO_INIT;
+    if (state == nullptr) {
+        return BAD_VALUE;
+    }
     return statusTFromBinderStatus(mModule->getMicMute(state));
 }
 
 status_t DeviceHalAidl::setMasterMute(bool state) {
+    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
     TIME_CHECK();
-    if (!mModule) return NO_INIT;
+    if (mModule == nullptr) return NO_INIT;
     return statusTFromBinderStatus(mModule->setMasterMute(state));
 }
 
 status_t DeviceHalAidl::getMasterMute(bool *state) {
+    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
     TIME_CHECK();
-    if (!mModule) return NO_INIT;
+    if (mModule == nullptr) return NO_INIT;
+    if (state == nullptr) {
+        return BAD_VALUE;
+    }
     return statusTFromBinderStatus(mModule->getMasterMute(state));
 }
 
 status_t DeviceHalAidl::setParameters(const String8& kvPairs) {
-    if (!mModule) return NO_INIT;
+    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    TIME_CHECK();
+    if (mModule == nullptr) return NO_INIT;
     AudioParameter parameters(kvPairs);
     ALOGD("%s: parameters: \"%s\"", __func__, parameters.toString().c_str());
 
@@ -307,8 +257,9 @@
 }
 
 status_t DeviceHalAidl::getParameters(const String8& keys, String8 *values) {
+    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
     TIME_CHECK();
-    if (!mModule) return NO_INIT;
+    if (mModule == nullptr) return NO_INIT;
     if (values == nullptr) {
         return BAD_VALUE;
     }
@@ -320,44 +271,13 @@
     return parseAndGetVendorParameters(mVendorExt, mModule, parameterKeys, values);
 }
 
-namespace {
-
-class Cleanup {
-  public:
-    typedef void (DeviceHalAidl::*Cleaner)(int32_t);
-
-    Cleanup(DeviceHalAidl* device, Cleaner cleaner, int32_t id) :
-            mDevice(device), mCleaner(cleaner), mId(id) {}
-    ~Cleanup() { clean(); }
-    void clean() {
-        if (mDevice != nullptr) (mDevice->*mCleaner)(mId);
-        disarm();
-    }
-    void disarm() { mDevice = nullptr; }
-
-  private:
-    DeviceHalAidl* mDevice;
-    const Cleaner mCleaner;
-    const int32_t mId;
-};
-
-}  // namespace
-
-// Since the order of container elements destruction is unspecified,
-// ensure that cleanups are performed from the most recent one and upwards.
-// This is the same as if there were individual Cleanup instances on the stack,
-// however the bonus is that we can disarm all of them with just one statement.
-class DeviceHalAidl::Cleanups : public std::forward_list<Cleanup> {
-  public:
-    ~Cleanups() { for (auto& c : *this) c.clean(); }
-    void disarmAll() { for (auto& c : *this) c.disarm(); }
-};
-
 status_t DeviceHalAidl::getInputBufferSize(const struct audio_config* config, size_t* size) {
     ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
-    if (size == nullptr) return BAD_VALUE;
     TIME_CHECK();
-    if (!mModule) return NO_INIT;
+    if (mModule == nullptr) return NO_INIT;
+    if (config == nullptr || size == nullptr) {
+        return BAD_VALUE;
+    }
     AudioConfig aidlConfig = VALUE_OR_RETURN_STATUS(
             ::aidl::android::legacy2aidl_audio_config_t_AudioConfig(*config, true /*isInput*/));
     AudioDevice aidlDevice;
@@ -365,61 +285,20 @@
     AudioSource aidlSource = AudioSource::DEFAULT;
     AudioIoFlags aidlFlags = AudioIoFlags::make<AudioIoFlags::Tag::input>(0);
     AudioPortConfig mixPortConfig;
-    Cleanups cleanups;
-    audio_config writableConfig = *config;
+    Hal2AidlMapper::Cleanups cleanups(mMapperAccessor);
     AudioPatch aidlPatch;
-    RETURN_STATUS_IF_ERROR(prepareToOpenStream(0 /*handle*/, aidlDevice, aidlFlags, aidlSource,
-                    &writableConfig, &cleanups, &aidlConfig, &mixPortConfig, &aidlPatch));
+    {
+        std::lock_guard l(mLock);
+        RETURN_STATUS_IF_ERROR(mMapper.prepareToOpenStream(
+                        0 /*handle*/, aidlDevice, aidlFlags, aidlSource,
+                        &cleanups, &aidlConfig, &mixPortConfig, &aidlPatch));
+    }
     *size = aidlConfig.frameCount *
             getFrameSizeInBytes(aidlConfig.base.format, aidlConfig.base.channelMask);
     // Do not disarm cleanups to release temporary port configs.
     return OK;
 }
 
-status_t DeviceHalAidl::prepareToOpenStream(
-        int32_t aidlHandle, const AudioDevice& aidlDevice, const AudioIoFlags& aidlFlags,
-        AudioSource aidlSource, struct audio_config* config,
-        Cleanups* cleanups, AudioConfig* aidlConfig, AudioPortConfig* mixPortConfig,
-        AudioPatch* aidlPatch) {
-    ALOGD("%p %s::%s: handle %d, device %s, flags %s, source %s, config %s, mix port config %s",
-            this, getClassName().c_str(), __func__, aidlHandle, aidlDevice.toString().c_str(),
-            aidlFlags.toString().c_str(), toString(aidlSource).c_str(),
-            aidlConfig->toString().c_str(), mixPortConfig->toString().c_str());
-    resetUnusedPatchesAndPortConfigs();
-    const bool isInput = aidlFlags.getTag() == AudioIoFlags::Tag::input;
-    // Find / create AudioPortConfigs for the device port and the mix port,
-    // then find / create a patch between them, and open a stream on the mix port.
-    AudioPortConfig devicePortConfig;
-    bool created = false;
-    RETURN_STATUS_IF_ERROR(findOrCreatePortConfig(aidlDevice, aidlConfig,
-                                                  &devicePortConfig, &created));
-    if (created) {
-        cleanups->emplace_front(this, &DeviceHalAidl::resetPortConfig, devicePortConfig.id);
-    }
-    RETURN_STATUS_IF_ERROR(findOrCreatePortConfig(*aidlConfig, aidlFlags, aidlHandle, aidlSource,
-                    std::set<int32_t>{devicePortConfig.portId}, mixPortConfig, &created));
-    if (created) {
-        cleanups->emplace_front(this, &DeviceHalAidl::resetPortConfig, mixPortConfig->id);
-    }
-    setConfigFromPortConfig(aidlConfig, *mixPortConfig);
-    if (isInput) {
-        RETURN_STATUS_IF_ERROR(findOrCreatePatch(
-                        {devicePortConfig.id}, {mixPortConfig->id}, aidlPatch, &created));
-    } else {
-        RETURN_STATUS_IF_ERROR(findOrCreatePatch(
-                        {mixPortConfig->id}, {devicePortConfig.id}, aidlPatch, &created));
-    }
-    if (created) {
-        cleanups->emplace_front(this, &DeviceHalAidl::resetPatch, aidlPatch->id);
-    }
-    if (aidlConfig->frameCount <= 0) {
-        aidlConfig->frameCount = aidlPatch->minimumStreamBufferSizeFrames;
-    }
-    *config = VALUE_OR_RETURN_STATUS(
-            ::aidl::android::aidl2legacy_AudioConfig_audio_config_t(*aidlConfig, isInput));
-    return OK;
-}
-
 namespace {
 
 class StreamCallbackBase {
@@ -543,26 +422,33 @@
         const char* address,
         sp<StreamOutHalInterface>* outStream) {
     ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
-    if (!outStream || !config) {
+    TIME_CHECK();
+    if (mModule == nullptr) return NO_INIT;
+    if (outStream == nullptr || config == nullptr) {
         return BAD_VALUE;
     }
-    TIME_CHECK();
-    if (!mModule) return NO_INIT;
+    constexpr bool isInput = false;
     int32_t aidlHandle = VALUE_OR_RETURN_STATUS(
             ::aidl::android::legacy2aidl_audio_io_handle_t_int32_t(handle));
     AudioConfig aidlConfig = VALUE_OR_RETURN_STATUS(
-            ::aidl::android::legacy2aidl_audio_config_t_AudioConfig(*config, false /*isInput*/));
+            ::aidl::android::legacy2aidl_audio_config_t_AudioConfig(*config, isInput));
     AudioDevice aidlDevice = VALUE_OR_RETURN_STATUS(
             ::aidl::android::legacy2aidl_audio_device_AudioDevice(devices, address));
     int32_t aidlOutputFlags = VALUE_OR_RETURN_STATUS(
             ::aidl::android::legacy2aidl_audio_output_flags_t_int32_t_mask(flags));
     AudioIoFlags aidlFlags = AudioIoFlags::make<AudioIoFlags::Tag::output>(aidlOutputFlags);
     AudioPortConfig mixPortConfig;
-    Cleanups cleanups;
     AudioPatch aidlPatch;
-    RETURN_STATUS_IF_ERROR(prepareToOpenStream(aidlHandle, aidlDevice, aidlFlags,
-                    AudioSource::SYS_RESERVED_INVALID /*only needed for input*/,
-                    config, &cleanups, &aidlConfig, &mixPortConfig, &aidlPatch));
+    Hal2AidlMapper::Cleanups cleanups(mMapperAccessor);
+    {
+        std::lock_guard l(mLock);
+        RETURN_STATUS_IF_ERROR(mMapper.prepareToOpenStream(aidlHandle, aidlDevice, aidlFlags,
+                        AudioSource::SYS_RESERVED_INVALID /*only needed for input*/,
+                        &cleanups, &aidlConfig, &mixPortConfig, &aidlPatch));
+    }
+    *config = VALUE_OR_RETURN_STATUS(
+            ::aidl::android::aidl2legacy_AudioConfig_audio_config_t(aidlConfig, isInput));
+    if (mixPortConfig.id == 0) return BAD_VALUE;  // HAL suggests a different config.
     ::aidl::android::hardware::audio::core::IModule::OpenOutputStreamArguments args;
     args.portConfigId = mixPortConfig.id;
     const bool isOffload = isBitPositionFlagSet(
@@ -588,11 +474,11 @@
     }
     *outStream = sp<StreamOutHalAidl>::make(*config, std::move(context), aidlPatch.latenciesMs[0],
             std::move(ret.stream), mVendorExt, this /*callbackBroker*/);
-    mStreams.insert(std::pair(*outStream, aidlPatch.id));
     void* cbCookie = (*outStream).get();
     {
         std::lock_guard l(mLock);
         mCallbacks.emplace(cbCookie, Callbacks{});
+        mMapper.addStream(*outStream, mixPortConfig.id, aidlPatch.id);
     }
     if (streamCb) streamCb->setCookie(cbCookie);
     eventCb->setCookie(cbCookie);
@@ -607,15 +493,16 @@
         audio_devices_t outputDevice, const char* outputDeviceAddress,
         sp<StreamInHalInterface>* inStream) {
     ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
-    if (!inStream || !config) {
+    TIME_CHECK();
+    if (mModule == nullptr) return NO_INIT;
+    if (inStream == nullptr || config == nullptr) {
         return BAD_VALUE;
     }
-    TIME_CHECK();
-    if (!mModule) return NO_INIT;
+    constexpr bool isInput = true;
     int32_t aidlHandle = VALUE_OR_RETURN_STATUS(
             ::aidl::android::legacy2aidl_audio_io_handle_t_int32_t(handle));
     AudioConfig aidlConfig = VALUE_OR_RETURN_STATUS(
-            ::aidl::android::legacy2aidl_audio_config_t_AudioConfig(*config, true /*isInput*/));
+            ::aidl::android::legacy2aidl_audio_config_t_AudioConfig(*config, isInput));
     AudioDevice aidlDevice = VALUE_OR_RETURN_STATUS(
             ::aidl::android::legacy2aidl_audio_device_AudioDevice(devices, address));
     int32_t aidlInputFlags = VALUE_OR_RETURN_STATUS(
@@ -624,10 +511,17 @@
     AudioSource aidlSource = VALUE_OR_RETURN_STATUS(
             ::aidl::android::legacy2aidl_audio_source_t_AudioSource(source));
     AudioPortConfig mixPortConfig;
-    Cleanups cleanups;
     AudioPatch aidlPatch;
-    RETURN_STATUS_IF_ERROR(prepareToOpenStream(aidlHandle, aidlDevice, aidlFlags, aidlSource,
-                    config, &cleanups, &aidlConfig, &mixPortConfig, &aidlPatch));
+    Hal2AidlMapper::Cleanups cleanups(mMapperAccessor);
+    {
+        std::lock_guard l(mLock);
+        RETURN_STATUS_IF_ERROR(mMapper.prepareToOpenStream(
+                        aidlHandle, aidlDevice, aidlFlags, aidlSource,
+                        &cleanups, &aidlConfig, &mixPortConfig, &aidlPatch));
+    }
+    *config = VALUE_OR_RETURN_STATUS(
+            ::aidl::android::aidl2legacy_AudioConfig_audio_config_t(aidlConfig, isInput));
+    if (mixPortConfig.id == 0) return BAD_VALUE;  // HAL suggests a different config.
     ::aidl::android::hardware::audio::core::IModule::OpenInputStreamArguments args;
     args.portConfigId = mixPortConfig.id;
     RecordTrackMetadata aidlTrackMetadata{
@@ -649,12 +543,18 @@
     }
     *inStream = sp<StreamInHalAidl>::make(*config, std::move(context), aidlPatch.latenciesMs[0],
             std::move(ret.stream), mVendorExt, this /*micInfoProvider*/);
-    mStreams.insert(std::pair(*inStream, aidlPatch.id));
+    {
+        std::lock_guard l(mLock);
+        mMapper.addStream(*inStream, mixPortConfig.id, aidlPatch.id);
+    }
     cleanups.disarmAll();
     return OK;
 }
 
 status_t DeviceHalAidl::supportsAudioPatches(bool* supportsPatches) {
+    if (supportsPatches == nullptr) {
+        return BAD_VALUE;
+    }
     *supportsPatches = true;
     return OK;
 }
@@ -666,7 +566,7 @@
                                          audio_patch_handle_t* patch) {
     ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
     TIME_CHECK();
-    if (!mModule) return NO_INIT;
+    if (mModule == nullptr) return NO_INIT;
     if (num_sinks > AUDIO_PATCH_PORTS_MAX || num_sources > AUDIO_PATCH_PORTS_MAX ||
         sources == nullptr || sinks == nullptr || patch == nullptr) {
         return BAD_VALUE;
@@ -683,7 +583,7 @@
     // that the HAL module uses `int32_t` for patch IDs. The following assert ensures
     // that both the framework and the HAL use the same value for "no ID":
     static_assert(AUDIO_PATCH_HANDLE_NONE == 0);
-    int32_t halPatchId = static_cast<int32_t>(*patch);
+    int32_t aidlPatchId = static_cast<int32_t>(*patch);
 
     // Upon conversion, mix port configs contain audio configuration, while
     // device port configs contain device address. This data is used to find
@@ -705,68 +605,13 @@
                         ::aidl::android::legacy2aidl_audio_port_config_AudioPortConfig(
                                 sinks[i], isInput, 0)));
     }
-    Cleanups cleanups;
-    auto existingPatchIt = halPatchId != 0 ? mPatches.find(halPatchId): mPatches.end();
-    AudioPatch aidlPatch;
-    if (existingPatchIt != mPatches.end()) {
-        aidlPatch = existingPatchIt->second;
-        aidlPatch.sourcePortConfigIds.clear();
-        aidlPatch.sinkPortConfigIds.clear();
+    Hal2AidlMapper::Cleanups cleanups(mMapperAccessor);
+    {
+        std::lock_guard l(mLock);
+        RETURN_STATUS_IF_ERROR(mMapper.createOrUpdatePatch(
+                        aidlSources, aidlSinks, &aidlPatchId, &cleanups));
     }
-    // The IDs will be found by 'fillPortConfigs', however the original 'aidlSources' and
-    // 'aidlSinks' will not be updated because 'setAudioPatch' only needs IDs. Here we log
-    // the source arguments, where only the audio configuration and device specifications
-    // are relevant.
-    ALOGD("%s: [disregard IDs] sources: %s, sinks: %s",
-            __func__, ::android::internal::ToString(aidlSources).c_str(),
-            ::android::internal::ToString(aidlSinks).c_str());
-    auto fillPortConfigs = [&](
-            const std::vector<AudioPortConfig>& configs,
-            const std::set<int32_t>& destinationPortIds,
-            std::vector<int32_t>* ids, std::set<int32_t>* portIds) -> status_t {
-        for (const auto& s : configs) {
-            AudioPortConfig portConfig;
-            bool created = false;
-            RETURN_STATUS_IF_ERROR(findOrCreatePortConfig(
-                            s, destinationPortIds, &portConfig, &created));
-            if (created) {
-                cleanups.emplace_front(this, &DeviceHalAidl::resetPortConfig, portConfig.id);
-            }
-            ids->push_back(portConfig.id);
-            if (portIds != nullptr) {
-                portIds->insert(portConfig.portId);
-            }
-        }
-        return OK;
-    };
-    // When looking up port configs, the destinationPortId is only used for mix ports.
-    // Thus, we process device port configs first, and look up the destination port ID from them.
-    bool sourceIsDevice = std::any_of(aidlSources.begin(), aidlSources.end(),
-            [](const auto& config) { return config.ext.getTag() == AudioPortExt::device; });
-    const std::vector<AudioPortConfig>& devicePortConfigs =
-            sourceIsDevice ? aidlSources : aidlSinks;
-    std::vector<int32_t>* devicePortConfigIds =
-            sourceIsDevice ? &aidlPatch.sourcePortConfigIds : &aidlPatch.sinkPortConfigIds;
-    const std::vector<AudioPortConfig>& mixPortConfigs =
-            sourceIsDevice ? aidlSinks : aidlSources;
-    std::vector<int32_t>* mixPortConfigIds =
-            sourceIsDevice ? &aidlPatch.sinkPortConfigIds : &aidlPatch.sourcePortConfigIds;
-    std::set<int32_t> devicePortIds;
-    RETURN_STATUS_IF_ERROR(fillPortConfigs(
-                    devicePortConfigs, std::set<int32_t>(), devicePortConfigIds, &devicePortIds));
-    RETURN_STATUS_IF_ERROR(fillPortConfigs(
-                    mixPortConfigs, devicePortIds, mixPortConfigIds, nullptr));
-    if (existingPatchIt != mPatches.end()) {
-        RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
-                        mModule->setAudioPatch(aidlPatch, &aidlPatch)));
-        existingPatchIt->second = aidlPatch;
-    } else {
-        bool created = false;
-        RETURN_STATUS_IF_ERROR(findOrCreatePatch(aidlPatch, &aidlPatch, &created));
-        // Since no cleanup of the patch is needed, 'created' is ignored.
-        halPatchId = aidlPatch.id;
-        *patch = static_cast<audio_patch_handle_t>(halPatchId);
-    }
+    *patch = static_cast<audio_patch_handle_t>(aidlPatchId);
     cleanups.disarmAll();
     return OK;
 }
@@ -774,26 +619,17 @@
 status_t DeviceHalAidl::releaseAudioPatch(audio_patch_handle_t patch) {
     ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
     TIME_CHECK();
-    if (!mModule) return NO_INIT;
+    if (mModule == nullptr) return NO_INIT;
     static_assert(AUDIO_PATCH_HANDLE_NONE == 0);
     if (patch == AUDIO_PATCH_HANDLE_NONE) {
         return BAD_VALUE;
     }
-    int32_t halPatchId = static_cast<int32_t>(patch);
-    auto patchIt = mPatches.find(halPatchId);
-    if (patchIt == mPatches.end()) {
-        ALOGE("%s: patch with id %d not found", __func__, halPatchId);
-        return BAD_VALUE;
-    }
-    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mModule->resetAudioPatch(halPatchId)));
-    mPatches.erase(patchIt);
+    std::lock_guard l(mLock);
+    RETURN_STATUS_IF_ERROR(mMapper.releaseAudioPatch(static_cast<int32_t>(patch)));
     return OK;
 }
 
 status_t DeviceHalAidl::getAudioPort(struct audio_port* port) {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
-    TIME_CHECK();
-    if (!mModule) return NO_INIT;
     if (port == nullptr) {
         return BAD_VALUE;
     }
@@ -806,7 +642,7 @@
 status_t DeviceHalAidl::getAudioPort(struct audio_port_v7 *port) {
     ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
     TIME_CHECK();
-    if (!mModule) return NO_INIT;
+    if (mModule == nullptr) return NO_INIT;
     if (port == nullptr) {
         return BAD_VALUE;
     }
@@ -822,24 +658,44 @@
     const auto& matchDevice = aidlPort.ext.get<AudioPortExt::device>().device;
     // It seems that we don't have to call HAL since all valid ports have been added either
     // during initialization, or while handling connection of an external device.
-    auto portsIt = findPort(matchDevice);
-    if (portsIt == mPorts.end()) {
-        ALOGE("%s: device port for device %s is not found in the module %s",
-                __func__, matchDevice.toString().c_str(), mInstance.c_str());
-        return BAD_VALUE;
-    }
     const int32_t fwkId = aidlPort.id;
-    aidlPort = portsIt->second;
+    {
+        std::lock_guard l(mLock);
+        RETURN_STATUS_IF_ERROR(mMapper.getAudioPortCached(matchDevice, &aidlPort));
+    }
     aidlPort.id = fwkId;
     *port = VALUE_OR_RETURN_STATUS(::aidl::android::aidl2legacy_AudioPort_audio_port_v7(
                     aidlPort, isInput));
     return OK;
 }
 
+status_t DeviceHalAidl::getAudioMixPort(const struct audio_port_v7 *devicePort,
+                                        struct audio_port_v7 *mixPort) {
+    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    TIME_CHECK();
+    if (mModule == nullptr) return NO_INIT;
+    if (devicePort == nullptr || mixPort == nullptr ||
+            devicePort->type != AUDIO_PORT_TYPE_DEVICE || mixPort->type != AUDIO_PORT_TYPE_MIX) {
+        return BAD_VALUE;
+    }
+    const int32_t aidlHandle = VALUE_OR_RETURN_STATUS(
+            ::aidl::android::legacy2aidl_audio_io_handle_t_int32_t(mixPort->ext.mix.handle));
+    AudioPort port;
+    {
+        std::lock_guard l(mLock);
+        RETURN_STATUS_IF_ERROR(mMapper.getAudioMixPort(aidlHandle, &port));
+    }
+    const bool isInput = VALUE_OR_RETURN_STATUS(::aidl::android::portDirection(
+            mixPort->role, mixPort->type)) == ::aidl::android::AudioPortDirection::INPUT;
+    *mixPort = VALUE_OR_RETURN_STATUS(::aidl::android::aidl2legacy_AudioPort_audio_port_v7(
+            port, isInput));
+    return OK;
+}
+
 status_t DeviceHalAidl::setAudioPortConfig(const struct audio_port_config* config) {
     ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
     TIME_CHECK();
-    if (!mModule) return NO_INIT;
+    if (mModule == nullptr) return NO_INIT;
     if (config == nullptr) {
         return BAD_VALUE;
     }
@@ -849,13 +705,15 @@
             ::aidl::android::legacy2aidl_audio_port_config_AudioPortConfig(
                     *config, isInput, 0 /*portId*/));
     AudioPortConfig portConfig;
-    bool created = false;
-    RETURN_STATUS_IF_ERROR(findOrCreatePortConfig(
-                    requestedPortConfig, std::set<int32_t>(), &portConfig, &created));
-    return OK;
+    std::lock_guard l(mLock);
+    return mMapper.setPortConfig(requestedPortConfig, std::set<int32_t>(), &portConfig);
 }
 
 MicrophoneInfoProvider::Info const* DeviceHalAidl::getMicrophoneInfo() {
+    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    TIME_CHECK();
+    if (!mModule) return {};
+    std::lock_guard l(mLock);
     if (mMicrophones.status == Microphones::Status::UNKNOWN) {
         TIME_CHECK();
         std::vector<MicrophoneInfo> aidlInfo;
@@ -878,11 +736,12 @@
 
 status_t DeviceHalAidl::getMicrophones(
         std::vector<audio_microphone_characteristic_t>* microphones) {
-    if (!microphones) {
+    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    TIME_CHECK();
+    if (mModule == nullptr) return NO_INIT;
+    if (microphones == nullptr) {
         return BAD_VALUE;
     }
-    TIME_CHECK();
-    if (!mModule) return NO_INIT;
     auto staticInfo = getMicrophoneInfo();
     if (!staticInfo) return INVALID_OPERATION;
     std::vector<MicrophoneDynamicInfo> emptyDynamicInfo;
@@ -899,9 +758,10 @@
 
 status_t DeviceHalAidl::addDeviceEffect(
         const struct audio_port_config *device, sp<EffectHalInterface> effect) {
+    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
     TIME_CHECK();
-    if (!mModule) return NO_INIT;
-    if (!effect) {
+    if (mModule == nullptr) return NO_INIT;
+    if (device == nullptr || effect == nullptr) {
         return BAD_VALUE;
     }
     bool isInput = VALUE_OR_RETURN_STATUS(::aidl::android::portDirection(
@@ -915,12 +775,11 @@
         return BAD_VALUE;
     }
     AudioPortConfig devicePortConfig;
-    bool created;
-    RETURN_STATUS_IF_ERROR(findOrCreatePortConfig(
-                    requestedPortConfig, {} /*destinationPortIds*/, &devicePortConfig, &created));
-    Cleanups cleanups;
-    if (created) {
-        cleanups.emplace_front(this, &DeviceHalAidl::resetPortConfig, devicePortConfig.id);
+    Hal2AidlMapper::Cleanups cleanups(mMapperAccessor);
+    {
+        std::lock_guard l(mLock);
+        RETURN_STATUS_IF_ERROR(mMapper.setPortConfig(
+                    requestedPortConfig, {} /*destinationPortIds*/, &devicePortConfig, &cleanups));
     }
     auto aidlEffect = sp<effect::EffectHalAidl>::cast(effect);
     RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mModule->addDeviceEffect(
@@ -930,9 +789,10 @@
 }
 status_t DeviceHalAidl::removeDeviceEffect(
         const struct audio_port_config *device, sp<EffectHalInterface> effect) {
+    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
     TIME_CHECK();
-    if (!mModule) return NO_INIT;
-    if (!effect) {
+    if (mModule == nullptr) return NO_INIT;
+    if (device == nullptr || effect == nullptr) {
         return BAD_VALUE;
     }
     bool isInput = VALUE_OR_RETURN_STATUS(::aidl::android::portDirection(
@@ -945,22 +805,24 @@
                 __func__, requestedPortConfig.toString().c_str());
         return BAD_VALUE;
     }
-    auto existingPortConfigIt = findPortConfig(
-            requestedPortConfig.ext.get<AudioPortExt::Tag::device>().device);
-    if (existingPortConfigIt == mPortConfigs.end()) {
-        ALOGE("%s: could not find a configured device port for the config %s",
-                __func__, requestedPortConfig.toString().c_str());
-        return BAD_VALUE;
+    AudioPortConfig devicePortConfig;
+    {
+        std::lock_guard l(mLock);
+        RETURN_STATUS_IF_ERROR(mMapper.findPortConfig(
+                        requestedPortConfig.ext.get<AudioPortExt::Tag::device>().device,
+                        &devicePortConfig));
     }
     auto aidlEffect = sp<effect::EffectHalAidl>::cast(effect);
     return statusTFromBinderStatus(mModule->removeDeviceEffect(
-                    existingPortConfigIt->first, aidlEffect->getIEffect()));
+                    devicePortConfig.id, aidlEffect->getIEffect()));
 }
 
 status_t DeviceHalAidl::getMmapPolicyInfos(
         media::audio::common::AudioMMapPolicyType policyType,
         std::vector<media::audio::common::AudioMMapPolicyInfo>* policyInfos) {
+    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
     TIME_CHECK();
+    if (mModule == nullptr) return NO_INIT;
     AudioMMapPolicyType mmapPolicyType = VALUE_OR_RETURN_STATUS(
             cpp2ndk_AudioMMapPolicyType(policyType));
 
@@ -978,7 +840,9 @@
 }
 
 int32_t DeviceHalAidl::getAAudioMixerBurstCount() {
+    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
     TIME_CHECK();
+    if (mModule == nullptr) return NO_INIT;
     int32_t mixerBurstCount = 0;
     if (mModule->getAAudioMixerBurstCount(&mixerBurstCount).isOk()) {
         return mixerBurstCount;
@@ -987,7 +851,9 @@
 }
 
 int32_t DeviceHalAidl::getAAudioHardwareBurstMinUsec() {
+    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
     TIME_CHECK();
+    if (mModule == nullptr) return NO_INIT;
     int32_t hardwareBurstMinUsec = 0;
     if (mModule->getAAudioHardwareBurstMinUsec(&hardwareBurstMinUsec).isOk()) {
         return hardwareBurstMinUsec;
@@ -996,8 +862,9 @@
 }
 
 error::Result<audio_hw_sync_t> DeviceHalAidl::getHwAvSync() {
+    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
     TIME_CHECK();
-    if (!mModule) return NO_INIT;
+    if (mModule == nullptr) return NO_INIT;
     int32_t aidlHwAvSync;
     RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mModule->generateHwAvSyncId(&aidlHwAvSync)));
     return VALUE_OR_RETURN_STATUS(
@@ -1006,13 +873,14 @@
 
 status_t DeviceHalAidl::dump(int fd, const Vector<String16>& args) {
     TIME_CHECK();
-    if (!mModule) return NO_INIT;
+    if (mModule == nullptr) return NO_INIT;
     return mModule->dump(fd, Args(args).args(), args.size());
 }
 
-int32_t DeviceHalAidl::supportsBluetoothVariableLatency(bool* supports) {
+status_t DeviceHalAidl::supportsBluetoothVariableLatency(bool* supports) {
+    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
     TIME_CHECK();
-    if (!mModule) return NO_INIT;
+    if (mModule == nullptr) return NO_INIT;
     if (supports == nullptr) {
         return BAD_VALUE;
     }
@@ -1021,50 +889,72 @@
 
 status_t DeviceHalAidl::getSoundDoseInterface(const std::string& module,
                                               ::ndk::SpAIBinder* soundDoseBinder)  {
-    TIME_CHECK();
-    if (!mModule) return NO_INIT;
+    if (soundDoseBinder == nullptr) {
+        return BAD_VALUE;
+    }
     if (mSoundDose == nullptr) {
-        ndk::ScopedAStatus status = mModule->getSoundDose(&mSoundDose);
-        if (!status.isOk()) {
-            ALOGE("%s failed to return the sound dose interface for module %s: %s",
-                  __func__,
-                  module.c_str(),
-                  status.getDescription().c_str());
-            return BAD_VALUE;
-        }
+        ALOGE("%s failed to retrieve the sound dose interface for module %s",
+                __func__, module.c_str());
+        return BAD_VALUE;
     }
     *soundDoseBinder = mSoundDose->asBinder();
     ALOGI("%s using audio AIDL HAL sound dose interface", __func__);
-
     return OK;
 }
 
 status_t DeviceHalAidl::prepareToDisconnectExternalDevice(const struct audio_port_v7* port) {
-    // There is not AIDL API defined for `prepareToDisconnectExternalDevice`.
-    // Call `setConnectedState` instead.
-    // TODO(b/279824103): call prepareToDisconnectExternalDevice when it is added.
-    if (const status_t status = setConnectedState(port, false /*connected*/); status == NO_ERROR) {
-        mDeviceDisconnectionNotified.insert(port->id);
-    }
-    // Return that there was no error as otherwise the disconnection procedure will not be
-    // considered complete for upper layers, and 'setConnectedState' will not be called again.
-    return NO_ERROR;
-}
-
-status_t DeviceHalAidl::setConnectedState(const struct audio_port_v7 *port, bool connected) {
+    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
     TIME_CHECK();
-    if (!mModule) return NO_INIT;
+    if (mModule == nullptr) return NO_INIT;
     if (port == nullptr) {
         return BAD_VALUE;
     }
-    if (!connected && mDeviceDisconnectionNotified.erase(port->id) > 0) {
-        // For device disconnection, APM will first call `prepareToDisconnectExternalDevice`
-        // and then call `setConnectedState`. However, there is no API for
-        // `prepareToDisconnectExternalDevice` yet. In that case, `setConnectedState` will be
-        // called when calling `prepareToDisconnectExternalDevice`. Do not call to the HAL if
-        // previous call is successful. Also remove the cache here to avoid a large cache after
-        // a long run.
-        return NO_ERROR;
+    const bool isInput = VALUE_OR_RETURN_STATUS(
+            ::aidl::android::portDirection(port->role, port->type)) ==
+                    ::aidl::android::AudioPortDirection::INPUT;
+    AudioPort aidlPort = VALUE_OR_RETURN_STATUS(
+            ::aidl::android::legacy2aidl_audio_port_v7_AudioPort(*port, isInput));
+    if (aidlPort.ext.getTag() != AudioPortExt::device) {
+        ALOGE("%s: provided port is not a device port (module %s): %s",
+              __func__, mInstance.c_str(), aidlPort.toString().c_str());
+        return BAD_VALUE;
+    }
+    status_t status = NO_ERROR;
+    {
+        std::lock_guard l(mLock);
+        status = mMapper.prepareToDisconnectExternalDevice(aidlPort);
+    }
+    if (status == UNKNOWN_TRANSACTION) {
+        // If there is not AIDL API defined for `prepareToDisconnectExternalDevice`.
+        // Call `setConnectedState` instead.
+        RETURN_STATUS_IF_ERROR(setConnectedState(port, false /*connected*/));
+        std::lock_guard l(mLock);
+        mDeviceDisconnectionNotified.insert(port->id);
+        // Return that there was no error as otherwise the disconnection procedure will not be
+        // considered complete for upper layers, and 'setConnectedState' will not be called again
+        return OK;
+    } else {
+        return status;
+    }
+}
+
+status_t DeviceHalAidl::setConnectedState(const struct audio_port_v7 *port, bool connected) {
+    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    TIME_CHECK();
+    if (mModule == nullptr) return NO_INIT;
+    if (port == nullptr) {
+        return BAD_VALUE;
+    }
+    if (!connected) {
+        std::lock_guard l(mLock);
+        if (mDeviceDisconnectionNotified.erase(port->id) > 0) {
+            // For device disconnection, APM will first call `prepareToDisconnectExternalDevice`
+            // and then call `setConnectedState`. If `prepareToDisconnectExternalDevice` doesn't
+            // exit, `setConnectedState` will be called when calling
+            // `prepareToDisconnectExternalDevice`. Do not call to the HAL if previous call is
+            // successful. Also remove the cache here to avoid a large cache after a long run.
+            return OK;
+        }
     }
     bool isInput = VALUE_OR_RETURN_STATUS(::aidl::android::portDirection(port->role, port->type)) ==
             ::aidl::android::AudioPortDirection::INPUT;
@@ -1075,64 +965,17 @@
                 __func__, mInstance.c_str(), aidlPort.toString().c_str());
         return BAD_VALUE;
     }
-    if (connected) {
-        AudioDevice matchDevice = aidlPort.ext.get<AudioPortExt::device>().device;
-        // Reset the device address to find the "template" port.
-        matchDevice.address = AudioDeviceAddress::make<AudioDeviceAddress::id>();
-        auto portsIt = findPort(matchDevice);
-        if (portsIt == mPorts.end()) {
-            // Since 'setConnectedState' is called for all modules, it is normal when the device
-            // port not found in every one of them.
-            return BAD_VALUE;
-        } else {
-            ALOGD("%s: device port for device %s found in the module %s",
-                    __func__, matchDevice.toString().c_str(), mInstance.c_str());
-        }
-        resetUnusedPatchesAndPortConfigs();
-        // Use the ID of the "template" port, use all the information from the provided port.
-        aidlPort.id = portsIt->first;
-        AudioPort connectedPort;
-        RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mModule->connectExternalDevice(
-                                aidlPort, &connectedPort)));
-        const auto [it, inserted] = mPorts.insert(std::make_pair(connectedPort.id, connectedPort));
-        LOG_ALWAYS_FATAL_IF(!inserted,
-                "%s: module %s, duplicate port ID received from HAL: %s, existing port: %s",
-                __func__, mInstance.c_str(), connectedPort.toString().c_str(),
-                it->second.toString().c_str());
-        mConnectedPorts[connectedPort.id] = false;
-    } else {  // !connected
-        AudioDevice matchDevice = aidlPort.ext.get<AudioPortExt::device>().device;
-        auto portsIt = findPort(matchDevice);
-        if (portsIt == mPorts.end()) {
-            // Since 'setConnectedState' is called for all modules, it is normal when the device
-            // port not found in every one of them.
-            return BAD_VALUE;
-        } else {
-            ALOGD("%s: device port for device %s found in the module %s",
-                    __func__, matchDevice.toString().c_str(), mInstance.c_str());
-        }
-        resetUnusedPatchesAndPortConfigs();
-        // Streams are closed by AudioFlinger independently from device disconnections.
-        // It is possible that the stream has not been closed yet.
-        const int32_t portId = portsIt->second.id;
-        if (!isPortHeldByAStream(portId)) {
-            RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
-                            mModule->disconnectExternalDevice(portId)));
-            mPorts.erase(portsIt);
-            mConnectedPorts.erase(portId);
-        } else {
-            ALOGD("%s: since device port ID %d is used by a stream, "
-                    "external device disconnection postponed", __func__, portId);
-            mConnectedPorts[portId] = true;
-        }
-    }
-    return updateRoutes();
+    std::lock_guard l(mLock);
+    return mMapper.setDevicePortConnectedState(aidlPort, connected);
 }
 
 status_t DeviceHalAidl::setSimulateDeviceConnections(bool enabled) {
     TIME_CHECK();
-    if (!mModule) return NO_INIT;
-    resetUnusedPatchesAndPortConfigs();
+    if (mModule == nullptr) return NO_INIT;
+    {
+        std::lock_guard l(mLock);
+        mMapper.resetUnusedPatchesAndPortConfigs();
+    }
     ModuleDebug debug{ .simulateDeviceConnections = enabled };
     status_t status = statusTFromBinderStatus(mModule->setModuleDebug(debug));
     // This is important to log as it affects HAL behavior.
@@ -1144,66 +987,24 @@
     return status;
 }
 
-bool DeviceHalAidl::audioDeviceMatches(const AudioDevice& device, const AudioPort& p) {
-    if (p.ext.getTag() != AudioPortExt::Tag::device) return false;
-    return p.ext.get<AudioPortExt::Tag::device>().device == device;
-}
-
-bool DeviceHalAidl::audioDeviceMatches(const AudioDevice& device, const AudioPortConfig& p) {
-    if (p.ext.getTag() != AudioPortExt::Tag::device) return false;
-    if (device.type.type == AudioDeviceType::IN_DEFAULT) {
-        return p.portId == mDefaultInputPortId;
-    } else if (device.type.type == AudioDeviceType::OUT_DEFAULT) {
-        return p.portId == mDefaultOutputPortId;
-    }
-    return p.ext.get<AudioPortExt::Tag::device>().device == device;
-}
-
-status_t DeviceHalAidl::createOrUpdatePortConfig(
-        const AudioPortConfig& requestedPortConfig, PortConfigs::iterator* result, bool* created) {
-    TIME_CHECK();
-    AudioPortConfig appliedPortConfig;
-    bool applied = false;
-    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mModule->setAudioPortConfig(
-                            requestedPortConfig, &appliedPortConfig, &applied)));
-    if (!applied) {
-        RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mModule->setAudioPortConfig(
-                                appliedPortConfig, &appliedPortConfig, &applied)));
-        if (!applied) {
-            ALOGE("%s: module %s did not apply suggested config %s",
-                    __func__, mInstance.c_str(), appliedPortConfig.toString().c_str());
-            return NO_INIT;
-        }
-    }
-
-    int32_t id = appliedPortConfig.id;
-    if (requestedPortConfig.id != 0 && requestedPortConfig.id != id) {
-        LOG_ALWAYS_FATAL("%s: requested port config id %d changed to %d", __func__,
-                requestedPortConfig.id, id);
-    }
-
-    auto [it, inserted] = mPortConfigs.insert_or_assign(std::move(id),
-            std::move(appliedPortConfig));
-    *result = it;
-    *created = inserted;
-    return OK;
-}
-
 status_t DeviceHalAidl::filterAndRetrieveBtA2dpParameters(
         AudioParameter &keys, AudioParameter *result) {
-    TIME_CHECK();
     if (String8 key = String8(AudioParameter::keyReconfigA2dpSupported); keys.containsKey(key)) {
         keys.remove(key);
-        bool supports;
-        RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
-                        mBluetoothA2dp->supportsOffloadReconfiguration(&supports)));
-        result->addInt(key, supports ? 1 : 0);
+        if (mBluetoothA2dp != nullptr) {
+            bool supports;
+            RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
+                            mBluetoothA2dp->supportsOffloadReconfiguration(&supports)));
+            result->addInt(key, supports ? 1 : 0);
+        } else {
+            ALOGI("%s: no IBluetoothA2dp on %s", __func__, mInstance.c_str());
+            result->addInt(key, 0);
+        }
     }
     return OK;
 }
 
 status_t DeviceHalAidl::filterAndUpdateBtA2dpParameters(AudioParameter &parameters) {
-    TIME_CHECK();
     std::optional<bool> a2dpEnabled;
     std::optional<std::vector<VendorParameter>> reconfigureOffload;
     (void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<String8>(
@@ -1245,7 +1046,6 @@
 }
 
 status_t DeviceHalAidl::filterAndUpdateBtHfpParameters(AudioParameter &parameters) {
-    TIME_CHECK();
     IBluetooth::HfpConfig hfpConfig;
     (void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<String8>(
                     parameters, String8(AudioParameter::keyBtHfpEnable),
@@ -1284,7 +1084,6 @@
 }
 
 status_t DeviceHalAidl::filterAndUpdateBtLeParameters(AudioParameter &parameters) {
-    TIME_CHECK();
     std::optional<bool> leEnabled;
     (void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<String8>(
                     parameters, String8(AudioParameter::keyBtLeSuspended),
@@ -1307,7 +1106,6 @@
 }
 
 status_t DeviceHalAidl::filterAndUpdateBtScoParameters(AudioParameter &parameters) {
-    TIME_CHECK();
     IBluetooth::ScoConfig scoConfig;
     (void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<String8>(
                     parameters, String8(AudioParameter::keyBtSco),
@@ -1365,7 +1163,6 @@
 }
 
 status_t DeviceHalAidl::filterAndUpdateScreenParameters(AudioParameter &parameters) {
-    TIME_CHECK();
     (void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<String8>(
                     parameters, String8(AudioParameter::keyScreenState),
                     [&](const String8& onOrOff) -> status_t {
@@ -1403,7 +1200,6 @@
 }
 
 status_t DeviceHalAidl::filterAndUpdateTelephonyParameters(AudioParameter &parameters) {
-    TIME_CHECK();
     using TtyMode = ITelephony::TelecomConfig::TtyMode;
     ITelephony::TelecomConfig telConfig;
     (void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<String8>(
@@ -1448,398 +1244,6 @@
     return OK;
 }
 
-status_t DeviceHalAidl::findOrCreatePatch(
-        const AudioPatch& requestedPatch, AudioPatch* patch, bool* created) {
-    std::set<int32_t> sourcePortConfigIds(requestedPatch.sourcePortConfigIds.begin(),
-            requestedPatch.sourcePortConfigIds.end());
-    std::set<int32_t> sinkPortConfigIds(requestedPatch.sinkPortConfigIds.begin(),
-            requestedPatch.sinkPortConfigIds.end());
-    return findOrCreatePatch(sourcePortConfigIds, sinkPortConfigIds, patch, created);
-}
-
-status_t DeviceHalAidl::findOrCreatePatch(
-        const std::set<int32_t>& sourcePortConfigIds, const std::set<int32_t>& sinkPortConfigIds,
-        AudioPatch* patch, bool* created) {
-    auto patchIt = findPatch(sourcePortConfigIds, sinkPortConfigIds);
-    if (patchIt == mPatches.end()) {
-        TIME_CHECK();
-        AudioPatch requestedPatch, appliedPatch;
-        requestedPatch.sourcePortConfigIds.insert(requestedPatch.sourcePortConfigIds.end(),
-                sourcePortConfigIds.begin(), sourcePortConfigIds.end());
-        requestedPatch.sinkPortConfigIds.insert(requestedPatch.sinkPortConfigIds.end(),
-                sinkPortConfigIds.begin(), sinkPortConfigIds.end());
-        RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mModule->setAudioPatch(
-                                requestedPatch, &appliedPatch)));
-        patchIt = mPatches.insert(mPatches.end(), std::make_pair(appliedPatch.id, appliedPatch));
-        *created = true;
-    } else {
-        *created = false;
-    }
-    *patch = patchIt->second;
-    return OK;
-}
-
-status_t DeviceHalAidl::findOrCreatePortConfig(const AudioDevice& device, const AudioConfig* config,
-        AudioPortConfig* portConfig, bool* created) {
-    auto portConfigIt = findPortConfig(device);
-    if (portConfigIt == mPortConfigs.end()) {
-        auto portsIt = findPort(device);
-        if (portsIt == mPorts.end()) {
-            ALOGE("%s: device port for device %s is not found in the module %s",
-                    __func__, device.toString().c_str(), mInstance.c_str());
-            return BAD_VALUE;
-        }
-        AudioPortConfig requestedPortConfig;
-        requestedPortConfig.portId = portsIt->first;
-        if (config != nullptr) {
-            setPortConfigFromConfig(&requestedPortConfig, *config);
-        }
-        RETURN_STATUS_IF_ERROR(createOrUpdatePortConfig(requestedPortConfig, &portConfigIt,
-                created));
-    } else {
-        *created = false;
-    }
-    *portConfig = portConfigIt->second;
-    return OK;
-}
-
-status_t DeviceHalAidl::findOrCreatePortConfig(
-        const AudioConfig& config, const std::optional<AudioIoFlags>& flags, int32_t ioHandle,
-        AudioSource source, const std::set<int32_t>& destinationPortIds,
-        AudioPortConfig* portConfig, bool* created) {
-    // These flags get removed one by one in this order when retrying port finding.
-    static const std::vector<AudioInputFlags> kOptionalInputFlags{
-        AudioInputFlags::FAST, AudioInputFlags::RAW };
-    auto portConfigIt = findPortConfig(config, flags, ioHandle);
-    if (portConfigIt == mPortConfigs.end() && flags.has_value()) {
-        auto optionalInputFlagsIt = kOptionalInputFlags.begin();
-        AudioIoFlags matchFlags = flags.value();
-        auto portsIt = findPort(config, matchFlags, destinationPortIds);
-        while (portsIt == mPorts.end() && matchFlags.getTag() == AudioIoFlags::Tag::input
-                && optionalInputFlagsIt != kOptionalInputFlags.end()) {
-            if (!isBitPositionFlagSet(
-                            matchFlags.get<AudioIoFlags::Tag::input>(), *optionalInputFlagsIt)) {
-                ++optionalInputFlagsIt;
-                continue;
-            }
-            matchFlags.set<AudioIoFlags::Tag::input>(matchFlags.get<AudioIoFlags::Tag::input>() &
-                    ~makeBitPositionFlagMask(*optionalInputFlagsIt++));
-            portsIt = findPort(config, matchFlags, destinationPortIds);
-            ALOGI("%s: mix port for config %s, flags %s was not found in the module %s, "
-                    "retried with flags %s", __func__, config.toString().c_str(),
-                    flags.value().toString().c_str(), mInstance.c_str(),
-                    matchFlags.toString().c_str());
-        }
-        if (portsIt == mPorts.end()) {
-            ALOGE("%s: mix port for config %s, flags %s is not found in the module %s",
-                    __func__, config.toString().c_str(), matchFlags.toString().c_str(),
-                    mInstance.c_str());
-            return BAD_VALUE;
-        }
-        AudioPortConfig requestedPortConfig;
-        requestedPortConfig.portId = portsIt->first;
-        setPortConfigFromConfig(&requestedPortConfig, config);
-        requestedPortConfig.ext = AudioPortMixExt{ .handle = ioHandle };
-        if (matchFlags.getTag() == AudioIoFlags::Tag::input
-                && source != AudioSource::SYS_RESERVED_INVALID) {
-            requestedPortConfig.ext.get<AudioPortExt::Tag::mix>().usecase =
-                    AudioPortMixExtUseCase::make<AudioPortMixExtUseCase::Tag::source>(source);
-        }
-        RETURN_STATUS_IF_ERROR(createOrUpdatePortConfig(requestedPortConfig, &portConfigIt,
-                created));
-    } else if (portConfigIt == mPortConfigs.end() && !flags.has_value()) {
-        ALOGW("%s: mix port config for %s, handle %d not found in the module %s, "
-                "and was not created as flags are not specified",
-                __func__, config.toString().c_str(), ioHandle, mInstance.c_str());
-        return BAD_VALUE;
-    } else {
-        AudioPortConfig requestedPortConfig = portConfigIt->second;
-        if (requestedPortConfig.ext.getTag() == AudioPortExt::Tag::mix) {
-            AudioPortMixExt& mixExt = requestedPortConfig.ext.get<AudioPortExt::Tag::mix>();
-            if (mixExt.usecase.getTag() == AudioPortMixExtUseCase::Tag::source &&
-                    source != AudioSource::SYS_RESERVED_INVALID) {
-                mixExt.usecase.get<AudioPortMixExtUseCase::Tag::source>() = source;
-            }
-        }
-
-        if (requestedPortConfig != portConfigIt->second) {
-            RETURN_STATUS_IF_ERROR(createOrUpdatePortConfig(requestedPortConfig, &portConfigIt,
-                    created));
-        } else {
-            *created = false;
-        }
-    }
-    *portConfig = portConfigIt->second;
-    return OK;
-}
-
-status_t DeviceHalAidl::findOrCreatePortConfig(
-        const AudioPortConfig& requestedPortConfig, const std::set<int32_t>& destinationPortIds,
-        AudioPortConfig* portConfig, bool* created) {
-    using Tag = AudioPortExt::Tag;
-    if (requestedPortConfig.ext.getTag() == Tag::mix) {
-        if (const auto& p = requestedPortConfig;
-                !p.sampleRate.has_value() || !p.channelMask.has_value() ||
-                !p.format.has_value()) {
-            ALOGW("%s: provided mix port config is not fully specified: %s",
-                    __func__, p.toString().c_str());
-            return BAD_VALUE;
-        }
-        AudioConfig config;
-        setConfigFromPortConfig(&config, requestedPortConfig);
-        AudioSource source = requestedPortConfig.ext.get<Tag::mix>().usecase.getTag() ==
-                AudioPortMixExtUseCase::Tag::source ?
-                requestedPortConfig.ext.get<Tag::mix>().usecase.
-                get<AudioPortMixExtUseCase::Tag::source>() : AudioSource::SYS_RESERVED_INVALID;
-        return findOrCreatePortConfig(config, requestedPortConfig.flags,
-                requestedPortConfig.ext.get<Tag::mix>().handle, source, destinationPortIds,
-                portConfig, created);
-    } else if (requestedPortConfig.ext.getTag() == Tag::device) {
-        return findOrCreatePortConfig(
-                requestedPortConfig.ext.get<Tag::device>().device, nullptr /*config*/,
-                portConfig, created);
-    }
-    ALOGW("%s: unsupported audio port config: %s",
-            __func__, requestedPortConfig.toString().c_str());
-    return BAD_VALUE;
-}
-
-DeviceHalAidl::Patches::iterator DeviceHalAidl::findPatch(
-        const std::set<int32_t>& sourcePortConfigIds, const std::set<int32_t>& sinkPortConfigIds) {
-    return std::find_if(mPatches.begin(), mPatches.end(),
-            [&](const auto& pair) {
-                const auto& p = pair.second;
-                std::set<int32_t> patchSrcs(
-                        p.sourcePortConfigIds.begin(), p.sourcePortConfigIds.end());
-                std::set<int32_t> patchSinks(
-                        p.sinkPortConfigIds.begin(), p.sinkPortConfigIds.end());
-                return sourcePortConfigIds == patchSrcs && sinkPortConfigIds == patchSinks; });
-}
-
-DeviceHalAidl::Ports::iterator DeviceHalAidl::findPort(const AudioDevice& device) {
-    if (device.type.type == AudioDeviceType::IN_DEFAULT) {
-        return mPorts.find(mDefaultInputPortId);
-    } else if (device.type.type == AudioDeviceType::OUT_DEFAULT) {
-        return mPorts.find(mDefaultOutputPortId);
-    }
-    if (device.address.getTag() != AudioDeviceAddress::id ||
-            !device.address.get<AudioDeviceAddress::id>().empty()) {
-        return std::find_if(mPorts.begin(), mPorts.end(),
-                [&](const auto& pair) { return audioDeviceMatches(device, pair.second); });
-    }
-    // For connection w/o an address, two ports can be found: the template port,
-    // and a connected port (if exists). Make sure we return the connected port.
-    DeviceHalAidl::Ports::iterator portIt = mPorts.end();
-    for (auto it = mPorts.begin(); it != mPorts.end(); ++it) {
-        if (audioDeviceMatches(device, it->second)) {
-            if (mConnectedPorts.find(it->first) != mConnectedPorts.end()) {
-                return it;
-            } else {
-                // Will return 'it' if there is no connected port.
-                portIt = it;
-            }
-        }
-    }
-    return portIt;
-}
-
-DeviceHalAidl::Ports::iterator DeviceHalAidl::findPort(
-            const AudioConfig& config, const AudioIoFlags& flags,
-            const std::set<int32_t>& destinationPortIds) {
-    auto belongsToProfile = [&config](const AudioProfile& prof) {
-        return (isDefaultAudioFormat(config.base.format) || prof.format == config.base.format) &&
-                (config.base.channelMask.getTag() == AudioChannelLayout::none ||
-                        std::find(prof.channelMasks.begin(), prof.channelMasks.end(),
-                                config.base.channelMask) != prof.channelMasks.end()) &&
-                (config.base.sampleRate == 0 ||
-                        std::find(prof.sampleRates.begin(), prof.sampleRates.end(),
-                                config.base.sampleRate) != prof.sampleRates.end());
-    };
-    static const std::vector<AudioOutputFlags> kOptionalOutputFlags{AudioOutputFlags::BIT_PERFECT};
-    int optionalFlags = 0;
-    auto flagMatches = [&flags, &optionalFlags](const AudioIoFlags& portFlags) {
-        // Ports should be able to match if the optional flags are not requested.
-        return portFlags == flags ||
-               (portFlags.getTag() == AudioIoFlags::Tag::output &&
-                        AudioIoFlags::make<AudioIoFlags::Tag::output>(
-                                portFlags.get<AudioIoFlags::Tag::output>() &
-                                        ~optionalFlags) == flags);
-    };
-    auto matcher = [&](const auto& pair) {
-        const auto& p = pair.second;
-        return p.ext.getTag() == AudioPortExt::Tag::mix &&
-                flagMatches(p.flags) &&
-                (destinationPortIds.empty() ||
-                        std::any_of(destinationPortIds.begin(), destinationPortIds.end(),
-                                [&](const int32_t destId) { return mRoutingMatrix.count(
-                                            std::make_pair(p.id, destId)) != 0; })) &&
-                (p.profiles.empty() ||
-                        std::find_if(p.profiles.begin(), p.profiles.end(), belongsToProfile) !=
-                        p.profiles.end()); };
-    auto result = std::find_if(mPorts.begin(), mPorts.end(), matcher);
-    if (result == mPorts.end() && flags.getTag() == AudioIoFlags::Tag::output) {
-        auto optionalOutputFlagsIt = kOptionalOutputFlags.begin();
-        while (result == mPorts.end() && optionalOutputFlagsIt != kOptionalOutputFlags.end()) {
-            if (isBitPositionFlagSet(
-                        flags.get<AudioIoFlags::Tag::output>(), *optionalOutputFlagsIt)) {
-                // If the flag is set by the request, it must be matched.
-                ++optionalOutputFlagsIt;
-                continue;
-            }
-            optionalFlags |= makeBitPositionFlagMask(*optionalOutputFlagsIt++);
-            result = std::find_if(mPorts.begin(), mPorts.end(), matcher);
-            ALOGI("%s: port for config %s, flags %s was not found in the module %s, "
-                  "retried with excluding optional flags %#x", __func__, config.toString().c_str(),
-                    flags.toString().c_str(), mInstance.c_str(), optionalFlags);
-        }
-    }
-    return result;
-}
-
-DeviceHalAidl::PortConfigs::iterator DeviceHalAidl::findPortConfig(const AudioDevice& device) {
-    return std::find_if(mPortConfigs.begin(), mPortConfigs.end(),
-            [&](const auto& pair) { return audioDeviceMatches(device, pair.second); });
-}
-
-DeviceHalAidl::PortConfigs::iterator DeviceHalAidl::findPortConfig(
-            const AudioConfig& config, const std::optional<AudioIoFlags>& flags, int32_t ioHandle) {
-    using Tag = AudioPortExt::Tag;
-    return std::find_if(mPortConfigs.begin(), mPortConfigs.end(),
-            [&](const auto& pair) {
-                const auto& p = pair.second;
-                LOG_ALWAYS_FATAL_IF(p.ext.getTag() == Tag::mix &&
-                        (!p.sampleRate.has_value() || !p.channelMask.has_value() ||
-                                !p.format.has_value() || !p.flags.has_value()),
-                        "%s: stored mix port config is not fully specified: %s",
-                        __func__, p.toString().c_str());
-                return p.ext.getTag() == Tag::mix &&
-                        isConfigEqualToPortConfig(config, p) &&
-                        (!flags.has_value() || p.flags.value() == flags.value()) &&
-                        p.ext.template get<Tag::mix>().handle == ioHandle; });
-}
-
-bool DeviceHalAidl::isPortHeldByAStream(int32_t portId) {
-    // It is assumed that mStreams has already been cleaned up.
-    for (const auto& streamPair : mStreams) {
-        int32_t patchId = streamPair.second;
-        auto patchIt = mPatches.find(patchId);
-        if (patchIt == mPatches.end()) continue;
-        for (int32_t id : patchIt->second.sourcePortConfigIds) {
-            auto portConfigIt = mPortConfigs.find(id);
-            if (portConfigIt != mPortConfigs.end() && portConfigIt->second.portId == portId) {
-                return true;
-            }
-        }
-        for (int32_t id : patchIt->second.sinkPortConfigIds) {
-            auto portConfigIt = mPortConfigs.find(id);
-            if (portConfigIt != mPortConfigs.end() && portConfigIt->second.portId == portId) {
-                return true;
-            }
-        }
-    }
-    return false;
-}
-
-void DeviceHalAidl::resetPatch(int32_t patchId) {
-    if (auto it = mPatches.find(patchId); it != mPatches.end()) {
-        mPatches.erase(it);
-        TIME_CHECK();
-        if (ndk::ScopedAStatus status = mModule->resetAudioPatch(patchId); !status.isOk()) {
-            ALOGE("%s: error while resetting patch %d: %s",
-                    __func__, patchId, status.getDescription().c_str());
-        }
-        return;
-    }
-    ALOGE("%s: patch id %d not found", __func__, patchId);
-}
-
-void DeviceHalAidl::resetPortConfig(int32_t portConfigId) {
-    if (auto it = mPortConfigs.find(portConfigId); it != mPortConfigs.end()) {
-        mPortConfigs.erase(it);
-        TIME_CHECK();
-        if (ndk::ScopedAStatus status = mModule->resetAudioPortConfig(portConfigId);
-                !status.isOk()) {
-            ALOGE("%s: error while resetting port config %d: %s",
-                    __func__, portConfigId, status.getDescription().c_str());
-        }
-        return;
-    }
-    ALOGE("%s: port config id %d not found", __func__, portConfigId);
-}
-
-void DeviceHalAidl::resetUnusedPatches() {
-    // Since patches can be created independently of streams via 'createAudioPatch',
-    // here we only clean up patches for released streams.
-    for (auto it = mStreams.begin(); it != mStreams.end(); ) {
-        if (auto streamSp = it->first.promote(); streamSp) {
-            ++it;
-        } else {
-            resetPatch(it->second);
-            it = mStreams.erase(it);
-        }
-    }
-}
-
-void DeviceHalAidl::resetUnusedPatchesAndPortConfigs() {
-    resetUnusedPatches();
-    resetUnusedPortConfigs();
-}
-
-void DeviceHalAidl::resetUnusedPortConfigs() {
-    // The assumption is that port configs are used to create patches
-    // (or to open streams, but that involves creation of patches, too). Thus,
-    // orphaned port configs can and should be reset.
-    std::map<int32_t, int32_t /*portID*/> portConfigIds;
-    std::transform(mPortConfigs.begin(), mPortConfigs.end(),
-            std::inserter(portConfigIds, portConfigIds.end()),
-            [](const auto& pcPair) { return std::make_pair(pcPair.first, pcPair.second.portId); });
-    for (const auto& p : mPatches) {
-        for (int32_t id : p.second.sourcePortConfigIds) portConfigIds.erase(id);
-        for (int32_t id : p.second.sinkPortConfigIds) portConfigIds.erase(id);
-    }
-    for (int32_t id : mInitialPortConfigIds) {
-        portConfigIds.erase(id);
-    }
-    std::set<int32_t> retryDeviceDisconnection;
-    for (const auto& portConfigAndIdPair : portConfigIds) {
-        resetPortConfig(portConfigAndIdPair.first);
-        if (const auto it = mConnectedPorts.find(portConfigAndIdPair.second);
-                it != mConnectedPorts.end() && it->second) {
-            retryDeviceDisconnection.insert(portConfigAndIdPair.second);
-        }
-    }
-    for (int32_t portId : retryDeviceDisconnection) {
-        if (!isPortHeldByAStream(portId)) {
-            TIME_CHECK();
-            if (auto status = mModule->disconnectExternalDevice(portId); status.isOk()) {
-                mPorts.erase(portId);
-                mConnectedPorts.erase(portId);
-                ALOGD("%s: executed postponed external device disconnection for port ID %d",
-                        __func__, portId);
-            }
-        }
-    }
-    if (!retryDeviceDisconnection.empty()) {
-        updateRoutes();
-    }
-}
-
-status_t DeviceHalAidl::updateRoutes() {
-    TIME_CHECK();
-    RETURN_STATUS_IF_ERROR(
-            statusTFromBinderStatus(mModule->getAudioRoutes(&mRoutes)));
-    ALOGW_IF(mRoutes.empty(), "%s: module %s returned an empty list of audio routes",
-            __func__, mInstance.c_str());
-    mRoutingMatrix.clear();
-    for (const auto& r : mRoutes) {
-        for (auto portId : r.sourcePortIds) {
-            mRoutingMatrix.emplace(r.sinkPortId, portId);
-            mRoutingMatrix.emplace(portId, r.sinkPortId);
-        }
-    }
-    return OK;
-}
-
 void DeviceHalAidl::clearCallbacks(void* cookie) {
     std::lock_guard l(mLock);
     mCallbacks.erase(cookie);
diff --git a/media/libaudiohal/impl/DeviceHalAidl.h b/media/libaudiohal/impl/DeviceHalAidl.h
index 20cf88c..9493e47 100644
--- a/media/libaudiohal/impl/DeviceHalAidl.h
+++ b/media/libaudiohal/impl/DeviceHalAidl.h
@@ -17,7 +17,9 @@
 #pragma once
 
 #include <map>
-#include <set>
+#include <memory>
+#include <mutex>
+#include <string>
 #include <vector>
 
 #include <aidl/android/media/audio/IHalAdapterVendorExtension.h>
@@ -26,9 +28,10 @@
 #include <android-base/thread_annotations.h>
 #include <media/audiohal/DeviceHalInterface.h>
 #include <media/audiohal/EffectHalInterface.h>
-#include <media/audiohal/StreamHalInterface.h>
 
+#include "Cleanups.h"
 #include "ConversionHelperAidl.h"
+#include "Hal2AidlMapper.h"
 
 namespace android {
 
@@ -165,7 +168,7 @@
 
     error::Result<audio_hw_sync_t> getHwAvSync() override;
 
-    int32_t supportsBluetoothVariableLatency(bool* supports __unused) override;
+    status_t supportsBluetoothVariableLatency(bool* supports __unused) override;
 
     status_t getSoundDoseInterface(const std::string& module,
                                    ::ndk::SpAIBinder* soundDoseBinder) override;
@@ -176,6 +179,9 @@
 
     status_t setSimulateDeviceConnections(bool enabled) override;
 
+    status_t getAudioMixPort(const struct audio_port_v7* devicePort,
+                             struct audio_port_v7* mixPort) override;
+
     status_t dump(int __unused, const Vector<String16>& __unused) override;
 
   private:
@@ -191,19 +197,6 @@
         Status status = Status::UNKNOWN;
         MicrophoneInfoProvider::Info info;
     };
-    // IDs of ports for connected external devices, and whether they are held by streams.
-    using ConnectedPorts = std::map<int32_t /*port ID*/, bool>;
-    using Patches = std::map<int32_t /*patch ID*/,
-            ::aidl::android::hardware::audio::core::AudioPatch>;
-    using PortConfigs = std::map<int32_t /*port config ID*/,
-            ::aidl::android::media::audio::common::AudioPortConfig>;
-    using Ports = std::map<int32_t /*port ID*/, ::aidl::android::media::audio::common::AudioPort>;
-    using Routes = std::vector<::aidl::android::hardware::audio::core::AudioRoute>;
-    // Answers the question "whether portID 'first' is reachable from portID 'second'?"
-    // It's not a map because both portIDs are known. The matrix is symmetric.
-    using RoutingMatrix = std::set<std::pair<int32_t, int32_t>>;
-    using Streams = std::map<wp<StreamHalInterface>, int32_t /*patch ID*/>;
-    class Cleanups;
 
     // Must not be constructed directly by clients.
     DeviceHalAidl(
@@ -213,13 +206,6 @@
 
     ~DeviceHalAidl() override = default;
 
-    bool audioDeviceMatches(const ::aidl::android::media::audio::common::AudioDevice& device,
-            const ::aidl::android::media::audio::common::AudioPort& p);
-    bool audioDeviceMatches(const ::aidl::android::media::audio::common::AudioDevice& device,
-            const ::aidl::android::media::audio::common::AudioPortConfig& p);
-    status_t createOrUpdatePortConfig(
-            const ::aidl::android::media::audio::common::AudioPortConfig& requestedPortConfig,
-            PortConfigs::iterator* result, bool *created);
     status_t filterAndRetrieveBtA2dpParameters(AudioParameter &keys, AudioParameter *result);
     status_t filterAndUpdateBtA2dpParameters(AudioParameter &parameters);
     status_t filterAndUpdateBtHfpParameters(AudioParameter &parameters);
@@ -227,59 +213,6 @@
     status_t filterAndUpdateBtScoParameters(AudioParameter &parameters);
     status_t filterAndUpdateScreenParameters(AudioParameter &parameters);
     status_t filterAndUpdateTelephonyParameters(AudioParameter &parameters);
-    status_t findOrCreatePatch(
-        const std::set<int32_t>& sourcePortConfigIds,
-        const std::set<int32_t>& sinkPortConfigIds,
-        ::aidl::android::hardware::audio::core::AudioPatch* patch, bool* created);
-    status_t findOrCreatePatch(
-        const ::aidl::android::hardware::audio::core::AudioPatch& requestedPatch,
-        ::aidl::android::hardware::audio::core::AudioPatch* patch, bool* created);
-    status_t findOrCreatePortConfig(
-            const ::aidl::android::media::audio::common::AudioDevice& device,
-            const ::aidl::android::media::audio::common::AudioConfig* config,
-            ::aidl::android::media::audio::common::AudioPortConfig* portConfig,
-            bool* created);
-    status_t findOrCreatePortConfig(
-            const ::aidl::android::media::audio::common::AudioConfig& config,
-            const std::optional<::aidl::android::media::audio::common::AudioIoFlags>& flags,
-            int32_t ioHandle,
-            ::aidl::android::media::audio::common::AudioSource aidlSource,
-            const std::set<int32_t>& destinationPortIds,
-            ::aidl::android::media::audio::common::AudioPortConfig* portConfig, bool* created);
-    status_t findOrCreatePortConfig(
-        const ::aidl::android::media::audio::common::AudioPortConfig& requestedPortConfig,
-        const std::set<int32_t>& destinationPortIds,
-        ::aidl::android::media::audio::common::AudioPortConfig* portConfig, bool* created);
-    Patches::iterator findPatch(const std::set<int32_t>& sourcePortConfigIds,
-            const std::set<int32_t>& sinkPortConfigIds);
-    Ports::iterator findPort(const ::aidl::android::media::audio::common::AudioDevice& device);
-    Ports::iterator findPort(
-            const ::aidl::android::media::audio::common::AudioConfig& config,
-            const ::aidl::android::media::audio::common::AudioIoFlags& flags,
-            const std::set<int32_t>& destinationPortIds);
-    PortConfigs::iterator findPortConfig(
-            const ::aidl::android::media::audio::common::AudioDevice& device);
-    PortConfigs::iterator findPortConfig(
-            const ::aidl::android::media::audio::common::AudioConfig& config,
-            const std::optional<::aidl::android::media::audio::common::AudioIoFlags>& flags,
-            int32_t ioHandle);
-    bool isPortHeldByAStream(int32_t portId);
-    status_t prepareToOpenStream(
-        int32_t aidlHandle,
-        const ::aidl::android::media::audio::common::AudioDevice& aidlDevice,
-        const ::aidl::android::media::audio::common::AudioIoFlags& aidlFlags,
-        ::aidl::android::media::audio::common::AudioSource aidlSource,
-        struct audio_config* config,
-        Cleanups* cleanups,
-        ::aidl::android::media::audio::common::AudioConfig* aidlConfig,
-        ::aidl::android::media::audio::common::AudioPortConfig* mixPortConfig,
-        ::aidl::android::hardware::audio::core::AudioPatch* aidlPatch);
-    void resetPatch(int32_t patchId);
-    void resetPortConfig(int32_t portConfigId);
-    void resetUnusedPatches();
-    void resetUnusedPatchesAndPortConfigs();
-    void resetUnusedPortConfigs();
-    status_t updateRoutes();
 
     // CallbackBroker implementation
     void clearCallbacks(void* cookie) override;
@@ -306,22 +239,14 @@
     const std::shared_ptr<::aidl::android::hardware::audio::core::IBluetooth> mBluetooth;
     const std::shared_ptr<::aidl::android::hardware::audio::core::IBluetoothA2dp> mBluetoothA2dp;
     const std::shared_ptr<::aidl::android::hardware::audio::core::IBluetoothLe> mBluetoothLe;
-    std::shared_ptr<::aidl::android::hardware::audio::core::sounddose::ISoundDose>
-        mSoundDose = nullptr;
-    Ports mPorts;
-    int32_t mDefaultInputPortId = -1;
-    int32_t mDefaultOutputPortId = -1;
-    PortConfigs mPortConfigs;
-    std::set<int32_t> mInitialPortConfigIds;
-    Patches mPatches;
-    Routes mRoutes;
-    RoutingMatrix mRoutingMatrix;
-    Streams mStreams;
-    Microphones mMicrophones;
+    const std::shared_ptr<::aidl::android::hardware::audio::core::sounddose::ISoundDose> mSoundDose;
+
     std::mutex mLock;
     std::map<void*, Callbacks> mCallbacks GUARDED_BY(mLock);
-    std::set<audio_port_handle_t> mDeviceDisconnectionNotified;
-    ConnectedPorts mConnectedPorts;
+    std::set<audio_port_handle_t> mDeviceDisconnectionNotified GUARDED_BY(mLock);
+    Hal2AidlMapper mMapper GUARDED_BY(mLock);
+    LockedAccessor<Hal2AidlMapper> mMapperAccessor;
+    Microphones mMicrophones GUARDED_BY(mLock);
 };
 
 } // namespace android
diff --git a/media/libaudiohal/impl/DeviceHalHidl.cpp b/media/libaudiohal/impl/DeviceHalHidl.cpp
index f96d419..e8e1f46 100644
--- a/media/libaudiohal/impl/DeviceHalHidl.cpp
+++ b/media/libaudiohal/impl/DeviceHalHidl.cpp
@@ -304,7 +304,12 @@
                 }
                 HidlUtils::audioConfigToHal(suggestedConfig, config);
             });
-    return processReturn("openOutputStream", ret, retval);
+    const status_t status = processReturn("openOutputStream", ret, retval);
+    cleanupStreams();
+    if (status == NO_ERROR) {
+        mStreams.insert({handle, *outStream});
+    }
+    return status;
 }
 
 status_t DeviceHalHidl::openInputStream(
@@ -377,7 +382,12 @@
                 }
                 HidlUtils::audioConfigToHal(suggestedConfig, config);
             });
-    return processReturn("openInputStream", ret, retval);
+    const status_t status = processReturn("openInputStream", ret, retval);
+    cleanupStreams();
+    if (status == NO_ERROR) {
+        mStreams.insert({handle, *inStream});
+    }
+    return status;
 }
 
 status_t DeviceHalHidl::supportsAudioPatches(bool *supportsPatches) {
@@ -684,4 +694,148 @@
 }
 #endif
 
+status_t DeviceHalHidl::supportsBluetoothVariableLatency(bool* supports) {
+    if (supports == nullptr) {
+        return BAD_VALUE;
+    }
+    *supports = false;
+
+    String8 reply;
+    status_t status = getParameters(
+            String8(AUDIO_PARAMETER_BT_VARIABLE_LATENCY_SUPPORTED), &reply);
+    if (status != NO_ERROR) {
+        return status;
+    }
+    AudioParameter replyParams(reply);
+    String8 trueOrFalse;
+    status = replyParams.get(
+            String8(AUDIO_PARAMETER_BT_VARIABLE_LATENCY_SUPPORTED), trueOrFalse);
+    if (status != NO_ERROR) {
+        return status;
+    }
+    *supports = trueOrFalse == AudioParameter::valueTrue;
+    return NO_ERROR;
+}
+
+namespace {
+
+status_t getParametersFromStream(
+        sp<StreamHalInterface> stream,
+        const char* parameters,
+        const char* extraParameters,
+        String8* reply) {
+    String8 request(parameters);
+    if (extraParameters != nullptr) {
+        request.append(";");
+        request.append(extraParameters);
+    }
+    status_t status = stream->getParameters(request, reply);
+    if (status != NO_ERROR) {
+        ALOGW("%s, failed to query %s, status=%d", __func__, parameters, status);
+        return status;
+    }
+    AudioParameter repliedParameters(*reply);
+    status = repliedParameters.get(String8(parameters), *reply);
+    if (status != NO_ERROR) {
+        ALOGW("%s: failed to retrieve %s, bailing out", __func__, parameters);
+    }
+    return status;
+}
+
+} // namespace
+
+status_t DeviceHalHidl::getAudioMixPort(const struct audio_port_v7 *devicePort,
+                                        struct audio_port_v7 *mixPort) {
+    // For HIDL HAL, querying mix port information is not supported. If the HAL supports
+    // `getAudioPort` API to query the device port attributes, use the structured audio profiles
+    // that have the same attributes reported by the `getParameters` API. Otherwise, only use
+    // the attributes reported by `getParameters` API.
+    struct audio_port_v7 temp = *devicePort;
+    AudioProfileAttributesMultimap attrsFromDevice;
+    status_t status = getAudioPort(&temp);
+    if (status == NO_ERROR) {
+        attrsFromDevice = createAudioProfilesAttrMap(temp.audio_profiles, 0 /*first*/,
+                                                     temp.num_audio_profiles);
+    }
+    auto streamIt = mStreams.find(mixPort->ext.mix.handle);
+    if (streamIt == mStreams.end()) {
+        return BAD_VALUE;
+    }
+    auto stream = streamIt->second.promote();
+    if (stream == nullptr) {
+        return BAD_VALUE;
+    }
+
+    String8 formatsStr;
+    status = getParametersFromStream(
+            stream, AudioParameter::keyStreamSupportedFormats, nullptr /*extraParameters*/,
+            &formatsStr);
+    if (status != NO_ERROR) {
+        return status;
+    }
+    FormatVector formats = formatsFromString(formatsStr.c_str());
+
+    mixPort->num_audio_profiles = 0;
+    for (audio_format_t format : formats) {
+        if (mixPort->num_audio_profiles >= AUDIO_PORT_MAX_AUDIO_PROFILES) {
+            ALOGW("%s, too many audio profiles", __func__);
+            break;
+        }
+        AudioParameter formatParameter;
+        formatParameter.addInt(String8(AudioParameter::keyFormat), format);
+
+        String8 samplingRatesStr;
+        status = getParametersFromStream(
+                stream, AudioParameter::keyStreamSupportedSamplingRates,
+                formatParameter.toString(), &samplingRatesStr);
+        if (status != NO_ERROR) {
+            // Failed to query supported sample rate for current format, may succeed with
+            // other formats.
+            ALOGW("Skip adding format=%#x, status=%d", format, status);
+            continue;
+        }
+        SampleRateSet sampleRatesFromStream = samplingRatesFromString(samplingRatesStr.c_str());
+        if (sampleRatesFromStream.empty()) {
+            ALOGW("Skip adding format=%#x as the returned sampling rates are empty", format);
+            continue;
+        }
+        String8 channelMasksStr;
+        status = getParametersFromStream(
+                stream, AudioParameter::keyStreamSupportedChannels,
+                formatParameter.toString(), &channelMasksStr);
+        if (status != NO_ERROR) {
+            // Failed to query supported channel masks for current format, may succeed with
+            // other formats.
+            ALOGW("Skip adding format=%#x, status=%d", format, status);
+            continue;
+        }
+        ChannelMaskSet channelMasksFromStream = channelMasksFromString(channelMasksStr.c_str());
+        if (channelMasksFromStream.empty()) {
+            ALOGW("Skip adding format=%#x as the returned channel masks are empty", format);
+            continue;
+        }
+
+        // For an audio format, all audio profiles from the device port with the same format will
+        // be added to mix port after filtering sample rates, channel masks according to the reply
+        // of getParameters API. If there is any sample rate or channel mask reported by
+        // getParameters API but not reported by the device, additional audio profiles will be
+        // added.
+        populateAudioProfiles(attrsFromDevice, format, channelMasksFromStream,
+                              sampleRatesFromStream, mixPort->audio_profiles,
+                              &mixPort->num_audio_profiles);
+    }
+
+    return NO_ERROR;
+}
+
+void DeviceHalHidl::cleanupStreams() {
+    for (auto it = mStreams.begin(); it != mStreams.end();) {
+        if (it->second.promote() == nullptr) {
+            it = mStreams.erase(it);
+        } else {
+            ++it;
+        }
+    }
+}
+
 } // namespace android
diff --git a/media/libaudiohal/impl/DeviceHalHidl.h b/media/libaudiohal/impl/DeviceHalHidl.h
index 989c1f5..7a712df 100644
--- a/media/libaudiohal/impl/DeviceHalHidl.h
+++ b/media/libaudiohal/impl/DeviceHalHidl.h
@@ -21,6 +21,7 @@
 #include PATH(android/hardware/audio/FILE_VERSION/IPrimaryDevice.h)
 #include <media/audiohal/DeviceHalInterface.h>
 #include <media/audiohal/EffectHalInterface.h>
+#include <media/audiohal/StreamHalInterface.h>
 
 #include "CoreConversionHelperHidl.h"
 
@@ -127,10 +128,7 @@
         return INVALID_OPERATION;
     }
 
-    int32_t supportsBluetoothVariableLatency(bool* supports __unused) override {
-        // TODO: Implement the HAL query when moving to AIDL HAL.
-        return INVALID_OPERATION;
-    }
+    status_t supportsBluetoothVariableLatency(bool* supports) override;
 
     status_t setConnectedState(const struct audio_port_v7 *port, bool connected) override;
 
@@ -148,6 +146,9 @@
 
     status_t prepareToDisconnectExternalDevice(const struct audio_port_v7* port) override;
 
+    status_t getAudioMixPort(const struct audio_port_v7* devicePort,
+                             struct audio_port_v7* mixPort) override;
+
   private:
     friend class DevicesFactoryHalHidl;
     sp<::android::hardware::audio::CPP_VERSION::IDevice> mDevice;
@@ -157,12 +158,15 @@
     class SoundDoseWrapper;
     const std::unique_ptr<SoundDoseWrapper> mSoundDoseWrapper;
     std::set<audio_port_handle_t> mDeviceDisconnectionNotified;
+    std::map<audio_io_handle_t, wp<StreamHalInterface>> mStreams;
 
     // Can not be constructed directly by clients.
     explicit DeviceHalHidl(const sp<::android::hardware::audio::CPP_VERSION::IDevice>& device);
     explicit DeviceHalHidl(
             const sp<::android::hardware::audio::CPP_VERSION::IPrimaryDevice>& device);
 
+    void cleanupStreams();
+
     // The destructor automatically closes the device.
     virtual ~DeviceHalHidl();
 
diff --git a/media/libaudiohal/impl/DevicesFactoryHalAidl.cpp b/media/libaudiohal/impl/DevicesFactoryHalAidl.cpp
index a8f9f7e..3dbc14a 100644
--- a/media/libaudiohal/impl/DevicesFactoryHalAidl.cpp
+++ b/media/libaudiohal/impl/DevicesFactoryHalAidl.cpp
@@ -24,7 +24,6 @@
 
 #include <aidl/android/hardware/audio/core/IModule.h>
 #include <android/binder_manager.h>
-#include <binder/IServiceManager.h>
 #include <media/AidlConversionNdkCpp.h>
 #include <media/AidlConversionUtil.h>
 #include <utils/Log.h>
@@ -115,29 +114,6 @@
     return OK;
 }
 
-status_t DevicesFactoryHalAidl::getHalPids(std::vector<pid_t> *pids) {
-    if (pids == nullptr) {
-        return BAD_VALUE;
-    }
-    // The functionality for retrieving debug infos of services is not exposed via the NDK.
-    sp<IServiceManager> sm = defaultServiceManager();
-    if (sm == nullptr) {
-        return NO_INIT;
-    }
-    std::set<pid_t> pidsSet;
-    const auto moduleServiceName = std::string(IModule::descriptor) + "/";
-    auto debugInfos = sm->getServiceDebugInfo();
-    for (const auto& info : debugInfos) {
-        if (info.pid > 0 &&
-                info.name.size() > moduleServiceName.size() && // '>' as there must be instance name
-                info.name.substr(0, moduleServiceName.size()) == moduleServiceName) {
-            pidsSet.insert(info.pid);
-        }
-    }
-    *pids = {pidsSet.begin(), pidsSet.end()};
-    return NO_ERROR;
-}
-
 status_t DevicesFactoryHalAidl::setCallbackOnce(sp<DevicesFactoryHalCallback> callback) {
     // Dynamic registration of module instances is not supported. The functionality
     // in the audio server which is related to this callback can be removed together
diff --git a/media/libaudiohal/impl/DevicesFactoryHalAidl.h b/media/libaudiohal/impl/DevicesFactoryHalAidl.h
index 97e3796..17bfe43 100644
--- a/media/libaudiohal/impl/DevicesFactoryHalAidl.h
+++ b/media/libaudiohal/impl/DevicesFactoryHalAidl.h
@@ -35,8 +35,6 @@
     // necessary to release references to the returned object.
     status_t openDevice(const char *name, sp<DeviceHalInterface> *device) override;
 
-    status_t getHalPids(std::vector<pid_t> *pids) override;
-
     status_t setCallbackOnce(sp<DevicesFactoryHalCallback> callback) override;
 
     android::detail::AudioHalVersionInfo getHalVersion() const override;
diff --git a/media/libaudiohal/impl/DevicesFactoryHalHidl.cpp b/media/libaudiohal/impl/DevicesFactoryHalHidl.cpp
index eef60b5..1cac9da 100644
--- a/media/libaudiohal/impl/DevicesFactoryHalHidl.cpp
+++ b/media/libaudiohal/impl/DevicesFactoryHalHidl.cpp
@@ -163,29 +163,6 @@
     return BAD_VALUE;
 }
 
-status_t DevicesFactoryHalHidl::getHalPids(std::vector<pid_t> *pids) {
-    std::set<pid_t> pidsSet;
-    auto factories = copyDeviceFactories();
-    for (const auto& factory : factories) {
-        using ::android::hidl::base::V1_0::DebugInfo;
-
-        DebugInfo debugInfo;
-        auto ret = factory->getDebugInfo([&] (const auto &info) {
-               debugInfo = info;
-            });
-        if (!ret.isOk()) {
-           return INVALID_OPERATION;
-        }
-        if (debugInfo.pid == (int)IServiceManager::PidConstant::NO_PID) {
-            continue;
-        }
-        pidsSet.insert(debugInfo.pid);
-    }
-
-    *pids = {pidsSet.begin(), pidsSet.end()};
-    return NO_ERROR;
-}
-
 status_t DevicesFactoryHalHidl::setCallbackOnce(sp<DevicesFactoryHalCallback> callback) {
     ALOG_ASSERT(callback != nullptr);
     bool needToCallCallback = false;
diff --git a/media/libaudiohal/impl/DevicesFactoryHalHidl.h b/media/libaudiohal/impl/DevicesFactoryHalHidl.h
index 3285af7..e38d86d 100644
--- a/media/libaudiohal/impl/DevicesFactoryHalHidl.h
+++ b/media/libaudiohal/impl/DevicesFactoryHalHidl.h
@@ -43,8 +43,6 @@
     // necessary to release references to the returned object.
     status_t openDevice(const char *name, sp<DeviceHalInterface> *device) override;
 
-    status_t getHalPids(std::vector<pid_t> *pids) override;
-
     status_t setCallbackOnce(sp<DevicesFactoryHalCallback> callback) override;
 
     android::detail::AudioHalVersionInfo getHalVersion() const override;
diff --git a/media/libaudiohal/impl/EffectConversionHelperAidl.cpp b/media/libaudiohal/impl/EffectConversionHelperAidl.cpp
index 196b432..39999a5 100644
--- a/media/libaudiohal/impl/EffectConversionHelperAidl.cpp
+++ b/media/libaudiohal/impl/EffectConversionHelperAidl.cpp
@@ -180,18 +180,6 @@
 
     State state;
     RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getState(&state)));
-    // in case of buffer/ioHandle re-configure for an opened effect, close it and re-open
-    if (state != State::INIT && mCommon != common) {
-        ALOGI("%s at state %s, common parameter change from %s to %s, closing effect", __func__,
-              android::internal::ToString(state).c_str(), mCommon.toString().c_str(),
-              common.toString().c_str());
-        RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->close()));
-        RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getState(&state)));
-        mStatusQ.reset();
-        mInputQ.reset();
-        mOutputQ.reset();
-    }
-
     if (state == State::INIT) {
         ALOGI("%s at state %s, opening effect with input %s output %s", __func__,
               android::internal::ToString(state).c_str(), common.input.toString().c_str(),
@@ -199,16 +187,7 @@
         IEffect::OpenEffectReturn openReturn;
         RETURN_STATUS_IF_ERROR(
                 statusTFromBinderStatus(mEffect->open(common, std::nullopt, &openReturn)));
-
-        if (mIsProxyEffect) {
-            mStatusQ = std::static_pointer_cast<EffectProxy>(mEffect)->getStatusMQ();
-            mInputQ = std::static_pointer_cast<EffectProxy>(mEffect)->getInputMQ();
-            mOutputQ = std::static_pointer_cast<EffectProxy>(mEffect)->getOutputMQ();
-        } else {
-            mStatusQ = std::make_shared<StatusMQ>(openReturn.statusMQ);
-            mInputQ = std::make_shared<DataMQ>(openReturn.inputDataMQ);
-            mOutputQ = std::make_shared<DataMQ>(openReturn.outputDataMQ);
-        }
+        updateMqs(openReturn);
 
         if (status_t status = updateEventFlags(); status != OK) {
             ALOGV("%s closing at status %d", __func__, status);
@@ -225,6 +204,18 @@
     return *static_cast<int32_t*>(pReplyData) = OK;
 }
 
+void EffectConversionHelperAidl::updateMqs(const IEffect::OpenEffectReturn& ret) {
+    if (mIsProxyEffect) {
+        mStatusQ = std::static_pointer_cast<EffectProxy>(mEffect)->getStatusMQ();
+        mInputQ = std::static_pointer_cast<EffectProxy>(mEffect)->getInputMQ();
+        mOutputQ = std::static_pointer_cast<EffectProxy>(mEffect)->getOutputMQ();
+    } else {
+        mStatusQ = std::make_shared<StatusMQ>(ret.statusMQ);
+        mInputQ = std::make_shared<DataMQ>(ret.inputDataMQ);
+        mOutputQ = std::make_shared<DataMQ>(ret.outputDataMQ);
+    }
+}
+
 status_t EffectConversionHelperAidl::handleGetConfig(uint32_t cmdSize __unused,
                                                      const void* pCmdData __unused,
                                                      uint32_t* replySize, void* pReplyData) {
@@ -517,5 +508,13 @@
     return desc;
 }
 
+status_t EffectConversionHelperAidl::reopen() {
+    IEffect::OpenEffectReturn openReturn;
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->reopen(&openReturn)));
+
+    updateMqs(openReturn);
+    return OK;
+}
+
 }  // namespace effect
 }  // namespace android
diff --git a/media/libaudiohal/impl/EffectConversionHelperAidl.h b/media/libaudiohal/impl/EffectConversionHelperAidl.h
index 5db334c..8b9efb3 100644
--- a/media/libaudiohal/impl/EffectConversionHelperAidl.h
+++ b/media/libaudiohal/impl/EffectConversionHelperAidl.h
@@ -47,6 +47,7 @@
     bool isBypassingOrTunnel() const;
 
     ::aidl::android::hardware::audio::effect::Descriptor getDescriptor() const;
+    status_t reopen();
 
   protected:
     const int32_t mSessionId;
@@ -108,6 +109,8 @@
     std::shared_ptr<android::hardware::EventFlag> mEfGroup = nullptr;
     status_t updateEventFlags();
 
+    void updateMqs(const ::aidl::android::hardware::audio::effect::IEffect::OpenEffectReturn& ret);
+
     status_t handleInit(uint32_t cmdSize, const void* pCmdData, uint32_t* replySize,
                         void* pReplyData);
     status_t handleSetConfig(uint32_t cmdSize, const void* pCmdData, uint32_t* replySize,
diff --git a/media/libaudiohal/impl/EffectHalAidl.cpp b/media/libaudiohal/impl/EffectHalAidl.cpp
index f26444c..2836727 100644
--- a/media/libaudiohal/impl/EffectHalAidl.cpp
+++ b/media/libaudiohal/impl/EffectHalAidl.cpp
@@ -56,6 +56,7 @@
 using ::aidl::android::hardware::audio::effect::Descriptor;
 using ::aidl::android::hardware::audio::effect::IEffect;
 using ::aidl::android::hardware::audio::effect::IFactory;
+using ::aidl::android::hardware::audio::effect::kEventFlagDataMqUpdate;
 using ::aidl::android::hardware::audio::effect::State;
 
 namespace android {
@@ -165,26 +166,37 @@
 
 // write to input FMQ here, wait for statusMQ STATUS_OK, and read from output FMQ
 status_t EffectHalAidl::process() {
+    const std::string effectName = mConversion->getDescriptor().common.name;
     State state = State::INIT;
     if (mConversion->isBypassing() || !mEffect->getState(&state).isOk() ||
         state != State::PROCESSING) {
-        ALOGI("%s skipping %s process because it's %s", __func__,
-              mConversion->getDescriptor().common.name.c_str(),
+        ALOGI("%s skipping %s process because it's %s", __func__, effectName.c_str(),
               mConversion->isBypassing()
                       ? "bypassing"
                       : aidl::android::hardware::audio::effect::toString(state).c_str());
         return -ENODATA;
     }
 
+    // check if the DataMq needs any update, timeout at 1ns to avoid being blocked
+    auto efGroup = mConversion->getEventFlagGroup();
+    if (!efGroup) {
+        ALOGE("%s invalid efGroup", __func__);
+        return INVALID_OPERATION;
+    }
+
+    if (uint32_t efState = 0;
+        ::android::OK == efGroup->wait(kEventFlagDataMqUpdate, &efState, 1 /* ns */,
+                                       true /* retry */)) {
+        ALOGI("%s %s receive dataMQUpdate eventFlag from HAL", __func__, effectName.c_str());
+        mConversion->reopen();
+    }
     auto statusQ = mConversion->getStatusMQ();
     auto inputQ = mConversion->getInputMQ();
     auto outputQ = mConversion->getOutputMQ();
-    auto efGroup = mConversion->getEventFlagGroup();
     if (!statusQ || !statusQ->isValid() || !inputQ || !inputQ->isValid() || !outputQ ||
-        !outputQ->isValid() || !efGroup) {
-        ALOGE("%s invalid FMQ [Status %d I %d O %d] efGroup %p", __func__,
-              statusQ ? statusQ->isValid() : 0, inputQ ? inputQ->isValid() : 0,
-              outputQ ? outputQ->isValid() : 0, efGroup.get());
+        !outputQ->isValid()) {
+        ALOGE("%s invalid FMQ [Status %d I %d O %d]", __func__, statusQ ? statusQ->isValid() : 0,
+              inputQ ? inputQ->isValid() : 0, outputQ ? outputQ->isValid() : 0);
         return INVALID_OPERATION;
     }
 
@@ -225,8 +237,8 @@
         return INVALID_OPERATION;
     }
 
-    ALOGD("%s %s consumed %zu produced %zu", __func__,
-          mConversion->getDescriptor().common.name.c_str(), floatsToWrite, floatsToRead);
+    ALOGD("%s %s consumed %zu produced %zu", __func__, effectName.c_str(), floatsToWrite,
+          floatsToRead);
     return OK;
 }
 
diff --git a/media/libaudiohal/impl/EffectProxy.cpp b/media/libaudiohal/impl/EffectProxy.cpp
index 1099b6d..d73a36c 100644
--- a/media/libaudiohal/impl/EffectProxy.cpp
+++ b/media/libaudiohal/impl/EffectProxy.cpp
@@ -106,8 +106,8 @@
 ndk::ScopedAStatus EffectProxy::open(const Parameter::Common& common,
                                      const std::optional<Parameter::Specific>& specific,
                                      IEffect::OpenEffectReturn* ret __unused) {
-    ndk::ScopedAStatus status = ndk::ScopedAStatus::fromExceptionCodeWithMessage(
-            EX_ILLEGAL_ARGUMENT, "nullEffectHandle");
+    ndk::ScopedAStatus status =
+            ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_STATE, "nullEffectHandle");
     for (auto& sub : mSubEffects) {
         IEffect::OpenEffectReturn openReturn;
         if (!sub.handle || !(status = sub.handle->open(common, specific, &openReturn)).isOk()) {
@@ -130,6 +130,31 @@
     return status;
 }
 
+ndk::ScopedAStatus EffectProxy::reopen(OpenEffectReturn* ret __unused) {
+    ndk::ScopedAStatus status =
+            ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_STATE, "nullEffectHandle");
+    for (auto& sub : mSubEffects) {
+        IEffect::OpenEffectReturn openReturn;
+        if (!sub.handle || !(status = sub.handle->reopen(&openReturn)).isOk()) {
+            ALOGE("%s: failed to open %p UUID %s", __func__, sub.handle.get(),
+                  ::android::audio::utils::toString(sub.descriptor.common.id.uuid).c_str());
+            break;
+        }
+        sub.effectMq.statusQ = std::make_shared<StatusMQ>(openReturn.statusMQ);
+        sub.effectMq.inputQ = std::make_shared<DataMQ>(openReturn.inputDataMQ);
+        sub.effectMq.outputQ = std::make_shared<DataMQ>(openReturn.outputDataMQ);
+    }
+
+    // close all opened effects if failure
+    if (!status.isOk()) {
+        ALOGE("%s: closing all sub-effects with error %s", __func__,
+              status.getDescription().c_str());
+        close();
+    }
+
+    return status;
+}
+
 ndk::ScopedAStatus EffectProxy::close() {
     return runWithAllSubEffects([&](std::shared_ptr<IEffect>& effect) {
         return effect->close();
@@ -162,7 +187,11 @@
 
 Descriptor::Common EffectProxy::buildDescriptorCommon(
         const AudioUuid& uuid, const std::vector<Descriptor>& subEffectDescs) {
-    Descriptor::Common common;
+    // initial flag values before we know which sub-effect to active (with setOffloadParam)
+    // align to HIDL EffectProxy flags
+    Descriptor::Common common = {.flags = {.type = Flags::Type::INSERT,
+                                           .insert = Flags::Insert::LAST,
+                                           .volume = Flags::Volume::CTRL}};
 
     for (const auto& desc : subEffectDescs) {
         if (desc.common.flags.hwAcceleratorMode == Flags::HardwareAccelerator::TUNNEL) {
@@ -174,14 +203,12 @@
         common.flags.deviceIndication |= desc.common.flags.deviceIndication;
         common.flags.audioModeIndication |= desc.common.flags.audioModeIndication;
         common.flags.audioSourceIndication |= desc.common.flags.audioSourceIndication;
+        // Set to NONE if any sub-effect not supporting any Volume command
+        if (desc.common.flags.volume == Flags::Volume::NONE) {
+            common.flags.volume = Flags::Volume::NONE;
+        }
     }
 
-    // initial flag values before we know which sub-effect to active (with setOffloadParam)
-    // same as HIDL EffectProxy flags
-    common.flags.type = Flags::Type::INSERT;
-    common.flags.insert = Flags::Insert::LAST;
-    common.flags.volume = Flags::Volume::CTRL;
-
     // copy type UUID from any of sub-effects, all sub-effects should have same type
     common.id.type = subEffectDescs[0].common.id.type;
     // replace implementation UUID with proxy UUID.
diff --git a/media/libaudiohal/impl/EffectProxy.h b/media/libaudiohal/impl/EffectProxy.h
index 0d62642..9b9e8f1 100644
--- a/media/libaudiohal/impl/EffectProxy.h
+++ b/media/libaudiohal/impl/EffectProxy.h
@@ -62,6 +62,8 @@
                     specific,
             ::aidl::android::hardware::audio::effect::IEffect::OpenEffectReturn* ret) override;
     ndk::ScopedAStatus close() override;
+    ndk::ScopedAStatus reopen(
+            ::aidl::android::hardware::audio::effect::IEffect::OpenEffectReturn* ret) override;
     ndk::ScopedAStatus getDescriptor(
             ::aidl::android::hardware::audio::effect::Descriptor* desc) override;
     ndk::ScopedAStatus command(::aidl::android::hardware::audio::effect::CommandId id) override;
diff --git a/media/libaudiohal/impl/EffectsFactoryHalAidl.cpp b/media/libaudiohal/impl/EffectsFactoryHalAidl.cpp
index 87aaeac..7d807b2 100644
--- a/media/libaudiohal/impl/EffectsFactoryHalAidl.cpp
+++ b/media/libaudiohal/impl/EffectsFactoryHalAidl.cpp
@@ -243,7 +243,7 @@
                                 [&](const auto& desc) { return desc.common.id.uuid == uuid; });
     if (matchIt == list.end()) {
         ALOGE("%s UUID not found in HAL and proxy list %s", __func__, toString(uuid).c_str());
-        return BAD_VALUE;
+        return NAME_NOT_FOUND;
     }
     ALOGI("%s UUID impl found %s", __func__, toString(uuid).c_str());
 
diff --git a/media/libaudiohal/impl/Hal2AidlMapper.cpp b/media/libaudiohal/impl/Hal2AidlMapper.cpp
new file mode 100644
index 0000000..d4024a2
--- /dev/null
+++ b/media/libaudiohal/impl/Hal2AidlMapper.cpp
@@ -0,0 +1,1093 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "Hal2AidlMapper"
+// #define LOG_NDEBUG 0
+
+#include <algorithm>
+
+#include <media/audiohal/StreamHalInterface.h>
+#include <error/expected_utils.h>
+#include <system/audio.h>  // For AUDIO_REMOTE_SUBMIX_DEVICE_ADDRESS
+#include <Utils.h>
+#include <utils/Log.h>
+
+#include "Hal2AidlMapper.h"
+
+using aidl::android::aidl_utils::statusTFromBinderStatus;
+using aidl::android::media::audio::common::AudioChannelLayout;
+using aidl::android::media::audio::common::AudioConfig;
+using aidl::android::media::audio::common::AudioConfigBase;
+using aidl::android::media::audio::common::AudioDevice;
+using aidl::android::media::audio::common::AudioDeviceAddress;
+using aidl::android::media::audio::common::AudioDeviceDescription;
+using aidl::android::media::audio::common::AudioDeviceType;
+using aidl::android::media::audio::common::AudioFormatDescription;
+using aidl::android::media::audio::common::AudioFormatType;
+using aidl::android::media::audio::common::AudioInputFlags;
+using aidl::android::media::audio::common::AudioIoFlags;
+using aidl::android::media::audio::common::AudioOutputFlags;
+using aidl::android::media::audio::common::AudioPort;
+using aidl::android::media::audio::common::AudioPortConfig;
+using aidl::android::media::audio::common::AudioPortDeviceExt;
+using aidl::android::media::audio::common::AudioPortExt;
+using aidl::android::media::audio::common::AudioPortMixExt;
+using aidl::android::media::audio::common::AudioPortMixExtUseCase;
+using aidl::android::media::audio::common::AudioProfile;
+using aidl::android::media::audio::common::AudioSource;
+using aidl::android::media::audio::common::Int;
+using aidl::android::hardware::audio::common::isBitPositionFlagSet;
+using aidl::android::hardware::audio::common::isDefaultAudioFormat;
+using aidl::android::hardware::audio::common::makeBitPositionFlagMask;
+using aidl::android::hardware::audio::core::AudioPatch;
+using aidl::android::hardware::audio::core::AudioRoute;
+using aidl::android::hardware::audio::core::IModule;
+
+namespace android {
+
+namespace {
+
+bool isConfigEqualToPortConfig(const AudioConfig& config, const AudioPortConfig& portConfig) {
+    return portConfig.sampleRate.value().value == config.base.sampleRate &&
+            portConfig.channelMask.value() == config.base.channelMask &&
+            portConfig.format.value() == config.base.format;
+}
+
+AudioConfig* setConfigFromPortConfig(AudioConfig* config, const AudioPortConfig& portConfig) {
+    config->base.sampleRate = portConfig.sampleRate.value().value;
+    config->base.channelMask = portConfig.channelMask.value();
+    config->base.format = portConfig.format.value();
+    return config;
+}
+
+void setPortConfigFromConfig(AudioPortConfig* portConfig, const AudioConfig& config) {
+    if (config.base.sampleRate != 0) {
+        portConfig->sampleRate = Int{ .value = config.base.sampleRate };
+    }
+    if (config.base.channelMask != AudioChannelLayout{}) {
+        portConfig->channelMask = config.base.channelMask;
+    }
+    if (config.base.format != AudioFormatDescription{}) {
+        portConfig->format = config.base.format;
+    }
+}
+
+bool containHapticChannel(AudioChannelLayout channel) {
+    return channel.getTag() == AudioChannelLayout::Tag::layoutMask &&
+            ((channel.get<AudioChannelLayout::Tag::layoutMask>()
+                    & AudioChannelLayout::CHANNEL_HAPTIC_A)
+                    == AudioChannelLayout::CHANNEL_HAPTIC_A ||
+             (channel.get<AudioChannelLayout::Tag::layoutMask>()
+                    & AudioChannelLayout::CHANNEL_HAPTIC_B)
+                    == AudioChannelLayout::CHANNEL_HAPTIC_B);
+}
+
+}  // namespace
+
+Hal2AidlMapper::Hal2AidlMapper(const std::string& instance, const std::shared_ptr<IModule>& module)
+        : mInstance(instance), mModule(module) {
+}
+
+void Hal2AidlMapper::addStream(
+        const sp<StreamHalInterface>& stream, int32_t portConfigId, int32_t patchId) {
+    mStreams.insert(std::pair(stream, std::pair(portConfigId, patchId)));
+}
+
+bool Hal2AidlMapper::audioDeviceMatches(const AudioDevice& device, const AudioPort& p) {
+    if (p.ext.getTag() != AudioPortExt::Tag::device) return false;
+    return p.ext.get<AudioPortExt::Tag::device>().device == device;
+}
+
+bool Hal2AidlMapper::audioDeviceMatches(const AudioDevice& device, const AudioPortConfig& p) {
+    if (p.ext.getTag() != AudioPortExt::Tag::device) return false;
+    if (device.type.type == AudioDeviceType::IN_DEFAULT) {
+        return p.portId == mDefaultInputPortId;
+    } else if (device.type.type == AudioDeviceType::OUT_DEFAULT) {
+        return p.portId == mDefaultOutputPortId;
+    }
+    return p.ext.get<AudioPortExt::Tag::device>().device == device;
+}
+
+status_t Hal2AidlMapper::createOrUpdatePatch(
+        const std::vector<AudioPortConfig>& sources,
+        const std::vector<AudioPortConfig>& sinks,
+        int32_t* patchId, Cleanups* cleanups) {
+    auto existingPatchIt = *patchId != 0 ? mPatches.find(*patchId): mPatches.end();
+    AudioPatch patch;
+    if (existingPatchIt != mPatches.end()) {
+        patch = existingPatchIt->second;
+        patch.sourcePortConfigIds.clear();
+        patch.sinkPortConfigIds.clear();
+    }
+    // The IDs will be found by 'fillPortConfigs', however the original 'sources' and
+    // 'sinks' will not be updated because 'setAudioPatch' only needs IDs. Here we log
+    // the source arguments, where only the audio configuration and device specifications
+    // are relevant.
+    ALOGD("%s: [disregard IDs] sources: %s, sinks: %s",
+            __func__, ::android::internal::ToString(sources).c_str(),
+            ::android::internal::ToString(sinks).c_str());
+    auto fillPortConfigs = [&](
+            const std::vector<AudioPortConfig>& configs,
+            const std::set<int32_t>& destinationPortIds,
+            std::vector<int32_t>* ids, std::set<int32_t>* portIds) -> status_t {
+        for (const auto& s : configs) {
+            AudioPortConfig portConfig;
+            if (status_t status = setPortConfig(
+                            s, destinationPortIds, &portConfig, cleanups); status != OK) {
+                if (s.ext.getTag() == AudioPortExt::mix) {
+                    // See b/315528763. Despite that the framework knows the actual format of
+                    // the mix port, it still uses the original format. Luckily, there is
+                    // the I/O handle which can be used to find the mix port.
+                    ALOGI("fillPortConfigs: retrying to find a mix port config with default "
+                            "configuration");
+                    if (auto it = findPortConfig(std::nullopt, s.flags,
+                                    s.ext.get<AudioPortExt::mix>().handle);
+                            it != mPortConfigs.end()) {
+                        portConfig = it->second;
+                    } else {
+                        const std::string flags = s.flags.has_value() ?
+                                s.flags->toString() : "<unspecified>";
+                        ALOGE("fillPortConfigs: existing port config for flags %s, handle %d "
+                                "not found in module %s", flags.c_str(),
+                                s.ext.get<AudioPortExt::mix>().handle, mInstance.c_str());
+                        return BAD_VALUE;
+                    }
+                } else {
+                    return status;
+                }
+            }
+            LOG_ALWAYS_FATAL_IF(portConfig.id == 0,
+                    "fillPortConfigs: initial config: %s, port config: %s",
+                    s.toString().c_str(), portConfig.toString().c_str());
+            ids->push_back(portConfig.id);
+            if (portIds != nullptr) {
+                portIds->insert(portConfig.portId);
+            }
+        }
+        return OK;
+    };
+    // When looking up port configs, the destinationPortId is only used for mix ports.
+    // Thus, we process device port configs first, and look up the destination port ID from them.
+    bool sourceIsDevice = std::any_of(sources.begin(), sources.end(),
+            [](const auto& config) { return config.ext.getTag() == AudioPortExt::device; });
+    const std::vector<AudioPortConfig>& devicePortConfigs =
+            sourceIsDevice ? sources : sinks;
+    std::vector<int32_t>* devicePortConfigIds =
+            sourceIsDevice ? &patch.sourcePortConfigIds : &patch.sinkPortConfigIds;
+    const std::vector<AudioPortConfig>& mixPortConfigs =
+            sourceIsDevice ? sinks : sources;
+    std::vector<int32_t>* mixPortConfigIds =
+            sourceIsDevice ? &patch.sinkPortConfigIds : &patch.sourcePortConfigIds;
+    std::set<int32_t> devicePortIds;
+    RETURN_STATUS_IF_ERROR(fillPortConfigs(
+                    devicePortConfigs, std::set<int32_t>(), devicePortConfigIds, &devicePortIds));
+    RETURN_STATUS_IF_ERROR(fillPortConfigs(
+                    mixPortConfigs, devicePortIds, mixPortConfigIds, nullptr));
+    if (existingPatchIt != mPatches.end()) {
+        RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
+                        mModule->setAudioPatch(patch, &patch)));
+        existingPatchIt->second = patch;
+    } else {
+        bool created = false;
+        RETURN_STATUS_IF_ERROR(findOrCreatePatch(patch, &patch, &created));
+        // No cleanup of the patch is needed, it is managed by the framework.
+        *patchId = patch.id;
+        if (!created) {
+            // The framework might have "created" a patch which already existed due to
+            // stream creation. Need to release the ownership from the stream.
+            for (auto& s : mStreams) {
+                if (s.second.second == patch.id) s.second.second = -1;
+            }
+        }
+    }
+    return OK;
+}
+
+status_t Hal2AidlMapper::createOrUpdatePortConfig(
+        const AudioPortConfig& requestedPortConfig, AudioPortConfig* result, bool* created) {
+    bool applied = false;
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mModule->setAudioPortConfig(
+                            requestedPortConfig, result, &applied)));
+    if (!applied) {
+        result->id = 0;
+        *created = false;
+        return OK;
+    }
+
+    int32_t id = result->id;
+    if (requestedPortConfig.id != 0 && requestedPortConfig.id != id) {
+        LOG_ALWAYS_FATAL("%s: requested port config id %d changed to %d", __func__,
+                requestedPortConfig.id, id);
+    }
+
+    auto [_, inserted] = mPortConfigs.insert_or_assign(id, *result);
+    *created = inserted;
+    return OK;
+}
+
+status_t Hal2AidlMapper::createOrUpdatePortConfigRetry(
+        const AudioPortConfig& requestedPortConfig, AudioPortConfig* result, bool* created) {
+    AudioPortConfig suggestedOrAppliedPortConfig;
+    RETURN_STATUS_IF_ERROR(createOrUpdatePortConfig(requestedPortConfig,
+                    &suggestedOrAppliedPortConfig, created));
+    if (suggestedOrAppliedPortConfig.id == 0) {
+        // Try again with the suggested config
+        suggestedOrAppliedPortConfig.id = requestedPortConfig.id;
+        AudioPortConfig appliedPortConfig;
+        RETURN_STATUS_IF_ERROR(createOrUpdatePortConfig(suggestedOrAppliedPortConfig,
+                        &appliedPortConfig, created));
+        if (appliedPortConfig.id == 0) {
+            ALOGE("%s: module %s did not apply suggested config %s", __func__,
+                    mInstance.c_str(), suggestedOrAppliedPortConfig.toString().c_str());
+            return NO_INIT;
+        }
+        *result = appliedPortConfig;
+    } else {
+        *result = suggestedOrAppliedPortConfig;
+    }
+    return OK;
+}
+
+void Hal2AidlMapper::eraseConnectedPort(int32_t portId) {
+    mPorts.erase(portId);
+    mConnectedPorts.erase(portId);
+    if (mDisconnectedPortReplacement.first == portId) {
+        const auto& port = mDisconnectedPortReplacement.second;
+        mPorts.insert(std::make_pair(port.id, port));
+        ALOGD("%s: disconnected port replacement: %s", __func__, port.toString().c_str());
+        mDisconnectedPortReplacement = std::pair<int32_t, AudioPort>();
+    }
+    updateDynamicMixPorts();
+}
+
+status_t Hal2AidlMapper::findOrCreatePatch(
+        const AudioPatch& requestedPatch, AudioPatch* patch, bool* created) {
+    std::set<int32_t> sourcePortConfigIds(requestedPatch.sourcePortConfigIds.begin(),
+            requestedPatch.sourcePortConfigIds.end());
+    std::set<int32_t> sinkPortConfigIds(requestedPatch.sinkPortConfigIds.begin(),
+            requestedPatch.sinkPortConfigIds.end());
+    return findOrCreatePatch(sourcePortConfigIds, sinkPortConfigIds, patch, created);
+}
+
+status_t Hal2AidlMapper::findOrCreatePatch(
+        const std::set<int32_t>& sourcePortConfigIds, const std::set<int32_t>& sinkPortConfigIds,
+        AudioPatch* patch, bool* created) {
+    auto patchIt = findPatch(sourcePortConfigIds, sinkPortConfigIds);
+    if (patchIt == mPatches.end()) {
+        AudioPatch requestedPatch, appliedPatch;
+        requestedPatch.sourcePortConfigIds.insert(requestedPatch.sourcePortConfigIds.end(),
+                sourcePortConfigIds.begin(), sourcePortConfigIds.end());
+        requestedPatch.sinkPortConfigIds.insert(requestedPatch.sinkPortConfigIds.end(),
+                sinkPortConfigIds.begin(), sinkPortConfigIds.end());
+        RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mModule->setAudioPatch(
+                                requestedPatch, &appliedPatch)));
+        patchIt = mPatches.insert(mPatches.end(), std::make_pair(appliedPatch.id, appliedPatch));
+        *created = true;
+    } else {
+        *created = false;
+    }
+    *patch = patchIt->second;
+    return OK;
+}
+
+status_t Hal2AidlMapper::findOrCreateDevicePortConfig(
+        const AudioDevice& device, const AudioConfig* config, AudioPortConfig* portConfig,
+        bool* created) {
+    if (auto portConfigIt = findPortConfig(device); portConfigIt == mPortConfigs.end()) {
+        auto portsIt = findPort(device);
+        if (portsIt == mPorts.end()) {
+            ALOGE("%s: device port for device %s is not found in the module %s",
+                    __func__, device.toString().c_str(), mInstance.c_str());
+            return BAD_VALUE;
+        }
+        AudioPortConfig requestedPortConfig;
+        requestedPortConfig.portId = portsIt->first;
+        if (config != nullptr) {
+            setPortConfigFromConfig(&requestedPortConfig, *config);
+        }
+        return createOrUpdatePortConfigRetry(requestedPortConfig, portConfig, created);
+    } else {
+        AudioPortConfig requestedPortConfig = portConfigIt->second;
+        if (config != nullptr) {
+            setPortConfigFromConfig(&requestedPortConfig, *config);
+        }
+
+        if (requestedPortConfig != portConfigIt->second) {
+            return createOrUpdatePortConfigRetry(requestedPortConfig, portConfig, created);
+        } else {
+            *portConfig = portConfigIt->second;
+            *created = false;
+        }
+    }
+    return OK;
+}
+
+status_t Hal2AidlMapper::findOrCreateMixPortConfig(
+        const AudioConfig& config, const std::optional<AudioIoFlags>& flags, int32_t ioHandle,
+        AudioSource source, const std::set<int32_t>& destinationPortIds,
+        AudioPortConfig* portConfig, bool* created) {
+    // These flags get removed one by one in this order when retrying port finding.
+    static const std::vector<AudioInputFlags> kOptionalInputFlags{
+        AudioInputFlags::FAST, AudioInputFlags::RAW, AudioInputFlags::VOIP_TX };
+    if (auto portConfigIt = findPortConfig(config, flags, ioHandle);
+            portConfigIt == mPortConfigs.end() && flags.has_value()) {
+        auto optionalInputFlagsIt = kOptionalInputFlags.begin();
+        AudioIoFlags matchFlags = flags.value();
+        auto portsIt = findPort(config, matchFlags, destinationPortIds);
+        while (portsIt == mPorts.end() && matchFlags.getTag() == AudioIoFlags::Tag::input
+                && optionalInputFlagsIt != kOptionalInputFlags.end()) {
+            if (!isBitPositionFlagSet(
+                            matchFlags.get<AudioIoFlags::Tag::input>(), *optionalInputFlagsIt)) {
+                ++optionalInputFlagsIt;
+                continue;
+            }
+            matchFlags.set<AudioIoFlags::Tag::input>(matchFlags.get<AudioIoFlags::Tag::input>() &
+                    ~makeBitPositionFlagMask(*optionalInputFlagsIt++));
+            portsIt = findPort(config, matchFlags, destinationPortIds);
+            ALOGI("%s: mix port for config %s, flags %s was not found in the module %s, "
+                    "retried with flags %s", __func__, config.toString().c_str(),
+                    flags.value().toString().c_str(), mInstance.c_str(),
+                    matchFlags.toString().c_str());
+        }
+        if (portsIt == mPorts.end()) {
+            ALOGE("%s: mix port for config %s, flags %s is not found in the module %s",
+                    __func__, config.toString().c_str(), matchFlags.toString().c_str(),
+                    mInstance.c_str());
+            return BAD_VALUE;
+        }
+        AudioPortConfig requestedPortConfig;
+        requestedPortConfig.portId = portsIt->first;
+        setPortConfigFromConfig(&requestedPortConfig, config);
+        requestedPortConfig.flags = portsIt->second.flags;
+        requestedPortConfig.ext = AudioPortMixExt{ .handle = ioHandle };
+        if (matchFlags.getTag() == AudioIoFlags::Tag::input
+                && source != AudioSource::SYS_RESERVED_INVALID) {
+            requestedPortConfig.ext.get<AudioPortExt::Tag::mix>().usecase =
+                    AudioPortMixExtUseCase::make<AudioPortMixExtUseCase::Tag::source>(source);
+        }
+        return createOrUpdatePortConfig(requestedPortConfig, portConfig, created);
+    } else if (portConfigIt == mPortConfigs.end() && !flags.has_value()) {
+        ALOGW("%s: mix port config for %s, handle %d not found in the module %s, "
+                "and was not created as flags are not specified",
+                __func__, config.toString().c_str(), ioHandle, mInstance.c_str());
+        return BAD_VALUE;
+    } else {
+        AudioPortConfig requestedPortConfig = portConfigIt->second;
+        setPortConfigFromConfig(&requestedPortConfig, config);
+
+        AudioPortMixExt& mixExt = requestedPortConfig.ext.get<AudioPortExt::Tag::mix>();
+        if (mixExt.usecase.getTag() == AudioPortMixExtUseCase::Tag::source &&
+                source != AudioSource::SYS_RESERVED_INVALID) {
+            mixExt.usecase.get<AudioPortMixExtUseCase::Tag::source>() = source;
+        }
+
+        if (requestedPortConfig != portConfigIt->second) {
+            return createOrUpdatePortConfig(requestedPortConfig, portConfig, created);
+        } else {
+            *portConfig = portConfigIt->second;
+            *created = false;
+        }
+    }
+    return OK;
+}
+
+status_t Hal2AidlMapper::findOrCreatePortConfig(
+        const AudioPortConfig& requestedPortConfig, const std::set<int32_t>& destinationPortIds,
+        AudioPortConfig* portConfig, bool* created) {
+    using Tag = AudioPortExt::Tag;
+    if (requestedPortConfig.ext.getTag() == Tag::mix) {
+        if (const auto& p = requestedPortConfig;
+                !p.sampleRate.has_value() || !p.channelMask.has_value() ||
+                !p.format.has_value()) {
+            ALOGW("%s: provided mix port config is not fully specified: %s",
+                    __func__, p.toString().c_str());
+            return BAD_VALUE;
+        }
+        AudioConfig config;
+        setConfigFromPortConfig(&config, requestedPortConfig);
+        AudioSource source = requestedPortConfig.ext.get<Tag::mix>().usecase.getTag() ==
+                AudioPortMixExtUseCase::Tag::source ?
+                requestedPortConfig.ext.get<Tag::mix>().usecase.
+                get<AudioPortMixExtUseCase::Tag::source>() : AudioSource::SYS_RESERVED_INVALID;
+        return findOrCreateMixPortConfig(config, requestedPortConfig.flags,
+                requestedPortConfig.ext.get<Tag::mix>().handle, source, destinationPortIds,
+                portConfig, created);
+    } else if (requestedPortConfig.ext.getTag() == Tag::device) {
+        if (const auto& p = requestedPortConfig;
+                p.sampleRate.has_value() && p.channelMask.has_value() &&
+                p.format.has_value()) {
+            AudioConfig config;
+            setConfigFromPortConfig(&config, requestedPortConfig);
+            return findOrCreateDevicePortConfig(
+                    requestedPortConfig.ext.get<Tag::device>().device, &config,
+                    portConfig, created);
+        } else {
+            return findOrCreateDevicePortConfig(
+                    requestedPortConfig.ext.get<Tag::device>().device, nullptr /*config*/,
+                    portConfig, created);
+        }
+    }
+    ALOGW("%s: unsupported audio port config: %s",
+            __func__, requestedPortConfig.toString().c_str());
+    return BAD_VALUE;
+}
+
+status_t Hal2AidlMapper::findPortConfig(const AudioDevice& device, AudioPortConfig* portConfig) {
+    if (auto it = findPortConfig(device); it != mPortConfigs.end()) {
+        *portConfig = it->second;
+        return OK;
+    }
+    ALOGE("%s: could not find a configured device port for device %s",
+            __func__, device.toString().c_str());
+    return BAD_VALUE;
+}
+
+Hal2AidlMapper::Patches::iterator Hal2AidlMapper::findPatch(
+        const std::set<int32_t>& sourcePortConfigIds, const std::set<int32_t>& sinkPortConfigIds) {
+    return std::find_if(mPatches.begin(), mPatches.end(),
+            [&](const auto& pair) {
+                const auto& p = pair.second;
+                std::set<int32_t> patchSrcs(
+                        p.sourcePortConfigIds.begin(), p.sourcePortConfigIds.end());
+                std::set<int32_t> patchSinks(
+                        p.sinkPortConfigIds.begin(), p.sinkPortConfigIds.end());
+                return sourcePortConfigIds == patchSrcs && sinkPortConfigIds == patchSinks; });
+}
+
+Hal2AidlMapper::Ports::iterator Hal2AidlMapper::findPort(const AudioDevice& device) {
+    if (device.type.type == AudioDeviceType::IN_DEFAULT) {
+        return mPorts.find(mDefaultInputPortId);
+    } else if (device.type.type == AudioDeviceType::OUT_DEFAULT) {
+        return mPorts.find(mDefaultOutputPortId);
+    }
+    if (device.address.getTag() != AudioDeviceAddress::id ||
+            !device.address.get<AudioDeviceAddress::id>().empty()) {
+        return std::find_if(mPorts.begin(), mPorts.end(),
+                [&](const auto& pair) { return audioDeviceMatches(device, pair.second); });
+    }
+    // For connection w/o an address, two ports can be found: the template port,
+    // and a connected port (if exists). Make sure we return the connected port.
+    Hal2AidlMapper::Ports::iterator portIt = mPorts.end();
+    for (auto it = mPorts.begin(); it != mPorts.end(); ++it) {
+        if (audioDeviceMatches(device, it->second)) {
+            if (mConnectedPorts.find(it->first) != mConnectedPorts.end()) {
+                return it;
+            } else {
+                // Will return 'it' if there is no connected port.
+                portIt = it;
+            }
+        }
+    }
+    return portIt;
+}
+
+Hal2AidlMapper::Ports::iterator Hal2AidlMapper::findPort(
+            const AudioConfig& config, const AudioIoFlags& flags,
+            const std::set<int32_t>& destinationPortIds) {
+    auto channelMaskMatches = [](const std::vector<AudioChannelLayout>& channelMasks,
+                                 const AudioChannelLayout& channelMask) {
+        // Return true when 1) the channel mask is none and none of the channel mask from the
+        // collection contains haptic channel mask, or 2) the channel mask collection contains
+        // the queried channel mask.
+        return (channelMask.getTag() == AudioChannelLayout::none &&
+                        std::none_of(channelMasks.begin(), channelMasks.end(),
+                                     containHapticChannel)) ||
+                std::find(channelMasks.begin(), channelMasks.end(), channelMask)
+                    != channelMasks.end();
+    };
+    auto belongsToProfile = [&config, &channelMaskMatches](const AudioProfile& prof) {
+        return (isDefaultAudioFormat(config.base.format) || prof.format == config.base.format) &&
+                channelMaskMatches(prof.channelMasks, config.base.channelMask) &&
+                (config.base.sampleRate == 0 ||
+                        std::find(prof.sampleRates.begin(), prof.sampleRates.end(),
+                                config.base.sampleRate) != prof.sampleRates.end());
+    };
+    static const std::vector<AudioOutputFlags> kOptionalOutputFlags{AudioOutputFlags::BIT_PERFECT};
+    int optionalFlags = 0;
+    auto flagMatches = [&flags, &optionalFlags](const AudioIoFlags& portFlags) {
+        // Ports should be able to match if the optional flags are not requested.
+        return portFlags == flags ||
+               (portFlags.getTag() == AudioIoFlags::Tag::output &&
+                        AudioIoFlags::make<AudioIoFlags::Tag::output>(
+                                portFlags.get<AudioIoFlags::Tag::output>() &
+                                        ~optionalFlags) == flags);
+    };
+    auto matcher = [&](const auto& pair) {
+        const auto& p = pair.second;
+        return p.ext.getTag() == AudioPortExt::Tag::mix &&
+                flagMatches(p.flags) &&
+                (destinationPortIds.empty() ||
+                        std::any_of(destinationPortIds.begin(), destinationPortIds.end(),
+                                [&](const int32_t destId) { return mRoutingMatrix.count(
+                                            std::make_pair(p.id, destId)) != 0; })) &&
+                (p.profiles.empty() ||
+                        std::find_if(p.profiles.begin(), p.profiles.end(), belongsToProfile) !=
+                        p.profiles.end()); };
+    auto result = std::find_if(mPorts.begin(), mPorts.end(), matcher);
+    if (result == mPorts.end() && flags.getTag() == AudioIoFlags::Tag::output) {
+        auto optionalOutputFlagsIt = kOptionalOutputFlags.begin();
+        while (result == mPorts.end() && optionalOutputFlagsIt != kOptionalOutputFlags.end()) {
+            if (isBitPositionFlagSet(
+                        flags.get<AudioIoFlags::Tag::output>(), *optionalOutputFlagsIt)) {
+                // If the flag is set by the request, it must be matched.
+                ++optionalOutputFlagsIt;
+                continue;
+            }
+            optionalFlags |= makeBitPositionFlagMask(*optionalOutputFlagsIt++);
+            result = std::find_if(mPorts.begin(), mPorts.end(), matcher);
+            ALOGI("%s: port for config %s, flags %s was not found in the module %s, "
+                  "retried with excluding optional flags %#x", __func__, config.toString().c_str(),
+                    flags.toString().c_str(), mInstance.c_str(), optionalFlags);
+        }
+    }
+    return result;
+}
+
+Hal2AidlMapper::PortConfigs::iterator Hal2AidlMapper::findPortConfig(const AudioDevice& device) {
+    return std::find_if(mPortConfigs.begin(), mPortConfigs.end(),
+            [&](const auto& pair) { return audioDeviceMatches(device, pair.second); });
+}
+
+Hal2AidlMapper::PortConfigs::iterator Hal2AidlMapper::findPortConfig(
+            const std::optional<AudioConfig>& config,
+            const std::optional<AudioIoFlags>& flags,
+            int32_t ioHandle) {
+    using Tag = AudioPortExt::Tag;
+    return std::find_if(mPortConfigs.begin(), mPortConfigs.end(),
+            [&](const auto& pair) {
+                const auto& p = pair.second;
+                LOG_ALWAYS_FATAL_IF(p.ext.getTag() == Tag::mix &&
+                        (!p.sampleRate.has_value() || !p.channelMask.has_value() ||
+                                !p.format.has_value() || !p.flags.has_value()),
+                        "%s: stored mix port config is not fully specified: %s",
+                        __func__, p.toString().c_str());
+                return p.ext.getTag() == Tag::mix &&
+                        (!config.has_value() ||
+                                isConfigEqualToPortConfig(config.value(), p)) &&
+                        (!flags.has_value() || p.flags.value() == flags.value()) &&
+                        p.ext.template get<Tag::mix>().handle == ioHandle; });
+}
+
+status_t Hal2AidlMapper::getAudioMixPort(int32_t ioHandle, AudioPort* port) {
+    auto it = findPortConfig(std::nullopt /*config*/, std::nullopt /*flags*/, ioHandle);
+    if (it == mPortConfigs.end()) {
+        ALOGE("%s, cannot find mix port config for handle %u", __func__, ioHandle);
+        return BAD_VALUE;
+    }
+    return updateAudioPort(it->second.portId, port);
+}
+
+status_t Hal2AidlMapper::getAudioPortCached(
+        const ::aidl::android::media::audio::common::AudioDevice& device,
+        ::aidl::android::media::audio::common::AudioPort* port) {
+
+    if (auto portsIt = findPort(device); portsIt != mPorts.end()) {
+        *port = portsIt->second;
+        return OK;
+    }
+    ALOGE("%s: device port for device %s is not found in the module %s",
+            __func__, device.toString().c_str(), mInstance.c_str());
+    return BAD_VALUE;
+}
+
+status_t Hal2AidlMapper::initialize() {
+    std::vector<AudioPort> ports;
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mModule->getAudioPorts(&ports)));
+    ALOGW_IF(ports.empty(), "%s: module %s returned an empty list of audio ports",
+            __func__, mInstance.c_str());
+    mDefaultInputPortId = mDefaultOutputPortId = -1;
+    const int defaultDeviceFlag = 1 << AudioPortDeviceExt::FLAG_INDEX_DEFAULT_DEVICE;
+    for (auto it = ports.begin(); it != ports.end(); ) {
+        const auto& port = *it;
+        if (port.ext.getTag() != AudioPortExt::Tag::device) {
+            ++it;
+            continue;
+        }
+        const AudioPortDeviceExt& deviceExt = port.ext.get<AudioPortExt::Tag::device>();
+        if ((deviceExt.flags & defaultDeviceFlag) != 0) {
+            if (port.flags.getTag() == AudioIoFlags::Tag::input) {
+                mDefaultInputPortId = port.id;
+            } else if (port.flags.getTag() == AudioIoFlags::Tag::output) {
+                mDefaultOutputPortId = port.id;
+            }
+        }
+        // For compatibility with HIDL, hide "template" remote submix ports from ports list.
+        if (const auto& devDesc = deviceExt.device;
+                (devDesc.type.type == AudioDeviceType::IN_SUBMIX ||
+                        devDesc.type.type == AudioDeviceType::OUT_SUBMIX) &&
+                devDesc.type.connection == AudioDeviceDescription::CONNECTION_VIRTUAL) {
+            if (devDesc.type.type == AudioDeviceType::IN_SUBMIX) {
+                mRemoteSubmixIn = port;
+            } else {
+                mRemoteSubmixOut = port;
+            }
+            it = ports.erase(it);
+        } else {
+            ++it;
+        }
+    }
+    if (mRemoteSubmixIn.has_value() != mRemoteSubmixOut.has_value()) {
+        ALOGE("%s: The configuration only has input or output remote submix device, must have both",
+                __func__);
+        mRemoteSubmixIn.reset();
+        mRemoteSubmixOut.reset();
+    }
+    if (mRemoteSubmixIn.has_value()) {
+        AudioPort connectedRSubmixIn = *mRemoteSubmixIn;
+        connectedRSubmixIn.ext.get<AudioPortExt::Tag::device>().device.address =
+                AUDIO_REMOTE_SUBMIX_DEVICE_ADDRESS;
+        ALOGD("%s: connecting remote submix input", __func__);
+        RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mModule->connectExternalDevice(
+                                connectedRSubmixIn, &connectedRSubmixIn)));
+        // The template port for the remote submix input couldn't be "default" because it is not
+        // attached. The connected port can now be made default because we never disconnect it.
+        if (mDefaultInputPortId == -1) {
+            mDefaultInputPortId = connectedRSubmixIn.id;
+        }
+        ports.push_back(std::move(connectedRSubmixIn));
+
+        // Remote submix output must not be connected until the framework actually starts
+        // using it, however for legacy compatibility we need to provide an "augmented template"
+        // port with an address and profiles. It is obtained by connecting the output and then
+        // immediately disconnecting it. This is a cheap operation as we don't open any streams.
+        AudioPort tempConnectedRSubmixOut = *mRemoteSubmixOut;
+        tempConnectedRSubmixOut.ext.get<AudioPortExt::Tag::device>().device.address =
+                AUDIO_REMOTE_SUBMIX_DEVICE_ADDRESS;
+        ALOGD("%s: temporarily connecting and disconnecting remote submix output", __func__);
+        RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mModule->connectExternalDevice(
+                                tempConnectedRSubmixOut, &tempConnectedRSubmixOut)));
+        RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mModule->disconnectExternalDevice(
+                                tempConnectedRSubmixOut.id)));
+        tempConnectedRSubmixOut.id = mRemoteSubmixOut->id;
+        ports.push_back(std::move(tempConnectedRSubmixOut));
+    }
+
+    ALOGI("%s: module %s default port ids: input %d, output %d",
+            __func__, mInstance.c_str(), mDefaultInputPortId, mDefaultOutputPortId);
+    std::transform(ports.begin(), ports.end(), std::inserter(mPorts, mPorts.end()),
+            [](const auto& p) { return std::make_pair(p.id, p); });
+    RETURN_STATUS_IF_ERROR(updateRoutes());
+    std::vector<AudioPortConfig> portConfigs;
+    RETURN_STATUS_IF_ERROR(
+            statusTFromBinderStatus(mModule->getAudioPortConfigs(&portConfigs)));  // OK if empty
+    std::transform(portConfigs.begin(), portConfigs.end(),
+            std::inserter(mPortConfigs, mPortConfigs.end()),
+            [](const auto& p) { return std::make_pair(p.id, p); });
+    std::transform(mPortConfigs.begin(), mPortConfigs.end(),
+            std::inserter(mInitialPortConfigIds, mInitialPortConfigIds.end()),
+            [](const auto& pcPair) { return pcPair.first; });
+    std::vector<AudioPatch> patches;
+    RETURN_STATUS_IF_ERROR(
+            statusTFromBinderStatus(mModule->getAudioPatches(&patches)));  // OK if empty
+    std::transform(patches.begin(), patches.end(),
+            std::inserter(mPatches, mPatches.end()),
+            [](const auto& p) { return std::make_pair(p.id, p); });
+    return OK;
+}
+
+bool Hal2AidlMapper::isPortBeingHeld(int32_t portId) {
+    // It is assumed that mStreams has already been cleaned up.
+    for (const auto& s : mStreams) {
+        if (portConfigBelongsToPort(s.second.first, portId)) return true;
+    }
+    for (const auto& [_, patch] : mPatches) {
+        for (int32_t id : patch.sourcePortConfigIds) {
+            if (portConfigBelongsToPort(id, portId)) return true;
+        }
+        for (int32_t id : patch.sinkPortConfigIds) {
+            if (portConfigBelongsToPort(id, portId)) return true;
+        }
+    }
+    return false;
+}
+
+status_t Hal2AidlMapper::prepareToDisconnectExternalDevice(const AudioPort& devicePort) {
+    auto portsIt = findPort(devicePort.ext.get<AudioPortExt::device>().device);
+    if (portsIt == mPorts.end()) {
+        return BAD_VALUE;
+    }
+    return statusTFromBinderStatus(mModule->prepareToDisconnectExternalDevice(portsIt->second.id));
+}
+
+status_t Hal2AidlMapper::prepareToOpenStream(
+        int32_t ioHandle, const AudioDevice& device, const AudioIoFlags& flags,
+        AudioSource source, Cleanups* cleanups, AudioConfig* config,
+        AudioPortConfig* mixPortConfig, AudioPatch* patch) {
+    ALOGD("%p %s: handle %d, device %s, flags %s, source %s, config %s, mix port config %s",
+            this, __func__, ioHandle, device.toString().c_str(),
+            flags.toString().c_str(), toString(source).c_str(),
+            config->toString().c_str(), mixPortConfig->toString().c_str());
+    resetUnusedPatchesAndPortConfigs();
+    const AudioConfig initialConfig = *config;
+    // Find / create AudioPortConfigs for the device port and the mix port,
+    // then find / create a patch between them, and open a stream on the mix port.
+    AudioPortConfig devicePortConfig;
+    bool created = false;
+    RETURN_STATUS_IF_ERROR(findOrCreateDevicePortConfig(device, config,
+                    &devicePortConfig, &created));
+    LOG_ALWAYS_FATAL_IF(devicePortConfig.id == 0);
+    if (created) {
+        cleanups->add(&Hal2AidlMapper::resetPortConfig, devicePortConfig.id);
+    }
+    status_t status = prepareToOpenStreamHelper(ioHandle, devicePortConfig.portId,
+            devicePortConfig.id, flags, source, initialConfig, cleanups, config,
+            mixPortConfig, patch);
+    if (status != OK) {
+        // If using the client-provided config did not work out for establishing a mix port config
+        // or patching, try with the device port config. Note that in general device port config and
+        // mix port config are not required to be the same, however they must match if the HAL
+        // module can't perform audio stream conversions.
+        AudioConfig deviceConfig = initialConfig;
+        if (setConfigFromPortConfig(&deviceConfig, devicePortConfig)->base != initialConfig.base) {
+            ALOGD("%s: retrying with device port config: %s", __func__,
+                    devicePortConfig.toString().c_str());
+            status = prepareToOpenStreamHelper(ioHandle, devicePortConfig.portId,
+                    devicePortConfig.id, flags, source, initialConfig, cleanups,
+                    &deviceConfig, mixPortConfig, patch);
+            if (status == OK) {
+                *config = deviceConfig;
+            }
+        }
+    }
+    return status;
+}
+
+status_t Hal2AidlMapper::prepareToOpenStreamHelper(
+        int32_t ioHandle, int32_t devicePortId, int32_t devicePortConfigId,
+        const AudioIoFlags& flags, AudioSource source, const AudioConfig& initialConfig,
+        Cleanups* cleanups, AudioConfig* config, AudioPortConfig* mixPortConfig,
+        AudioPatch* patch) {
+    const bool isInput = flags.getTag() == AudioIoFlags::Tag::input;
+    bool created = false;
+    RETURN_STATUS_IF_ERROR(findOrCreateMixPortConfig(*config, flags, ioHandle, source,
+                    std::set<int32_t>{devicePortId}, mixPortConfig, &created));
+    if (created) {
+        cleanups->add(&Hal2AidlMapper::resetPortConfig, mixPortConfig->id);
+    }
+    setConfigFromPortConfig(config, *mixPortConfig);
+    bool retryWithSuggestedConfig = false;   // By default, let the framework to retry.
+    if (mixPortConfig->id == 0 && config->base == AudioConfigBase{}) {
+        // The HAL proposes a default config, can retry here.
+        retryWithSuggestedConfig = true;
+    } else if (isInput && config->base != initialConfig.base) {
+        // If the resulting config is different, we must stop and provide the config to the
+        // framework so that it can retry.
+        mixPortConfig->id = 0;
+    } else if (!isInput && mixPortConfig->id == 0 &&
+                    (initialConfig.base.format.type == AudioFormatType::PCM ||
+                            !isBitPositionFlagSet(flags.get<AudioIoFlags::output>(),
+                                    AudioOutputFlags::DIRECT) ||
+                            isBitPositionFlagSet(flags.get<AudioIoFlags::output>(),
+                                    AudioOutputFlags::COMPRESS_OFFLOAD))) {
+        // The framework does not retry opening non-direct PCM and IEC61937 outputs, need to retry
+        // here (see 'AudioHwDevice::openOutputStream').
+        retryWithSuggestedConfig = true;
+    }
+    if (mixPortConfig->id == 0 && retryWithSuggestedConfig) {
+        ALOGD("%s: retrying to find/create a mix port config using config %s", __func__,
+                config->toString().c_str());
+        RETURN_STATUS_IF_ERROR(findOrCreateMixPortConfig(*config, flags, ioHandle, source,
+                        std::set<int32_t>{devicePortId}, mixPortConfig, &created));
+        if (created) {
+            cleanups->add(&Hal2AidlMapper::resetPortConfig, mixPortConfig->id);
+        }
+        setConfigFromPortConfig(config, *mixPortConfig);
+    }
+    if (mixPortConfig->id == 0) {
+        ALOGD("%p %s: returning suggested config for the stream: %s", this, __func__,
+                config->toString().c_str());
+        return OK;
+    }
+    if (isInput) {
+        RETURN_STATUS_IF_ERROR(findOrCreatePatch(
+                        {devicePortConfigId}, {mixPortConfig->id}, patch, &created));
+    } else {
+        RETURN_STATUS_IF_ERROR(findOrCreatePatch(
+                        {mixPortConfig->id}, {devicePortConfigId}, patch, &created));
+    }
+    if (created) {
+        cleanups->add(&Hal2AidlMapper::resetPatch, patch->id);
+    }
+    if (config->frameCount <= 0) {
+        config->frameCount = patch->minimumStreamBufferSizeFrames;
+    }
+    return OK;
+}
+
+status_t Hal2AidlMapper::setPortConfig(
+        const AudioPortConfig& requestedPortConfig, const std::set<int32_t>& destinationPortIds,
+        AudioPortConfig* portConfig, Cleanups* cleanups) {
+    bool created = false;
+    RETURN_STATUS_IF_ERROR(findOrCreatePortConfig(
+                    requestedPortConfig, destinationPortIds, portConfig, &created));
+    if (created && cleanups != nullptr) {
+        cleanups->add(&Hal2AidlMapper::resetPortConfig, portConfig->id);
+    }
+    return OK;
+}
+
+status_t Hal2AidlMapper::releaseAudioPatch(int32_t patchId) {
+    return releaseAudioPatches({patchId});
+}
+
+status_t Hal2AidlMapper::releaseAudioPatches(const std::set<int32_t>& patchIds) {
+    status_t result = OK;
+    for (const auto patchId : patchIds) {
+        if (auto it = mPatches.find(patchId); it != mPatches.end()) {
+            mPatches.erase(it);
+            if (ndk::ScopedAStatus status = mModule->resetAudioPatch(patchId); !status.isOk()) {
+                ALOGE("%s: error while resetting patch %d: %s",
+                        __func__, patchId, status.getDescription().c_str());
+                result = statusTFromBinderStatus(status);
+            }
+        } else {
+            ALOGE("%s: patch id %d not found", __func__, patchId);
+            result = BAD_VALUE;
+        }
+    }
+    resetUnusedPortConfigs();
+    return result;
+}
+
+void Hal2AidlMapper::resetPortConfig(int32_t portConfigId) {
+    if (auto it = mPortConfigs.find(portConfigId); it != mPortConfigs.end()) {
+        mPortConfigs.erase(it);
+        if (ndk::ScopedAStatus status = mModule->resetAudioPortConfig(portConfigId);
+                !status.isOk()) {
+            ALOGE("%s: error while resetting port config %d: %s",
+                    __func__, portConfigId, status.getDescription().c_str());
+        }
+        return;
+    }
+    ALOGE("%s: port config id %d not found", __func__, portConfigId);
+}
+
+void Hal2AidlMapper::resetUnusedPatchesAndPortConfigs() {
+    // Since patches can be created independently of streams via 'createOrUpdatePatch',
+    // here we only clean up patches for released streams.
+    std::set<int32_t> patchesToRelease;
+    for (auto it = mStreams.begin(); it != mStreams.end(); ) {
+        if (auto streamSp = it->first.promote(); streamSp) {
+            ++it;
+        } else {
+            if (const int32_t patchId = it->second.second; patchId != -1) {
+                patchesToRelease.insert(patchId);
+            }
+            it = mStreams.erase(it);
+        }
+    }
+    // 'releaseAudioPatches' also resets unused port configs.
+    releaseAudioPatches(patchesToRelease);
+}
+
+void Hal2AidlMapper::resetUnusedPortConfigs() {
+    // The assumption is that port configs are used to create patches
+    // (or to open streams, but that involves creation of patches, too). Thus,
+    // orphaned port configs can and should be reset.
+    std::map<int32_t, int32_t /*portID*/> portConfigIds;
+    std::transform(mPortConfigs.begin(), mPortConfigs.end(),
+            std::inserter(portConfigIds, portConfigIds.end()),
+            [](const auto& pcPair) { return std::make_pair(pcPair.first, pcPair.second.portId); });
+    for (const auto& p : mPatches) {
+        for (int32_t id : p.second.sourcePortConfigIds) portConfigIds.erase(id);
+        for (int32_t id : p.second.sinkPortConfigIds) portConfigIds.erase(id);
+    }
+    for (int32_t id : mInitialPortConfigIds) {
+        portConfigIds.erase(id);
+    }
+    for (const auto& s : mStreams) {
+        portConfigIds.erase(s.second.first);
+    }
+    std::set<int32_t> retryDeviceDisconnection;
+    for (const auto& portConfigAndIdPair : portConfigIds) {
+        resetPortConfig(portConfigAndIdPair.first);
+        if (const auto it = mConnectedPorts.find(portConfigAndIdPair.second);
+                it != mConnectedPorts.end() && it->second) {
+            retryDeviceDisconnection.insert(portConfigAndIdPair.second);
+        }
+    }
+    for (int32_t portId : retryDeviceDisconnection) {
+        if (!isPortBeingHeld(portId)) {
+            if (auto status = mModule->disconnectExternalDevice(portId); status.isOk()) {
+                eraseConnectedPort(portId);
+                ALOGD("%s: executed postponed external device disconnection for port ID %d",
+                        __func__, portId);
+            }
+        }
+    }
+    if (!retryDeviceDisconnection.empty()) {
+        updateRoutes();
+    }
+}
+
+status_t Hal2AidlMapper::setDevicePortConnectedState(const AudioPort& devicePort, bool connected) {
+    if (connected) {
+        AudioDevice matchDevice = devicePort.ext.get<AudioPortExt::device>().device;
+        std::optional<AudioPort> templatePort;
+        auto erasePortAfterConnectionIt = mPorts.end();
+        // Connection of remote submix out with address "0" is a special case. Since there is
+        // already an "augmented template" port with this address in mPorts, we need to replace
+        // it with a connected port.
+        // Connection of remote submix outs with any other address is done as usual except that
+        // the template port is in `mRemoteSubmixOut`.
+        if (mRemoteSubmixOut.has_value() && matchDevice.type.type == AudioDeviceType::OUT_SUBMIX) {
+            if (matchDevice.address == AudioDeviceAddress::make<AudioDeviceAddress::id>(
+                            AUDIO_REMOTE_SUBMIX_DEVICE_ADDRESS)) {
+                erasePortAfterConnectionIt = findPort(matchDevice);
+            }
+            templatePort = mRemoteSubmixOut;
+        } else if (mRemoteSubmixIn.has_value() &&
+                matchDevice.type.type == AudioDeviceType::IN_SUBMIX) {
+            templatePort = mRemoteSubmixIn;
+        } else {
+            // Reset the device address to find the "template" port.
+            matchDevice.address = AudioDeviceAddress::make<AudioDeviceAddress::id>();
+        }
+        if (!templatePort.has_value()) {
+            auto portsIt = findPort(matchDevice);
+            if (portsIt == mPorts.end()) {
+                // Since 'setConnectedState' is called for all modules, it is normal when the device
+                // port not found in every one of them.
+                return BAD_VALUE;
+            } else {
+                ALOGD("%s: device port for device %s found in the module %s",
+                        __func__, matchDevice.toString().c_str(), mInstance.c_str());
+            }
+            templatePort = portsIt->second;
+        }
+        resetUnusedPatchesAndPortConfigs();
+
+        // Use the ID of the "template" port, use all the information from the provided port.
+        AudioPort connectedPort = devicePort;
+        connectedPort.id = templatePort->id;
+        RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mModule->connectExternalDevice(
+                                connectedPort, &connectedPort)));
+        const auto [it, inserted] = mPorts.insert(std::make_pair(connectedPort.id, connectedPort));
+        LOG_ALWAYS_FATAL_IF(!inserted,
+                "%s: module %s, duplicate port ID received from HAL: %s, existing port: %s",
+                __func__, mInstance.c_str(), connectedPort.toString().c_str(),
+                it->second.toString().c_str());
+        mConnectedPorts[connectedPort.id] = false;
+        if (erasePortAfterConnectionIt != mPorts.end()) {
+            mPorts.erase(erasePortAfterConnectionIt);
+        }
+    } else {  // !connected
+        AudioDevice matchDevice = devicePort.ext.get<AudioPortExt::device>().device;
+        auto portsIt = findPort(matchDevice);
+        if (portsIt == mPorts.end()) {
+            // Since 'setConnectedState' is called for all modules, it is normal when the device
+            // port not found in every one of them.
+            return BAD_VALUE;
+        } else {
+            ALOGD("%s: device port for device %s found in the module %s",
+                    __func__, matchDevice.toString().c_str(), mInstance.c_str());
+        }
+        resetUnusedPatchesAndPortConfigs();
+
+        // Disconnection of remote submix out with address "0" is a special case. We need to replace
+        // the connected port entry with the "augmented template".
+        const int32_t portId = portsIt->second.id;
+        if (mRemoteSubmixOut.has_value() && matchDevice.type.type == AudioDeviceType::OUT_SUBMIX &&
+                matchDevice.address == AudioDeviceAddress::make<AudioDeviceAddress::id>(
+                        AUDIO_REMOTE_SUBMIX_DEVICE_ADDRESS)) {
+            mDisconnectedPortReplacement = std::make_pair(portId, *mRemoteSubmixOut);
+            auto& port = mDisconnectedPortReplacement.second;
+            port.ext.get<AudioPortExt::Tag::device>().device = matchDevice;
+            port.profiles = portsIt->second.profiles;
+        }
+        // Streams are closed by AudioFlinger independently from device disconnections.
+        // It is possible that the stream has not been closed yet.
+        if (!isPortBeingHeld(portId)) {
+            RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
+                            mModule->disconnectExternalDevice(portId)));
+            eraseConnectedPort(portId);
+        } else {
+            ALOGD("%s: since device port ID %d is used by a stream, "
+                    "external device disconnection postponed", __func__, portId);
+            mConnectedPorts[portId] = true;
+        }
+    }
+    return updateRoutes();
+}
+
+status_t Hal2AidlMapper::updateAudioPort(int32_t portId, AudioPort* port) {
+    const status_t status = statusTFromBinderStatus(mModule->getAudioPort(portId, port));
+    if (status == OK) {
+        auto portIt = mPorts.find(portId);
+        if (portIt != mPorts.end()) {
+            if (port->ext.getTag() == AudioPortExt::Tag::mix && portIt->second != *port) {
+                mDynamicMixPortIds.insert(portId);
+            }
+            portIt->second = *port;
+        } else {
+            ALOGW("%s, port(%d) returned successfully from the HAL but not it is not cached",
+                  __func__, portId);
+        }
+    }
+    return status;
+}
+
+status_t Hal2AidlMapper::updateRoutes() {
+    RETURN_STATUS_IF_ERROR(
+            statusTFromBinderStatus(mModule->getAudioRoutes(&mRoutes)));
+    ALOGW_IF(mRoutes.empty(), "%s: module %s returned an empty list of audio routes",
+            __func__, mInstance.c_str());
+    if (mRemoteSubmixIn.has_value()) {
+        // Remove mentions of the template remote submix input from routes.
+        int32_t rSubmixInId = mRemoteSubmixIn->id;
+        // Remove mentions of the template remote submix out only if it is not in mPorts
+        // (that means there is a connected port in mPorts).
+        int32_t rSubmixOutId = mPorts.find(mRemoteSubmixOut->id) == mPorts.end() ?
+                mRemoteSubmixOut->id : -1;
+        for (auto it = mRoutes.begin(); it != mRoutes.end();) {
+            auto& route = *it;
+            if (route.sinkPortId == rSubmixOutId) {
+                it = mRoutes.erase(it);
+                continue;
+            }
+            if (auto routeIt = std::find(route.sourcePortIds.begin(), route.sourcePortIds.end(),
+                            rSubmixInId); routeIt != route.sourcePortIds.end()) {
+                route.sourcePortIds.erase(routeIt);
+                if (route.sourcePortIds.empty()) {
+                    it = mRoutes.erase(it);
+                    continue;
+                }
+            }
+            ++it;
+        }
+    }
+    mRoutingMatrix.clear();
+    for (const auto& r : mRoutes) {
+        for (auto portId : r.sourcePortIds) {
+            mRoutingMatrix.emplace(r.sinkPortId, portId);
+            mRoutingMatrix.emplace(portId, r.sinkPortId);
+        }
+    }
+    return OK;
+}
+
+void Hal2AidlMapper::updateDynamicMixPorts() {
+    for (int32_t portId : mDynamicMixPortIds) {
+        if (auto it = mPorts.find(portId); it != mPorts.end()) {
+            updateAudioPort(portId, &it->second);
+        } else {
+            // This must not happen
+            ALOGE("%s, cannot find port for id=%d", __func__, portId);
+        }
+    }
+}
+
+} // namespace android
diff --git a/media/libaudiohal/impl/Hal2AidlMapper.h b/media/libaudiohal/impl/Hal2AidlMapper.h
new file mode 100644
index 0000000..93ce233
--- /dev/null
+++ b/media/libaudiohal/impl/Hal2AidlMapper.h
@@ -0,0 +1,212 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <memory>
+#include <map>
+#include <set>
+#include <utility>
+#include <vector>
+
+#include <aidl/android/hardware/audio/core/IModule.h>
+#include <media/AidlConversionUtil.h>
+
+#include "Cleanups.h"
+
+namespace android {
+
+class Hal2AidlMapper;
+class StreamHalInterface;
+
+// The mapper class is needed because the framework was not yet updated to operate on AIDL-based
+// structures directly. Mapper does the job of translating the "legacy" way of identifying ports
+// and port configs (by device addresses and I/O handles) into AIDL IDs. Once the framework will
+// be updated to provide these IDs directly to libaudiohal, the need for the mapper will cease.
+//
+// Note that unlike DeviceHalInterface, which sometimes allows a method to return an error,
+// but still consider some of the outputs to be valid (for example, in 'open{Input|Output}Stream'),
+// 'Hal2AidlMapper' follows the Binder convention. It means that if a method returns an error,
+// the outputs may not be initialized at all and should not be considered by the caller.
+class Hal2AidlMapper {
+  public:
+    using Cleanups = Cleanups<Hal2AidlMapper>;
+
+    Hal2AidlMapper(
+            const std::string& instance,
+            const std::shared_ptr<::aidl::android::hardware::audio::core::IModule>& module);
+
+    void addStream(const sp<StreamHalInterface>& stream, int32_t portConfigId, int32_t patchId);
+    status_t createOrUpdatePatch(
+            const std::vector<::aidl::android::media::audio::common::AudioPortConfig>& sources,
+            const std::vector<::aidl::android::media::audio::common::AudioPortConfig>& sinks,
+            int32_t* patchId, Cleanups* cleanups);
+    status_t findPortConfig(
+            const ::aidl::android::media::audio::common::AudioDevice& device,
+            ::aidl::android::media::audio::common::AudioPortConfig* portConfig);
+    status_t getAudioMixPort(
+            int32_t ioHandle, ::aidl::android::media::audio::common::AudioPort* port);
+    status_t getAudioPortCached(
+            const ::aidl::android::media::audio::common::AudioDevice& device,
+            ::aidl::android::media::audio::common::AudioPort* port);
+    template<typename OutputContainer, typename Func>
+    status_t getAudioPorts(OutputContainer* ports, Func converter) {
+        return ::aidl::android::convertContainer(mPorts, ports,
+                [&converter](const auto& pair) { return converter(pair.second); });
+    }
+    template<typename OutputContainer, typename Func>
+    status_t getAudioRoutes(OutputContainer* routes, Func converter) {
+        return ::aidl::android::convertContainer(mRoutes, routes, converter);
+    }
+    status_t initialize();
+    status_t prepareToDisconnectExternalDevice(
+            const ::aidl::android::media::audio::common::AudioPort& devicePort);
+    // If the resulting 'mixPortConfig->id' is 0, that means the stream was not created,
+    // and 'config' is a suggested config.
+    status_t prepareToOpenStream(
+        int32_t ioHandle,
+        const ::aidl::android::media::audio::common::AudioDevice& device,
+        const ::aidl::android::media::audio::common::AudioIoFlags& flags,
+        ::aidl::android::media::audio::common::AudioSource source,
+        Cleanups* cleanups,
+        ::aidl::android::media::audio::common::AudioConfig* config,
+        ::aidl::android::media::audio::common::AudioPortConfig* mixPortConfig,
+        ::aidl::android::hardware::audio::core::AudioPatch* patch);
+    status_t setPortConfig(
+        const ::aidl::android::media::audio::common::AudioPortConfig& requestedPortConfig,
+        const std::set<int32_t>& destinationPortIds,
+        ::aidl::android::media::audio::common::AudioPortConfig* portConfig,
+        Cleanups* cleanups = nullptr);
+    status_t releaseAudioPatch(int32_t patchId);
+    void resetUnusedPatchesAndPortConfigs();
+    status_t setDevicePortConnectedState(
+            const ::aidl::android::media::audio::common::AudioPort& devicePort, bool connected);
+
+  private:
+    // IDs of ports for connected external devices, and whether they are held by streams.
+    using ConnectedPorts = std::map<int32_t /*port ID*/, bool>;
+    using Patches = std::map<int32_t /*patch ID*/,
+            ::aidl::android::hardware::audio::core::AudioPatch>;
+    using PortConfigs = std::map<int32_t /*port config ID*/,
+            ::aidl::android::media::audio::common::AudioPortConfig>;
+    using Ports = std::map<int32_t /*port ID*/, ::aidl::android::media::audio::common::AudioPort>;
+    using Routes = std::vector<::aidl::android::hardware::audio::core::AudioRoute>;
+    // Answers the question "whether portID 'first' is reachable from portID 'second'?"
+    // It's not a map because both portIDs are known. The matrix is symmetric.
+    using RoutingMatrix = std::set<std::pair<int32_t, int32_t>>;
+    // There is always a port config ID set. The patch ID is set after stream
+    // creation, and can be set to '-1' later if the framework happens to create
+    // a patch between the same endpoints. In that case, the ownership of the patch
+    // is on the framework.
+    using Streams = std::map<wp<StreamHalInterface>,
+            std::pair<int32_t /*port config ID*/, int32_t /*patch ID*/>>;
+
+    const std::string mInstance;
+    const std::shared_ptr<::aidl::android::hardware::audio::core::IModule> mModule;
+
+    bool audioDeviceMatches(const ::aidl::android::media::audio::common::AudioDevice& device,
+            const ::aidl::android::media::audio::common::AudioPort& p);
+    bool audioDeviceMatches(const ::aidl::android::media::audio::common::AudioDevice& device,
+            const ::aidl::android::media::audio::common::AudioPortConfig& p);
+    // If the 'result->id' is 0, that means, the config was not created/updated,
+    // and the 'result' is a suggestion from the HAL.
+    status_t createOrUpdatePortConfig(
+            const ::aidl::android::media::audio::common::AudioPortConfig& requestedPortConfig,
+            ::aidl::android::media::audio::common::AudioPortConfig* result, bool *created);
+    status_t createOrUpdatePortConfigRetry(
+            const ::aidl::android::media::audio::common::AudioPortConfig& requestedPortConfig,
+            ::aidl::android::media::audio::common::AudioPortConfig* result, bool *created);
+    void eraseConnectedPort(int32_t portId);
+    status_t findOrCreatePatch(
+        const std::set<int32_t>& sourcePortConfigIds,
+        const std::set<int32_t>& sinkPortConfigIds,
+        ::aidl::android::hardware::audio::core::AudioPatch* patch, bool* created);
+    status_t findOrCreatePatch(
+        const ::aidl::android::hardware::audio::core::AudioPatch& requestedPatch,
+        ::aidl::android::hardware::audio::core::AudioPatch* patch, bool* created);
+    status_t findOrCreateDevicePortConfig(
+            const ::aidl::android::media::audio::common::AudioDevice& device,
+            const ::aidl::android::media::audio::common::AudioConfig* config,
+            ::aidl::android::media::audio::common::AudioPortConfig* portConfig,
+            bool* created);
+    // If the resulting 'portConfig->id' is 0, that means the config was not created,
+    // and 'portConfig' is a suggested config.
+    status_t findOrCreateMixPortConfig(
+            const ::aidl::android::media::audio::common::AudioConfig& config,
+            const std::optional<::aidl::android::media::audio::common::AudioIoFlags>& flags,
+            int32_t ioHandle,
+            ::aidl::android::media::audio::common::AudioSource source,
+            const std::set<int32_t>& destinationPortIds,
+            ::aidl::android::media::audio::common::AudioPortConfig* portConfig, bool* created);
+    status_t findOrCreatePortConfig(
+        const ::aidl::android::media::audio::common::AudioPortConfig& requestedPortConfig,
+        const std::set<int32_t>& destinationPortIds,
+        ::aidl::android::media::audio::common::AudioPortConfig* portConfig, bool* created);
+    Patches::iterator findPatch(const std::set<int32_t>& sourcePortConfigIds,
+            const std::set<int32_t>& sinkPortConfigIds);
+    Ports::iterator findPort(const ::aidl::android::media::audio::common::AudioDevice& device);
+    Ports::iterator findPort(
+            const ::aidl::android::media::audio::common::AudioConfig& config,
+            const ::aidl::android::media::audio::common::AudioIoFlags& flags,
+            const std::set<int32_t>& destinationPortIds);
+    PortConfigs::iterator findPortConfig(
+            const ::aidl::android::media::audio::common::AudioDevice& device);
+    PortConfigs::iterator findPortConfig(
+            const std::optional<::aidl::android::media::audio::common::AudioConfig>& config,
+            const std::optional<::aidl::android::media::audio::common::AudioIoFlags>& flags,
+            int32_t ioHandle);
+    bool isPortBeingHeld(int32_t portId);
+    status_t prepareToOpenStreamHelper(
+        int32_t ioHandle, int32_t devicePortId, int32_t devicePortConfigId,
+        const ::aidl::android::media::audio::common::AudioIoFlags& flags,
+        ::aidl::android::media::audio::common::AudioSource source,
+        const ::aidl::android::media::audio::common::AudioConfig& initialConfig,
+        Cleanups* cleanups, ::aidl::android::media::audio::common::AudioConfig* config,
+        ::aidl::android::media::audio::common::AudioPortConfig* mixPortConfig,
+        ::aidl::android::hardware::audio::core::AudioPatch* patch);
+    bool portConfigBelongsToPort(int32_t portConfigId, int32_t portId) {
+        auto it = mPortConfigs.find(portConfigId);
+        return it != mPortConfigs.end() && it->second.portId == portId;
+    }
+    status_t releaseAudioPatches(const std::set<int32_t>& patchIds);
+    void resetPatch(int32_t patchId) { (void)releaseAudioPatch(patchId); }
+    void resetPortConfig(int32_t portConfigId);
+    void resetUnusedPortConfigs();
+    status_t updateAudioPort(
+            int32_t portId, ::aidl::android::media::audio::common::AudioPort* port);
+    status_t updateRoutes();
+    void updateDynamicMixPorts();
+
+    Ports mPorts;
+    // Remote submix "template" ports (no address specified, no profiles).
+    // They are excluded from `mPorts` as their presence confuses the framework code.
+    std::optional<::aidl::android::media::audio::common::AudioPort> mRemoteSubmixIn;
+    std::optional<::aidl::android::media::audio::common::AudioPort> mRemoteSubmixOut;
+    int32_t mDefaultInputPortId = -1;
+    int32_t mDefaultOutputPortId = -1;
+    PortConfigs mPortConfigs;
+    std::set<int32_t> mInitialPortConfigIds;
+    Patches mPatches;
+    Routes mRoutes;
+    RoutingMatrix mRoutingMatrix;
+    Streams mStreams;
+    ConnectedPorts mConnectedPorts;
+    std::pair<int32_t, ::aidl::android::media::audio::common::AudioPort>
+            mDisconnectedPortReplacement;
+    std::set<int32_t> mDynamicMixPortIds;
+};
+
+}  // namespace android
diff --git a/media/libaudiohal/impl/StreamHalAidl.cpp b/media/libaudiohal/impl/StreamHalAidl.cpp
index e74fc16..c2d7ee1 100644
--- a/media/libaudiohal/impl/StreamHalAidl.cpp
+++ b/media/libaudiohal/impl/StreamHalAidl.cpp
@@ -83,6 +83,7 @@
           mContext(std::move(context)),
           mStream(stream),
           mVendorExt(vext) {
+    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
     {
         std::lock_guard l(mLock);
         mLastReply.latencyMs = nominalLatency;
@@ -97,6 +98,7 @@
 }
 
 StreamHalAidl::~StreamHalAidl() {
+    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
     if (mStream != nullptr) {
         ndk::ScopedAStatus status = mStream->close();
         ALOGE_IF(!status.isOk(), "%s: status %s", __func__, status.getDescription().c_str());
@@ -806,6 +808,7 @@
                         mOffloadMetadata.channelMask = VALUE_OR_RETURN_STATUS(
                                 ::aidl::android::legacy2aidl_audio_channel_mask_t_AudioChannelLayout(
                                         channel_mask, false /*isInput*/));
+                        return OK;
                     }
                     return BAD_VALUE;
                 }))) {
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionSpatializer.cpp b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionSpatializer.cpp
index ff0c32b..49e6827 100644
--- a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionSpatializer.cpp
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionSpatializer.cpp
@@ -51,6 +51,7 @@
 status_t AidlConversionSpatializer::getParameter(EffectParamWriter& param) {
     DefaultExtension defaultExt;
     // read parameters into DefaultExtension vector<uint8_t>
+    defaultExt.bytes.resize(param.getParameterSize());
     if (OK != param.readFromParameter(defaultExt.bytes.data(), param.getParameterSize())) {
         ALOGE("%s invalid param %s", __func__, param.toString().c_str());
         param.setStatus(BAD_VALUE);
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionVisualizer.cpp b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionVisualizer.cpp
index 18d0d95..e4ec2ba 100644
--- a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionVisualizer.cpp
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionVisualizer.cpp
@@ -169,8 +169,8 @@
     const auto& measure = VALUE_OR_RETURN_STATUS(GET_PARAMETER_SPECIFIC_FIELD(
             aidlParam, Visualizer, visualizer, Visualizer::measurement, Visualizer::Measurement));
     int32_t* reply = (int32_t *) pReplyData;
-    *reply++ = measure.rms;
-    *reply = measure.peak;
+    *reply++ = measure.peak;
+    *reply = measure.rms;
     return OK;
 }
 
diff --git a/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h b/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h
index a965709..bb5f851 100644
--- a/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h
+++ b/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h
@@ -142,7 +142,7 @@
     virtual int32_t getAAudioMixerBurstCount() = 0;
     virtual int32_t getAAudioHardwareBurstMinUsec() = 0;
 
-    virtual int32_t supportsBluetoothVariableLatency(bool* supports) = 0;
+    virtual status_t supportsBluetoothVariableLatency(bool* supports) = 0;
 
     // Update the connection status of an external device.
     virtual status_t setConnectedState(const struct audio_port_v7* port, bool connected) = 0;
@@ -160,6 +160,9 @@
 
     virtual status_t prepareToDisconnectExternalDevice(const struct audio_port_v7* port) = 0;
 
+    virtual status_t getAudioMixPort(const struct audio_port_v7* devicePort,
+                                     struct audio_port_v7* mixPort) = 0;
+
   protected:
     // Subclasses can not be constructed directly by clients.
     DeviceHalInterface() {}
diff --git a/media/libaudiohal/include/media/audiohal/DevicesFactoryHalInterface.h b/media/libaudiohal/include/media/audiohal/DevicesFactoryHalInterface.h
index 8397e9b..c34a671 100644
--- a/media/libaudiohal/include/media/audiohal/DevicesFactoryHalInterface.h
+++ b/media/libaudiohal/include/media/audiohal/DevicesFactoryHalInterface.h
@@ -42,8 +42,6 @@
     // necessary to release references to the returned object.
     virtual status_t openDevice(const char *name, sp<DeviceHalInterface> *device) = 0;
 
-    virtual status_t getHalPids(std::vector<pid_t> *pids) = 0;
-
     // Sets a DevicesFactoryHalCallback to notify the client.
     // The callback can be only set once.
     virtual status_t setCallbackOnce(sp<DevicesFactoryHalCallback> callback) = 0;
diff --git a/media/libaudiohal/tests/CoreAudioHalAidl_test.cpp b/media/libaudiohal/tests/CoreAudioHalAidl_test.cpp
index adff110..1204a3b 100644
--- a/media/libaudiohal/tests/CoreAudioHalAidl_test.cpp
+++ b/media/libaudiohal/tests/CoreAudioHalAidl_test.cpp
@@ -201,6 +201,9 @@
     ndk::ScopedAStatus getAAudioHardwareBurstMinUsec(int32_t*) override {
         return ndk::ScopedAStatus::ok();
     }
+    ndk::ScopedAStatus prepareToDisconnectExternalDevice(int32_t) override {
+        return ndk::ScopedAStatus::ok();
+    }
 
     bool mIsScreenTurnedOn = false;
     ScreenRotation mScreenRotation = ScreenRotation::DEG_0;
diff --git a/media/libaudioprocessing/BufferProviders.cpp b/media/libaudioprocessing/BufferProviders.cpp
index 9f19f7b..fbc7f90 100644
--- a/media/libaudioprocessing/BufferProviders.cpp
+++ b/media/libaudioprocessing/BufferProviders.cpp
@@ -185,6 +185,8 @@
      mDownmixConfig.inputCfg.mask = EFFECT_CONFIG_SMP_RATE | EFFECT_CONFIG_CHANNELS |
              EFFECT_CONFIG_FORMAT | EFFECT_CONFIG_ACC_MODE;
      mDownmixConfig.outputCfg.mask = mDownmixConfig.inputCfg.mask;
+     mDownmixConfig.inputCfg.buffer.frameCount = bufferFrameCount;
+     mDownmixConfig.outputCfg.buffer.frameCount = bufferFrameCount;
 
      mInFrameSize =
              audio_bytes_per_sample(format) * audio_channel_count_from_out_mask(inputChannelMask);
diff --git a/media/libaudioprocessing/OWNERS b/media/libaudioprocessing/OWNERS
index 96d0ea0..ae071cf 100644
--- a/media/libaudioprocessing/OWNERS
+++ b/media/libaudioprocessing/OWNERS
@@ -1,3 +1,4 @@
-gkasten@google.com
+# Bug component: 48436
 hunga@google.com
-rago@google.com
+yaoshunkai@google.com
+include platform/frameworks/av:/media/janitors/audio_OWNERS #{LAST_RESORT_SUGGESTION}
diff --git a/media/libcpustats/OWNERS b/media/libcpustats/OWNERS
index f9cb567..fe3205a 100644
--- a/media/libcpustats/OWNERS
+++ b/media/libcpustats/OWNERS
@@ -1 +1,4 @@
-gkasten@google.com
+# Bug component: 48436
+atneya@google.com
+hunga@google.com
+include platform/frameworks/av:/media/janitors/audio_OWNERS #{LAST_RESORT_SUGGESTION}
diff --git a/media/libeffects/OWNERS b/media/libeffects/OWNERS
index b7832ea..fe0ca99 100644
--- a/media/libeffects/OWNERS
+++ b/media/libeffects/OWNERS
@@ -1,3 +1,5 @@
+# Bug component: 48436
 hunga@google.com
 mnaganov@google.com
-rago@google.com
+yaoshunkai@google.com
+include platform/frameworks/av:/media/janitors/audio_OWNERS #{LAST_RESORT_SUGGESTION}
diff --git a/media/libeffects/downmix/Android.bp b/media/libeffects/downmix/Android.bp
index a5259aa..37633ae 100644
--- a/media/libeffects/downmix/Android.bp
+++ b/media/libeffects/downmix/Android.bp
@@ -56,9 +56,7 @@
         ":effectCommonFile",
     ],
     defaults: [
-        "aidlaudioservice_defaults",
-        "latest_android_hardware_audio_effect_ndk_shared",
-        "latest_android_media_audio_common_types_ndk_shared",
+        "aidlaudioeffectservice_defaults",
     ],
     header_libs: [
         "libaudioeffects",
diff --git a/media/libeffects/downmix/aidl/DownmixContext.cpp b/media/libeffects/downmix/aidl/DownmixContext.cpp
index ac893d8..5fb44b5 100644
--- a/media/libeffects/downmix/aidl/DownmixContext.cpp
+++ b/media/libeffects/downmix/aidl/DownmixContext.cpp
@@ -20,12 +20,60 @@
 
 #include "DownmixContext.h"
 
-using aidl::android::hardware::audio::effect::IEffect;
 using aidl::android::hardware::audio::common::getChannelCount;
+using aidl::android::hardware::audio::effect::IEffect;
 using aidl::android::media::audio::common::AudioChannelLayout;
+using aidl::android::media::audio::common::AudioConfig;
 
 namespace aidl::android::hardware::audio::effect {
 
+namespace {
+
+inline bool isChannelMaskValid(const AudioChannelLayout& channelMask) {
+    if (channelMask.getTag() != AudioChannelLayout::layoutMask) return false;
+    int chMask = channelMask.get<AudioChannelLayout::layoutMask>();
+    // check against unsupported channels (up to FCC_26)
+    constexpr uint32_t MAXIMUM_CHANNEL_MASK = AudioChannelLayout::LAYOUT_22POINT2 |
+                                              AudioChannelLayout::CHANNEL_FRONT_WIDE_LEFT |
+                                              AudioChannelLayout::CHANNEL_FRONT_WIDE_RIGHT;
+    if (chMask & ~MAXIMUM_CHANNEL_MASK) {
+        LOG(ERROR) << "Unsupported channels in " << (chMask & ~MAXIMUM_CHANNEL_MASK);
+        return false;
+    }
+    return true;
+}
+
+inline bool isStereoChannelMask(const AudioChannelLayout& channelMask) {
+    if (channelMask.getTag() != AudioChannelLayout::layoutMask) return false;
+
+    return channelMask.get<AudioChannelLayout::layoutMask>() == AudioChannelLayout::LAYOUT_STEREO;
+}
+
+}  // namespace
+
+bool DownmixContext::validateCommonConfig(const Parameter::Common& common) {
+    const AudioConfig& input = common.input;
+    const AudioConfig& output = common.output;
+    if (input.base.sampleRate != output.base.sampleRate) {
+        LOG(ERROR) << __func__ << ": SRC not supported, input: " << input.toString()
+                   << " output: " << output.toString();
+        return false;
+    }
+
+    if (!isStereoChannelMask(output.base.channelMask)) {
+        LOG(ERROR) << __func__ << ": output should be stereo, not "
+                   << output.base.channelMask.toString();
+        return false;
+    }
+
+    if (!isChannelMaskValid(input.base.channelMask)) {
+        LOG(ERROR) << __func__ << ": invalid input channel, " << input.base.channelMask.toString();
+        return false;
+    }
+
+    return true;
+}
+
 DownmixContext::DownmixContext(int statusDepth, const Parameter::Common& common)
     : EffectContext(statusDepth, common) {
     LOG(DEBUG) << __func__;
@@ -62,12 +110,11 @@
     resetBuffer();
 }
 
-IEffect::Status DownmixContext::lvmProcess(float* in, float* out, int samples) {
-    LOG(DEBUG) << __func__ << " in " << in << " out " << out << " sample " << samples;
+IEffect::Status DownmixContext::downmixProcess(float* in, float* out, int samples) {
     IEffect::Status status = {EX_ILLEGAL_ARGUMENT, 0, 0};
 
-    if (in == nullptr || out == nullptr || getInputFrameSize() != getOutputFrameSize() ||
-        getInputFrameSize() == 0) {
+    if (in == nullptr || out == nullptr ||
+        getCommon().input.frameCount != getCommon().output.frameCount || getInputFrameSize() == 0) {
         return status;
     }
 
@@ -84,7 +131,6 @@
     bool accumulate = false;
     int frames = samples * sizeof(float) / getInputFrameSize();
     if (mType == Downmix::Type::STRIP) {
-        int inputChannelCount = getChannelCount(mChMask);
         while (frames) {
             if (accumulate) {
                 out[0] = std::clamp(out[0] + in[0], -1.f, 1.f);
@@ -93,7 +139,7 @@
                 out[0] = in[0];
                 out[1] = in[1];
             }
-            in += inputChannelCount;
+            in += mInputChannelCount;
             out += 2;
             frames--;
         }
@@ -105,14 +151,17 @@
             return status;
         }
     }
-    LOG(DEBUG) << __func__ << " done processing";
-    return {STATUS_OK, samples, samples};
+    int producedSamples = (samples / mInputChannelCount) << 1;
+    LOG(DEBUG) << __func__ << " done processing " << samples << " samples, generated "
+               << producedSamples << " frameSize: " << getInputFrameSize() << " - "
+               << getOutputFrameSize();
+    return {STATUS_OK, samples, producedSamples};
 }
 
 void DownmixContext::init_params(const Parameter::Common& common) {
     // when configuring the effect, do not allow a blank or unsupported channel mask
     AudioChannelLayout channelMask = common.input.base.channelMask;
-    if (isChannelMaskValid(channelMask)) {
+    if (!isChannelMaskValid(channelMask)) {
         LOG(ERROR) << "Downmix_Configure error: input channel mask " << channelMask.toString()
                    << " not supported";
     } else {
@@ -122,18 +171,4 @@
     }
 }
 
-bool DownmixContext::isChannelMaskValid(AudioChannelLayout channelMask) {
-    if (channelMask.getTag() == AudioChannelLayout::layoutMask) return false;
-    int chMask = channelMask.get<AudioChannelLayout::layoutMask>();
-    // check against unsupported channels (up to FCC_26)
-    constexpr uint32_t MAXIMUM_CHANNEL_MASK = AudioChannelLayout::LAYOUT_22POINT2 |
-                                              AudioChannelLayout::CHANNEL_FRONT_WIDE_LEFT |
-                                              AudioChannelLayout::CHANNEL_FRONT_WIDE_RIGHT;
-    if (chMask & ~MAXIMUM_CHANNEL_MASK) {
-        LOG(ERROR) << "Unsupported channels in " << (chMask & ~MAXIMUM_CHANNEL_MASK);
-        return false;
-    }
-    return true;
-}
-
 }  // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/downmix/aidl/DownmixContext.h b/media/libeffects/downmix/aidl/DownmixContext.h
index 1571c38..a381d7f 100644
--- a/media/libeffects/downmix/aidl/DownmixContext.h
+++ b/media/libeffects/downmix/aidl/DownmixContext.h
@@ -50,7 +50,9 @@
         return RetCode::SUCCESS;
     }
 
-    IEffect::Status lvmProcess(float* in, float* out, int samples);
+    IEffect::Status downmixProcess(float* in, float* out, int samples);
+
+    static bool validateCommonConfig(const Parameter::Common& common);
 
   private:
     DownmixState mState;
@@ -60,7 +62,6 @@
 
     // Common Params
     void init_params(const Parameter::Common& common);
-    bool isChannelMaskValid(::aidl::android::media::audio::common::AudioChannelLayout channelMask);
 };
 
 }  // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/downmix/aidl/EffectDownmix.cpp b/media/libeffects/downmix/aidl/EffectDownmix.cpp
index 7068c5c..c82c23b 100644
--- a/media/libeffects/downmix/aidl/EffectDownmix.cpp
+++ b/media/libeffects/downmix/aidl/EffectDownmix.cpp
@@ -71,42 +71,6 @@
     return ndk::ScopedAStatus::ok();
 }
 
-ndk::ScopedAStatus DownmixImpl::setParameterCommon(const Parameter& param) {
-    RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
-
-    auto tag = param.getTag();
-    switch (tag) {
-        case Parameter::common:
-            RETURN_IF(mContext->setCommon(param.get<Parameter::common>()) != RetCode::SUCCESS,
-                      EX_ILLEGAL_ARGUMENT, "setCommFailed");
-            break;
-        case Parameter::deviceDescription:
-            RETURN_IF(mContext->setOutputDevice(param.get<Parameter::deviceDescription>()) !=
-                              RetCode::SUCCESS,
-                      EX_ILLEGAL_ARGUMENT, "setDeviceFailed");
-            break;
-        case Parameter::mode:
-            RETURN_IF(mContext->setAudioMode(param.get<Parameter::mode>()) != RetCode::SUCCESS,
-                      EX_ILLEGAL_ARGUMENT, "setModeFailed");
-            break;
-        case Parameter::source:
-            RETURN_IF(mContext->setAudioSource(param.get<Parameter::source>()) != RetCode::SUCCESS,
-                      EX_ILLEGAL_ARGUMENT, "setSourceFailed");
-            break;
-        case Parameter::volumeStereo:
-            RETURN_IF(mContext->setVolumeStereo(param.get<Parameter::volumeStereo>()) !=
-                              RetCode::SUCCESS,
-                      EX_ILLEGAL_ARGUMENT, "setVolumeStereoFailed");
-            break;
-        default: {
-            LOG(ERROR) << __func__ << " unsupportedParameterTag " << toString(tag);
-            return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
-                                                                    "commonParamNotSupported");
-        }
-    }
-    return ndk::ScopedAStatus::ok();
-}
-
 ndk::ScopedAStatus DownmixImpl::commandImpl(CommandId command) {
     RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
     switch (command) {
@@ -193,6 +157,8 @@
         return mContext;
     }
 
+    if (!DownmixContext::validateCommonConfig(common)) return nullptr;
+
     mContext = std::make_shared<DownmixContext>(1 /* statusFmqDepth */, common);
     return mContext;
 }
@@ -204,13 +170,50 @@
     return RetCode::SUCCESS;
 }
 
+void DownmixImpl::process() {
+    /**
+     * wait for the EventFlag without lock, it's ok because the mEfGroup pointer will not change
+     * in the life cycle of workerThread (threadLoop).
+     */
+    uint32_t efState = 0;
+    if (!mEventFlag || ::android::OK != mEventFlag->wait(kEventFlagNotEmpty, &efState)) {
+        LOG(ERROR) << getEffectName() << __func__ << ": StatusEventFlag invalid";
+    }
+
+    {
+        std::lock_guard lg(mImplMutex);
+        RETURN_VALUE_IF(!mImplContext, void(), "nullContext");
+        auto statusMQ = mImplContext->getStatusFmq();
+        auto inputMQ = mImplContext->getInputDataFmq();
+        auto outputMQ = mImplContext->getOutputDataFmq();
+        auto buffer = mImplContext->getWorkBuffer();
+        if (!inputMQ || !outputMQ) {
+            return;
+        }
+
+        const auto availableToRead = inputMQ->availableToRead();
+        const auto availableToWrite = outputMQ->availableToWrite() *
+                                      mImplContext->getInputFrameSize() /
+                                      mImplContext->getOutputFrameSize();
+        auto processSamples = std::min(availableToRead, availableToWrite);
+        if (processSamples) {
+            inputMQ->read(buffer, processSamples);
+            IEffect::Status status = effectProcessImpl(buffer, buffer, processSamples);
+            outputMQ->write(buffer, status.fmqProduced);
+            statusMQ->writeBlocking(&status, 1);
+            LOG(VERBOSE) << getEffectName() << __func__ << ": done processing, effect consumed "
+                        << status.fmqConsumed << " produced " << status.fmqProduced;
+        }
+    }
+}
+
 // Processing method running in EffectWorker thread.
 IEffect::Status DownmixImpl::effectProcessImpl(float* in, float* out, int sampleToProcess) {
     if (!mContext) {
         LOG(ERROR) << __func__ << " nullContext";
         return {EX_NULL_POINTER, 0, 0};
     }
-    return mContext->lvmProcess(in, out, sampleToProcess);
+    return mContext->downmixProcess(in, out, sampleToProcess);
 }
 
 }  // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/downmix/aidl/EffectDownmix.h b/media/libeffects/downmix/aidl/EffectDownmix.h
index 812d26b..54557dc 100644
--- a/media/libeffects/downmix/aidl/EffectDownmix.h
+++ b/media/libeffects/downmix/aidl/EffectDownmix.h
@@ -34,21 +34,26 @@
         LOG(DEBUG) << __func__;
     }
 
-    ndk::ScopedAStatus commandImpl(CommandId command) override;
+    ndk::ScopedAStatus commandImpl(CommandId command) REQUIRES(mImplMutex) override;
     ndk::ScopedAStatus getDescriptor(Descriptor* _aidl_return) override;
-    ndk::ScopedAStatus setParameterCommon(const Parameter& param) override;
-    ndk::ScopedAStatus setParameterSpecific(const Parameter::Specific& specific) override;
-    ndk::ScopedAStatus getParameterSpecific(const Parameter::Id& id,
-                                            Parameter::Specific* specific) override;
-    IEffect::Status effectProcessImpl(float* in, float* out, int process) override;
-    std::shared_ptr<EffectContext> createContext(const Parameter::Common& common) override;
-    RetCode releaseContext() override;
+    ndk::ScopedAStatus setParameterSpecific(const Parameter::Specific& specific)
+            REQUIRES(mImplMutex) override;
+    ndk::ScopedAStatus getParameterSpecific(const Parameter::Id& id, Parameter::Specific* specific)
+            REQUIRES(mImplMutex) override;
+    IEffect::Status effectProcessImpl(float* in, float* out, int process)
+            REQUIRES(mImplMutex) override;
+    std::shared_ptr<EffectContext> createContext(const Parameter::Common& common)
+            REQUIRES(mImplMutex) override;
+    RetCode releaseContext() REQUIRES(mImplMutex) override;
 
-    std::shared_ptr<EffectContext> getContext() override { return mContext; }
     std::string getEffectName() override { return kEffectName; }
 
+    // downmix override the process because of different input/output sample size requirement
+    void process() override;
+
   private:
-    std::shared_ptr<DownmixContext> mContext;
-    ndk::ScopedAStatus getParameterDownmix(const Downmix::Tag& tag, Parameter::Specific* specific);
+    std::shared_ptr<DownmixContext> mContext GUARDED_BY(mImplMutex);
+    ndk::ScopedAStatus getParameterDownmix(const Downmix::Tag& tag, Parameter::Specific* specific)
+            REQUIRES(mImplMutex);
 };
 }  // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/dynamicsproc/Android.bp b/media/libeffects/dynamicsproc/Android.bp
index 7838117..9e154cf 100644
--- a/media/libeffects/dynamicsproc/Android.bp
+++ b/media/libeffects/dynamicsproc/Android.bp
@@ -86,9 +86,7 @@
     ],
 
     defaults: [
-        "aidlaudioservice_defaults",
-        "latest_android_hardware_audio_effect_ndk_shared",
-        "latest_android_media_audio_common_types_ndk_shared",
+        "aidlaudioeffectservice_defaults",
         "dynamicsprocessingdefaults",
     ],
 
diff --git a/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.cpp b/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.cpp
index 1fed9a5..1fedea4 100644
--- a/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.cpp
+++ b/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.cpp
@@ -105,14 +105,14 @@
         DynamicsProcessing::EqBandConfig({.channel = 0,
                                           .band = 0,
                                           .enable = false,
-                                          .cutoffFrequencyHz = 20,
+                                          .cutoffFrequencyHz = 0,
                                           .gainDb = -200});
 
 static const DynamicsProcessing::EqBandConfig kEqBandConfigMax =
         DynamicsProcessing::EqBandConfig({.channel = std::numeric_limits<int>::max(),
                                           .band = std::numeric_limits<int>::max(),
                                           .enable = true,
-                                          .cutoffFrequencyHz = 20000,
+                                          .cutoffFrequencyHz = 192000,
                                           .gainDb = 200});
 
 static const Range::DynamicsProcessingRange kPreEqBandConfigRange = {
@@ -129,7 +129,7 @@
                         {.channel = 0,
                          .band = 0,
                          .enable = false,
-                         .cutoffFrequencyHz = 20,
+                         .cutoffFrequencyHz = 0,
                          .attackTimeMs = 0,
                          .releaseTimeMs = 0,
                          .ratio = 1,
@@ -144,7 +144,7 @@
                         {.channel = std::numeric_limits<int>::max(),
                          .band = std::numeric_limits<int>::max(),
                          .enable = true,
-                         .cutoffFrequencyHz = 20000,
+                         .cutoffFrequencyHz = 192000,
                          .attackTimeMs = 60000,
                          .releaseTimeMs = 60000,
                          .ratio = 50,
@@ -211,11 +211,12 @@
     RETURN_IF(common.input.base.format.pcm != common.output.base.format.pcm ||
                       common.input.base.format.pcm != PcmType::FLOAT_32_BIT,
               EX_ILLEGAL_ARGUMENT, "dataMustBe32BitsFloat");
+    std::lock_guard lg(mImplMutex);
     RETURN_OK_IF(mState != State::INIT);
-    auto context = createContext(common);
-    RETURN_IF(!context, EX_NULL_POINTER, "createContextFailed");
+    mImplContext = createContext(common);
+    RETURN_IF(!mContext || !mImplContext, EX_NULL_POINTER, "createContextFailed");
+    mEventFlag = mImplContext->getStatusEventFlag();
 
-    RETURN_IF_ASTATUS_NOT_OK(setParameterCommon(common), "setCommParamErr");
     if (specific.has_value()) {
         RETURN_IF_ASTATUS_NOT_OK(setParameterSpecific(specific.value()), "setSpecParamErr");
     } else {
@@ -227,8 +228,8 @@
     }
 
     mState = State::IDLE;
-    context->dupeFmq(ret);
-    RETURN_IF(createThread(context, getEffectName()) != RetCode::SUCCESS, EX_UNSUPPORTED_OPERATION,
+    mContext->dupeFmq(ret);
+    RETURN_IF(createThread(getEffectName()) != RetCode::SUCCESS, EX_UNSUPPORTED_OPERATION,
               "FailedToCreateWorker");
     return ndk::ScopedAStatus::ok();
 }
@@ -443,7 +444,7 @@
 IEffect::Status DynamicsProcessingImpl::effectProcessImpl(float* in, float* out, int samples) {
     IEffect::Status status = {EX_NULL_POINTER, 0, 0};
     RETURN_VALUE_IF(!mContext, status, "nullContext");
-    return mContext->lvmProcess(in, out, samples);
+    return mContext->dpeProcess(in, out, samples);
 }
 
 }  // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.h b/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.h
index 1e1e72e..4897888 100644
--- a/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.h
+++ b/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.h
@@ -39,22 +39,25 @@
     ndk::ScopedAStatus open(const Parameter::Common& common,
                             const std::optional<Parameter::Specific>& specific,
                             OpenEffectReturn* ret) override;
-    ndk::ScopedAStatus commandImpl(CommandId command) override;
+    ndk::ScopedAStatus commandImpl(CommandId command) REQUIRES(mImplMutex) override;
     ndk::ScopedAStatus getDescriptor(Descriptor* _aidl_return) override;
-    ndk::ScopedAStatus setParameterSpecific(const Parameter::Specific& specific) override;
-    ndk::ScopedAStatus getParameterSpecific(const Parameter::Id& id,
-                                            Parameter::Specific* specific) override;
-    IEffect::Status effectProcessImpl(float* in, float* out, int process) override;
-    std::shared_ptr<EffectContext> createContext(const Parameter::Common& common) override;
-    RetCode releaseContext() override;
+    ndk::ScopedAStatus setParameterSpecific(const Parameter::Specific& specific)
+            REQUIRES(mImplMutex) override;
+    ndk::ScopedAStatus getParameterSpecific(const Parameter::Id& id, Parameter::Specific* specific)
+            REQUIRES(mImplMutex) override;
+    IEffect::Status effectProcessImpl(float* in, float* out, int process)
+            REQUIRES(mImplMutex) override;
+    std::shared_ptr<EffectContext> createContext(const Parameter::Common& common)
+            REQUIRES(mImplMutex) override;
+    RetCode releaseContext() REQUIRES(mImplMutex) override;
 
-    std::shared_ptr<EffectContext> getContext() override { return mContext; }
     std::string getEffectName() override { return kEffectName; }
 
   private:
-    std::shared_ptr<DynamicsProcessingContext> mContext;
+    std::shared_ptr<DynamicsProcessingContext> mContext GUARDED_BY(mImplMutex);
     ndk::ScopedAStatus getParameterDynamicsProcessing(const DynamicsProcessing::Tag& tag,
-                                                      Parameter::Specific* specific);
+                                                      Parameter::Specific* specific)
+            REQUIRES(mImplMutex);
     bool isParamInRange(const Parameter::Specific& specific);
 };
 
diff --git a/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.cpp b/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.cpp
index 9d77135..042b063 100644
--- a/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.cpp
+++ b/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.cpp
@@ -19,6 +19,7 @@
 #include "DynamicsProcessingContext.h"
 #include "DynamicsProcessing.h"
 
+#include <audio_utils/power.h>
 #include <sys/param.h>
 #include <functional>
 #include <unordered_set>
@@ -62,12 +63,32 @@
 }
 
 RetCode DynamicsProcessingContext::setCommon(const Parameter::Common& common) {
+    if(auto ret = updateIOFrameSize(common); ret != RetCode::SUCCESS) {
+        return ret;
+    }
     mCommon = common;
     init();
     LOG(INFO) << __func__ << common.toString();
     return RetCode::SUCCESS;
 }
 
+RetCode DynamicsProcessingContext::setVolumeStereo(const Parameter::VolumeStereo& volumeStereo) {
+    std::lock_guard lg(mMutex);
+    dp_fx::DPChannel* leftChannel = mDpFreq->getChannel(0);
+    dp_fx::DPChannel* rightChannel = mDpFreq->getChannel(1);
+    if (leftChannel != nullptr) {
+        leftChannel->setOutputGain(audio_utils_power_from_amplitude(volumeStereo.left));
+    }
+    if (rightChannel != nullptr) {
+        rightChannel->setOutputGain(audio_utils_power_from_amplitude(volumeStereo.right));
+    }
+    return RetCode::SUCCESS;
+}
+
+Parameter::VolumeStereo DynamicsProcessingContext::getVolumeStereo() {
+    return {1.0f, 1.0f};
+}
+
 void DynamicsProcessingContext::dpSetFreqDomainVariant_l(
         const DynamicsProcessing::EngineArchitecture& engine) {
     mDpFreq.reset(new dp_fx::DPFrequency());
@@ -273,7 +294,7 @@
     return ret;
 }
 
-IEffect::Status DynamicsProcessingContext::lvmProcess(float* in, float* out, int samples) {
+IEffect::Status DynamicsProcessingContext::dpeProcess(float* in, float* out, int samples) {
     LOG(DEBUG) << __func__ << " in " << in << " out " << out << " sample " << samples;
 
     IEffect::Status status = {EX_NULL_POINTER, 0, 0};
@@ -294,9 +315,11 @@
 
 void DynamicsProcessingContext::init() {
     std::lock_guard lg(mMutex);
-    mState = DYNAMICS_PROCESSING_STATE_INITIALIZED;
-    mChannelCount = ::aidl::android::hardware::audio::common::getChannelCount(
-            mCommon.input.base.channelMask);
+    if (mState == DYNAMICS_PROCESSING_STATE_UNINITIALIZED) {
+        mState = DYNAMICS_PROCESSING_STATE_INITIALIZED;
+    }
+    mChannelCount = static_cast<int>(::aidl::android::hardware::audio::common::getChannelCount(
+            mCommon.input.base.channelMask));
 }
 
 dp_fx::DPChannel* DynamicsProcessingContext::getChannel_l(int channel) {
@@ -416,14 +439,25 @@
 template <typename T>
 bool DynamicsProcessingContext::validateBandConfig(const std::vector<T>& bands, int maxChannel,
                                                    int maxBand) {
-    std::vector<float> freqs(bands.size(), -1);
+    std::map<int, float> freqs;
     for (auto band : bands) {
-        if (!validateChannel(band.channel, maxChannel)) return false;
-        if (!validateBand(band.band, maxBand)) return false;
+        if (!validateChannel(band.channel, maxChannel)) {
+            LOG(ERROR) << __func__ << " " << band.toString() << " invalid, maxCh " << maxChannel;
+            return false;
+        }
+        if (!validateBand(band.band, maxBand)) {
+            LOG(ERROR) << __func__ << " " << band.toString() << " invalid, maxBand " << maxBand;
+            return false;
+        }
+        if (freqs.find(band.band) != freqs.end()) {
+            LOG(ERROR) << __func__ << " " << band.toString() << " found duplicate";
+            return false;
+        }
         freqs[band.band] = band.cutoffFrequencyHz;
     }
-    if (std::count(freqs.begin(), freqs.end(), -1)) return false;
-    return std::is_sorted(freqs.begin(), freqs.end());
+    return std::is_sorted(freqs.begin(), freqs.end(), [](const auto& a, const auto& b) {
+        return a.second <= b.second; //index is already sorted as map key
+    });
 }
 
 bool DynamicsProcessingContext::validateLimiterConfig(
@@ -449,6 +483,11 @@
     RetCode ret = RetCode::SUCCESS;
     std::unordered_set<int> channelSet;
 
+    if (!stageInUse) {
+        LOG(WARNING) << __func__ << " not in use " << ::android::internal::ToString(channels);
+        return RetCode::SUCCESS;
+    }
+
     RETURN_VALUE_IF(!stageInUse, RetCode::ERROR_ILLEGAL_PARAMETER, "stageNotInUse");
     for (auto& it : channels) {
         if (0 != channelSet.count(it.channel)) {
diff --git a/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.h b/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.h
index b8539f6..839c6dd 100644
--- a/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.h
+++ b/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.h
@@ -45,6 +45,8 @@
 
     // override EffectContext::setCommon to update mChannelCount
     RetCode setCommon(const Parameter::Common& common) override;
+    RetCode setVolumeStereo(const Parameter::VolumeStereo& volumeStereo) override;
+    Parameter::VolumeStereo getVolumeStereo() override;
 
     RetCode setEngineArchitecture(const DynamicsProcessing::EngineArchitecture& engineArchitecture);
     RetCode setPreEq(const std::vector<DynamicsProcessing::ChannelConfig>& eqChannels);
@@ -66,13 +68,13 @@
     std::vector<DynamicsProcessing::LimiterConfig> getLimiter();
     std::vector<DynamicsProcessing::InputGain> getInputGain();
 
-    IEffect::Status lvmProcess(float* in, float* out, int samples);
+    IEffect::Status dpeProcess(float* in, float* out, int samples);
 
   private:
     static constexpr float kPreferredProcessingDurationMs = 10.0f;
     static constexpr int kBandCount = 5;
     std::mutex mMutex;
-    size_t mChannelCount GUARDED_BY(mMutex) = 0;
+    int mChannelCount GUARDED_BY(mMutex) = 0;
     DynamicsProcessingState mState GUARDED_BY(mMutex) = DYNAMICS_PROCESSING_STATE_UNINITIALIZED;
     std::unique_ptr<dp_fx::DPFrequency> mDpFreq GUARDED_BY(mMutex) = nullptr;
     bool mEngineInited GUARDED_BY(mMutex) = false;
diff --git a/media/libeffects/hapticgenerator/Android.bp b/media/libeffects/hapticgenerator/Android.bp
index fc80211..cc19a80 100644
--- a/media/libeffects/hapticgenerator/Android.bp
+++ b/media/libeffects/hapticgenerator/Android.bp
@@ -75,9 +75,7 @@
     ],
 
     defaults: [
-        "aidlaudioservice_defaults",
-        "latest_android_hardware_audio_effect_ndk_shared",
-        "latest_android_media_audio_common_types_ndk_shared",
+        "aidlaudioeffectservice_defaults",
         "hapticgeneratordefaults",
     ],
 
diff --git a/media/libeffects/hapticgenerator/aidl/EffectHapticGenerator.h b/media/libeffects/hapticgenerator/aidl/EffectHapticGenerator.h
index fe9616a..53dcd49 100644
--- a/media/libeffects/hapticgenerator/aidl/EffectHapticGenerator.h
+++ b/media/libeffects/hapticgenerator/aidl/EffectHapticGenerator.h
@@ -33,16 +33,18 @@
         LOG(DEBUG) << __func__;
     }
 
-    ndk::ScopedAStatus commandImpl(CommandId command) override;
+    ndk::ScopedAStatus commandImpl(CommandId command) REQUIRES(mImplMutex) override;
     ndk::ScopedAStatus getDescriptor(Descriptor* _aidl_return) override;
-    ndk::ScopedAStatus setParameterSpecific(const Parameter::Specific& specific) override;
-    ndk::ScopedAStatus getParameterSpecific(const Parameter::Id& id,
-                                            Parameter::Specific* specific) override;
-    IEffect::Status effectProcessImpl(float* in, float* out, int process) override;
-    std::shared_ptr<EffectContext> createContext(const Parameter::Common& common) override;
-    RetCode releaseContext() override;
+    ndk::ScopedAStatus setParameterSpecific(const Parameter::Specific& specific)
+            REQUIRES(mImplMutex) override;
+    ndk::ScopedAStatus getParameterSpecific(const Parameter::Id& id, Parameter::Specific* specific)
+            REQUIRES(mImplMutex) override;
+    IEffect::Status effectProcessImpl(float* in, float* out, int process)
+            REQUIRES(mImplMutex) override;
+    std::shared_ptr<EffectContext> createContext(const Parameter::Common& common)
+            REQUIRES(mImplMutex) override;
+    RetCode releaseContext() REQUIRES(mImplMutex) override;
 
-    std::shared_ptr<EffectContext> getContext() override { return mContext; }
     std::string getEffectName() override { return kEffectName; }
 
   private:
diff --git a/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.cpp b/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.cpp
index de44e05..354ee00 100644
--- a/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.cpp
+++ b/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.cpp
@@ -14,6 +14,7 @@
  * limitations under the License.
  */
 
+#include <cstddef>
 #define LOG_TAG "AHAL_HapticGeneratorContext"
 
 #include <Utils.h>
@@ -162,8 +163,8 @@
     }
 
     // Construct input buffer according to haptic channel source
-    for (size_t i = 0; i < mFrameCount; ++i) {
-        for (size_t j = 0; j < mParams.mHapticChannelCount; ++j) {
+    for (int64_t i = 0; i < mFrameCount; ++i) {
+        for (int j = 0; j < mParams.mHapticChannelCount; ++j) {
             mInputBuffer[i * mParams.mHapticChannelCount + j] =
                     in[i * mParams.mAudioChannelCount + mParams.mHapticChannelSource[j]];
         }
@@ -180,8 +181,7 @@
     // buffer, which contains haptic data at the end of the buffer, directly to sink buffer.
     // In that case, copy haptic data to input buffer instead of output buffer.
     // Note: this may not work with rpc/binder calls
-    int offset = samples;
-    for (int i = 0; i < hapticSampleCount; ++i) {
+    for (size_t i = 0; i < hapticSampleCount; ++i) {
         in[samples + i] = hapticOutBuffer[i];
     }
     return {STATUS_OK, samples, static_cast<int32_t>(samples + hapticSampleCount)};
@@ -199,7 +199,7 @@
     mParams.mHapticChannelCount = ::aidl::android::hardware::audio::common::getChannelCount(
             outputChMask, media::audio::common::AudioChannelLayout::LAYOUT_HAPTIC_AB);
     LOG_ALWAYS_FATAL_IF(mParams.mHapticChannelCount > 2, "haptic channel count is too large");
-    for (size_t i = 0; i < mParams.mHapticChannelCount; ++i) {
+    for (int i = 0; i < mParams.mHapticChannelCount; ++i) {
         // By default, use the first audio channel to generate haptic channels.
         mParams.mHapticChannelSource[i] = 0;
     }
diff --git a/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.h b/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.h
index a0a0a4c..26e69e4 100644
--- a/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.h
+++ b/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.h
@@ -92,7 +92,7 @@
     HapticGeneratorState mState;
     HapticGeneratorParam mParams GUARDED_BY(mMutex);
     int mSampleRate;
-    int mFrameCount = 0;
+    int64_t mFrameCount = 0;
 
     // A cache for all shared pointers of the HapticGenerator
     struct HapticGeneratorProcessorsRecord mProcessorsRecord;
diff --git a/media/libeffects/loudness/Android.bp b/media/libeffects/loudness/Android.bp
index 7acba11..05bbec3 100644
--- a/media/libeffects/loudness/Android.bp
+++ b/media/libeffects/loudness/Android.bp
@@ -54,9 +54,7 @@
         ":effectCommonFile",
     ],
     defaults: [
-        "aidlaudioservice_defaults",
-        "latest_android_hardware_audio_effect_ndk_shared",
-        "latest_android_media_audio_common_types_ndk_shared",
+        "aidlaudioeffectservice_defaults",
     ],
     header_libs: [
         "libaudioeffects",
diff --git a/media/libeffects/loudness/aidl/EffectLoudnessEnhancer.h b/media/libeffects/loudness/aidl/EffectLoudnessEnhancer.h
index 5b9e924..e2e716c 100644
--- a/media/libeffects/loudness/aidl/EffectLoudnessEnhancer.h
+++ b/media/libeffects/loudness/aidl/EffectLoudnessEnhancer.h
@@ -33,22 +33,25 @@
         LOG(DEBUG) << __func__;
     }
 
-    ndk::ScopedAStatus commandImpl(CommandId command) override;
+    ndk::ScopedAStatus commandImpl(CommandId command) REQUIRES(mImplMutex) override;
     ndk::ScopedAStatus getDescriptor(Descriptor* _aidl_return) override;
-    ndk::ScopedAStatus setParameterSpecific(const Parameter::Specific& specific) override;
-    ndk::ScopedAStatus getParameterSpecific(const Parameter::Id& id,
-                                            Parameter::Specific* specific) override;
-    IEffect::Status effectProcessImpl(float* in, float* out, int process) override;
-    std::shared_ptr<EffectContext> createContext(const Parameter::Common& common) override;
-    RetCode releaseContext() override;
+    ndk::ScopedAStatus setParameterSpecific(const Parameter::Specific& specific)
+            REQUIRES(mImplMutex) override;
+    ndk::ScopedAStatus getParameterSpecific(const Parameter::Id& id, Parameter::Specific* specific)
+            REQUIRES(mImplMutex) override;
+    IEffect::Status effectProcessImpl(float* in, float* out, int process)
+            REQUIRES(mImplMutex) override;
+    std::shared_ptr<EffectContext> createContext(const Parameter::Common& common)
+            REQUIRES(mImplMutex) override;
+    RetCode releaseContext() REQUIRES(mImplMutex) override;
 
-    std::shared_ptr<EffectContext> getContext() override { return mContext; }
     std::string getEffectName() override { return kEffectName; }
 
   private:
-    std::shared_ptr<LoudnessEnhancerContext> mContext;
+    std::shared_ptr<LoudnessEnhancerContext> mContext GUARDED_BY(mImplMutex);
     ndk::ScopedAStatus getParameterLoudnessEnhancer(const LoudnessEnhancer::Tag& tag,
-                                                    Parameter::Specific* specific);
+                                                    Parameter::Specific* specific)
+            REQUIRES(mImplMutex);
 };
 
 }  // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/loudness/dsp/core/dynamic_range_compression.cpp b/media/libeffects/loudness/dsp/core/dynamic_range_compression.cpp
index 578f58a..33f6779 100644
--- a/media/libeffects/loudness/dsp/core/dynamic_range_compression.cpp
+++ b/media/libeffects/loudness/dsp/core/dynamic_range_compression.cpp
@@ -89,8 +89,7 @@
   } else {
     state_ = alpha_release_ * state_ + (1.0f - alpha_release_) * cv;
   }
-  compressor_gain_ *=
-      math::ExpApproximationViaTaylorExpansionOrder5(state_ - prev_state);
+  compressor_gain_ *= expf(state_ - prev_state);
   x *= compressor_gain_;
   if (x > kFixedPointLimit) {
     return kFixedPointLimit;
@@ -118,8 +117,7 @@
   } else {
     state_ = alpha_release_ * state_ + (1.0f - alpha_release_) * cv;
   }
-  compressor_gain_ *=
-      math::ExpApproximationViaTaylorExpansionOrder5(state_ - prev_state);
+  compressor_gain_ *= expf(state_ - prev_state);
   *x1 *= compressor_gain_;
   if (*x1 > kFixedPointLimit) {
     *x1 = kFixedPointLimit;
diff --git a/media/libeffects/lvm/wrapper/Aidl/BundleContext.cpp b/media/libeffects/lvm/wrapper/Aidl/BundleContext.cpp
index 0db7a73..bb7e4c6 100644
--- a/media/libeffects/lvm/wrapper/Aidl/BundleContext.cpp
+++ b/media/libeffects/lvm/wrapper/Aidl/BundleContext.cpp
@@ -20,6 +20,7 @@
 #define LOG_TAG "BundleContext"
 #include <android-base/logging.h>
 #include <audio_utils/power.h>
+#include <media/AidlConversionCppNdk.h>
 #include <Utils.h>
 
 #include "BundleContext.h"
@@ -32,13 +33,34 @@
 using ::aidl::android::media::audio::common::AudioDeviceDescription;
 using ::aidl::android::media::audio::common::AudioDeviceType;
 
+BundleContext::BundleContext(int statusDepth, const Parameter::Common& common,
+              const lvm::BundleEffectType& type)
+        : EffectContext(statusDepth, common), mType(type) {
+    LOG(DEBUG) << __func__ << type;
+
+    int inputChannelCount = ::aidl::android::hardware::audio::common::getChannelCount(
+            common.input.base.channelMask);
+    mSamplesPerSecond = common.input.base.sampleRate * inputChannelCount;
+}
+
+BundleContext::~BundleContext() {
+    LOG(DEBUG) << __func__;
+    deInit();
+}
+
 RetCode BundleContext::init() {
     std::lock_guard lg(mMutex);
     // init with pre-defined preset NORMAL
     for (std::size_t i = 0; i < lvm::MAX_NUM_BANDS; i++) {
-        mBandGainMdB[i] = lvm::kSoftPresets[0 /* normal */][i] * 100;
+        mBandGainmB[i] = lvm::kSoftPresets[0 /* normal */][i] * 100;
     }
 
+    // Initialise control params
+    LVM_ControlParams_t controlParams;
+    RetCode retStatus = initControlParameter(controlParams);
+    RETURN_VALUE_IF(retStatus != RetCode::SUCCESS, RetCode::ERROR_ILLEGAL_PARAMETER,
+                    " UnsupportedParams");
+
     // allocate lvm instance
     LVM_ReturnStatus_en status;
     LVM_InstParams_t params = {.BufferMode = LVM_UNMANAGED_BUFFERS,
@@ -49,8 +71,6 @@
     GOTO_IF_LVM_ERROR(status, deinit, "LVM_GetInstanceHandleFailed");
 
     // set control
-    LVM_ControlParams_t controlParams;
-    initControlParameter(controlParams);
     status = LVM_SetControlParameters(mInstance, &controlParams);
     GOTO_IF_LVM_ERROR(status, deinit, "LVM_SetControlParametersFailed");
 
@@ -213,8 +233,8 @@
         bool viEnabled = params.VirtualizerOperatingMode == LVM_MODE_ON;
 
         if (eqEnabled) {
-            for (int i = 0; i < lvm::MAX_NUM_BANDS; i++) {
-                float bandFactor = mBandGainMdB[i] / 1500.0;
+            for (unsigned int i = 0; i < lvm::MAX_NUM_BANDS; i++) {
+                float bandFactor = mBandGainmB[i] / 1500.0;
                 float bandCoefficient = lvm::kBandEnergyCoefficient[i];
                 float bandEnergy = bandFactor * bandCoefficient * bandCoefficient;
                 if (bandEnergy > 0) energyContribution += bandEnergy;
@@ -222,9 +242,9 @@
 
             // cross EQ coefficients
             float bandFactorSum = 0;
-            for (int i = 0; i < lvm::MAX_NUM_BANDS - 1; i++) {
-                float bandFactor1 = mBandGainMdB[i] / 1500.0;
-                float bandFactor2 = mBandGainMdB[i + 1] / 1500.0;
+            for (unsigned int i = 0; i < lvm::MAX_NUM_BANDS - 1; i++) {
+                float bandFactor1 = mBandGainmB[i] / 1500.0;
+                float bandFactor2 = mBandGainmB[i + 1] / 1500.0;
 
                 if (bandFactor1 > 0 && bandFactor2 > 0) {
                     float crossEnergy =
@@ -245,8 +265,8 @@
             energyContribution += boostFactor * boostCoefficient * boostCoefficient;
 
             if (eqEnabled) {
-                for (int i = 0; i < lvm::MAX_NUM_BANDS; i++) {
-                    float bandFactor = mBandGainMdB[i] / 1500.0;
+                for (unsigned int i = 0; i < lvm::MAX_NUM_BANDS; i++) {
+                    float bandFactor = mBandGainmB[i] / 1500.0;
                     float bandCrossCoefficient = lvm::kBassBoostEnergyCrossCoefficient[i];
                     float bandEnergy = boostFactor * bandFactor * bandCrossCoefficient;
                     if (bandEnergy > 0) energyBassBoost += bandEnergy;
@@ -298,7 +318,9 @@
             device != AudioDeviceDescription{AudioDeviceType::OUT_CARKIT,
                                              AudioDeviceDescription::CONNECTION_BT_SCO} &&
             device != AudioDeviceDescription{AudioDeviceType::OUT_SPEAKER,
-                                             AudioDeviceDescription::CONNECTION_BT_A2DP}) {
+                                             AudioDeviceDescription::CONNECTION_BT_A2DP} &&
+            device != AudioDeviceDescription{AudioDeviceType::OUT_SUBMIX,
+                                             AudioDeviceDescription::CONNECTION_VIRTUAL}) {
             return false;
         }
     }
@@ -315,7 +337,9 @@
             device != AudioDeviceDescription{AudioDeviceType::OUT_HEADPHONE,
                                              AudioDeviceDescription::CONNECTION_BT_A2DP} &&
             device != AudioDeviceDescription{AudioDeviceType::OUT_HEADSET,
-                                             AudioDeviceDescription::CONNECTION_USB}) {
+                                             AudioDeviceDescription::CONNECTION_USB} &&
+            device != AudioDeviceDescription{AudioDeviceType::OUT_SUBMIX,
+                                             AudioDeviceDescription::CONNECTION_VIRTUAL}) {
             return false;
         }
     }
@@ -407,7 +431,6 @@
 
 RetCode BundleContext::setVolumeStereo(const Parameter::VolumeStereo& volume) {
     LVM_ControlParams_t params;
-    LVM_ReturnStatus_en status = LVM_SUCCESS;
 
     // Convert volume to dB
     float leftdB = VolToDb(volume.left);
@@ -456,6 +479,7 @@
 RetCode BundleContext::setEqualizerBandLevels(const std::vector<Equalizer::BandLevel>& bandLevels) {
     RETURN_VALUE_IF(bandLevels.size() > lvm::MAX_NUM_BANDS || bandLevels.empty(),
                     RetCode::ERROR_ILLEGAL_PARAMETER, "sizeExceedMax");
+
     RetCode ret = updateControlParameter(bandLevels);
     if (RetCode::SUCCESS == ret) {
         mCurPresetIdx = lvm::PRESET_CUSTOM;
@@ -470,15 +494,13 @@
     std::vector<Equalizer::BandLevel> bandLevels;
     bandLevels.reserve(lvm::MAX_NUM_BANDS);
     for (std::size_t i = 0; i < lvm::MAX_NUM_BANDS; i++) {
-        bandLevels.emplace_back(
-                Equalizer::BandLevel{static_cast<int32_t>(i), mBandGainMdB[i]});
+        bandLevels.emplace_back(Equalizer::BandLevel{static_cast<int32_t>(i), mBandGainmB[i]});
     }
     return bandLevels;
 }
 
 std::vector<int32_t> BundleContext::getEqualizerCenterFreqs() {
     std::vector<int32_t> freqs;
-
     LVM_ControlParams_t params;
     {
         std::lock_guard lg(mMutex);
@@ -498,14 +520,14 @@
     const auto [min, max] =
             std::minmax_element(bandLevels.begin(), bandLevels.end(),
                                 [](const auto& a, const auto& b) { return a.index < b.index; });
-    return min->index >= 0 && max->index < lvm::MAX_NUM_BANDS;
+    return min->index >= 0 && static_cast<size_t>(max->index) < lvm::MAX_NUM_BANDS;
 }
 
 RetCode BundleContext::updateControlParameter(const std::vector<Equalizer::BandLevel>& bandLevels) {
     RETURN_VALUE_IF(!isBandLevelIndexInRange(bandLevels), RetCode::ERROR_ILLEGAL_PARAMETER,
                     "indexOutOfRange");
 
-    std::array<int, lvm::MAX_NUM_BANDS> tempLevel(mBandGainMdB);
+    std::array<int, lvm::MAX_NUM_BANDS> tempLevel(mBandGainmB);
     for (const auto& it : bandLevels) {
         tempLevel[it.index] = it.levelMb;
     }
@@ -526,8 +548,8 @@
         RETURN_VALUE_IF(LVM_SUCCESS != LVM_SetControlParameters(mInstance, &params),
                         RetCode::ERROR_EFFECT_LIB_ERROR, " setControlParamFailed");
     }
-    mBandGainMdB = tempLevel;
-    LOG(DEBUG) << __func__ << " update bandGain to " << ::android::internal::ToString(mBandGainMdB)
+    mBandGainmB = tempLevel;
+    LOG(DEBUG) << __func__ << " update bandGain to " << ::android::internal::ToString(mBandGainmB)
                << "mdB";
 
     return RetCode::SUCCESS;
@@ -605,11 +627,30 @@
     return RetCode::SUCCESS;
 }
 
-void BundleContext::initControlParameter(LVM_ControlParams_t& params) const {
+RetCode BundleContext::initControlParameter(LVM_ControlParams_t& params) const {
+    int outputChannelCount = ::aidl::android::hardware::audio::common::getChannelCount(
+            mCommon.output.base.channelMask);
+    auto outputChannelMaskConv = aidl2legacy_AudioChannelLayout_audio_channel_mask_t(
+            mCommon.output.base.channelMask, /*isInput*/ false);
+    RETURN_VALUE_IF(!outputChannelMaskConv.ok(), RetCode::ERROR_ILLEGAL_PARAMETER,
+                    " outputChannelMaskNotValid");
+
+    params.NrChannels = outputChannelCount;
+    params.ChMask = outputChannelMaskConv.value();
+    params.SampleRate = lvmFsForSampleRate(mCommon.input.base.sampleRate);
+
+    int inputChannelCount = ::aidl::android::hardware::audio::common::getChannelCount(
+            mCommon.input.base.channelMask);
+    if (inputChannelCount == 1) {
+        params.SourceFormat = LVM_MONO;
+    } else if (inputChannelCount == 2) {
+        params.SourceFormat = LVM_STEREO;
+    } else if (inputChannelCount > 2 && inputChannelCount <= LVM_MAX_CHANNELS) {
+        params.SourceFormat = LVM_MULTICHANNEL;
+    }
+
     /* General parameters */
     params.OperatingMode = LVM_MODE_ON;
-    params.SampleRate = LVM_FS_44100;
-    params.SourceFormat = LVM_STEREO;
     params.SpeakerType = LVM_HEADPHONES;
 
     /* Concert Sound parameters */
@@ -644,13 +685,7 @@
     params.PSA_Enable = LVM_PSA_OFF;
     params.PSA_PeakDecayRate = LVM_PSA_SPEED_MEDIUM;
 
-    /* TE Control parameters */
-    params.TE_OperatingMode = LVM_TE_OFF;
-    params.TE_EffectLevel = 0;
-
-    params.NrChannels = audio_channel_count_from_out_mask(AUDIO_CHANNEL_OUT_STEREO);
-    params.ChMask = AUDIO_CHANNEL_OUT_STEREO;
-    params.SourceFormat = LVM_STEREO;
+    return RetCode::SUCCESS;
 }
 
 void BundleContext::initHeadroomParameter(LVM_HeadroomParams_t& params) const {
@@ -819,13 +854,13 @@
             LOG(DEBUG) << "Effect_process() processing last frame";
         }
         mNumberEffectsCalled = 0;
-        LVM_UINT16 frames = samples * sizeof(float) / frameSize;
         float* outTmp = (accumulate ? getWorkBuffer() : out);
         /* Process the samples */
         LVM_ReturnStatus_en lvmStatus;
         {
             std::lock_guard lg(mMutex);
-            lvmStatus = LVM_Process(mInstance, in, outTmp, frames, 0);
+
+            lvmStatus = LVM_Process(mInstance, in, outTmp, inputFrameCount, 0);
             if (lvmStatus != LVM_SUCCESS) {
                 LOG(ERROR) << __func__ << lvmStatus;
                 return {EX_UNSUPPORTED_OPERATION, 0, 0};
diff --git a/media/libeffects/lvm/wrapper/Aidl/BundleContext.h b/media/libeffects/lvm/wrapper/Aidl/BundleContext.h
index 62bb6e4..809f402 100644
--- a/media/libeffects/lvm/wrapper/Aidl/BundleContext.h
+++ b/media/libeffects/lvm/wrapper/Aidl/BundleContext.h
@@ -29,14 +29,8 @@
 class BundleContext final : public EffectContext {
   public:
     BundleContext(int statusDepth, const Parameter::Common& common,
-                  const lvm::BundleEffectType& type)
-        : EffectContext(statusDepth, common), mType(type) {
-        LOG(DEBUG) << __func__ << type;
-    }
-    ~BundleContext() override {
-        LOG(DEBUG) << __func__;
-        deInit();
-    }
+                  const lvm::BundleEffectType& type);
+    ~BundleContext();
 
     RetCode init();
     void deInit();
@@ -47,15 +41,6 @@
     RetCode disable();
     RetCode disableOperatingMode();
 
-    void setSampleRate(const int sampleRate) { mSampleRate = sampleRate; }
-    int getSampleRate() const { return mSampleRate; }
-
-    void setChannelMask(const aidl::android::media::audio::common::AudioChannelLayout& chMask) {
-        mChMask = chMask;
-    }
-    aidl::android::media::audio::common::AudioChannelLayout getChannelMask() const {
-        return mChMask;
-    }
     bool isDeviceSupportedBassBoost(
             const std::vector<aidl::android::media::audio::common::AudioDeviceDescription>&
                     devices);
@@ -98,7 +83,7 @@
             const Virtualizer::SpeakerAnglesPayload payload);
 
     RetCode setVolumeStereo(const Parameter::VolumeStereo& volumeStereo) override;
-    Parameter::VolumeStereo getVolumeStereo() override { return mVolumeStereo; }
+    Parameter::VolumeStereo getVolumeStereo() override { return {1.0f, 1.0f}; }
 
     IEffect::Status lvmProcess(float* in, float* out, int samples);
 
@@ -111,9 +96,7 @@
     LVM_Handle_t mInstance GUARDED_BY(mMutex);
 
     aidl::android::media::audio::common::AudioDeviceDescription mVirtualizerForcedDevice;
-    aidl::android::media::audio::common::AudioChannelLayout mChMask;
 
-    int mSampleRate = LVM_FS_44100;
     int mSamplesPerSecond = 0;
     int mSamplesToExitCountEq = 0;
     int mSamplesToExitCountBb = 0;
@@ -135,7 +118,7 @@
     int mBassStrengthSaved = 0;
     // Equalizer
     int mCurPresetIdx = lvm::PRESET_CUSTOM; /* Current preset being used */
-    std::array<int, lvm::MAX_NUM_BANDS> mBandGainMdB; /* band gain in millibels */
+    std::array<int, lvm::MAX_NUM_BANDS> mBandGainmB; /* band gain in millibels */
     // Virtualizer
     int mVirtStrengthSaved = 0; /* Conversion between Get/Set */
     bool mVirtualizerTempDisabled = false;
@@ -145,7 +128,7 @@
     float mVolume = 0;
     bool mMuteEnabled = false; /* Must store as mute = -96dB level */
 
-    void initControlParameter(LVM_ControlParams_t& params) const;
+    RetCode initControlParameter(LVM_ControlParams_t& params) const;
     void initHeadroomParameter(LVM_HeadroomParams_t& params) const;
     RetCode limitLevel();
     static float VolToDb(float vol);
diff --git a/media/libeffects/lvm/wrapper/Aidl/EffectBundleAidl.cpp b/media/libeffects/lvm/wrapper/Aidl/EffectBundleAidl.cpp
index 3148d36..257e972 100644
--- a/media/libeffects/lvm/wrapper/Aidl/EffectBundleAidl.cpp
+++ b/media/libeffects/lvm/wrapper/Aidl/EffectBundleAidl.cpp
@@ -425,10 +425,6 @@
     return mContext;
 }
 
-std::shared_ptr<EffectContext> EffectBundleAidl::getContext() {
-    return mContext;
-}
-
 RetCode EffectBundleAidl::releaseContext() {
     if (mContext) {
         GlobalSession::getGlobalSession().releaseSession(mType, mContext->getSessionId());
diff --git a/media/libeffects/lvm/wrapper/Aidl/EffectBundleAidl.h b/media/libeffects/lvm/wrapper/Aidl/EffectBundleAidl.h
index ec1abe8..429e941 100644
--- a/media/libeffects/lvm/wrapper/Aidl/EffectBundleAidl.h
+++ b/media/libeffects/lvm/wrapper/Aidl/EffectBundleAidl.h
@@ -36,41 +36,47 @@
     ~EffectBundleAidl() override;
 
     ndk::ScopedAStatus getDescriptor(Descriptor* _aidl_return) override;
-    ndk::ScopedAStatus setParameterCommon(const Parameter& param) override;
-    ndk::ScopedAStatus setParameterSpecific(const Parameter::Specific& specific) override;
-    ndk::ScopedAStatus getParameterSpecific(const Parameter::Id& id,
-                                            Parameter::Specific* specific) override;
+    ndk::ScopedAStatus setParameterCommon(const Parameter& param) REQUIRES(mImplMutex) override;
+    ndk::ScopedAStatus setParameterSpecific(const Parameter::Specific& specific)
+            REQUIRES(mImplMutex) override;
+    ndk::ScopedAStatus getParameterSpecific(const Parameter::Id& id, Parameter::Specific* specific)
+            REQUIRES(mImplMutex) override;
 
-    std::shared_ptr<EffectContext> createContext(const Parameter::Common& common) override;
-    std::shared_ptr<EffectContext> getContext() override;
-    RetCode releaseContext() override;
+    std::shared_ptr<EffectContext> createContext(const Parameter::Common& common)
+            REQUIRES(mImplMutex) override;
+    RetCode releaseContext() REQUIRES(mImplMutex) override;
 
-    IEffect::Status effectProcessImpl(float* in, float* out, int samples) override;
+    IEffect::Status effectProcessImpl(float* in, float* out, int samples)
+            REQUIRES(mImplMutex) override;
 
-    ndk::ScopedAStatus commandImpl(CommandId command) override;
+    ndk::ScopedAStatus commandImpl(CommandId command) REQUIRES(mImplMutex) override;
 
     std::string getEffectName() override { return *mEffectName; }
 
   private:
-    std::shared_ptr<BundleContext> mContext;
+    std::shared_ptr<BundleContext> mContext GUARDED_BY(mImplMutex);
     const Descriptor* mDescriptor;
     const std::string* mEffectName;
     lvm::BundleEffectType mType = lvm::BundleEffectType::EQUALIZER;
 
     IEffect::Status status(binder_status_t status, size_t consumed, size_t produced);
 
-    ndk::ScopedAStatus setParameterBassBoost(const Parameter::Specific& specific);
-    ndk::ScopedAStatus getParameterBassBoost(const BassBoost::Id& id,
-                                             Parameter::Specific* specific);
+    ndk::ScopedAStatus setParameterBassBoost(const Parameter::Specific& specific)
+            REQUIRES(mImplMutex);
+    ndk::ScopedAStatus getParameterBassBoost(const BassBoost::Id& id, Parameter::Specific* specific)
+            REQUIRES(mImplMutex);
 
-    ndk::ScopedAStatus setParameterEqualizer(const Parameter::Specific& specific);
-    ndk::ScopedAStatus getParameterEqualizer(const Equalizer::Id& id,
-                                             Parameter::Specific* specific);
-    ndk::ScopedAStatus setParameterVolume(const Parameter::Specific& specific);
-    ndk::ScopedAStatus getParameterVolume(const Volume::Id& id, Parameter::Specific* specific);
-    ndk::ScopedAStatus setParameterVirtualizer(const Parameter::Specific& specific);
+    ndk::ScopedAStatus setParameterEqualizer(const Parameter::Specific& specific)
+            REQUIRES(mImplMutex);
+    ndk::ScopedAStatus getParameterEqualizer(const Equalizer::Id& id, Parameter::Specific* specific)
+            REQUIRES(mImplMutex);
+    ndk::ScopedAStatus setParameterVolume(const Parameter::Specific& specific) REQUIRES(mImplMutex);
+    ndk::ScopedAStatus getParameterVolume(const Volume::Id& id, Parameter::Specific* specific)
+            REQUIRES(mImplMutex);
+    ndk::ScopedAStatus setParameterVirtualizer(const Parameter::Specific& specific)
+            REQUIRES(mImplMutex);
     ndk::ScopedAStatus getParameterVirtualizer(const Virtualizer::Id& id,
-                                               Parameter::Specific* specific);
+                                               Parameter::Specific* specific) REQUIRES(mImplMutex);
 };
 
 }  // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/lvm/wrapper/Android.bp b/media/libeffects/lvm/wrapper/Android.bp
index fa300d2..da5346f 100644
--- a/media/libeffects/lvm/wrapper/Android.bp
+++ b/media/libeffects/lvm/wrapper/Android.bp
@@ -110,9 +110,7 @@
     ],
     static_libs: ["libmusicbundle"],
     defaults: [
-        "aidlaudioservice_defaults",
-        "latest_android_hardware_audio_effect_ndk_shared",
-        "latest_android_media_audio_common_types_ndk_shared",
+        "aidlaudioeffectservice_defaults",
     ],
     local_include_dirs: ["Aidl"],
     header_libs: [
@@ -120,10 +118,15 @@
         "libhardware_headers",
     ],
     shared_libs: [
+        "libaudio_aidl_conversion_common_ndk",
+        "libaudioutils",
+        "libbinder",
         "liblog",
+        "libstagefright_foundation",
     ],
     cflags: [
         "-Wthread-safety",
+        "-DBACKEND_NDK",
     ],
     relative_install_path: "soundfx",
     visibility: [
@@ -140,9 +143,7 @@
     ],
     static_libs: ["libreverb"],
     defaults: [
-        "aidlaudioservice_defaults",
-        "latest_android_hardware_audio_effect_ndk_shared",
-        "latest_android_media_audio_common_types_ndk_shared",
+        "aidlaudioeffectservice_defaults",
     ],
     local_include_dirs: ["Reverb/aidl"],
     header_libs: [
diff --git a/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp b/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp
index df64676..1a37622 100644
--- a/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp
+++ b/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp
@@ -17,7 +17,7 @@
 #ifndef LVM_FLOAT
 typedef float LVM_FLOAT;
 #endif
-#define LOG_TAG "Bundle"
+#define LOG_TAG "EffectBundle"
 #define ARRAY_SIZE(array) (sizeof(array) / sizeof(array)[0])
 //#define LOG_NDEBUG 0
 
@@ -1191,11 +1191,13 @@
 //  0            if the configuration is supported
 //----------------------------------------------------------------------------
 int VirtualizerIsDeviceSupported(audio_devices_t deviceType) {
+    ALOGV("%s: deviceType:%#x", __func__, deviceType);
     switch (deviceType) {
         case AUDIO_DEVICE_OUT_WIRED_HEADSET:
         case AUDIO_DEVICE_OUT_WIRED_HEADPHONE:
         case AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES:
         case AUDIO_DEVICE_OUT_USB_HEADSET:
+        case AUDIO_DEVICE_OUT_BLE_HEADSET:
             // case AUDIO_DEVICE_OUT_USB_DEVICE:  // For USB testing of the virtualizer only.
             return 0;
         default:
@@ -3372,10 +3374,10 @@
             if (pContext->EffectType == LVM_BASS_BOOST) {
                 if ((device == AUDIO_DEVICE_OUT_SPEAKER) ||
                     (device == AUDIO_DEVICE_OUT_BLUETOOTH_SCO_CARKIT) ||
-                    (device == AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER)) {
-                    ALOGV("\tEFFECT_CMD_SET_DEVICE device is invalid for LVM_BASS_BOOST %d",
-                          *(int32_t*)pCmdData);
-                    ALOGV("\tEFFECT_CMD_SET_DEVICE temporary disable LVM_BAS_BOOST");
+                     device == AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER ||
+                     device == AUDIO_DEVICE_OUT_BLE_SPEAKER) {
+                    ALOGV("%s: EFFECT_CMD_SET_DEVICE device %#x is invalid for LVM_BASS_BOOST",
+                            __func__, device);
 
                     // If a device doesn't support bassboost the effect must be temporarily disabled
                     // the effect must still report its original state as this can only be changed
diff --git a/media/libeffects/lvm/wrapper/Reverb/aidl/EffectReverb.cpp b/media/libeffects/lvm/wrapper/Reverb/aidl/EffectReverb.cpp
index b49d109..f9afe69 100644
--- a/media/libeffects/lvm/wrapper/Reverb/aidl/EffectReverb.cpp
+++ b/media/libeffects/lvm/wrapper/Reverb/aidl/EffectReverb.cpp
@@ -358,10 +358,6 @@
     return mContext;
 }
 
-std::shared_ptr<EffectContext> EffectReverb::getContext() {
-    return mContext;
-}
-
 RetCode EffectReverb::releaseContext() {
     if (mContext) {
         mContext.reset();
diff --git a/media/libeffects/lvm/wrapper/Reverb/aidl/EffectReverb.h b/media/libeffects/lvm/wrapper/Reverb/aidl/EffectReverb.h
index d7d2bbd..e0771a1 100644
--- a/media/libeffects/lvm/wrapper/Reverb/aidl/EffectReverb.h
+++ b/media/libeffects/lvm/wrapper/Reverb/aidl/EffectReverb.h
@@ -30,35 +30,41 @@
 
     ndk::ScopedAStatus getDescriptor(Descriptor* _aidl_return) override;
 
-    ndk::ScopedAStatus setParameterSpecific(const Parameter::Specific& specific) override;
-    ndk::ScopedAStatus getParameterSpecific(const Parameter::Id& id,
-                                            Parameter::Specific* specific) override;
+    ndk::ScopedAStatus setParameterSpecific(const Parameter::Specific& specific)
+            REQUIRES(mImplMutex) override;
+    ndk::ScopedAStatus getParameterSpecific(const Parameter::Id& id, Parameter::Specific* specific)
+            REQUIRES(mImplMutex) override;
 
-    std::shared_ptr<EffectContext> createContext(const Parameter::Common& common) override;
-    std::shared_ptr<EffectContext> getContext() override;
-    RetCode releaseContext() override;
+    std::shared_ptr<EffectContext> createContext(const Parameter::Common& common)
+            REQUIRES(mImplMutex) override;
+    RetCode releaseContext() REQUIRES(mImplMutex) override;
 
-    IEffect::Status effectProcessImpl(float* in, float* out, int samples) override;
+    IEffect::Status effectProcessImpl(float* in, float* out, int samples)
+            REQUIRES(mImplMutex) override;
 
-    ndk::ScopedAStatus commandImpl(CommandId command) override;
+    ndk::ScopedAStatus commandImpl(CommandId command) REQUIRES(mImplMutex) override;
 
     std::string getEffectName() override { return *mEffectName; }
 
   private:
-    std::shared_ptr<ReverbContext> mContext;
+    std::shared_ptr<ReverbContext> mContext GUARDED_BY(mImplMutex);
     const Descriptor* mDescriptor;
     const std::string* mEffectName;
     lvm::ReverbEffectType mType;
 
     IEffect::Status status(binder_status_t status, size_t consumed, size_t produced);
 
-    ndk::ScopedAStatus setParameterPresetReverb(const Parameter::Specific& specific);
+    ndk::ScopedAStatus setParameterPresetReverb(const Parameter::Specific& specific)
+            REQUIRES(mImplMutex);
     ndk::ScopedAStatus getParameterPresetReverb(const PresetReverb::Id& id,
-                                                Parameter::Specific* specific);
+                                                Parameter::Specific* specific)
+            REQUIRES(mImplMutex);
 
-    ndk::ScopedAStatus setParameterEnvironmentalReverb(const Parameter::Specific& specific);
+    ndk::ScopedAStatus setParameterEnvironmentalReverb(const Parameter::Specific& specific)
+            REQUIRES(mImplMutex);
     ndk::ScopedAStatus getParameterEnvironmentalReverb(const EnvironmentalReverb::Id& id,
-                                                       Parameter::Specific* specific);
+                                                       Parameter::Specific* specific)
+            REQUIRES(mImplMutex);
 };
 
 }  // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.cpp b/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.cpp
index 79e67f2..468b268 100644
--- a/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.cpp
+++ b/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.cpp
@@ -329,7 +329,7 @@
  */
 
 int ReverbContext::convertLevel(int level) {
-    for (int i = 0; i < kLevelMapping.size(); i++) {
+    for (std::size_t i = 0; i < kLevelMapping.size(); i++) {
         if (level <= kLevelMapping[i]) {
             return i;
         }
diff --git a/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.h b/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.h
index 9bb0b1a..d11a081 100644
--- a/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.h
+++ b/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.h
@@ -81,7 +81,12 @@
     bool getEnvironmentalReverbBypass() const { return mBypass; }
 
     RetCode setVolumeStereo(const Parameter::VolumeStereo& volumeStereo) override;
-    Parameter::VolumeStereo getVolumeStereo() override { return mVolumeStereo; }
+    Parameter::VolumeStereo getVolumeStereo() override {
+        if (isAuxiliary()) {
+            return mVolumeStereo;
+        }
+        return {1.0f, 1.0f};
+    }
 
     RetCode setReflectionsDelay(int delay) {
         mReflectionsDelayMs = delay;
diff --git a/media/libeffects/preprocessing/Android.bp b/media/libeffects/preprocessing/Android.bp
index d018c47..564eb36 100644
--- a/media/libeffects/preprocessing/Android.bp
+++ b/media/libeffects/preprocessing/Android.bp
@@ -67,9 +67,7 @@
         ":effectCommonFile",
     ],
     defaults: [
-        "aidlaudioservice_defaults",
-        "latest_android_hardware_audio_effect_ndk_shared",
-        "latest_android_media_audio_common_types_ndk_shared",
+        "aidlaudioeffectservice_defaults",
     ],
     local_include_dirs: ["aidl"],
     shared_libs: [
diff --git a/media/libeffects/preprocessing/aidl/EffectPreProcessing.cpp b/media/libeffects/preprocessing/aidl/EffectPreProcessing.cpp
index e8ae8b3..7552804 100644
--- a/media/libeffects/preprocessing/aidl/EffectPreProcessing.cpp
+++ b/media/libeffects/preprocessing/aidl/EffectPreProcessing.cpp
@@ -412,10 +412,6 @@
     return mContext;
 }
 
-std::shared_ptr<EffectContext> EffectPreProcessing::getContext() {
-    return mContext;
-}
-
 RetCode EffectPreProcessing::releaseContext() {
     if (mContext) {
         PreProcessingSession::getPreProcessingSession().releaseSession(mType,
diff --git a/media/libeffects/preprocessing/aidl/EffectPreProcessing.h b/media/libeffects/preprocessing/aidl/EffectPreProcessing.h
index fad848a..9ce5597 100644
--- a/media/libeffects/preprocessing/aidl/EffectPreProcessing.h
+++ b/media/libeffects/preprocessing/aidl/EffectPreProcessing.h
@@ -31,41 +31,51 @@
 
     ndk::ScopedAStatus getDescriptor(Descriptor* _aidl_return) override;
 
-    ndk::ScopedAStatus setParameterSpecific(const Parameter::Specific& specific) override;
-    ndk::ScopedAStatus getParameterSpecific(const Parameter::Id& id,
-                                            Parameter::Specific* specific) override;
+    ndk::ScopedAStatus setParameterSpecific(const Parameter::Specific& specific)
+            REQUIRES(mImplMutex) override;
+    ndk::ScopedAStatus getParameterSpecific(const Parameter::Id& id, Parameter::Specific* specific)
+            REQUIRES(mImplMutex) override;
 
-    std::shared_ptr<EffectContext> createContext(const Parameter::Common& common) override;
-    std::shared_ptr<EffectContext> getContext() override;
-    RetCode releaseContext() override;
+    std::shared_ptr<EffectContext> createContext(const Parameter::Common& common)
+            REQUIRES(mImplMutex) override;
+    RetCode releaseContext() REQUIRES(mImplMutex) override;
 
-    IEffect::Status effectProcessImpl(float* in, float* out, int samples) override;
+    IEffect::Status effectProcessImpl(float* in, float* out, int samples)
+            REQUIRES(mImplMutex) override;
 
-    ndk::ScopedAStatus commandImpl(CommandId command) override;
+    ndk::ScopedAStatus commandImpl(CommandId command) REQUIRES(mImplMutex) override;
 
     std::string getEffectName() override { return *mEffectName; }
 
   private:
-    std::shared_ptr<PreProcessingContext> mContext;
+    std::shared_ptr<PreProcessingContext> mContext GUARDED_BY(mImplMutex);
     const Descriptor* mDescriptor;
     const std::string* mEffectName;
     PreProcessingEffectType mType;
 
-    ndk::ScopedAStatus setParameterAcousticEchoCanceler(const Parameter::Specific& specific);
+    ndk::ScopedAStatus setParameterAcousticEchoCanceler(const Parameter::Specific& specific)
+            REQUIRES(mImplMutex);
     ndk::ScopedAStatus getParameterAcousticEchoCanceler(const AcousticEchoCanceler::Id& id,
-                                                        Parameter::Specific* specific);
+                                                        Parameter::Specific* specific)
+            REQUIRES(mImplMutex);
 
-    ndk::ScopedAStatus setParameterAutomaticGainControlV1(const Parameter::Specific& specific);
+    ndk::ScopedAStatus setParameterAutomaticGainControlV1(const Parameter::Specific& specific)
+            REQUIRES(mImplMutex);
     ndk::ScopedAStatus getParameterAutomaticGainControlV1(const AutomaticGainControlV1::Id& id,
-                                                          Parameter::Specific* specific);
+                                                          Parameter::Specific* specific)
+            REQUIRES(mImplMutex);
 
-    ndk::ScopedAStatus setParameterAutomaticGainControlV2(const Parameter::Specific& specific);
+    ndk::ScopedAStatus setParameterAutomaticGainControlV2(const Parameter::Specific& specific)
+            REQUIRES(mImplMutex);
     ndk::ScopedAStatus getParameterAutomaticGainControlV2(const AutomaticGainControlV2::Id& id,
-                                                          Parameter::Specific* specific);
+                                                          Parameter::Specific* specific)
+            REQUIRES(mImplMutex);
 
-    ndk::ScopedAStatus setParameterNoiseSuppression(const Parameter::Specific& specific);
+    ndk::ScopedAStatus setParameterNoiseSuppression(const Parameter::Specific& specific)
+            REQUIRES(mImplMutex);
     ndk::ScopedAStatus getParameterNoiseSuppression(const NoiseSuppression::Id& id,
-                                                    Parameter::Specific* specific);
+                                                    Parameter::Specific* specific)
+            REQUIRES(mImplMutex);
 };
 
 }  // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/preprocessing/aidl/PreProcessingContext.cpp b/media/libeffects/preprocessing/aidl/PreProcessingContext.cpp
index c1e4eda..2c44e5c 100644
--- a/media/libeffects/preprocessing/aidl/PreProcessingContext.cpp
+++ b/media/libeffects/preprocessing/aidl/PreProcessingContext.cpp
@@ -141,6 +141,9 @@
 }
 
 RetCode PreProcessingContext::setCommon(const Parameter::Common& common) {
+    if(auto ret = updateIOFrameSize(common); ret != RetCode::SUCCESS) {
+        return ret;
+    }
     mCommon = common;
     updateConfigs(common);
     return RetCode::SUCCESS;
diff --git a/media/libeffects/visualizer/Android.bp b/media/libeffects/visualizer/Android.bp
index cf782f7..a8b665b 100644
--- a/media/libeffects/visualizer/Android.bp
+++ b/media/libeffects/visualizer/Android.bp
@@ -60,8 +60,6 @@
     ],
     defaults: [
         "aidlaudioeffectservice_defaults",
-        "latest_android_hardware_audio_effect_ndk_shared",
-        "latest_android_media_audio_common_types_ndk_shared",
         "visualizer_defaults",
     ],
     cflags: [
diff --git a/media/libeffects/visualizer/aidl/Visualizer.cpp b/media/libeffects/visualizer/aidl/Visualizer.cpp
index 53bfb41..0303842 100644
--- a/media/libeffects/visualizer/aidl/Visualizer.cpp
+++ b/media/libeffects/visualizer/aidl/Visualizer.cpp
@@ -73,7 +73,7 @@
                           .proxy = std::nullopt},
                    .flags = {.type = Flags::Type::INSERT,
                              .insert = Flags::Insert::LAST,
-                             .volume = Flags::Volume::CTRL},
+                             .volume = Flags::Volume::NONE},
                    .name = VisualizerImpl::kEffectName,
                    .implementor = "The Android Open Source Project"},
         .capability = VisualizerImpl::kCapability};
diff --git a/media/libeffects/visualizer/aidl/Visualizer.h b/media/libeffects/visualizer/aidl/Visualizer.h
index ec725db..b48c85e 100644
--- a/media/libeffects/visualizer/aidl/Visualizer.h
+++ b/media/libeffects/visualizer/aidl/Visualizer.h
@@ -35,23 +35,25 @@
         LOG(DEBUG) << __func__;
     }
 
-    ndk::ScopedAStatus commandImpl(CommandId command) override;
+    ndk::ScopedAStatus commandImpl(CommandId command) REQUIRES(mImplMutex) override;
     ndk::ScopedAStatus getDescriptor(Descriptor* _aidl_return) override;
-    ndk::ScopedAStatus setParameterSpecific(const Parameter::Specific& specific) override;
-    ndk::ScopedAStatus getParameterSpecific(const Parameter::Id& id,
-                                            Parameter::Specific* specific) override;
-    IEffect::Status effectProcessImpl(float* in, float* out, int process) override;
-    std::shared_ptr<EffectContext> createContext(const Parameter::Common& common) override;
-    RetCode releaseContext() override;
+    ndk::ScopedAStatus setParameterSpecific(const Parameter::Specific& specific)
+            REQUIRES(mImplMutex) override;
+    ndk::ScopedAStatus getParameterSpecific(const Parameter::Id& id, Parameter::Specific* specific)
+            REQUIRES(mImplMutex) override;
+    IEffect::Status effectProcessImpl(float* in, float* out, int process)
+            REQUIRES(mImplMutex) override;
+    std::shared_ptr<EffectContext> createContext(const Parameter::Common& common)
+            REQUIRES(mImplMutex) override;
+    RetCode releaseContext() REQUIRES(mImplMutex) override;
 
-    std::shared_ptr<EffectContext> getContext() override { return mContext; }
     std::string getEffectName() override { return kEffectName; }
 
   private:
     static const std::vector<Range::VisualizerRange> kRanges;
-    std::shared_ptr<VisualizerContext> mContext;
+    std::shared_ptr<VisualizerContext> mContext GUARDED_BY(mImplMutex);
     ndk::ScopedAStatus getParameterVisualizer(const Visualizer::Tag& tag,
-                                                    Parameter::Specific* specific);
+                                              Parameter::Specific* specific) REQUIRES(mImplMutex);
 };
 
 }  // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/visualizer/aidl/VisualizerContext.cpp b/media/libeffects/visualizer/aidl/VisualizerContext.cpp
index 5d0d08d..5d2bb3a 100644
--- a/media/libeffects/visualizer/aidl/VisualizerContext.cpp
+++ b/media/libeffects/visualizer/aidl/VisualizerContext.cpp
@@ -61,6 +61,7 @@
 #endif
     mChannelCount = channelCount;
     mCommon = common;
+    std::fill(mCaptureBuf.begin(), mCaptureBuf.end(), 0x80);
     return RetCode::SUCCESS;
 }
 
@@ -84,7 +85,7 @@
 
 void VisualizerContext::reset() {
     std::lock_guard lg(mMutex);
-    std::fill_n(mCaptureBuf.begin(), kMaxCaptureBufSize, 0x80);
+    std::fill(mCaptureBuf.begin(), mCaptureBuf.end(), 0x80);
 }
 
 RetCode VisualizerContext::setCaptureSamples(int samples) {
@@ -190,13 +191,12 @@
 }
 
 std::vector<uint8_t> VisualizerContext::capture() {
-    std::vector<uint8_t> result;
     std::lock_guard lg(mMutex);
+    uint32_t captureSamples = mCaptureSamples;
+    std::vector<uint8_t> result(captureSamples, 0x80);
     // cts android.media.audio.cts.VisualizerTest expecting silence data when effect not running
     // RETURN_VALUE_IF(mState != State::ACTIVE, result, "illegalState");
     if (mState != State::ACTIVE) {
-        result.resize(mCaptureSamples);
-        memset(result.data(), 0x80, mCaptureSamples);
         return result;
     }
 
@@ -214,7 +214,7 @@
     if (latencyMs < 0) {
         latencyMs = 0;
     }
-    uint32_t deltaSamples = mCaptureSamples + mCommon.input.base.sampleRate * latencyMs / 1000;
+    uint32_t deltaSamples = captureSamples + mCommon.input.base.sampleRate * latencyMs / 1000;
 
     // large sample rate, latency, or capture size, could cause overflow.
     // do not offset more than the size of buffer.
@@ -224,21 +224,21 @@
     }
 
     int32_t capturePoint;
-    //capturePoint = (int32_t)mCaptureIdx - deltaSamples;
     __builtin_sub_overflow((int32_t) mCaptureIdx, deltaSamples, &capturePoint);
     // a negative capturePoint means we wrap the buffer.
     if (capturePoint < 0) {
         uint32_t size = -capturePoint;
-        if (size > mCaptureSamples) {
-            size = mCaptureSamples;
+        if (size > captureSamples) {
+            size = captureSamples;
         }
-        result.insert(result.end(), &mCaptureBuf[kMaxCaptureBufSize + capturePoint],
-                        &mCaptureBuf[kMaxCaptureBufSize + capturePoint + size]);
-        mCaptureSamples -= size;
+        std::copy(std::begin(mCaptureBuf) + kMaxCaptureBufSize - size,
+                  std::begin(mCaptureBuf) + kMaxCaptureBufSize, result.begin());
+        captureSamples -= size;
         capturePoint = 0;
     }
-    result.insert(result.end(), &mCaptureBuf[capturePoint],
-                    &mCaptureBuf[capturePoint + mCaptureSamples]);
+    std::copy(std::begin(mCaptureBuf) + capturePoint,
+              std::begin(mCaptureBuf) + capturePoint + captureSamples,
+              result.begin() + mCaptureSamples - captureSamples);
     mLastCaptureIdx = mCaptureIdx;
     return result;
 }
@@ -256,16 +256,15 @@
         // find the peak and RMS squared for the new buffer
         float rmsSqAcc = 0;
         float maxSample = 0.f;
-        for (size_t inIdx = 0; inIdx < (unsigned)samples; ++inIdx) {
+        for (size_t inIdx = 0; inIdx < (unsigned) samples; ++inIdx) {
             maxSample = fmax(maxSample, fabs(in[inIdx]));
             rmsSqAcc += in[inIdx] * in[inIdx];
         }
         maxSample *= 1 << 15; // scale to int16_t, with exactly 1 << 15 representing positive num.
         rmsSqAcc *= 1 << 30; // scale to int16_t * 2
-        mPastMeasurements[mMeasurementBufferIdx] = {
-                .mPeakU16 = (uint16_t)maxSample,
-                .mRmsSquared = rmsSqAcc / samples,
-                .mIsValid = true };
+        mPastMeasurements[mMeasurementBufferIdx] = {.mIsValid = true,
+                                                    .mPeakU16 = (uint16_t)maxSample,
+                                                    .mRmsSquared = rmsSqAcc / samples};
         if (++mMeasurementBufferIdx >= mMeasurementWindowSizeInBuffers) {
             mMeasurementBufferIdx = 0;
         }
diff --git a/media/liberror/include/error/expected_utils.h b/media/liberror/include/error/expected_utils.h
index ddc8517..8c1654a 100644
--- a/media/liberror/include/error/expected_utils.h
+++ b/media/liberror/include/error/expected_utils.h
@@ -20,6 +20,10 @@
 #include <android-base/expected.h>
 #include <log/log_main.h>
 
+#pragma push_macro("LOG_TAG")
+#undef LOG_TAG
+#define LOG_TAG "MediaLibError"
+
 /**
  * Useful macros for working with status codes and base::expected.
  *
@@ -50,18 +54,26 @@
  *   human-readable version of the status.
  */
 
-#define VALUE_OR_RETURN(exp)                                                         \
-    ({                                                                               \
-        auto _tmp = (exp);                                                           \
-        if (!_tmp.ok()) return ::android::base::unexpected(std::move(_tmp.error())); \
-        std::move(_tmp.value());                                                     \
+#define VALUE_OR_RETURN(exp)                                                          \
+    ({                                                                                \
+        auto _tmp = (exp);                                                            \
+        if (!_tmp.ok()) {                                                             \
+            ALOGE("Function: %s Line: %d Failed result (%s)", __FUNCTION__, __LINE__, \
+                  errorToString(_tmp.error()).c_str());                               \
+            return ::android::base::unexpected(std::move(_tmp.error()));              \
+        }                                                                             \
+        std::move(_tmp.value());                                                      \
     })
 
-#define VALUE_OR_RETURN_STATUS(exp)                     \
-    ({                                                  \
-        auto _tmp = (exp);                              \
-        if (!_tmp.ok()) return std::move(_tmp.error()); \
-        std::move(_tmp.value());                        \
+#define VALUE_OR_RETURN_STATUS(exp)                                                   \
+    ({                                                                                \
+        auto _tmp = (exp);                                                            \
+        if (!_tmp.ok()) {                                                             \
+            ALOGE("Function: %s Line: %d Failed result (%s)", __FUNCTION__, __LINE__, \
+                  errorToString(_tmp.error()).c_str());                               \
+            return std::move(_tmp.error());                                           \
+        }                                                                             \
+        std::move(_tmp.value());                                                      \
     })
 
 #define VALUE_OR_FATAL(exp)                                                                       \
@@ -72,15 +84,29 @@
         std::move(_tmp.value());                                                                  \
     })
 
-#define RETURN_IF_ERROR(exp) \
-    if (auto _tmp = (exp); !errorIsOk(_tmp)) return ::android::base::unexpected(std::move(_tmp));
+#define RETURN_IF_ERROR(exp)                                                \
+    ({                                                                      \
+        auto _tmp = (exp);                                                  \
+        if (!errorIsOk(_tmp)) {                                             \
+            ALOGE("Function: %s Line: %d Failed ", __FUNCTION__, __LINE__); \
+            return ::android::base::unexpected(std::move(_tmp));            \
+        }                                                                   \
+    })
 
-#define RETURN_STATUS_IF_ERROR(exp) \
-    if (auto _tmp = (exp); !errorIsOk(_tmp)) return _tmp;
+#define RETURN_STATUS_IF_ERROR(exp)                                         \
+    ({                                                                      \
+        auto _tmp = (exp);                                                  \
+        if (!errorIsOk(_tmp)) {                                             \
+            ALOGE("Function: %s Line: %d Failed ", __FUNCTION__, __LINE__); \
+            return _tmp;                                                    \
+        }                                                                   \
+    })
 
 #define FATAL_IF_ERROR(exp)                                                                \
     {                                                                                      \
         auto _tmp = (exp);                                                                 \
         LOG_ALWAYS_FATAL_IF(!errorIsOk(_tmp), "Function: %s Line: %d Failed result: (%s)", \
-                            __FUNCTION__, __LINE__, errorToString(_tmp).c_str());         \
+                            __FUNCTION__, __LINE__, errorToString(_tmp).c_str());          \
     }
+
+#pragma pop_macro("LOG_TAG")
diff --git a/media/libheadtracking/OWNERS b/media/libheadtracking/OWNERS
index e5d0370..ae071cf 100644
--- a/media/libheadtracking/OWNERS
+++ b/media/libheadtracking/OWNERS
@@ -1,2 +1,4 @@
-ytai@google.com
-elaurent@google.com
+# Bug component: 48436
+hunga@google.com
+yaoshunkai@google.com
+include platform/frameworks/av:/media/janitors/audio_OWNERS #{LAST_RESORT_SUGGESTION}
diff --git a/media/libmedia/CharacterEncodingDetector.cpp b/media/libmedia/CharacterEncodingDetector.cpp
index e33cc0f..4436fb9 100644
--- a/media/libmedia/CharacterEncodingDetector.cpp
+++ b/media/libmedia/CharacterEncodingDetector.cpp
@@ -198,7 +198,9 @@
             ALOGV("@@@ checking %s", name);
             const char *s = mValues.getEntry(i);
             int32_t inputLength = strlen(s);
-            const char *enc;
+            // Use encoding determined from the combination of artist/album/title etc.
+            // as default if there is no better match found.
+            const char *enc = combinedenc;
 
             if (!allprintable && (!strcmp(name, "artist") ||
                     !strcmp(name, "albumartist") ||
@@ -216,13 +218,12 @@
                     const UCharsetMatch** ucma = ucsdet_detectAll(csd, &matches, &status);
                     const UCharsetMatch* bestSingleMatch = getPreferred(s, inputLength,
                             ucma, matches, &goodmatchSingle, &highestSingle);
-                    if (goodmatchSingle || highestSingle > highest)
-                        enc = ucsdet_getName(bestSingleMatch, &status);
-                    else
-                        enc = combinedenc;
-                } else {
-                    // use encoding determined from the combination of artist/album/title etc.
-                    enc = combinedenc;
+                    // getPreferred could return a null. Check for null before calling
+                    // ucsdet_getName.
+                    if (bestSingleMatch != NULL) {
+                        if (goodmatchSingle || highestSingle > highest)
+                            enc = ucsdet_getName(bestSingleMatch, &status);
+                    }
                 }
             } else {
                 if (isPrintableAscii(s, inputLength)) {
diff --git a/media/libmedia/MediaResource.cpp b/media/libmedia/MediaResource.cpp
index a6f0b60..1bcb4b9 100644
--- a/media/libmedia/MediaResource.cpp
+++ b/media/libmedia/MediaResource.cpp
@@ -61,8 +61,9 @@
 }
 
 //static
-MediaResource MediaResource::VideoBatteryResource() {
-    return MediaResource(Type::kBattery, SubType::kVideoCodec, 1);
+MediaResource MediaResource::VideoBatteryResource(bool isHardware) {
+    SubType subType = isHardware ? SubType::kHwVideoCodec : SubType::kSwVideoCodec;
+    return MediaResource(Type::kBattery, subType, 1);
 }
 
 //static
diff --git a/media/libmedia/include/media/MediaResource.h b/media/libmedia/include/media/MediaResource.h
index 3b69d4f..c88fee2 100644
--- a/media/libmedia/include/media/MediaResource.h
+++ b/media/libmedia/include/media/MediaResource.h
@@ -41,7 +41,7 @@
             int64_t instanceCount = 1);
     static MediaResource GraphicMemoryResource(int64_t value);
     static MediaResource CpuBoostResource();
-    static MediaResource VideoBatteryResource();
+    static MediaResource VideoBatteryResource(bool isHardware = true);
     static MediaResource DrmSessionResource(const std::vector<uint8_t> &id, int64_t value);
 };
 
@@ -61,10 +61,13 @@
 inline static const char *asString(MediaResource::SubType i, const char *def = "??") {
     switch (i) {
         case MediaResource::SubType::kUnspecifiedSubType: return "unspecified";
-        case MediaResource::SubType::kAudioCodec:         return "audio-codec";
-        case MediaResource::SubType::kVideoCodec:         return "video-codec";
-        case MediaResource::SubType::kImageCodec:         return "image-codec";
-        default:                                 return def;
+        case MediaResource::SubType::kHwAudioCodec:       return "hw-audio-codec";
+        case MediaResource::SubType::kSwAudioCodec:       return "sw-audio-codec";
+        case MediaResource::SubType::kHwVideoCodec:       return "hw-video-codec";
+        case MediaResource::SubType::kSwVideoCodec:       return "sw-video-codec";
+        case MediaResource::SubType::kHwImageCodec:       return "hw-image-codec";
+        case MediaResource::SubType::kSwImageCodec:       return "sw-image-codec";
+        default:                                          return def;
     }
 }
 
diff --git a/media/libmediametrics/MediaMetrics.cpp b/media/libmediametrics/MediaMetrics.cpp
index 2240223..26fe306 100644
--- a/media/libmediametrics/MediaMetrics.cpp
+++ b/media/libmediametrics/MediaMetrics.cpp
@@ -87,7 +87,7 @@
 }
 
 void mediametrics_setString(mediametrics_handle_t handle, attr_t attr,
-                                 const std::string &string) {
+                            const std::string &string) {
     mediametrics_setCString(handle, attr, string.c_str());
 }
 
diff --git a/media/libmediaplayerservice/fuzzer/Android.bp b/media/libmediaplayerservice/fuzzer/Android.bp
index c3d6c89..b511372 100644
--- a/media/libmediaplayerservice/fuzzer/Android.bp
+++ b/media/libmediaplayerservice/fuzzer/Android.bp
@@ -174,6 +174,7 @@
         "libnetd_client",
         "libpowermanager",
         "libstagefright_httplive",
+        "libaudiohal@7.0",
     ],
 }
 
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
index a625893..bb49b5a 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
@@ -1977,6 +1977,8 @@
         if (rate > 0) {
             format->setFloat("operating-rate", rate * mPlaybackSettings.mSpeed);
         }
+
+        format->setInt32("android._video-scaling", mVideoScalingMode);
     }
 
     Mutex::Autolock autoLock(mDecoderLock);
diff --git a/media/libmediaplayerservice/tests/DrmSessionManager_test.cpp b/media/libmediaplayerservice/tests/DrmSessionManager_test.cpp
index 30f6a91..6da8e31 100644
--- a/media/libmediaplayerservice/tests/DrmSessionManager_test.cpp
+++ b/media/libmediaplayerservice/tests/DrmSessionManager_test.cpp
@@ -148,8 +148,8 @@
 class DrmSessionManagerTest : public ::testing::Test {
 public:
     DrmSessionManagerTest()
-        : mService(::ndk::SharedRefBase::make<ResourceManagerService>
-            (new FakeProcessInfo(), new FakeSystemCallback())),
+        : mService(ResourceManagerService::Create(
+                  new FakeProcessInfo(), new FakeSystemCallback())),
           mDrmSessionManager(new DrmSessionManager(mService)),
           mTestDrm1(::ndk::SharedRefBase::make<FakeDrm>(
                   kTestSessionId1, mDrmSessionManager)),
diff --git a/media/libnbaio/NBAIO.cpp b/media/libnbaio/NBAIO.cpp
index 1cb4410..3abc35e 100644
--- a/media/libnbaio/NBAIO.cpp
+++ b/media/libnbaio/NBAIO.cpp
@@ -55,8 +55,7 @@
     ret.mSampleRate = sampleRate;
     ret.mChannelCount = channelCount;
     ret.mFormat = format;
-    ret.mFrameSize = audio_is_linear_pcm(format) ?
-            channelCount * audio_bytes_per_sample(format) : sizeof(uint8_t);
+    ret.mFrameSize = audio_bytes_per_frame(channelCount, format);
     return ret;
 }
 
diff --git a/media/libnbaio/OWNERS b/media/libnbaio/OWNERS
index eece71f..4bd5e06 100644
--- a/media/libnbaio/OWNERS
+++ b/media/libnbaio/OWNERS
@@ -1,2 +1,4 @@
-gkasten@google.com
+# Bug component: 48436
 hunga@google.com
+mnaganov@google.com
+include platform/frameworks/av:/media/janitors/audio_OWNERS #{LAST_RESORT_SUGGESTION}
diff --git a/media/libshmem/OWNERS b/media/libshmem/OWNERS
index 29fa2f5..63c7dab 100644
--- a/media/libshmem/OWNERS
+++ b/media/libshmem/OWNERS
@@ -1,3 +1,5 @@
-ytai@google.com
+# Bug component: 48436
+atneya@google.com
 mnaganov@google.com
-elaurent@google.com
+yaoshunkai@google.com
+include platform/frameworks/av:/media/janitors/audio_OWNERS #{LAST_RESORT_SUGGESTION}
diff --git a/media/libshmem/ShmemCompat.cpp b/media/libshmem/ShmemCompat.cpp
index 246cb24..4200c2e 100644
--- a/media/libshmem/ShmemCompat.cpp
+++ b/media/libshmem/ShmemCompat.cpp
@@ -84,11 +84,11 @@
             return false;
         }
 
-        const int fd = fcntl(heap->getHeapID(), F_DUPFD_CLOEXEC, 0);
-        if (fd < 0) {
+        base::unique_fd fd(fcntl(heap->getHeapID(), F_DUPFD_CLOEXEC, 0));
+        if (!fd.ok()) {
             return false;
         }
-        result->fd.reset(base::unique_fd(fd));
+        result->fd.reset(std::move(fd));
         result->size = size;
         result->offset = heap->getOffset() + offset;
         result->writeable = (heap->getFlags() & IMemoryHeap::READ_ONLY) == 0;
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index a91b24a..2145dd9 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -43,6 +43,7 @@
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/OMXClient.h>
 #include <media/stagefright/PersistentSurface.h>
+#include <media/stagefright/RenderedFrameInfo.h>
 #include <media/stagefright/SurfaceUtils.h>
 #include <media/hardware/HardwareAPI.h>
 #include <media/MediaBufferHolder.h>
@@ -64,11 +65,14 @@
 #include "include/SharedMemoryBuffer.h"
 #include <media/stagefright/omx/OMXUtils.h>
 
+#include <server_configurable_flags/get_flags.h>
+
 namespace android {
 
 typedef hardware::media::omx::V1_0::IGraphicBufferSource HGraphicBufferSource;
 
 using hardware::media::omx::V1_0::Status;
+using server_configurable_flags::GetServerConfigurableFlag;
 
 enum {
     kMaxIndicesToCheck = 32, // used when enumerating supported formats and profiles
@@ -81,6 +85,11 @@
 
 }
 
+static bool areRenderMetricsEnabled() {
+    std::string v = GetServerConfigurableFlag("media_native", "render_metrics_enabled", "false");
+    return v == "true";
+}
+
 // OMX errors are directly mapped into status_t range if
 // there is no corresponding MediaError status code.
 // Use the statusFromOMXError(int32_t omxError) function.
@@ -563,6 +572,9 @@
 ACodec::ACodec()
     : mSampleRate(0),
       mNodeGeneration(0),
+      mAreRenderMetricsEnabled(areRenderMetricsEnabled()),
+      mIsWindowToDisplay(false),
+      mHasPresentFenceTimes(false),
       mUsingNativeWindow(false),
       mNativeWindowUsageBits(0),
       mLastNativeWindowDataSpace(HAL_DATASPACE_UNKNOWN),
@@ -634,7 +646,8 @@
     if (!mBufferChannel) {
         mBufferChannel = std::make_shared<ACodecBufferChannel>(
                 new AMessage(kWhatInputBufferFilled, this),
-                new AMessage(kWhatOutputBufferDrained, this));
+                new AMessage(kWhatOutputBufferDrained, this),
+                new AMessage(kWhatPollForRenderedBuffers, this));
     }
     return mBufferChannel;
 }
@@ -657,7 +670,7 @@
     msg->post();
 }
 
-status_t ACodec::setSurface(const sp<Surface> &surface) {
+status_t ACodec::setSurface(const sp<Surface> &surface, uint32_t /*generation*/) {
     sp<AMessage> msg = new AMessage(kWhatSetSurface, this);
     msg->setObject("surface", surface);
 
@@ -744,6 +757,7 @@
     // if we have not yet started the codec, we can simply set the native window
     if (mBuffers[kPortIndexInput].size() == 0) {
         mNativeWindow = surface;
+        initializeFrameTracking();
         return OK;
     }
 
@@ -852,6 +866,7 @@
 
     mNativeWindow = nativeWindow;
     mNativeWindowUsageBits = usageBits;
+    initializeFrameTracking();
     return OK;
 }
 
@@ -962,7 +977,6 @@
                 BufferInfo info;
                 info.mStatus = BufferInfo::OWNED_BY_US;
                 info.mFenceFd = -1;
-                info.mRenderInfo = NULL;
                 info.mGraphicBuffer = NULL;
                 info.mNewGraphicBuffer = false;
 
@@ -1230,6 +1244,7 @@
 
     *bufferCount = def.nBufferCountActual;
     *bufferSize =  def.nBufferSize;
+    initializeFrameTracking();
     return err;
 }
 
@@ -1268,7 +1283,6 @@
         info.mStatus = BufferInfo::OWNED_BY_US;
         info.mFenceFd = fenceFd;
         info.mIsReadFence = false;
-        info.mRenderInfo = NULL;
         info.mGraphicBuffer = graphicBuffer;
         info.mNewGraphicBuffer = false;
         info.mDequeuedAt = mDequeueCounter;
@@ -1345,7 +1359,6 @@
         BufferInfo info;
         info.mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW;
         info.mFenceFd = -1;
-        info.mRenderInfo = NULL;
         info.mGraphicBuffer = NULL;
         info.mNewGraphicBuffer = false;
         info.mDequeuedAt = mDequeueCounter;
@@ -1441,42 +1454,6 @@
     return err;
 }
 
-void ACodec::updateRenderInfoForDequeuedBuffer(
-        ANativeWindowBuffer *buf, int fenceFd, BufferInfo *info) {
-
-    info->mRenderInfo =
-        mRenderTracker.updateInfoForDequeuedBuffer(
-                buf, fenceFd, info - &mBuffers[kPortIndexOutput][0]);
-
-    // check for any fences already signaled
-    notifyOfRenderedFrames(false /* dropIncomplete */, info->mRenderInfo);
-}
-
-void ACodec::onFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano) {
-    if (mRenderTracker.onFrameRendered(mediaTimeUs, systemNano) != OK) {
-        mRenderTracker.dumpRenderQueue();
-    }
-}
-
-void ACodec::notifyOfRenderedFrames(bool dropIncomplete, FrameRenderTracker::Info *until) {
-    std::list<FrameRenderTracker::Info> done =
-        mRenderTracker.checkFencesAndGetRenderedFrames(until, dropIncomplete);
-
-    // unlink untracked frames
-    for (std::list<FrameRenderTracker::Info>::const_iterator it = done.cbegin();
-            it != done.cend(); ++it) {
-        ssize_t index = it->getIndex();
-        if (index >= 0 && (size_t)index < mBuffers[kPortIndexOutput].size()) {
-            mBuffers[kPortIndexOutput][index].mRenderInfo = NULL;
-        } else if (index >= 0) {
-            // THIS SHOULD NEVER HAPPEN
-            ALOGE("invalid index %zd in %zu", index, mBuffers[kPortIndexOutput].size());
-        }
-    }
-
-    mCallback->onOutputFramesRendered(done);
-}
-
 void ACodec::onFirstTunnelFrameReady() {
     mCallback->onFirstTunnelFrameReady();
 }
@@ -1531,7 +1508,6 @@
 
                 info->mStatus = BufferInfo::OWNED_BY_US;
                 info->setWriteFence(fenceFd, "dequeueBufferFromNativeWindow");
-                updateRenderInfoForDequeuedBuffer(buf, fenceFd, info);
                 return info;
             }
         }
@@ -1576,18 +1552,105 @@
     oldest->mNewGraphicBuffer = true;
     oldest->mStatus = BufferInfo::OWNED_BY_US;
     oldest->setWriteFence(fenceFd, "dequeueBufferFromNativeWindow for oldest");
-    mRenderTracker.untrackFrame(oldest->mRenderInfo);
-    oldest->mRenderInfo = NULL;
 
     ALOGV("replaced oldest buffer #%u with age %u, graphicBuffer %p",
             (unsigned)(oldest - &mBuffers[kPortIndexOutput][0]),
             mDequeueCounter - oldest->mDequeuedAt,
             oldest->mGraphicBuffer->handle);
-
-    updateRenderInfoForDequeuedBuffer(buf, fenceFd, oldest);
     return oldest;
 }
 
+void ACodec::initializeFrameTracking() {
+    mTrackedFrames.clear();
+
+    int isWindowToDisplay = 0;
+    mNativeWindow->query(mNativeWindow.get(), NATIVE_WINDOW_QUEUES_TO_WINDOW_COMPOSER,
+            &isWindowToDisplay);
+    mIsWindowToDisplay = isWindowToDisplay == 1;
+    // No frame tracking is needed if we're not sending frames to the display
+    if (!mIsWindowToDisplay) {
+        // Return early so we don't call into SurfaceFlinger (requiring permissions)
+        return;
+    }
+
+    int hasPresentFenceTimes = 0;
+    mNativeWindow->query(mNativeWindow.get(), NATIVE_WINDOW_FRAME_TIMESTAMPS_SUPPORTS_PRESENT,
+            &hasPresentFenceTimes);
+    mHasPresentFenceTimes = hasPresentFenceTimes == 1;
+    if (!mHasPresentFenceTimes) {
+        ALOGI("Using latch times for frame rendered signals - present fences not supported");
+    }
+
+    status_t err = native_window_enable_frame_timestamps(mNativeWindow.get(), true);
+    if (err) {
+        ALOGE("Failed to enable frame timestamps (%d)", err);
+    }
+}
+
+void ACodec::trackReleasedFrame(int64_t frameId, int64_t mediaTimeUs, int64_t desiredRenderTimeNs) {
+    // If the render time is earlier than now, then we're suggesting it should be rendered ASAP,
+    // so track the frame as if the desired render time is now.
+    int64_t nowNs = systemTime(SYSTEM_TIME_MONOTONIC);
+    if (desiredRenderTimeNs < nowNs) {
+        desiredRenderTimeNs = nowNs;
+    }
+
+    // If the render time is more than a second from now, then pretend the frame is supposed to be
+    // rendered immediately, because that's what SurfaceFlinger heuristics will do. This is a tight
+    // coupling, but is really the only way to optimize away unnecessary present fence checks in
+    // processRenderedFrames.
+    if (desiredRenderTimeNs > nowNs + 1*1000*1000*1000LL) {
+        desiredRenderTimeNs = nowNs;
+    }
+
+    // We've just queued a frame to the surface, so keep track of it and later check to see if it is
+    // actually rendered.
+    TrackedFrame frame;
+    frame.id = frameId;
+    frame.mediaTimeUs = mediaTimeUs;
+    frame.desiredRenderTimeNs = desiredRenderTimeNs;
+    mTrackedFrames.push_back(frame);
+}
+
+void ACodec::pollForRenderedFrames() {
+    std::list<RenderedFrameInfo> renderedFrameInfos;
+    // Scan all frames and check to see if the frames that SHOULD have been rendered by now, have,
+    // in fact, been rendered.
+    int64_t nowNs = systemTime(SYSTEM_TIME_MONOTONIC);
+    while (!mTrackedFrames.empty()) {
+        TrackedFrame & frame = mTrackedFrames.front();
+        // Frames that should have been rendered at least 100ms in the past are checked
+        if (frame.desiredRenderTimeNs > nowNs - 100*1000*1000LL) {
+            break;
+        }
+
+        status_t err;
+        nsecs_t latchOrPresentTimeNs = NATIVE_WINDOW_TIMESTAMP_INVALID;
+        err = native_window_get_frame_timestamps(mNativeWindow.get(), frame.id,
+                /* outRequestedPresentTime */ nullptr, /* outAcquireTime */ nullptr,
+                mHasPresentFenceTimes ? nullptr : &latchOrPresentTimeNs, // latch time
+                /* outFirstRefreshStartTime */ nullptr, /* outLastRefreshStartTime */ nullptr,
+                /* outGpuCompositionDoneTime */ nullptr,
+                mHasPresentFenceTimes ? &latchOrPresentTimeNs : nullptr, // display present time,
+                /* outDequeueReadyTime */ nullptr, /* outReleaseTime */ nullptr);
+        if (err) {
+            ALOGE("Failed to get frame timestamps for %lld: %d", (long long) frame.id, err);
+        }
+        // If we don't have a render time by now, then consider the frame as dropped
+        if (latchOrPresentTimeNs != NATIVE_WINDOW_TIMESTAMP_PENDING &&
+            latchOrPresentTimeNs != NATIVE_WINDOW_TIMESTAMP_INVALID) {
+            renderedFrameInfos.push_back(RenderedFrameInfo(frame.mediaTimeUs,
+                                                           latchOrPresentTimeNs));
+        }
+
+        mTrackedFrames.pop_front();
+    }
+
+    if (!renderedFrameInfos.empty()) {
+        mCallback->onOutputFramesRendered(renderedFrameInfos);
+    }
+}
+
 status_t ACodec::freeBuffersOnPort(OMX_U32 portIndex) {
     if (portIndex == kPortIndexInput) {
         mBufferChannel->setInputBufferArray({});
@@ -1663,11 +1726,6 @@
         ::close(info->mFenceFd);
     }
 
-    if (portIndex == kPortIndexOutput) {
-        mRenderTracker.untrackFrame(info->mRenderInfo, i);
-        info->mRenderInfo = NULL;
-    }
-
     // remove buffer even if mOMXNode->freeBuffer fails
     mBuffers[portIndex].erase(mBuffers[portIndex].begin() + i);
     return err;
@@ -6032,22 +6090,10 @@
     sp<RefBase> obj;
     CHECK(msg->findObject("messages", &obj));
     sp<MessageList> msgList = static_cast<MessageList *>(obj.get());
-
-    bool receivedRenderedEvents = false;
     for (std::list<sp<AMessage>>::const_iterator it = msgList->getList().cbegin();
           it != msgList->getList().cend(); ++it) {
         (*it)->setWhat(ACodec::kWhatOMXMessageItem);
         mCodec->handleMessage(*it);
-        int32_t type;
-        CHECK((*it)->findInt32("type", &type));
-        if (type == omx_message::FRAME_RENDERED) {
-            receivedRenderedEvents = true;
-        }
-    }
-
-    if (receivedRenderedEvents) {
-        // NOTE: all buffers are rendered in this case
-        mCodec->notifyOfRenderedFrames();
     }
     return true;
 }
@@ -6609,15 +6655,6 @@
     info->mDequeuedAt = ++mCodec->mDequeueCounter;
     info->mStatus = BufferInfo::OWNED_BY_US;
 
-    if (info->mRenderInfo != NULL) {
-        // The fence for an emptied buffer must have signaled, but there still could be queued
-        // or out-of-order dequeued buffers in the render queue prior to this buffer. Drop these,
-        // as we will soon requeue this buffer to the surface. While in theory we could still keep
-        // track of buffers that are requeued to the surface, it is better to add support to the
-        // buffer-queue to notify us of released buffers and their fences (in the future).
-        mCodec->notifyOfRenderedFrames(true /* dropIncomplete */);
-    }
-
     // byte buffers cannot take fences, so wait for any fence now
     if (mCodec->mNativeWindow == NULL) {
         (void)mCodec->waitForFence(fenceFd, "onOMXFillBufferDone");
@@ -6824,14 +6861,6 @@
             mCodec->mLastHdr10PlusBuffer = hdr10PlusInfo;
         }
 
-        // save buffers sent to the surface so we can get render time when they return
-        int64_t mediaTimeUs = -1;
-        buffer->meta()->findInt64("timeUs", &mediaTimeUs);
-        if (mediaTimeUs >= 0) {
-            mCodec->mRenderTracker.onFrameQueued(
-                    mediaTimeUs, info->mGraphicBuffer, new Fence(::dup(info->mFenceFd)));
-        }
-
         int64_t timestampNs = 0;
         if (!msg->findInt64("timestampNs", &timestampNs)) {
             // use media timestamp if client did not request a specific render timestamp
@@ -6845,11 +6874,25 @@
         err = native_window_set_buffers_timestamp(mCodec->mNativeWindow.get(), timestampNs);
         ALOGW_IF(err != NO_ERROR, "failed to set buffer timestamp: %d", err);
 
+        uint64_t frameId;
+        err = native_window_get_next_frame_id(mCodec->mNativeWindow.get(), &frameId);
+
         info->checkReadFence("onOutputBufferDrained before queueBuffer");
         err = mCodec->mNativeWindow->queueBuffer(
                     mCodec->mNativeWindow.get(), info->mGraphicBuffer.get(), info->mFenceFd);
-        // TODO(b/266211548): Poll the native window for rendered buffers, since when queueing
-        // buffers, the frame event history delta is retrieved.
+
+        int64_t mediaTimeUs = -1;
+        buffer->meta()->findInt64("timeUs", &mediaTimeUs);
+        if (mCodec->mAreRenderMetricsEnabled && mCodec->mIsWindowToDisplay) {
+            mCodec->trackReleasedFrame(frameId, mediaTimeUs, timestampNs);
+            mCodec->pollForRenderedFrames();
+        } else {
+            // When the surface is an intermediate surface, onFrameRendered is triggered immediately
+            // when the frame is queued to the non-display surface
+            mCodec->mCallback->onOutputFramesRendered({RenderedFrameInfo(mediaTimeUs,
+                                                                         timestampNs)});
+        }
+
         info->mFenceFd = -1;
         if (err == OK) {
             info->mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW;
@@ -7076,7 +7119,6 @@
     ++mCodec->mNodeGeneration;
 
     mCodec->mComponentName = componentName;
-    mCodec->mRenderTracker.setComponentName(componentName);
     mCodec->mFlags = 0;
 
     if (componentName.endsWith(".secure")) {
@@ -7713,7 +7755,6 @@
 
 void ACodec::ExecutingState::stateEntered() {
     ALOGV("[%s] Now Executing", mCodec->mComponentName.c_str());
-    mCodec->mRenderTracker.clear(systemTime(CLOCK_MONOTONIC));
     mCodec->processDeferredMessages();
 }
 
@@ -7824,7 +7865,15 @@
                     mCodec->signalSubmitOutputMetadataBufferIfEOS_workaround();
                 }
             }
-            return true;
+            handled = true;
+            break;
+        }
+
+        case kWhatPollForRenderedBuffers:
+        {
+            mCodec->pollForRenderedFrames();
+            handled = true;
+            break;
         }
 
         default:
@@ -8520,7 +8569,7 @@
 }
 
 bool ACodec::ExecutingState::onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano) {
-    mCodec->onFrameRendered(mediaTimeUs, systemNano);
+    mCodec->mCallback->onOutputFramesRendered({RenderedFrameInfo(mediaTimeUs, systemNano)});
     return true;
 }
 
@@ -8694,7 +8743,7 @@
 
 bool ACodec::OutputPortSettingsChangedState::onOMXFrameRendered(
         int64_t mediaTimeUs, nsecs_t systemNano) {
-    mCodec->onFrameRendered(mediaTimeUs, systemNano);
+    mCodec->mCallback->onOutputFramesRendered({RenderedFrameInfo(mediaTimeUs, systemNano)});
     return true;
 }
 
@@ -8725,10 +8774,6 @@
                             OMX_CommandPortEnable, kPortIndexOutput);
                 }
 
-                // Clear the RenderQueue in which queued GraphicBuffers hold the
-                // actual buffer references in order to free them early.
-                mCodec->mRenderTracker.clear(systemTime(CLOCK_MONOTONIC));
-
                 if (err == OK) {
                     err = mCodec->allocateBuffersOnPort(kPortIndexOutput);
                     ALOGE_IF(err != OK, "Failed to allocate output port buffers after port "
@@ -9112,8 +9157,6 @@
         // the native window for rendering. Let's get those back as well.
         mCodec->waitUntilAllPossibleNativeWindowBuffersAreReturnedToUs();
 
-        mCodec->mRenderTracker.clear(systemTime(CLOCK_MONOTONIC));
-
         mCodec->mCallback->onFlushCompleted();
 
         mCodec->mPortEOS[kPortIndexInput] =
diff --git a/media/libstagefright/ACodecBufferChannel.cpp b/media/libstagefright/ACodecBufferChannel.cpp
index 8f2bed2..ad42813 100644
--- a/media/libstagefright/ACodecBufferChannel.cpp
+++ b/media/libstagefright/ACodecBufferChannel.cpp
@@ -32,6 +32,7 @@
 #include <media/stagefright/foundation/ABuffer.h>
 #include <media/stagefright/foundation/AMessage.h>
 #include <media/stagefright/foundation/AUtils.h>
+#include <media/stagefright/ACodec.h>
 #include <media/stagefright/MediaCodec.h>
 #include <media/MediaCodecBuffer.h>
 #include <system/window.h>
@@ -87,9 +88,11 @@
 }
 
 ACodecBufferChannel::ACodecBufferChannel(
-        const sp<AMessage> &inputBufferFilled, const sp<AMessage> &outputBufferDrained)
+        const sp<AMessage> &inputBufferFilled, const sp<AMessage> &outputBufferDrained,
+        const sp<AMessage> &pollForRenderedBuffers)
     : mInputBufferFilled(inputBufferFilled),
       mOutputBufferDrained(outputBufferDrained),
+      mPollForRenderedBuffers(pollForRenderedBuffers),
       mHeapSeqNum(-1) {
 }
 
@@ -488,7 +491,7 @@
 }
 
 void ACodecBufferChannel::pollForRenderedBuffers() {
-    // TODO(b/266211548): Poll the native window for rendered buffers.
+    mPollForRenderedBuffers->post();
 }
 
 status_t ACodecBufferChannel::discardBuffer(const sp<MediaCodecBuffer> &buffer) {
diff --git a/media/libstagefright/Android.bp b/media/libstagefright/Android.bp
index a26fcbe..0af9d12 100644
--- a/media/libstagefright/Android.bp
+++ b/media/libstagefright/Android.bp
@@ -270,10 +270,10 @@
         "SurfaceUtils.cpp",
         "ThrottledSource.cpp",
         "Utils.cpp",
-        "VideoRenderQualityTracker.cpp",
         "VideoFrameSchedulerBase.cpp",
         "VideoFrameScheduler.cpp",
-    ],
+        "VideoRenderQualityTracker.cpp",
+      ],
 
     shared_libs: [
         "libstagefright_framecapture_utils",
diff --git a/media/libstagefright/FrameDecoder.cpp b/media/libstagefright/FrameDecoder.cpp
index b5bd975..57937f9 100644
--- a/media/libstagefright/FrameDecoder.cpp
+++ b/media/libstagefright/FrameDecoder.cpp
@@ -16,7 +16,7 @@
 
 //#define LOG_NDEBUG 0
 #define LOG_TAG "FrameDecoder"
-
+#define ATRACE_TAG  ATRACE_TAG_VIDEO
 #include "include/FrameDecoder.h"
 #include "include/FrameCaptureLayer.h"
 #include "include/HevcUtils.h"
@@ -41,6 +41,7 @@
 #include <media/stagefright/Utils.h>
 #include <private/media/VideoFrame.h>
 #include <utils/Log.h>
+#include <utils/Trace.h>
 
 namespace android {
 
@@ -340,6 +341,7 @@
 }
 
 sp<IMemory> FrameDecoder::extractFrame(FrameRect *rect) {
+    ScopedTrace trace(ATRACE_TAG, "FrameDecoder::ExtractFrame");
     status_t err = onExtractRect(rect);
     if (err == OK) {
         err = extractInternal();
@@ -713,6 +715,7 @@
     }
     converter.setSrcColorSpace(standard, range, transfer);
     if (converter.isValid()) {
+        ScopedTrace trace(ATRACE_TAG, "FrameDecoder::ColorConverter");
         converter.convert(
                 (const uint8_t *)videoFrameBuffer->data(),
                 width, height, stride,
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index 870ebdf..c27cfc5 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -79,6 +79,7 @@
 #include <media/stagefright/MediaErrors.h>
 #include <media/stagefright/OMXClient.h>
 #include <media/stagefright/PersistentSurface.h>
+#include <media/stagefright/RenderedFrameInfo.h>
 #include <media/stagefright/SurfaceUtils.h>
 #include <nativeloader/dlext_namespaces.h>
 #include <private/android_filesystem_config.h>
@@ -210,6 +211,7 @@
 // Render metrics
 static const char *kCodecPlaybackDurationSec = "android.media.mediacodec.playback-duration-sec";
 static const char *kCodecFirstRenderTimeUs = "android.media.mediacodec.first-render-time-us";
+static const char *kCodecLastRenderTimeUs = "android.media.mediacodec.last-render-time-us";
 static const char *kCodecFramesReleased = "android.media.mediacodec.frames-released";
 static const char *kCodecFramesRendered = "android.media.mediacodec.frames-rendered";
 static const char *kCodecFramesDropped = "android.media.mediacodec.frames-dropped";
@@ -243,7 +245,7 @@
         "android.media.mediacodec.judder-score-histogram-buckets";
 // Freeze event
 static const char *kCodecFreezeEventCount = "android.media.mediacodec.freeze-event-count";
-static const char *kFreezeEventKeyName = "freeze";
+static const char *kFreezeEventKeyName = "videofreeze";
 static const char *kFreezeEventInitialTimeUs = "android.media.mediacodec.freeze.initial-time-us";
 static const char *kFreezeEventDurationMs = "android.media.mediacodec.freeze.duration-ms";
 static const char *kFreezeEventCount = "android.media.mediacodec.freeze.count";
@@ -255,7 +257,7 @@
         "android.media.mediacodec.freeze.details-distance-ms";
 // Judder event
 static const char *kCodecJudderEventCount = "android.media.mediacodec.judder-event-count";
-static const char *kJudderEventKeyName = "judder";
+static const char *kJudderEventKeyName = "videojudder";
 static const char *kJudderEventInitialTimeUs = "android.media.mediacodec.judder.initial-time-us";
 static const char *kJudderEventDurationMs = "android.media.mediacodec.judder.duration-ms";
 static const char *kJudderEventCount = "android.media.mediacodec.judder.count";
@@ -822,6 +824,37 @@
     const sp<AMessage> mNotify;
 };
 
+class OnBufferReleasedListener : public ::android::BnProducerListener{
+private:
+    uint32_t mGeneration;
+    std::weak_ptr<BufferChannelBase> mBufferChannel;
+
+    void notifyBufferReleased() {
+        auto p = mBufferChannel.lock();
+        if (p) {
+            p->onBufferReleasedFromOutputSurface(mGeneration);
+        }
+    }
+
+public:
+    explicit OnBufferReleasedListener(
+            uint32_t generation,
+            const std::shared_ptr<BufferChannelBase> &bufferChannel)
+            : mGeneration(generation), mBufferChannel(bufferChannel) {}
+
+    virtual ~OnBufferReleasedListener() = default;
+
+    void onBufferReleased() override {
+        notifyBufferReleased();
+    }
+
+    void onBufferDetached([[maybe_unused]] int slot) override {
+        notifyBufferReleased();
+    }
+
+    bool needsReleaseNotify() override { return true; }
+};
+
 class BufferCallback : public CodecBase::BufferCallback {
 public:
     explicit BufferCallback(const sp<AMessage> &notify);
@@ -880,7 +913,7 @@
             const sp<AMessage> &outputFormat) override;
     virtual void onInputSurfaceDeclined(status_t err) override;
     virtual void onSignaledInputEOS(status_t err) override;
-    virtual void onOutputFramesRendered(const std::list<FrameRenderTracker::Info> &done) override;
+    virtual void onOutputFramesRendered(const std::list<RenderedFrameInfo> &done) override;
     virtual void onOutputBuffersChanged() override;
     virtual void onFirstTunnelFrameReady() override;
     virtual void onMetricsUpdated(const sp<AMessage> &updatedMetrics) override;
@@ -990,7 +1023,7 @@
     notify->post();
 }
 
-void CodecCallback::onOutputFramesRendered(const std::list<FrameRenderTracker::Info> &done) {
+void CodecCallback::onOutputFramesRendered(const std::list<RenderedFrameInfo> &done) {
     sp<AMessage> notify(mNotify->dup());
     notify->setInt32("what", kWhatOutputFramesRendered);
     if (MediaCodec::CreateFramesRenderedMessage(done, notify)) {
@@ -1017,12 +1050,19 @@
     notify->post();
 }
 
-static MediaResourceSubType toMediaResourceSubType(MediaCodec::Domain domain) {
+static MediaResourceSubType toMediaResourceSubType(bool isHardware, MediaCodec::Domain domain) {
     switch (domain) {
-        case MediaCodec::DOMAIN_VIDEO: return MediaResourceSubType::kVideoCodec;
-        case MediaCodec::DOMAIN_AUDIO: return MediaResourceSubType::kAudioCodec;
-        case MediaCodec::DOMAIN_IMAGE: return MediaResourceSubType::kImageCodec;
-        default:                       return MediaResourceSubType::kUnspecifiedSubType;
+    case MediaCodec::DOMAIN_VIDEO:
+        return isHardware? MediaResourceSubType::kHwVideoCodec :
+                           MediaResourceSubType::kSwVideoCodec;
+    case MediaCodec::DOMAIN_AUDIO:
+        return isHardware? MediaResourceSubType::kHwAudioCodec :
+                           MediaResourceSubType::kSwAudioCodec;
+    case MediaCodec::DOMAIN_IMAGE:
+        return isHardware? MediaResourceSubType::kHwImageCodec :
+                           MediaResourceSubType::kSwImageCodec;
+    default:
+        return MediaResourceSubType::kUnspecifiedSubType;
     }
 }
 
@@ -1159,6 +1199,7 @@
       mTunneledInputHeight(0),
       mTunneled(false),
       mTunnelPeekState(TunnelPeekState::kLegacyMode),
+      mTunnelPeekEnabled(false),
       mHaveInputSurface(false),
       mHavePendingInputBuffers(false),
       mCpuBoostRequested(false),
@@ -1306,6 +1347,7 @@
         const VideoRenderQualityMetrics &m = mVideoRenderQualityTracker.getMetrics();
         if (m.frameReleasedCount > 0) {
             mediametrics_setInt64(mMetricsHandle, kCodecFirstRenderTimeUs, m.firstRenderTimeUs);
+            mediametrics_setInt64(mMetricsHandle, kCodecLastRenderTimeUs, m.lastRenderTimeUs);
             mediametrics_setInt64(mMetricsHandle, kCodecFramesReleased, m.frameReleasedCount);
             mediametrics_setInt64(mMetricsHandle, kCodecFramesRendered, m.frameRenderedCount);
             mediametrics_setInt64(mMetricsHandle, kCodecFramesSkipped, m.frameSkippedCount);
@@ -1703,6 +1745,7 @@
 
     TunnelPeekState previousState = mTunnelPeekState;
     if(tunnelPeek == 0){
+        mTunnelPeekEnabled = false;
         switch (mTunnelPeekState) {
             case TunnelPeekState::kLegacyMode:
                 msg->setInt32("android._tunnel-peek-set-legacy", 0);
@@ -1718,6 +1761,7 @@
                 return;
         }
     } else {
+        mTunnelPeekEnabled = true;
         switch (mTunnelPeekState) {
             case TunnelPeekState::kLegacyMode:
                 msg->setInt32("android._tunnel-peek-set-legacy", 0);
@@ -1791,7 +1835,7 @@
 
     if (mBatteryChecker != nullptr) {
         mBatteryChecker->onCodecActivity([this] () {
-            mResourceManagerProxy->addResource(MediaResource::VideoBatteryResource());
+            mResourceManagerProxy->addResource(MediaResource::VideoBatteryResource(mIsHardware));
         });
     }
 
@@ -1863,7 +1907,7 @@
 
     if (mBatteryChecker != nullptr) {
         mBatteryChecker->onCodecActivity([this] () {
-            mResourceManagerProxy->addResource(MediaResource::VideoBatteryResource());
+            mResourceManagerProxy->addResource(MediaResource::VideoBatteryResource(mIsHardware));
         });
     }
 
@@ -2118,13 +2162,16 @@
         mBatteryChecker = new BatteryChecker(new AMessage(kWhatCheckBatteryStats, this));
     }
 
-    std::vector<MediaResourceParcel> resources;
-    resources.push_back(MediaResource::CodecResource(secureCodec, toMediaResourceSubType(mDomain)));
-
     // If the ComponentName is not set yet, use the name passed by the user.
     if (mComponentName.empty()) {
+        mIsHardware = !MediaCodecList::isSoftwareCodec(name);
         mResourceManagerProxy->setCodecName(name.c_str());
     }
+
+    std::vector<MediaResourceParcel> resources;
+    resources.push_back(MediaResource::CodecResource(secureCodec,
+                                                     toMediaResourceSubType(mIsHardware, mDomain)));
+
     for (int i = 0; i <= kMaxRetry; ++i) {
         if (i > 0) {
             // Don't try to reclaim resource for the first time.
@@ -2368,7 +2415,7 @@
     status_t err;
     std::vector<MediaResourceParcel> resources;
     resources.push_back(MediaResource::CodecResource(mFlags & kFlagIsSecure,
-            toMediaResourceSubType(mDomain)));
+            toMediaResourceSubType(mIsHardware, mDomain)));
     if (mDomain == DOMAIN_VIDEO || mDomain == DOMAIN_IMAGE) {
         // Don't know the buffer size at this point, but it's fine to use 1 because
         // the reclaimResource call doesn't consider the requester's buffer size for now.
@@ -2973,7 +3020,7 @@
     status_t err;
     std::vector<MediaResourceParcel> resources;
     resources.push_back(MediaResource::CodecResource(mFlags & kFlagIsSecure,
-            toMediaResourceSubType(mDomain)));
+            toMediaResourceSubType(mIsHardware, mDomain)));
     if (mDomain == DOMAIN_VIDEO || mDomain == DOMAIN_IMAGE) {
         // Don't know the buffer size at this point, but it's fine to use 1 because
         // the reclaimResource call doesn't consider the requester's buffer size for now.
@@ -2991,12 +3038,6 @@
                 ALOGE("retrying start: failed to reset codec");
                 break;
             }
-            sp<AMessage> response;
-            err = PostAndAwaitResponse(mConfigureMsg, &response);
-            if (err != OK) {
-                ALOGE("retrying start: failed to configure codec");
-                break;
-            }
             if (callback != nullptr) {
                 err = setCallback(callback);
                 if (err != OK) {
@@ -3005,6 +3046,12 @@
                 }
                 ALOGD("succeed to set callback for reclaim");
             }
+            sp<AMessage> response;
+            err = PostAndAwaitResponse(mConfigureMsg, &response);
+            if (err != OK) {
+                ALOGE("retrying start: failed to configure codec");
+                break;
+            }
         }
 
         // Keep callback message after the first iteration if necessary.
@@ -3729,9 +3776,8 @@
 
 
 inline void MediaCodec::initClientConfigParcel(ClientConfigParcel& clientConfig) {
-    clientConfig.codecType = toMediaResourceSubType(mDomain);
+    clientConfig.codecType = toMediaResourceSubType(mIsHardware, mDomain);
     clientConfig.isEncoder = mFlags & kFlagIsEncoder;
-    clientConfig.isHardware = !MediaCodecList::isSoftwareCodec(mComponentName);
     clientConfig.width = mWidth;
     clientConfig.height = mHeight;
     clientConfig.timeStamp = systemTime(SYSTEM_TIME_MONOTONIC) / 1000LL;
@@ -3960,6 +4006,7 @@
                     CHECK(msg->findString("componentName", &mComponentName));
 
                     if (mComponentName.c_str()) {
+                        mIsHardware = !MediaCodecList::isSoftwareCodec(mComponentName);
                         mediametrics_setCString(mMetricsHandle, kCodecCodec,
                                                 mComponentName.c_str());
                         // Update the codec name.
@@ -3987,7 +4034,7 @@
                                           MediaCodecList::isSoftwareCodec(mComponentName) ? 0 : 1);
 
                     mResourceManagerProxy->addResource(MediaResource::CodecResource(
-                            mFlags & kFlagIsSecure, toMediaResourceSubType(mDomain)));
+                            mFlags & kFlagIsSecure, toMediaResourceSubType(mIsHardware, mDomain)));
 
                     postPendingRepliesAndDeferredMessages("kWhatComponentAllocated");
                     break;
@@ -4671,6 +4718,8 @@
                     PostReplyWithError(replyID, err);
                     break;
                 }
+                uint32_t generation = mSurfaceGeneration;
+                format->setInt32("native-window-generation", generation);
             } else {
                 // we are not using surface so this variable is not used, but initialize sensibly anyway
                 mAllowFrameDroppingBySurface = false;
@@ -4799,7 +4848,8 @@
                         mErrorLog.log(LOG_TAG, "Unsetting surface is not supported");
                         err = BAD_VALUE;
                     } else {
-                        err = connectToSurface(surface);
+                        uint32_t generation;
+                        err = connectToSurface(surface, &generation);
                         if (err == ALREADY_EXISTS) {
                             // reconnecting to same surface
                             err = OK;
@@ -4814,12 +4864,13 @@
                                     mSoftRenderer = new SoftwareRenderer(surface);
                                     // TODO: check if this was successful
                                 } else {
-                                    err = mCodec->setSurface(surface);
+                                    err = mCodec->setSurface(surface, generation);
                                 }
                             }
                             if (err == OK) {
                                 (void)disconnectFromSurface();
                                 mSurface = surface;
+                                mSurfaceGeneration = generation;
                             }
                             mReliabilityContextMetrics.setOutputSurfaceCount++;
                         }
@@ -4897,10 +4948,11 @@
             CHECK(msg->senderAwaitsResponse(&replyID));
             TunnelPeekState previousState = mTunnelPeekState;
             if (previousState != TunnelPeekState::kLegacyMode) {
-                mTunnelPeekState = TunnelPeekState::kEnabledNoBuffer;
+                mTunnelPeekState = mTunnelPeekEnabled ? TunnelPeekState::kEnabledNoBuffer :
+                    TunnelPeekState::kDisabledNoBuffer;
                 ALOGV("TunnelPeekState: %s -> %s",
                         asString(previousState),
-                        asString(TunnelPeekState::kEnabledNoBuffer));
+                        asString(mTunnelPeekState));
             }
 
             mReplyID = replyID;
@@ -5057,15 +5109,17 @@
                     mReleaseSurface.reset(new ReleaseSurface(usage));
                 }
                 if (mSurface != mReleaseSurface->getSurface()) {
-                    status_t err = connectToSurface(mReleaseSurface->getSurface());
+                    uint32_t generation;
+                    status_t err = connectToSurface(mReleaseSurface->getSurface(), &generation);
                     ALOGW_IF(err != OK, "error connecting to release surface: err = %d", err);
                     if (err == OK && !(mFlags & kFlagUsesSoftwareRenderer)) {
-                        err = mCodec->setSurface(mReleaseSurface->getSurface());
+                        err = mCodec->setSurface(mReleaseSurface->getSurface(), generation);
                         ALOGW_IF(err != OK, "error setting release surface: err = %d", err);
                     }
                     if (err == OK) {
                         (void)disconnectFromSurface();
                         mSurface = mReleaseSurface->getSurface();
+                        mSurfaceGeneration = generation;
                     } else {
                         // We were not able to switch the surface, so force
                         // synchronous release.
@@ -5397,10 +5451,11 @@
             returnBuffersToCodec();
             TunnelPeekState previousState = mTunnelPeekState;
             if (previousState != TunnelPeekState::kLegacyMode) {
-                mTunnelPeekState = TunnelPeekState::kEnabledNoBuffer;
+                mTunnelPeekState = mTunnelPeekEnabled ? TunnelPeekState::kEnabledNoBuffer :
+                    TunnelPeekState::kDisabledNoBuffer;
                 ALOGV("TunnelPeekState: %s -> %s",
                         asString(previousState),
-                        asString(TunnelPeekState::kEnabledNoBuffer));
+                        asString(mTunnelPeekState));
             }
             break;
         }
@@ -5506,7 +5561,7 @@
             if (mBatteryChecker != nullptr) {
                 mBatteryChecker->onCheckBatteryTimer(msg, [this] () {
                     mResourceManagerProxy->removeResource(
-                            MediaResource::VideoBatteryResource());
+                            MediaResource::VideoBatteryResource(mIsHardware));
                 });
             }
             break;
@@ -5943,7 +5998,7 @@
         if (isBufferDecodeOnly) {
             buffer->meta()->setInt32("decode-only", true);
         }
-        if (mTunneled && !isBufferDecodeOnly) {
+        if (mTunneled && !isBufferDecodeOnly && !(flags & BUFFER_FLAG_CODECCONFIG)) {
             TunnelPeekState previousState = mTunnelPeekState;
             switch(mTunnelPeekState){
                 case TunnelPeekState::kEnabledNoBuffer:
@@ -6139,12 +6194,10 @@
     return onQueueInputBuffer(msg);
 }
 
-//static
-size_t MediaCodec::CreateFramesRenderedMessage(
-        const std::list<FrameRenderTracker::Info> &done, sp<AMessage> &msg) {
+template<typename T>
+static size_t CreateFramesRenderedMessageInternal(const std::list<T> &done, sp<AMessage> &msg) {
     size_t index = 0;
-    for (std::list<FrameRenderTracker::Info>::const_iterator it = done.cbegin();
-            it != done.cend(); ++it) {
+    for (typename std::list<T>::const_iterator it = done.cbegin(); it != done.cend(); ++it) {
         if (it->getRenderTimeNs() < 0) {
             continue; // dropped frame from tracking
         }
@@ -6155,6 +6208,18 @@
     return index;
 }
 
+//static
+size_t MediaCodec::CreateFramesRenderedMessage(
+        const std::list<RenderedFrameInfo> &done, sp<AMessage> &msg) {
+    return CreateFramesRenderedMessageInternal(done, msg);
+}
+
+//static
+size_t MediaCodec::CreateFramesRenderedMessage(
+        const std::list<FrameRenderTracker::Info> &done, sp<AMessage> &msg) {
+    return CreateFramesRenderedMessageInternal(done, msg);
+}
+
 status_t MediaCodec::onReleaseOutputBuffer(const sp<AMessage> &msg) {
     size_t index;
     CHECK(msg->findSize("index", &index));
@@ -6246,7 +6311,9 @@
             // presentation timestamp is used instead, which almost certainly occurs in the past,
             // since it's almost always a zero-based offset from the start of the stream. In these
             // scenarios, we expect the frame to be rendered with no delay.
-            int64_t delayUs = noRenderTime ? 0 : renderTimeNs / 1000 - ALooper::GetNowUs();
+            int64_t nowUs = ALooper::GetNowUs();
+            int64_t renderTimeUs = renderTimeNs / 1000;
+            int64_t delayUs = renderTimeUs < nowUs ? 0 : renderTimeUs - nowUs;
             delayUs += 100 * 1000; /* 100ms in microseconds */
             status_t err =
                     mMsgPollForRenderedBuffers->postUnique(/* token= */ mMsgPollForRenderedBuffers,
@@ -6323,7 +6390,7 @@
     return index;
 }
 
-status_t MediaCodec::connectToSurface(const sp<Surface> &surface) {
+status_t MediaCodec::connectToSurface(const sp<Surface> &surface, uint32_t *generation) {
     status_t err = OK;
     if (surface != NULL) {
         uint64_t oldId, newId;
@@ -6345,21 +6412,26 @@
             // number. Rely on the fact that max supported process id by Linux is 2^22.
             // PID is never 0 so we don't have to worry that we use the default generation of 0.
             // TODO: come up with a unique scheme if other producers also set the generation number.
-            static uint32_t mSurfaceGeneration = 0;
-            uint32_t generation = (getpid() << 10) | (++mSurfaceGeneration & ((1 << 10) - 1));
-            surface->setGenerationNumber(generation);
-            ALOGI("[%s] setting surface generation to %u", mComponentName.c_str(), generation);
+            static uint32_t sSurfaceGeneration = 0;
+            *generation = (getpid() << 10) | (++sSurfaceGeneration & ((1 << 10) - 1));
+            surface->setGenerationNumber(*generation);
+            ALOGI("[%s] setting surface generation to %u", mComponentName.c_str(), *generation);
 
             // HACK: clear any free buffers. Remove when connect will automatically do this.
             // This is needed as the consumer may be holding onto stale frames that it can reattach
             // to this surface after disconnect/connect, and those free frames would inherit the new
             // generation number. Disconnecting after setting a unique generation prevents this.
             nativeWindowDisconnect(surface.get(), "connectToSurface(reconnect)");
-            err = nativeWindowConnect(surface.get(), "connectToSurface(reconnect)");
+            sp<IProducerListener> listener =
+                    new OnBufferReleasedListener(*generation, mBufferChannel);
+            err = surfaceConnectWithListener(
+                    surface, listener, "connectToSurface(reconnect-with-listener)");
         }
 
         if (err != OK) {
-            ALOGE("nativeWindowConnect returned an error: %s (%d)", strerror(-err), err);
+            *generation = 0;
+            ALOGE("nativeWindowConnect/surfaceConnectWithListener returned an error: %s (%d)",
+                    strerror(-err), err);
         } else {
             if (!mAllowFrameDroppingBySurface) {
                 disableLegacyBufferDropPostQ(surface);
@@ -6385,6 +6457,7 @@
         }
         // assume disconnected even on error
         mSurface.clear();
+        mSurfaceGeneration = 0;
         mIsSurfaceToDisplay = false;
     }
     return err;
@@ -6396,9 +6469,11 @@
         (void)disconnectFromSurface();
     }
     if (surface != NULL) {
-        err = connectToSurface(surface);
+        uint32_t generation;
+        err = connectToSurface(surface, &generation);
         if (err == OK) {
             mSurface = surface;
+            mSurfaceGeneration = generation;
         }
     }
     return err;
diff --git a/media/libstagefright/MediaMuxer.cpp b/media/libstagefright/MediaMuxer.cpp
index 9768f97..aaf7465 100644
--- a/media/libstagefright/MediaMuxer.cpp
+++ b/media/libstagefright/MediaMuxer.cpp
@@ -208,6 +208,9 @@
         ALOGE("WriteSampleData() get an NULL buffer.");
         return -EINVAL;
     }
+    if (!mWriter->isSampleMetadataValid(trackIndex, timeUs)) {
+        return -EINVAL;
+    }
     {
         /* As MediaMuxer's writeSampleData handles inputs from multiple tracks,
          * limited the scope of mMuxerLock to this inner block so that the
diff --git a/media/libstagefright/SurfaceUtils.cpp b/media/libstagefright/SurfaceUtils.cpp
index 291b892..604dcb0 100644
--- a/media/libstagefright/SurfaceUtils.cpp
+++ b/media/libstagefright/SurfaceUtils.cpp
@@ -225,6 +225,13 @@
         return err;
     };
 
+    // We need to set sidebandStream to nullptr before pushing blank buffers
+    err = native_window_set_sideband_stream(nativeWindow, nullptr);
+    if (err != NO_ERROR) {
+        ALOGE("error setting sidebandStream to nullptr: %s (%d)", strerror(-err), -err);
+        return err;
+    }
+
     // We need to reconnect to the ANativeWindow as a CPU client to ensure that
     // no frames get dropped by SurfaceFlinger assuming that these are video
     // frames.
@@ -291,6 +298,11 @@
             ALOGE("error pushing blank frames: lock failed: %s (%d)", strerror(-err), -err);
             break;
         }
+        if (img == nullptr) {
+            (void)buf->unlock(); // Since lock() was successful.
+            ALOGE("error pushing blank frames: lock succeeded: buf mapping is nullptr");
+            break;
+        }
 
         *img = 0;
 
@@ -321,6 +333,16 @@
     return err;
 }
 
+status_t surfaceConnectWithListener(
+        const sp<Surface> &surface, sp<IProducerListener> listener, const char *reason) {
+    ALOGD("connecting to surface %p, reason %s", surface.get(), reason);
+
+    status_t err = surface->connect(NATIVE_WINDOW_API_MEDIA, listener);
+    ALOGE_IF(err != OK, "Failed to connect from surface %p, err %d", surface.get(), err);
+
+    return err;
+}
+
 status_t nativeWindowDisconnect(ANativeWindow *surface, const char *reason) {
     ALOGD("disconnecting from surface %p, reason %s", surface, reason);
 
diff --git a/media/libstagefright/VideoRenderQualityTracker.cpp b/media/libstagefright/VideoRenderQualityTracker.cpp
index fbd8577..eb9ac0f 100644
--- a/media/libstagefright/VideoRenderQualityTracker.cpp
+++ b/media/libstagefright/VideoRenderQualityTracker.cpp
@@ -15,7 +15,11 @@
  */
 
 #define LOG_TAG "VideoRenderQualityTracker"
+#define ATRACE_TAG ATRACE_TAG_VIDEO
+
 #include <utils/Log.h>
+#include <utils/Trace.h>
+#include <utils/Mutex.h>
 
 #include <media/stagefright/VideoRenderQualityTracker.h>
 
@@ -24,9 +28,12 @@
 #include <cmath>
 #include <stdio.h>
 #include <sys/time.h>
+#include <sys/wait.h>
 
+#include <android-base/macros.h>
 #include <android-base/parsebool.h>
 #include <android-base/parseint.h>
+#include <android-base/properties.h>
 
 namespace android {
 
@@ -38,6 +45,7 @@
 
 typedef VideoRenderQualityTracker::Configuration::GetServerConfigurableFlagFn
         GetServerConfigurableFlagFn;
+typedef VideoRenderQualityTracker::TraceTriggerFn TraceTriggerFn;
 
 static void getServerConfigurableFlag(GetServerConfigurableFlagFn getServerConfigurableFlagFn,
                                       char const *flagNameSuffix, bool *value) {
@@ -119,6 +127,7 @@
     contentFrameRate = FRAME_RATE_UNDETERMINED;
     desiredFrameRate = FRAME_RATE_UNDETERMINED;
     actualFrameRate = FRAME_RATE_UNDETERMINED;
+    maxContentDroppedAfterPauseMs = 0;
     freezeEventCount = 0;
     freezeDurationMsHistogram.clear();
     freezeDistanceMsHistogram.clear();
@@ -137,6 +146,7 @@
     getFlag(maxExpectedContentFrameDurationUs, "max_expected_content_frame_duration_us");
     getFlag(frameRateDetectionToleranceUs, "frame_rate_detection_tolerance_us");
     getFlag(liveContentFrameDropToleranceUs, "live_content_frame_drop_tolerance_us");
+    getFlag(pauseAudioLatencyUs, "pause_audio_latency_us");
     getFlag(freezeDurationMsHistogramBuckets, "freeze_duration_ms_histogram_buckets");
     getFlag(freezeDurationMsHistogramToScore, "freeze_duration_ms_histogram_to_score");
     getFlag(freezeDistanceMsHistogramBuckets, "freeze_distance_ms_histogram_buckets");
@@ -149,6 +159,9 @@
     getFlag(judderEventMax, "judder_event_max");
     getFlag(judderEventDetailsMax, "judder_event_details_max");
     getFlag(judderEventDistanceToleranceMs, "judder_event_distance_tolerance_ms");
+    getFlag(traceTriggerEnabled, "trace_trigger_enabled");
+    getFlag(traceTriggerThrottleMs, "trace_trigger_throttle_ms");
+    getFlag(traceMinFreezeDurationMs, "trace_minimum_freeze_duration_ms");
 #undef getFlag
     return c;
 }
@@ -170,6 +183,9 @@
     // because of frame drops for live content, or because the user is seeking.
     liveContentFrameDropToleranceUs = 200 * 1000;
 
+    // After a pause is initiated, audio should likely stop playback within 200ms.
+    pauseAudioLatencyUs = 200 * 1000;
+
     // Freeze configuration
     freezeDurationMsHistogramBuckets = {1, 20, 40, 60, 80, 100, 120, 150, 175, 225, 300, 400, 500};
     freezeDurationMsHistogramToScore = {1,  1,  1,  1,  1,   1,   1,   1,   1,   1,   1,   1,   1};
@@ -186,15 +202,24 @@
     judderEventMax = 0; // enabled only when debugging
     judderEventDetailsMax = 20;
     judderEventDistanceToleranceMs = 5000; // lump judder occurrences together when 5s or less
+
+    // Perfetto trigger configuration.
+    traceTriggerEnabled = android::base::GetProperty(
+        "ro.build.type", "user") != "user"; // Enabled for non-user builds for debugging.
+    traceTriggerThrottleMs = 5 * 60 * 1000; // 5 mins.
+    traceMinFreezeDurationMs = 400;
 }
 
-VideoRenderQualityTracker::VideoRenderQualityTracker() : mConfiguration(Configuration()) {
+VideoRenderQualityTracker::VideoRenderQualityTracker()
+    : mConfiguration(Configuration()), mTraceTriggerFn(triggerTrace) {
     configureHistograms(mMetrics, mConfiguration);
     clear();
 }
 
-VideoRenderQualityTracker::VideoRenderQualityTracker(const Configuration &configuration) :
-        mConfiguration(configuration) {
+VideoRenderQualityTracker::VideoRenderQualityTracker(const Configuration &configuration,
+                                                     const TraceTriggerFn traceTriggerFn)
+    : mConfiguration(configuration),
+      mTraceTriggerFn(traceTriggerFn == nullptr ? triggerTrace : traceTriggerFn) {
     configureHistograms(mMetrics, mConfiguration);
     clear();
 }
@@ -231,6 +256,11 @@
 
     resetIfDiscontinuity(contentTimeUs, -1);
 
+    if (mTraceFrameSkippedToken == -1) {
+       mTraceFrameSkippedToken = contentTimeUs;
+       ATRACE_ASYNC_BEGIN("Video frame(s) skipped", mTraceFrameSkippedToken);
+    }
+
     // Frames skipped at the end of playback shouldn't be counted as skipped frames, since the
     // app could be terminating the playback. The pending count will be added to the metrics if and
     // when the next frame is rendered.
@@ -261,11 +291,17 @@
         return;
     }
 
+    if (mTraceFrameSkippedToken != -1) {
+        ATRACE_ASYNC_END("Video frame(s) skipped", mTraceFrameSkippedToken);
+        mTraceFrameSkippedToken = -1;
+    }
+
     int64_t actualRenderTimeUs = actualRenderTimeNs / 1000;
 
     if (mLastRenderTimeUs != -1) {
         mRenderDurationMs += (actualRenderTimeUs - mLastRenderTimeUs) / 1000;
     }
+
     // Now that a frame has been rendered, the previously skipped frames can be processed as skipped
     // frames since the app is not skipping them to terminate playback.
     for (int64_t contentTimeUs : mPendingSkippedFrameContentTimeUsList) {
@@ -357,13 +393,13 @@
     mLastRenderTimeUs = -1;
     mLastFreezeEndTimeUs = -1;
     mLastJudderEndTimeUs = -1;
-    mWasPreviousFrameDropped = false;
+    mDroppedContentDurationUs = 0;
     mFreezeEvent.valid = false;
     mJudderEvent.valid = false;
 
-    // Don't worry about tracking frame rendering times from now up until playback catches up to the
-    // discontinuity. While stuttering or freezing could be found in the next few frames, the impact
-    // to the user is is minimal, so better to just keep things simple and don't bother.
+    // Don't worry about tracking frame rendering times from now up until playback catches up to
+    // the discontinuity. While stuttering or freezing could be found in the next few frames, the
+    // impact to the user is is minimal, so better to just keep things simple and don't bother.
     mNextExpectedRenderedFrameQueue = {};
     mTunnelFrameQueuedContentTimeUs = -1;
 
@@ -432,7 +468,7 @@
     updateFrameDurations(mDesiredFrameDurationUs, -1);
     updateFrameDurations(mActualFrameDurationUs, -1);
     updateFrameRate(mMetrics.contentFrameRate, mContentFrameDurationUs, mConfiguration);
-    mWasPreviousFrameDropped = false;
+    mDroppedContentDurationUs = 0;
 }
 
 void VideoRenderQualityTracker::processMetricsForDroppedFrame(int64_t contentTimeUs,
@@ -443,7 +479,9 @@
     updateFrameDurations(mActualFrameDurationUs, -1);
     updateFrameRate(mMetrics.contentFrameRate, mContentFrameDurationUs, mConfiguration);
     updateFrameRate(mMetrics.desiredFrameRate, mDesiredFrameDurationUs, mConfiguration);
-    mWasPreviousFrameDropped = true;
+    if (mContentFrameDurationUs[0] != -1) {
+        mDroppedContentDurationUs += mContentFrameDurationUs[0];
+    }
 }
 
 void VideoRenderQualityTracker::processMetricsForRenderedFrame(int64_t contentTimeUs,
@@ -451,10 +489,14 @@
                                                                int64_t actualRenderTimeUs,
                                                                FreezeEvent *freezeEventOut,
                                                                JudderEvent *judderEventOut) {
+    const Configuration& c = mConfiguration;
+
     // Capture the timestamp at which the first frame was rendered
     if (mMetrics.firstRenderTimeUs == 0) {
         mMetrics.firstRenderTimeUs = actualRenderTimeUs;
     }
+    // Capture the timestamp at which the last frame was rendered
+    mMetrics.lastRenderTimeUs = actualRenderTimeUs;
 
     mMetrics.frameRenderedCount++;
 
@@ -471,11 +513,36 @@
     updateFrameRate(mMetrics.desiredFrameRate, mDesiredFrameDurationUs, mConfiguration);
     updateFrameRate(mMetrics.actualFrameRate, mActualFrameDurationUs, mConfiguration);
 
-    // If the previous frame was dropped, there was a freeze if we've already rendered a frame
-    if (mWasPreviousFrameDropped && mLastRenderTimeUs != -1) {
-        processFreeze(actualRenderTimeUs, mLastRenderTimeUs, mLastFreezeEndTimeUs, mFreezeEvent,
-                      mMetrics, mConfiguration);
-        mLastFreezeEndTimeUs = actualRenderTimeUs;
+    // A freeze occurs if frames were dropped NOT after a discontinuity
+    if (mDroppedContentDurationUs != 0 && mLastRenderTimeUs != -1) {
+        // When pausing, audio playback may continue for a brief period of time after video
+        // pauses while the audio buffers drain. When resuming, a small number of video frames
+        // might be dropped to catch up to the audio position. This is acceptable behacvior and
+        // should not count as a freeze.
+        bool isLikelyCatchingUpAfterPause = false;
+        // A pause can be detected if a freeze occurs for a longer period of time than the
+        // content duration of the dropped frames. This strategy works because, for freeze
+        // events (no video pause), the content duration of the dropped frames will closely track
+        // the wall clock time (freeze duration). When pausing, however, the wall clock time
+        // (freeze duration) will be longer than the content duration of the dropped frames
+        // required to catch up to the audio position.
+        const int64_t wallClockDurationUs = actualRenderTimeUs - mLastRenderTimeUs;
+        // 200ms is chosen because it is larger than what a hiccup in the display pipeline could
+        // likely be, but shorter than the duration for which a user could pause for.
+        static const int32_t MAX_PIPELINE_HICCUP_DURATION_US = 200 * 1000;
+        if (wallClockDurationUs > mDroppedContentDurationUs + MAX_PIPELINE_HICCUP_DURATION_US) {
+            // Capture the amount of content that is dropped after pause, so we can push apps to be
+            // better about this behavior.
+            if (mDroppedContentDurationUs / 1000 > mMetrics.maxContentDroppedAfterPauseMs) {
+                mMetrics.maxContentDroppedAfterPauseMs = int32_t(mDroppedContentDurationUs / 1000);
+            }
+            isLikelyCatchingUpAfterPause = mDroppedContentDurationUs <= c.pauseAudioLatencyUs;
+        }
+        if (!isLikelyCatchingUpAfterPause) {
+            processFreeze(actualRenderTimeUs, mLastRenderTimeUs, mLastFreezeEndTimeUs, mFreezeEvent,
+                        mMetrics, mConfiguration, mTraceTriggerFn);
+            mLastFreezeEndTimeUs = actualRenderTimeUs;
+        }
     }
     maybeCaptureFreezeEvent(actualRenderTimeUs, mLastFreezeEndTimeUs, mFreezeEvent, mMetrics,
                             mConfiguration, freezeEventOut);
@@ -494,13 +561,13 @@
     maybeCaptureJudderEvent(actualRenderTimeUs, mLastJudderEndTimeUs, mJudderEvent, mMetrics,
                             mConfiguration, judderEventOut);
 
-    mWasPreviousFrameDropped = false;
+    mDroppedContentDurationUs = 0;
 }
 
 void VideoRenderQualityTracker::processFreeze(int64_t actualRenderTimeUs, int64_t lastRenderTimeUs,
                                               int64_t lastFreezeEndTimeUs, FreezeEvent &e,
-                                              VideoRenderQualityMetrics &m,
-                                              const Configuration &c) {
+                                              VideoRenderQualityMetrics &m, const Configuration &c,
+                                              const TraceTriggerFn traceTriggerFn) {
     int32_t durationMs = int32_t((actualRenderTimeUs - lastRenderTimeUs) / 1000);
     m.freezeDurationMsHistogram.insert(durationMs);
     int32_t distanceMs = -1;
@@ -536,6 +603,11 @@
             e.details.distanceMs.push_back(distanceMs); // -1 for first detail in the first event
         }
     }
+
+    if (c.traceTriggerEnabled && durationMs >= c.traceMinFreezeDurationMs) {
+        ALOGI("Video freezed %lld ms", (long long) durationMs);
+        triggerTraceWithThrottle(traceTriggerFn, c, actualRenderTimeUs);
+    }
 }
 
 void VideoRenderQualityTracker::maybeCaptureFreezeEvent(int64_t actualRenderTimeUs,
@@ -736,4 +808,54 @@
     return false;
 }
 
+void VideoRenderQualityTracker::triggerTraceWithThrottle(const TraceTriggerFn traceTriggerFn,
+                                                         const Configuration &c,
+                                                         const int64_t triggerTimeUs) {
+    static int64_t lastTriggerUs = -1;
+    static Mutex updateLastTriggerLock;
+
+    {
+        Mutex::Autolock autoLock(updateLastTriggerLock);
+        if (lastTriggerUs != -1) {
+            int32_t sinceLastTriggerMs = int32_t((triggerTimeUs - lastTriggerUs) / 1000);
+            // Throttle the trace trigger calls to reduce continuous PID fork calls in a short time
+            // to impact device performance, and reduce spamming trace reports.
+            if (sinceLastTriggerMs < c.traceTriggerThrottleMs) {
+                ALOGI("Not triggering trace - not enough time since last trigger");
+                return;
+            }
+        }
+        lastTriggerUs = triggerTimeUs;
+    }
+
+    (*traceTriggerFn)();
+}
+
+void VideoRenderQualityTracker::triggerTrace() {
+    // Trigger perfetto to stop always-on-tracing (AOT) to collect trace into a file for video
+    // freeze event, the collected trace categories are configured by AOT.
+    static const char* args[] = {"/system/bin/trigger_perfetto",
+                                 "com.android.codec-video-freeze", NULL};
+
+    pid_t pid = fork();
+    if (pid < 0) {
+        ALOGI("Failed to fork for triggering trace");
+    } else if (pid == 0) {
+        // Child process.
+        ALOGI("Trigger trace %s", args[1]);
+        execvp(args[0], const_cast<char**>(args));
+        ALOGW("Failed to trigger trace %s", args[1]);
+        _exit(1);
+    } else {
+        // Parent process.
+        int status;
+        // Wait for the child process (pid) gets terminated, and allow the system to release
+        // the resource associated with the child. Or the child process will remain in a
+        // zombie state and get killed by llkd to cause foreground app crash.
+        if (waitpid(pid, &status, 0) < 0) {
+            ALOGW("Failed to waitpid for triggering trace");
+        }
+    }
+}
+
 } // namespace android
diff --git a/media/libstagefright/codecs/on2/enc/SoftVP8Encoder.cpp b/media/libstagefright/codecs/on2/enc/SoftVP8Encoder.cpp
index 04737a9..9198b7c 100644
--- a/media/libstagefright/codecs/on2/enc/SoftVP8Encoder.cpp
+++ b/media/libstagefright/codecs/on2/enc/SoftVP8Encoder.cpp
@@ -120,6 +120,11 @@
 
 OMX_ERRORTYPE SoftVP8Encoder::internalGetVp8Params(
         OMX_VIDEO_PARAM_VP8TYPE* vp8Params) {
+    if (!isValidOMXParam(vp8Params)) {
+        android_errorWriteLog(0x534e4554, "273936274");
+        return OMX_ErrorBadParameter;
+    }
+
     if (vp8Params->nPortIndex != kOutputPortIndex) {
         return OMX_ErrorUnsupportedIndex;
     }
@@ -133,6 +138,11 @@
 
 OMX_ERRORTYPE SoftVP8Encoder::internalSetVp8Params(
         const OMX_VIDEO_PARAM_VP8TYPE* vp8Params) {
+    if (!isValidOMXParam(vp8Params)) {
+        android_errorWriteLog(0x534e4554, "273937171");
+        return OMX_ErrorBadParameter;
+    }
+
     if (vp8Params->nPortIndex != kOutputPortIndex) {
         return OMX_ErrorUnsupportedIndex;
     }
diff --git a/media/libstagefright/codecs/on2/enc/SoftVP9Encoder.cpp b/media/libstagefright/codecs/on2/enc/SoftVP9Encoder.cpp
index 1ea1c85..f8495c2 100644
--- a/media/libstagefright/codecs/on2/enc/SoftVP9Encoder.cpp
+++ b/media/libstagefright/codecs/on2/enc/SoftVP9Encoder.cpp
@@ -119,6 +119,11 @@
 
 OMX_ERRORTYPE SoftVP9Encoder::internalGetVp9Params(
         OMX_VIDEO_PARAM_VP9TYPE *vp9Params) {
+    if (!isValidOMXParam(vp9Params)) {
+        android_errorWriteLog(0x534e4554, "273936553");
+        return OMX_ErrorBadParameter;
+    }
+
     if (vp9Params->nPortIndex != kOutputPortIndex) {
         return OMX_ErrorUnsupportedIndex;
     }
@@ -133,6 +138,11 @@
 
 OMX_ERRORTYPE SoftVP9Encoder::internalSetVp9Params(
         const OMX_VIDEO_PARAM_VP9TYPE *vp9Params) {
+    if (!isValidOMXParam(vp9Params)) {
+        android_errorWriteLog(0x534e4554, "273937136");
+        return OMX_ErrorBadParameter;
+    }
+
     if (vp9Params->nPortIndex != kOutputPortIndex) {
         return OMX_ErrorUnsupportedIndex;
     }
diff --git a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp
index e9b4341..cbedb72 100644
--- a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp
+++ b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp
@@ -485,6 +485,11 @@
 
 OMX_ERRORTYPE SoftVPXEncoder::internalGetAndroidVpxParams(
         OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE *vpxAndroidParams) {
+    if (!isValidOMXParam(vpxAndroidParams)) {
+        android_errorWriteLog(0x534e4554, "273936601");
+        return OMX_ErrorBadParameter;
+    }
+
     if (vpxAndroidParams->nPortIndex != kOutputPortIndex) {
         return OMX_ErrorUnsupportedIndex;
     }
@@ -501,6 +506,10 @@
 
 OMX_ERRORTYPE SoftVPXEncoder::internalSetAndroidVpxParams(
         const OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE *vpxAndroidParams) {
+    if (!isValidOMXParam(vpxAndroidParams)) {
+        android_errorWriteLog(0x534e4554, "273937551");
+        return OMX_ErrorBadParameter;
+    }
     if (vpxAndroidParams->nPortIndex != kOutputPortIndex) {
         return OMX_ErrorUnsupportedIndex;
     }
diff --git a/media/libstagefright/colorconversion/ColorConverter.cpp b/media/libstagefright/colorconversion/ColorConverter.cpp
index f91a8b2..6c26c28 100644
--- a/media/libstagefright/colorconversion/ColorConverter.cpp
+++ b/media/libstagefright/colorconversion/ColorConverter.cpp
@@ -363,6 +363,7 @@
     int32_t _g_u;
     int32_t _g_v;
     int32_t _b_u;
+    int32_t _c16;  // 16 for limited range matrix, 0 for full rance
 };
 
 /*
@@ -425,18 +426,18 @@
  *
  * clip range 8-bit: [-277, 535], 10-bit: [-1111, 2155]
  */
-const struct ColorConverter::Coeffs BT601_FULL      = { 256, 359,  88, 183, 454 };
-const struct ColorConverter::Coeffs BT601_LIMITED   = { 298, 409, 100, 208, 516 };
-const struct ColorConverter::Coeffs BT601_LTD_10BIT = { 299, 410, 101, 209, 518 };
+const struct ColorConverter::Coeffs BT601_FULL      = { 256, 359,  88, 183, 454, 0 };
+const struct ColorConverter::Coeffs BT601_LIMITED   = { 298, 409, 100, 208, 516, 16 };
+const struct ColorConverter::Coeffs BT601_LTD_10BIT = { 299, 410, 101, 209, 518, 16 };
 
 /**
  * BT.709:  K_R = 0.2126; K_B = 0.0722
  *
  * clip range 8-bit: [-289, 547], 10-bit: [-1159, 2202]
  */
-const struct ColorConverter::Coeffs BT709_FULL      = { 256, 403,  48, 120, 475 };
-const struct ColorConverter::Coeffs BT709_LIMITED   = { 298, 459,  55, 136, 541 };
-const struct ColorConverter::Coeffs BT709_LTD_10BIT = { 290, 460,  55, 137, 542 };
+const struct ColorConverter::Coeffs BT709_FULL      = { 256, 403,  48, 120, 475, 0 };
+const struct ColorConverter::Coeffs BT709_LIMITED   = { 298, 459,  55, 136, 541, 16 };
+const struct ColorConverter::Coeffs BT709_LTD_10BIT = { 299, 460,  55, 137, 542, 16 };
 
 /**
  * BT.2020:  K_R = 0.2627; K_B = 0.0593
@@ -445,9 +446,9 @@
  *
  * This is the largest clip range.
  */
-const struct ColorConverter::Coeffs BT2020_FULL      = { 256, 377,  42, 146, 482 };
-const struct ColorConverter::Coeffs BT2020_LIMITED   = { 298, 430,  48, 167, 548 };
-const struct ColorConverter::Coeffs BT2020_LTD_10BIT = { 299, 431,  48, 167, 550 };
+const struct ColorConverter::Coeffs BT2020_FULL      = { 256, 377,  42, 146, 482, 0 };
+const struct ColorConverter::Coeffs BT2020_LIMITED   = { 298, 430,  48, 167, 548, 16 };
+const struct ColorConverter::Coeffs BT2020_LTD_10BIT = { 299, 431,  48, 167, 550, 16 };
 
 constexpr int CLIP_RANGE_MIN_8BIT = -294;
 constexpr int CLIP_RANGE_MAX_8BIT = 552;
@@ -781,7 +782,7 @@
     signed _neg_g_v = -matrix->_g_v;
     signed _r_v = matrix->_r_v;
     signed _y = matrix->_y;
-    signed _c16 = mSrcColorSpace.mRange == ColorUtils::kColorRangeLimited ? 16 : 0;
+    signed _c16 = matrix->_c16;
 
     uint8_t *kAdjustedClip = initClip();
 
@@ -1257,6 +1258,7 @@
     signed _neg_g_v = -matrix->_g_v;
     signed _r_v = matrix->_r_v;
     signed _y = matrix->_y;
+    signed _c16 = matrix->_c16;
 
     uint8_t *dst_ptr = (uint8_t *)dst.mBits
             + dst.mCropTop * dst.mStride + dst.mCropLeft * dst.mBpp;
@@ -1275,13 +1277,12 @@
 
     //TODO: optimize for chroma sampling, reading and writing multiple pixels
     //      within the same loop
-    signed _c16 = 0;
+
     void *kAdjustedClip = nullptr;
     if (mSrcImage->getBitDepth() != ImageBitDepth8) {
         ALOGE("BitDepth != 8 for MediaImage2");
         return ERROR_UNSUPPORTED;
     }
-    _c16 = mSrcColorSpace.mRange == ColorUtils::kColorRangeLimited ? 16 : 0;
     kAdjustedClip = initClip();
 
     auto writeToDst = getWriteToDst(mDstFormat, (void *)kAdjustedClip);
@@ -1388,7 +1389,7 @@
     signed _neg_g_v = -matrix->_g_v;
     signed _r_v = matrix->_r_v;
     signed _y = matrix->_y;
-    signed _c16 = mSrcColorSpace.mRange == ColorUtils::kColorRangeLimited ? 16 : 0;
+    signed _c16 = matrix->_c16;
 
     uint8_t *kAdjustedClip = initClip();
 
@@ -1463,7 +1464,7 @@
     signed _neg_g_v = -matrix->_g_v;
     signed _r_v = matrix->_r_v;
     signed _y = matrix->_y;
-    signed _c16 = mSrcColorSpace.mRange == ColorUtils::kColorRangeLimited ? 64 : 0;
+    signed _c64 = matrix->_c16 * 4;
 
     uint16_t *kAdjustedClip10bit = initClip10Bit();
 
@@ -1483,8 +1484,8 @@
     for (size_t y = 0; y < src.cropHeight(); ++y) {
         for (size_t x = 0; x < src.cropWidth(); x += 2) {
             signed y1, y2, u, v;
-            y1 = (src_y[x] >> 6) - _c16;
-            y2 = (src_y[x + 1] >> 6) - _c16;
+            y1 = (src_y[x] >> 6) - _c64;
+            y2 = (src_y[x + 1] >> 6) - _c64;
             u = int(src_uv[x] >> 6) - 512;
             v = int(src_uv[x + 1] >> 6) - 512;
 
diff --git a/media/libstagefright/data/media_codecs_google_c2_audio.xml b/media/libstagefright/data/media_codecs_google_c2_audio.xml
index 509f7a9..0d9e0ec 100644
--- a/media/libstagefright/data/media_codecs_google_c2_audio.xml
+++ b/media/libstagefright/data/media_codecs_google_c2_audio.xml
@@ -66,7 +66,7 @@
         </MediaCodec>
         <MediaCodec name="c2.android.raw.decoder" type="audio/raw">
             <Alias name="OMX.google.raw.decoder" />
-            <Limit name="channel-count" max="8" />
+            <Limit name="channel-count" max="12" />
             <Limit name="sample-rate" ranges="8000-192000" />
             <Limit name="bitrate" range="1-10000000" />
         </MediaCodec>
diff --git a/media/libstagefright/data/media_codecs_sw.xml b/media/libstagefright/data/media_codecs_sw.xml
index 665ceee..24020d1 100644
--- a/media/libstagefright/data/media_codecs_sw.xml
+++ b/media/libstagefright/data/media_codecs_sw.xml
@@ -80,7 +80,7 @@
         </MediaCodec>
         <MediaCodec name="c2.android.raw.decoder" type="audio/raw">
             <Alias name="OMX.google.raw.decoder" />
-            <Limit name="channel-count" max="8" />
+            <Limit name="channel-count" max="12" />
             <Limit name="sample-rate" ranges="8000-192000" />
             <Limit name="bitrate" range="1-10000000" />
             <Attribute name="software-codec" />
@@ -218,6 +218,24 @@
             <Feature name="adaptive-playback" />
             <Attribute name="software-codec" />
         </MediaCodec>
+        <MediaCodec name="c2.android.av1-dav1d.decoder" type="video/av01" variant="slow-cpu,!slow-cpu" rank="1024">
+            <Limit name="alignment" value="1x1" />
+            <Limit name="block-size" value="16x16" />
+            <Variant name="!slow-cpu">
+                <Limit name="size" min="2x2" max="2048x2048" />
+                <Limit name="block-count" range="1-8192" /> <!-- max 2048x1024 -->
+                <Limit name="blocks-per-second" range="1-245760" />
+                <Limit name="bitrate" range="1-40000000" />
+            </Variant>
+            <Variant name="slow-cpu">
+                <Limit name="size" min="2x2" max="1280x1280" />
+                <Limit name="block-count" range="1-3600" /> <!-- max 1280x720 -->
+                <Limit name="blocks-per-second" range="1-108000" />
+                <Limit name="bitrate" range="1-5000000" />
+            </Variant>
+            <Feature name="adaptive-playback" />
+            <Attribute name="software-codec" />
+        </MediaCodec>
         <MediaCodec name="c2.android.mpeg2.decoder" type="video/mpeg2" domain="tv">
             <Alias name="OMX.google.mpeg2.decoder" />
             <!-- profiles and levels:  ProfileMain : LevelHL -->
diff --git a/media/libstagefright/httplive/fuzzer/Android.bp b/media/libstagefright/httplive/fuzzer/Android.bp
index dd49714..cb2f4ee 100644
--- a/media/libstagefright/httplive/fuzzer/Android.bp
+++ b/media/libstagefright/httplive/fuzzer/Android.bp
@@ -48,6 +48,7 @@
         "libstagefright_httplive_headers",
     ],
     shared_libs: [
+        "libbase",
         "libcrypto",
         "libstagefright_foundation",
         "libhidlbase",
diff --git a/media/libstagefright/include/ACodecBufferChannel.h b/media/libstagefright/include/ACodecBufferChannel.h
index 903280f..946d533 100644
--- a/media/libstagefright/include/ACodecBufferChannel.h
+++ b/media/libstagefright/include/ACodecBufferChannel.h
@@ -29,6 +29,7 @@
 #include <media/IOMX.h>
 
 namespace android {
+ struct ACodec;
 namespace hardware {
 class HidlMemory;
 };
@@ -63,15 +64,16 @@
     };
 
     ACodecBufferChannel(
-            const sp<AMessage> &inputBufferFilled, const sp<AMessage> &outputBufferDrained);
+            const sp<AMessage> &inputBufferFilled, const sp<AMessage> &outputBufferDrained,
+            const sp<AMessage> &pollForRenderedBuffers);
     virtual ~ACodecBufferChannel();
 
     // BufferChannelBase interface
     void setCrypto(const sp<ICrypto> &crypto) override;
     void setDescrambler(const sp<IDescrambler> &descrambler) override;
 
-    virtual status_t queueInputBuffer(const sp<MediaCodecBuffer> &buffer) override;
-    virtual status_t queueSecureInputBuffer(
+    status_t queueInputBuffer(const sp<MediaCodecBuffer> &buffer) override;
+    status_t queueSecureInputBuffer(
             const sp<MediaCodecBuffer> &buffer,
             bool secure,
             const uint8_t *key,
@@ -81,10 +83,10 @@
             const CryptoPlugin::SubSample *subSamples,
             size_t numSubSamples,
             AString *errorDetailMsg) override;
-    virtual status_t attachBuffer(
+    status_t attachBuffer(
             const std::shared_ptr<C2Buffer> &c2Buffer,
             const sp<MediaCodecBuffer> &buffer) override;
-    virtual status_t attachEncryptedBuffer(
+    status_t attachEncryptedBuffer(
             const sp<hardware::HidlMemory> &memory,
             bool secure,
             const uint8_t *key,
@@ -96,12 +98,12 @@
             size_t numSubSamples,
             const sp<MediaCodecBuffer> &buffer,
             AString* errorDetailMsg) override;
-    virtual status_t renderOutputBuffer(
+    status_t renderOutputBuffer(
             const sp<MediaCodecBuffer> &buffer, int64_t timestampNs) override;
-    virtual void pollForRenderedBuffers() override;
-    virtual status_t discardBuffer(const sp<MediaCodecBuffer> &buffer) override;
-    virtual void getInputBufferArray(Vector<sp<MediaCodecBuffer>> *array) override;
-    virtual void getOutputBufferArray(Vector<sp<MediaCodecBuffer>> *array) override;
+    void pollForRenderedBuffers() override;
+    status_t discardBuffer(const sp<MediaCodecBuffer> &buffer) override;
+    void getInputBufferArray(Vector<sp<MediaCodecBuffer>> *array) override;
+    void getOutputBufferArray(Vector<sp<MediaCodecBuffer>> *array) override;
 
     // Methods below are interface for ACodec to use.
 
@@ -138,6 +140,7 @@
 
     const sp<AMessage> mInputBufferFilled;
     const sp<AMessage> mOutputBufferDrained;
+    const sp<AMessage> mPollForRenderedBuffers;
 
     sp<MemoryDealer> mDealer;
     sp<IMemory> mDecryptDestination;
diff --git a/media/libstagefright/include/media/stagefright/ACodec.h b/media/libstagefright/include/media/stagefright/ACodec.h
index e535d5d..a4d82ab 100644
--- a/media/libstagefright/include/media/stagefright/ACodec.h
+++ b/media/libstagefright/include/media/stagefright/ACodec.h
@@ -19,7 +19,7 @@
 
 #include <set>
 #include <stdint.h>
-#include <list>
+#include <deque>
 #include <vector>
 #include <android/native_window.h>
 #include <media/hardware/MetadataBufferType.h>
@@ -27,9 +27,9 @@
 #include <media/IOMX.h>
 #include <media/stagefright/AHierarchicalStateMachine.h>
 #include <media/stagefright/CodecBase.h>
-#include <media/stagefright/FrameRenderTracker.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/SkipCutBuffer.h>
+#include <ui/GraphicBuffer.h>
 #include <utils/NativeHandle.h>
 #include <OMX_Audio.h>
 #include <hardware/gralloc.h>
@@ -83,7 +83,7 @@
             const char* mime, bool isEncoder,
             MediaCodecInfo::CapabilitiesWriter* caps);
 
-    virtual status_t setSurface(const sp<Surface> &surface);
+    virtual status_t setSurface(const sp<Surface> &surface, uint32_t /*generation*/);
 
     virtual void signalFlush();
     virtual void signalResume();
@@ -156,6 +156,7 @@
         kWhatForceStateTransition    = 'fstt',
         kWhatCheckIfStuck            = 'Cstk',
         kWhatSubmitExtraOutputMetadataBuffer = 'sbxo',
+        kWhatPollForRenderedBuffers  = 'pfrb',
     };
 
     enum {
@@ -177,6 +178,13 @@
                             | static_cast<uint64_t>(BufferUsage::VIDEO_DECODER),
     };
 
+    struct TrackedFrame {
+        int64_t id;
+        int64_t mediaTimeUs;
+        int64_t desiredRenderTimeNs;
+        nsecs_t renderTimeNs;
+    };
+
     struct BufferInfo {
         enum Status {
             OWNED_BY_US,
@@ -204,7 +212,6 @@
         sp<GraphicBuffer> mGraphicBuffer;
         bool mNewGraphicBuffer;
         int mFenceFd;
-        FrameRenderTracker::Info *mRenderInfo;
 
         // The following field and 4 methods are used for debugging only
         bool mIsReadFence;
@@ -251,6 +258,11 @@
     int32_t mNodeGeneration;
     sp<TAllocator> mAllocator[2];
 
+    std::deque<TrackedFrame> mTrackedFrames; // render information for buffers sent to a window
+    bool mAreRenderMetricsEnabled;
+    bool mIsWindowToDisplay;
+    bool mHasPresentFenceTimes;
+
     bool mUsingNativeWindow;
     sp<ANativeWindow> mNativeWindow;
     int mNativeWindowUsageBits;
@@ -267,7 +279,6 @@
     // format updates. This will equal to mOutputFormat until the first actual frame is received.
     sp<AMessage> mBaseOutputFormat;
 
-    FrameRenderTracker mRenderTracker; // render information for buffers rendered by ACodec
     std::vector<BufferInfo> mBuffers[2];
     bool mPortEOS[2];
     status_t mInputEOSResult;
@@ -349,6 +360,10 @@
     status_t freeOutputBuffersNotOwnedByComponent();
     BufferInfo *dequeueBufferFromNativeWindow();
 
+    void initializeFrameTracking();
+    void trackReleasedFrame(int64_t frameId, int64_t mediaTimeUs, int64_t desiredRenderTimeNs);
+    void pollForRenderedFrames();
+
     inline bool storingMetadataInDecodedBuffers() {
         return (mPortMode[kPortIndexOutput] == IOMX::kPortModeDynamicANWBuffer) && !mIsEncoder;
     }
@@ -571,21 +586,6 @@
     void processDeferredMessages();
 
     void onFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano);
-    // called when we have dequeued a buffer |buf| from the native window to track render info.
-    // |fenceFd| is the dequeue fence, and |info| points to the buffer info where this buffer is
-    // stored.
-    void updateRenderInfoForDequeuedBuffer(
-            ANativeWindowBuffer *buf, int fenceFd, BufferInfo *info);
-
-    // Checks to see if any frames have rendered up until |until|, and to notify client
-    // (MediaCodec) of rendered frames up-until the frame pointed to by |until| or the first
-    // unrendered frame. These frames are removed from the render queue.
-    // If |dropIncomplete| is true, unrendered frames up-until |until| will be dropped from the
-    // queue, allowing all rendered framed up till then to be notified of.
-    // (This will effectively clear the render queue up-until (and including) |until|.)
-    // If |until| is NULL, or is not in the rendered queue, this method will check all frames.
-    void notifyOfRenderedFrames(
-            bool dropIncomplete = false, FrameRenderTracker::Info *until = NULL);
 
     void onFirstTunnelFrameReady();
 
diff --git a/media/libstagefright/include/media/stagefright/CodecBase.h b/media/libstagefright/include/media/stagefright/CodecBase.h
index 2a5989f..0927653 100644
--- a/media/libstagefright/include/media/stagefright/CodecBase.h
+++ b/media/libstagefright/include/media/stagefright/CodecBase.h
@@ -41,7 +41,7 @@
 struct BufferProducerWrapper;
 class MediaCodecBuffer;
 struct PersistentSurface;
-struct RenderedFrameInfo;
+class RenderedFrameInfo;
 class Surface;
 struct ICrypto;
 class IMemory;
@@ -239,7 +239,9 @@
     // require an explicit message handler
     virtual void onMessageReceived(const sp<AMessage> &msg) = 0;
 
-    virtual status_t setSurface(const sp<Surface>& /*surface*/) { return INVALID_OPERATION; }
+    virtual status_t setSurface(const sp<Surface>& /*surface*/, uint32_t /*generation*/) {
+        return INVALID_OPERATION;
+    }
 
     virtual void signalFlush() = 0;
     virtual void signalResume() = 0;
@@ -424,6 +426,15 @@
     virtual void pollForRenderedBuffers() = 0;
 
     /**
+     * Notify a buffer is released from output surface.
+     *
+     * @param     generation    MediaCodec's surface specifier
+     */
+    virtual void onBufferReleasedFromOutputSurface(uint32_t /*generation*/) {
+        // default: no-op
+    };
+
+    /**
      * Discard a buffer to the underlying CodecBase object.
      *
      * TODO: remove once this operation can be handled by just clearing the
diff --git a/media/libstagefright/include/media/stagefright/FrameRenderTracker.h b/media/libstagefright/include/media/stagefright/FrameRenderTracker.h
index c14755a..cab7ecc 100644
--- a/media/libstagefright/include/media/stagefright/FrameRenderTracker.h
+++ b/media/libstagefright/include/media/stagefright/FrameRenderTracker.h
@@ -32,61 +32,59 @@
 
 namespace android {
 
-// Tracks the render information about a frame. Frames go through several states while
-// the render information is tracked:
-//
-// 1. queued frame: mMediaTime and mGraphicBuffer are set for the frame. mFence is the
-// queue fence (read fence). mIndex is negative, and mRenderTimeNs is invalid.
-// Key characteristics: mFence is not NULL and mIndex is negative.
-//
-// 2. dequeued frame: mFence is updated with the dequeue fence (write fence). mIndex is set.
-// Key characteristics: mFence is not NULL and mIndex is non-negative. mRenderTime is still
-// invalid.
-//
-// 3. rendered frame or frame: mFence is cleared, mRenderTimeNs is set.
-// Key characteristics: mFence is NULL.
-//
-struct RenderedFrameInfo {
-    // set by client during onFrameQueued or onFrameRendered
-    int64_t getMediaTimeUs() const  { return mMediaTimeUs; }
-
-    // -1 if frame is not yet rendered
-    nsecs_t getRenderTimeNs() const { return mRenderTimeNs; }
-
-    // set by client during updateRenderInfoForDequeuedBuffer; -1 otherwise
-    ssize_t getIndex() const        { return mIndex; }
-
-    // creates information for a queued frame
-    RenderedFrameInfo(int64_t mediaTimeUs, const sp<GraphicBuffer> &graphicBuffer,
-            const sp<Fence> &fence)
-        : mMediaTimeUs(mediaTimeUs),
-          mRenderTimeNs(-1),
-          mIndex(-1),
-          mGraphicBuffer(graphicBuffer),
-          mFence(fence) {
-    }
-
-    // creates information for a frame rendered on a tunneled surface
-    RenderedFrameInfo(int64_t mediaTimeUs, nsecs_t renderTimeNs)
-        : mMediaTimeUs(mediaTimeUs),
-          mRenderTimeNs(renderTimeNs),
-          mIndex(-1),
-          mGraphicBuffer(NULL),
-          mFence(NULL) {
-    }
-
-private:
-    int64_t mMediaTimeUs;
-    nsecs_t mRenderTimeNs;
-    ssize_t mIndex;         // to be used by client
-    sp<GraphicBuffer> mGraphicBuffer;
-    sp<Fence> mFence;
-
-    friend struct FrameRenderTracker;
-};
-
 struct FrameRenderTracker {
-    typedef RenderedFrameInfo Info;
+    // Tracks the render information about a frame. Frames go through several states while
+    // the render information is tracked:
+    //
+    // 1. queued frame: mMediaTime and mGraphicBuffer are set for the frame. mFence is the
+    // queue fence (read fence). mIndex is negative, and mRenderTimeNs is invalid.
+    // Key characteristics: mFence is not NULL and mIndex is negative.
+    //
+    // 2. dequeued frame: mFence is updated with the dequeue fence (write fence). mIndex is set.
+    // Key characteristics: mFence is not NULL and mIndex is non-negative. mRenderTime is still
+    // invalid.
+    //
+    // 3. rendered frame or frame: mFence is cleared, mRenderTimeNs is set.
+    // Key characteristics: mFence is NULL.
+    //
+    struct Info {
+        // set by client during onFrameQueued or onFrameRendered
+        int64_t getMediaTimeUs() const  { return mMediaTimeUs; }
+
+        // -1 if frame is not yet rendered
+        nsecs_t getRenderTimeNs() const { return mRenderTimeNs; }
+
+        // set by client during updateRenderInfoForDequeuedBuffer; -1 otherwise
+        ssize_t getIndex() const        { return mIndex; }
+
+        // creates information for a queued frame
+        Info(int64_t mediaTimeUs, const sp<GraphicBuffer> &graphicBuffer,
+                const sp<Fence> &fence)
+          : mMediaTimeUs(mediaTimeUs),
+            mRenderTimeNs(-1),
+            mIndex(-1),
+            mGraphicBuffer(graphicBuffer),
+            mFence(fence) {
+        }
+
+        // creates information for a frame rendered on a tunneled surface
+        Info(int64_t mediaTimeUs, nsecs_t renderTimeNs)
+            : mMediaTimeUs(mediaTimeUs),
+            mRenderTimeNs(renderTimeNs),
+            mIndex(-1),
+            mGraphicBuffer(NULL),
+            mFence(NULL) {
+        }
+
+    private:
+        int64_t mMediaTimeUs;
+        nsecs_t mRenderTimeNs;
+        ssize_t mIndex;         // to be used by client
+        sp<GraphicBuffer> mGraphicBuffer;
+        sp<Fence> mFence;
+
+        friend struct FrameRenderTracker;
+    };
 
     FrameRenderTracker();
 
diff --git a/media/libstagefright/include/media/stagefright/MediaCodec.h b/media/libstagefright/include/media/stagefright/MediaCodec.h
index bc0f6c5..baa5b7e 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodec.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodec.h
@@ -64,6 +64,7 @@
 class MediaCodecBuffer;
 class IMemory;
 struct PersistentSurface;
+class RenderedFrameInfo;
 class SoftwareRenderer;
 class Surface;
 namespace hardware {
@@ -281,6 +282,8 @@
     // by adding rendered frame information to a base notification message. Returns the number
     // of frames that were rendered.
     static size_t CreateFramesRenderedMessage(
+            const std::list<RenderedFrameInfo> &done, sp<AMessage> &msg);
+    static size_t CreateFramesRenderedMessage(
             const std::list<FrameRenderTracker::Info> &done, sp<AMessage> &msg);
 
     static status_t CanFetchLinearBlock(
@@ -446,6 +449,7 @@
     int64_t mPresentationTimeUs = 0;
     status_t mStickyError;
     sp<Surface> mSurface;
+    uint32_t mSurfaceGeneration = 0;
     SoftwareRenderer *mSoftRenderer;
 
     Mutex mMetricsLock;
@@ -556,6 +560,7 @@
     int32_t mTunneledInputHeight;
     bool mTunneled;
     TunnelPeekState mTunnelPeekState;
+    bool mTunnelPeekEnabled;
 
     sp<IDescrambler> mDescrambler;
 
@@ -614,7 +619,7 @@
     status_t queueCSDInputBuffer(size_t bufferIndex);
 
     status_t handleSetSurface(const sp<Surface> &surface);
-    status_t connectToSurface(const sp<Surface> &surface);
+    status_t connectToSurface(const sp<Surface> &surface, uint32_t *generation);
     status_t disconnectFromSurface();
 
     bool hasCryptoOrDescrambler() {
@@ -720,6 +725,7 @@
 
     // An unique ID for the codec - Used by the metrics.
     uint64_t mCodecId = 0;
+    bool     mIsHardware = false;
 
     std::function<sp<CodecBase>(const AString &, const char *)> mGetCodecBase;
     std::function<status_t(const AString &, sp<MediaCodecInfo> *)> mGetCodecInfo;
diff --git a/media/libstagefright/include/media/stagefright/MediaCodecMetricsConstants.h b/media/libstagefright/include/media/stagefright/MediaCodecMetricsConstants.h
new file mode 100644
index 0000000..2c40904
--- /dev/null
+++ b/media/libstagefright/include/media/stagefright/MediaCodecMetricsConstants.h
@@ -0,0 +1,31 @@
+/*
+ * Copyright 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+#ifndef MEDIA_CODEC_METRICS_CONSTANTS_H_
+#define MEDIA_CODEC_METRICS_CONSTANTS_H_
+
+namespace android {
+
+// key for media statistics
+// Other keys are in MediaCodec.cpp
+// NB: These are not yet exposed as public Java API constants.
+inline constexpr char kCodecPixelFormat[] =
+        "android.media.mediacodec.pixel-format";
+
+}
+
+#endif  // MEDIA_CODEC_METRICS_CONSTANTS_H_
\ No newline at end of file
diff --git a/media/libstagefright/include/media/stagefright/MediaHistogram.h b/media/libstagefright/include/media/stagefright/MediaHistogram.h
index 50fa258..46ee288 100644
--- a/media/libstagefright/include/media/stagefright/MediaHistogram.h
+++ b/media/libstagefright/include/media/stagefright/MediaHistogram.h
@@ -29,11 +29,11 @@
 public:
     MediaHistogram();
     void clear();
-    bool setup(int bucketCount, T width, T floor = 0);
+    bool setup(size_t bucketCount, T width, T floor = 0);
     bool setup(const std::vector<T> &bucketLimits);
     void insert(T sample);
-    size_t size();
-    int64_t operator[](int);
+    size_t size() const;
+    int64_t operator[](int) const;
     T getMin() const { return mMin; }
     T getMax() const { return mMax; }
     T getCount() const { return mCount; }
@@ -45,7 +45,7 @@
 private:
     MediaHistogram(const MediaHistogram &); // disallow
 
-    bool allocate(int bucketCount, bool withBucketLimits);
+    void allocate(size_t bucketCount, bool withBucketLimits);
 
     T mFloor, mCeiling, mWidth;
     T mMin, mMax, mSum;
@@ -73,13 +73,12 @@
 }
 
 template<typename T>
-bool MediaHistogram<T>::setup(int bucketCount, T width, T floor) {
+bool MediaHistogram<T>::setup(size_t bucketCount, T width, T floor) {
     if (bucketCount <= 0 || width <= 0) {
         return false;
     }
-    if (!allocate(bucketCount, false)) {
-        return false;
-    }
+    allocate(bucketCount, false);
+
     mWidth = width;
     mFloor = floor;
     mCeiling = floor + bucketCount * width;
@@ -92,14 +91,14 @@
     if (bucketLimits.size() <= 1) {
         return false;
     }
-    int bucketCount = bucketLimits.size() - 1;
-    if (!allocate(bucketCount, true)) {
-        return false;
-    }
+    // The floor is the first bucket limit value, so offset by 1
+    size_t bucketCount = bucketLimits.size() - 1;
+    allocate(bucketCount, true);
 
     mWidth = -1;
     mFloor = bucketLimits[0];
-    for (int i = 0; i < bucketCount; ++i) {
+    for (size_t i = 0; i < bucketCount; ++i) {
+        // The floor is the first bucket, so offset by 1
         mBucketLimits[i] = bucketLimits[i + 1];
     }
     mCeiling = bucketLimits[bucketCount];
@@ -108,7 +107,7 @@
 }
 
 template<typename T>
-bool MediaHistogram<T>::allocate(int bucketCount, bool withBucketLimits) {
+void MediaHistogram<T>::allocate(size_t bucketCount, bool withBucketLimits) {
     assert(bucketCount > 0);
     if (bucketCount != mBuckets.size()) {
         mBuckets = std::vector<T>(bucketCount, 0);
@@ -116,7 +115,6 @@
     if (withBucketLimits && mBucketLimits.size() != bucketCount) {
         mBucketLimits = std::vector<T>(bucketCount, 0);
     }
-    return true;
 }
 
 template<typename T>
@@ -128,8 +126,8 @@
 
     mCount++;
     mSum += sample;
-    if (mMin > sample) mMin = sample;
-    if (mMax < sample) mMax = sample;
+    mMin = std::min(mMin, sample);
+    mMax = std::max(mMax, sample);
 
     if (sample < mFloor) {
         mBelow++;
@@ -138,7 +136,7 @@
     } else if (mWidth == -1) {
         // A binary search might be more efficient for large number of buckets, but it is expected
         // that there will never be a large amount of buckets, so keep the code simple.
-        for (int slot = 0; slot < mBucketLimits.size(); ++slot) {
+        for (size_t slot = 0; slot < mBucketLimits.size(); ++slot) {
             if (sample < mBucketLimits[slot]) {
                 mBuckets[slot]++;
                 break;
@@ -153,12 +151,12 @@
 }
 
 template<typename T>
-size_t MediaHistogram<T>::size() {
+size_t MediaHistogram<T>::size() const {
     return mBuckets.size() + 1;
 }
 
 template<typename T>
-int64_t MediaHistogram<T>::operator[](int i) {
+int64_t MediaHistogram<T>::operator[](int i) const {
     assert(i >= 0);
     assert(i <= mBuckets.size());
     if (i == mBuckets.size()) {
@@ -179,7 +177,7 @@
     } else {
         ss << mFloor << "," << mWidth << "," << mBelow << "{";
     }
-    for (int i = 0; i < mBuckets.size(); i++) {
+    for (size_t i = 0; i < mBuckets.size(); i++) {
         if (i != 0) {
             ss << ",";
         }
@@ -194,12 +192,12 @@
     std::stringstream ss("");
     if (mWidth == -1) {
         ss << mFloor;
-        for (int i = 0; i < mBucketLimits.size(); ++i) {
+        for (size_t i = 0; i < mBucketLimits.size(); ++i) {
             ss << ',' << mBucketLimits[i];
         }
     } else {
         ss << mFloor;
-        for (int i = 1; i <= mBuckets.size(); ++i) {
+        for (size_t i = 1; i <= mBuckets.size(); ++i) {
             ss << ',' << (mFloor + i * mWidth);
         }
     }
diff --git a/media/libstagefright/include/media/stagefright/MediaWriter.h b/media/libstagefright/include/media/stagefright/MediaWriter.h
index 2b14811..04dcfc0 100644
--- a/media/libstagefright/include/media/stagefright/MediaWriter.h
+++ b/media/libstagefright/include/media/stagefright/MediaWriter.h
@@ -54,6 +54,12 @@
         return true;
     }
 
+    // Returns true if the sample data is valid.
+    virtual bool isSampleMetadataValid([[maybe_unused]] size_t trackIndex,
+                                       [[maybe_unused]] int64_t timeUs) {
+        return true;
+    }
+
     virtual status_t addSource(const sp<MediaSource> &source) = 0;
     virtual bool reachedEOS() = 0;
     virtual status_t start(MetaData *params = NULL) = 0;
diff --git a/media/libstagefright/include/media/stagefright/RenderedFrameInfo.h b/media/libstagefright/include/media/stagefright/RenderedFrameInfo.h
new file mode 100644
index 0000000..4b8a58d
--- /dev/null
+++ b/media/libstagefright/include/media/stagefright/RenderedFrameInfo.h
@@ -0,0 +1,37 @@
+/*
+ * Copyright 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef RENDERED_FRAME_INFO_H
+#define RENDERED_FRAME_INFO_H
+
+namespace android {
+
+class RenderedFrameInfo {
+public:
+    RenderedFrameInfo(int64_t mediaTimeUs, int64_t renderTimeNs)
+        : mMediaTimeUs(mediaTimeUs), mRenderTimeNs(renderTimeNs) {}
+
+    int64_t getMediaTimeUs() const  { return mMediaTimeUs; }
+    nsecs_t getRenderTimeNs() const { return mRenderTimeNs;}
+
+private:
+    int64_t mMediaTimeUs;
+    nsecs_t mRenderTimeNs;
+};
+
+} // android
+
+#endif // RENDERED_FRAME_INFO_H
\ No newline at end of file
diff --git a/media/libstagefright/include/media/stagefright/SurfaceUtils.h b/media/libstagefright/include/media/stagefright/SurfaceUtils.h
index 35b3fa2..eccb413 100644
--- a/media/libstagefright/include/media/stagefright/SurfaceUtils.h
+++ b/media/libstagefright/include/media/stagefright/SurfaceUtils.h
@@ -27,6 +27,7 @@
 namespace android {
 
 struct HDRStaticInfo;
+class IProducerListener;
 
 /**
  * Configures |nativeWindow| for given |width|x|height|, pixel |format|, |rotation| and |usage|.
@@ -43,6 +44,8 @@
 status_t pushBlankBuffersToNativeWindow(ANativeWindow *nativeWindow /* nonnull */);
 status_t nativeWindowConnect(ANativeWindow *surface, const char *reason);
 status_t nativeWindowDisconnect(ANativeWindow *surface, const char *reason);
+status_t surfaceConnectWithListener(const sp<Surface> &surface,
+        sp<IProducerListener> listener, const char *reason);
 
 /**
  * Disable buffer dropping behavior of BufferQueue if target sdk of application
diff --git a/media/libstagefright/include/media/stagefright/VideoRenderQualityTracker.h b/media/libstagefright/include/media/stagefright/VideoRenderQualityTracker.h
index 82ba81c..7139deb 100644
--- a/media/libstagefright/include/media/stagefright/VideoRenderQualityTracker.h
+++ b/media/libstagefright/include/media/stagefright/VideoRenderQualityTracker.h
@@ -38,6 +38,9 @@
     // The render time of the first video frame.
     int64_t firstRenderTimeUs;
 
+    // The render time of the last video frame.
+    int64_t lastRenderTimeUs;
+
     // The number of frames released to be rendered.
     int64_t frameReleasedCount;
 
@@ -60,6 +63,11 @@
     // post-render.
     float actualFrameRate;
 
+    // The amount of content duration skipped by the app after a pause when video was trying to
+    // resume. This sometimes happen when catching up to the audio position which continued playing
+    // after video pauses.
+    int32_t maxContentDroppedAfterPauseMs;
+
     // A histogram of the durations of freezes due to dropped/skipped frames.
     MediaHistogram<int32_t> freezeDurationMsHistogram;
     // The computed overall freeze score using the above histogram and score conversion table. The
@@ -152,6 +160,11 @@
         // seeking forward.
         int32_t liveContentFrameDropToleranceUs;
 
+        // The amount of time it takes for audio to stop playback after a pause is initiated. Used
+        // for providing some allowance of dropped video frames to catch back up to the audio
+        // position when resuming playback.
+        int32_t pauseAudioLatencyUs;
+
         // Freeze configuration
         //
         // The values used to distribute freeze durations across a histogram.
@@ -197,6 +210,16 @@
         // The maximum distance in time between two judder occurrences such that both will be
         // lumped into the same judder event.
         int32_t judderEventDistanceToleranceMs;
+        //
+        // Whether or not Perfetto trace trigger is enabled.
+        bool traceTriggerEnabled;
+        //
+        // The throttle time for Perfetto trace trigger to avoid triggering multiple traces for
+        // the same event in a short time.
+        int32_t traceTriggerThrottleMs;
+        //
+        // The minimum frame render duration to recognize video freeze event to collect trace.
+        int32_t traceMinFreezeDurationMs;
     };
 
     struct FreezeEvent {
@@ -253,8 +276,11 @@
         Details details;
     };
 
+    typedef void (*TraceTriggerFn)();
+
     VideoRenderQualityTracker();
-    VideoRenderQualityTracker(const Configuration &configuration);
+    VideoRenderQualityTracker(const Configuration &configuration,
+                              const TraceTriggerFn traceTriggerFn = nullptr);
 
     // Called when a tunnel mode frame has been queued.
     void onTunnelFrameQueued(int64_t contentTimeUs);
@@ -349,7 +375,8 @@
     // Process a frame freeze.
     static void processFreeze(int64_t actualRenderTimeUs, int64_t lastRenderTimeUs,
                               int64_t lastFreezeEndTimeUs, FreezeEvent &e,
-                              VideoRenderQualityMetrics &m, const Configuration &c);
+                              VideoRenderQualityMetrics &m, const Configuration &c,
+                              const TraceTriggerFn traceTriggerFn);
 
     // Retrieve a freeze event if an event just finished.
     static void maybeCaptureFreezeEvent(int64_t actualRenderTimeUs, int64_t lastFreezeEndTimeUs,
@@ -373,6 +400,14 @@
                                         JudderEvent &e, const VideoRenderQualityMetrics & m,
                                         const Configuration &c, JudderEvent *judderEventOut);
 
+    // Trigger trace collection for video freeze.
+    static void triggerTrace();
+
+    // Trigger collection of a Perfetto Always-On-Tracing (AOT) trace file for video freeze,
+    // triggerTimeUs is used as a throttle to avoid triggering multiple traces in a short time.
+    static void triggerTraceWithThrottle(TraceTriggerFn traceTriggerFn,
+                                         const Configuration &c, const int64_t triggerTimeUs);
+
     // Check to see if a discontinuity has occurred by examining the content time and the
     // app-desired render time. If so, reset some internal state.
     bool resetIfDiscontinuity(int64_t contentTimeUs, int64_t desiredRenderTimeUs);
@@ -391,6 +426,9 @@
     // Configurable elements of the metrics algorithms.
     const Configuration mConfiguration;
 
+    // The function for triggering trace collection for video freeze.
+    const TraceTriggerFn mTraceTriggerFn;
+
     // Metrics are updated every time a frame event occurs - skipped, dropped, rendered.
     VideoRenderQualityMetrics mMetrics;
 
@@ -409,8 +447,8 @@
     // The render duration of the playback.
     int64_t mRenderDurationMs;
 
-    // True if the previous frame was dropped.
-    bool mWasPreviousFrameDropped;
+    // The duration of the content that was dropped.
+    int64_t mDroppedContentDurationUs;
 
     // The freeze event that's currently being tracked.
     FreezeEvent mFreezeEvent;
@@ -442,6 +480,9 @@
     // Frame durations derived from timestamps captured by the display subsystem, indicating the
     // wall clock atime at which the frame is actually rendered.
     FrameDurationUs mActualFrameDurationUs;
+
+    // Token of async atrace for video frame dropped/skipped by the app.
+    int64_t mTraceFrameSkippedToken= -1;
 };
 
 }  // namespace android
diff --git a/media/libstagefright/omx/Android.bp b/media/libstagefright/omx/Android.bp
index 54c5697..79ab009 100644
--- a/media/libstagefright/omx/Android.bp
+++ b/media/libstagefright/omx/Android.bp
@@ -184,6 +184,9 @@
 
 cc_defaults {
     name: "libstagefright_softomx-defaults",
+    // TODO (b/316432618) Software OMX codecs are no longer used, disable building them till
+    // this code is removed completely.
+    enabled: false,
     vendor_available: true,
 
     cflags: [
diff --git a/media/libstagefright/rtsp/ARTPWriter.cpp b/media/libstagefright/rtsp/ARTPWriter.cpp
index 41e9aff..bc57ef7 100644
--- a/media/libstagefright/rtsp/ARTPWriter.cpp
+++ b/media/libstagefright/rtsp/ARTPWriter.cpp
@@ -105,6 +105,7 @@
 
     mRTCPAddr = mRTPAddr;
     mRTCPAddr.sin_port = htons(ntohs(mRTPAddr.sin_port) | 1);
+    mVPSBuf = NULL;
     mSPSBuf = NULL;
     mPPSBuf = NULL;
 
diff --git a/media/libstagefright/tests/VideoRenderQualityTracker_test.cpp b/media/libstagefright/tests/VideoRenderQualityTracker_test.cpp
index 7823922..16f8294 100644
--- a/media/libstagefright/tests/VideoRenderQualityTracker_test.cpp
+++ b/media/libstagefright/tests/VideoRenderQualityTracker_test.cpp
@@ -36,10 +36,11 @@
 class Helper {
 public:
     Helper(double contentFrameDurationMs, const Configuration &configuration) :
-            mVideoRenderQualityTracker(configuration) {
+            mVideoRenderQualityTracker(configuration, testTraceTrigger) {
         mContentFrameDurationUs = int64_t(contentFrameDurationMs * 1000);
         mMediaTimeUs = 0;
         mClockTimeNs = 0;
+        sTraceTriggeredCount = 0;
     }
 
     void changeContentFrameDuration(double contentFrameDurationMs) {
@@ -100,6 +101,10 @@
         return e;
     }
 
+    int getTraceTriggeredCount() {
+        return sTraceTriggeredCount;
+    }
+
 private:
     VideoRenderQualityTracker mVideoRenderQualityTracker;
     int64_t mContentFrameDurationUs;
@@ -107,8 +112,16 @@
     int64_t mClockTimeNs;
     VideoRenderQualityTracker::FreezeEvent mFreezeEvent;
     VideoRenderQualityTracker::JudderEvent mJudderEvent;
+
+    static int sTraceTriggeredCount;
+
+    static void testTraceTrigger() {
+        sTraceTriggeredCount++;
+    };
 };
 
+int Helper::sTraceTriggeredCount = 0;
+
 class VideoRenderQualityTrackerTest : public ::testing::Test {
 public:
     VideoRenderQualityTrackerTest() {}
@@ -127,6 +140,7 @@
     EXPECT_EQ(c.maxExpectedContentFrameDurationUs, d.maxExpectedContentFrameDurationUs);
     EXPECT_EQ(c.frameRateDetectionToleranceUs, d.frameRateDetectionToleranceUs);
     EXPECT_EQ(c.liveContentFrameDropToleranceUs, d.liveContentFrameDropToleranceUs);
+    EXPECT_EQ(c.pauseAudioLatencyUs, d.pauseAudioLatencyUs);
     EXPECT_EQ(c.freezeDurationMsHistogramBuckets, d.freezeDurationMsHistogramBuckets);
     EXPECT_EQ(c.freezeDurationMsHistogramToScore, d.freezeDurationMsHistogramToScore);
     EXPECT_EQ(c.freezeDistanceMsHistogramBuckets, d.freezeDistanceMsHistogramBuckets);
@@ -139,6 +153,9 @@
     EXPECT_EQ(c.judderEventMax, d.judderEventMax);
     EXPECT_EQ(c.judderEventDetailsMax, d.judderEventDetailsMax);
     EXPECT_EQ(c.judderEventDistanceToleranceMs, d.judderEventDistanceToleranceMs);
+    EXPECT_EQ(c.traceTriggerEnabled, d.traceTriggerEnabled);
+    EXPECT_EQ(c.traceTriggerThrottleMs, d.traceTriggerThrottleMs);
+    EXPECT_EQ(c.traceMinFreezeDurationMs, d.traceMinFreezeDurationMs);
 }
 
 TEST_F(VideoRenderQualityTrackerTest, getFromServerConfigurableFlags_withEmpty) {
@@ -154,6 +171,7 @@
     EXPECT_EQ(c.maxExpectedContentFrameDurationUs, d.maxExpectedContentFrameDurationUs);
     EXPECT_EQ(c.frameRateDetectionToleranceUs, d.frameRateDetectionToleranceUs);
     EXPECT_EQ(c.liveContentFrameDropToleranceUs, d.liveContentFrameDropToleranceUs);
+    EXPECT_EQ(c.pauseAudioLatencyUs, d.pauseAudioLatencyUs);
     EXPECT_EQ(c.freezeDurationMsHistogramBuckets, d.freezeDurationMsHistogramBuckets);
     EXPECT_EQ(c.freezeDurationMsHistogramToScore, d.freezeDurationMsHistogramToScore);
     EXPECT_EQ(c.freezeDistanceMsHistogramBuckets, d.freezeDistanceMsHistogramBuckets);
@@ -166,6 +184,9 @@
     EXPECT_EQ(c.judderEventMax, d.judderEventMax);
     EXPECT_EQ(c.judderEventDetailsMax, d.judderEventDetailsMax);
     EXPECT_EQ(c.judderEventDistanceToleranceMs, d.judderEventDistanceToleranceMs);
+    EXPECT_EQ(c.traceTriggerEnabled, d.traceTriggerEnabled);
+    EXPECT_EQ(c.traceTriggerThrottleMs, d.traceTriggerThrottleMs);
+    EXPECT_EQ(c.traceMinFreezeDurationMs, d.traceMinFreezeDurationMs);
 }
 
 TEST_F(VideoRenderQualityTrackerTest, getFromServerConfigurableFlags_withInvalid) {
@@ -181,6 +202,7 @@
     EXPECT_EQ(c.maxExpectedContentFrameDurationUs, d.maxExpectedContentFrameDurationUs);
     EXPECT_EQ(c.frameRateDetectionToleranceUs, d.frameRateDetectionToleranceUs);
     EXPECT_EQ(c.liveContentFrameDropToleranceUs, d.liveContentFrameDropToleranceUs);
+    EXPECT_EQ(c.pauseAudioLatencyUs, d.pauseAudioLatencyUs);
     EXPECT_EQ(c.freezeDurationMsHistogramBuckets, d.freezeDurationMsHistogramBuckets);
     EXPECT_EQ(c.freezeDurationMsHistogramToScore, d.freezeDurationMsHistogramToScore);
     EXPECT_EQ(c.freezeDistanceMsHistogramBuckets, d.freezeDistanceMsHistogramBuckets);
@@ -193,6 +215,9 @@
     EXPECT_EQ(c.judderEventMax, d.judderEventMax);
     EXPECT_EQ(c.judderEventDetailsMax, d.judderEventDetailsMax);
     EXPECT_EQ(c.judderEventDistanceToleranceMs, d.judderEventDistanceToleranceMs);
+    EXPECT_EQ(c.traceTriggerEnabled, d.traceTriggerEnabled);
+    EXPECT_EQ(c.traceTriggerThrottleMs, d.traceTriggerThrottleMs);
+    EXPECT_EQ(c.traceMinFreezeDurationMs, d.traceMinFreezeDurationMs);
 }
 
 TEST_F(VideoRenderQualityTrackerTest, getFromServerConfigurableFlags_withAlmostValid) {
@@ -208,6 +233,8 @@
                 return "10b0";
             } else if (flag == "render_metrics_live_content_frame_drop_tolerance_us") {
                 return "c100";
+            } else if (flag == "render_metrics_pause_audio_latency_us") {
+                return "1ab0";
             } else if (flag == "render_metrics_freeze_duration_ms_histogram_buckets") {
                 return "1,5300,3b400,123";
             } else if (flag == "render_metrics_freeze_duration_ms_histogram_to_score") {
@@ -232,6 +259,14 @@
                 return "10*10";
             } else if (flag == "render_metrics_judder_event_distance_tolerance_ms") {
                 return "140-a";
+            } else if (flag == "render_metrics_trace_trigger_enabled") {
+                return "fals";
+            } else if (flag == "render_metrics_trace_trigger_throttle_ms") {
+                return "12345678901234";
+            } else if (flag == "render_metrics_trace_minimum_freeze_duration_ms") {
+                return "10b0";
+            } else if (flag == "render_metrics_trace_maximum_freeze_duration_ms") {
+                return "100a";
             }
             return "";
         }
@@ -243,6 +278,7 @@
     EXPECT_EQ(c.maxExpectedContentFrameDurationUs, d.maxExpectedContentFrameDurationUs);
     EXPECT_EQ(c.frameRateDetectionToleranceUs, d.frameRateDetectionToleranceUs);
     EXPECT_EQ(c.liveContentFrameDropToleranceUs, d.liveContentFrameDropToleranceUs);
+    EXPECT_EQ(c.pauseAudioLatencyUs, d.pauseAudioLatencyUs);
     EXPECT_EQ(c.freezeDurationMsHistogramBuckets, d.freezeDurationMsHistogramBuckets);
     EXPECT_EQ(c.freezeDurationMsHistogramToScore, d.freezeDurationMsHistogramToScore);
     EXPECT_EQ(c.freezeDistanceMsHistogramBuckets, d.freezeDistanceMsHistogramBuckets);
@@ -255,13 +291,16 @@
     EXPECT_EQ(c.judderEventMax, d.judderEventMax);
     EXPECT_EQ(c.judderEventDetailsMax, d.judderEventDetailsMax);
     EXPECT_EQ(c.judderEventDistanceToleranceMs, d.judderEventDistanceToleranceMs);
+    EXPECT_EQ(c.traceTriggerEnabled, d.traceTriggerEnabled);
+    EXPECT_EQ(c.traceTriggerThrottleMs, d.traceTriggerThrottleMs);
+    EXPECT_EQ(c.traceMinFreezeDurationMs, d.traceMinFreezeDurationMs);
 }
 
 TEST_F(VideoRenderQualityTrackerTest, getFromServerConfigurableFlags_withValid) {
     Configuration::GetServerConfigurableFlagFn getServerConfigurableFlagFn{
         [](const std::string &, const std::string &flag, const std::string &) -> std::string {
             if (flag == "render_metrics_enabled") {
-                return "false";
+                return "true";
             } else if (flag == "render_metrics_are_skipped_frames_dropped") {
                 return "false";
             } else if (flag == "render_metrics_max_expected_content_frame_duration_us") {
@@ -270,6 +309,8 @@
                 return "3000";
             } else if (flag == "render_metrics_live_content_frame_drop_tolerance_us") {
                 return "4000";
+            } else if (flag == "render_metrics_pause_audio_latency_us") {
+                return "300000";
             } else if (flag == "render_metrics_freeze_duration_ms_histogram_buckets") {
                 return "100,200,300,400";
             } else if (flag == "render_metrics_freeze_duration_ms_histogram_to_score") {
@@ -294,6 +335,14 @@
                 return "10000";
             } else if (flag == "render_metrics_judder_event_distance_tolerance_ms") {
                 return "11000";
+            } else if (flag == "render_metrics_trace_trigger_enabled") {
+                return "true";
+            } else if (flag == "render_metrics_trace_trigger_throttle_ms") {
+                return "50000";
+            } else if (flag == "render_metrics_trace_minimum_freeze_duration_ms") {
+                return "1000";
+            } else if (flag == "render_metrics_trace_maximum_freeze_duration_ms") {
+                return "5000";
             }
             return "";
         }
@@ -304,7 +353,7 @@
     // default - if we are accidentally configuring to the default then we're not necessarily
     // testing the parsing.
     Configuration d;
-    EXPECT_EQ(c.enabled, false);
+    EXPECT_EQ(c.enabled, true);
     EXPECT_NE(c.enabled, d.enabled);
     EXPECT_EQ(c.areSkippedFramesDropped, false);
     EXPECT_NE(c.areSkippedFramesDropped, d.areSkippedFramesDropped);
@@ -314,6 +363,8 @@
     EXPECT_NE(c.frameRateDetectionToleranceUs, d.frameRateDetectionToleranceUs);
     EXPECT_EQ(c.liveContentFrameDropToleranceUs, 4000);
     EXPECT_NE(c.liveContentFrameDropToleranceUs, d.liveContentFrameDropToleranceUs);
+    EXPECT_EQ(c.pauseAudioLatencyUs, 300000);
+    EXPECT_NE(c.pauseAudioLatencyUs, d.pauseAudioLatencyUs);
     {
         std::vector<int32_t> expected({100,200,300,400});
         EXPECT_EQ(c.freezeDurationMsHistogramBuckets, expected);
@@ -353,10 +404,15 @@
     EXPECT_NE(c.judderEventDetailsMax, d.judderEventDetailsMax);
     EXPECT_EQ(c.judderEventDistanceToleranceMs, 11000);
     EXPECT_NE(c.judderEventDistanceToleranceMs, d.judderEventDistanceToleranceMs);
+
+    EXPECT_EQ(c.traceTriggerEnabled, true);
+    EXPECT_EQ(c.traceTriggerThrottleMs, 50000);
+    EXPECT_EQ(c.traceMinFreezeDurationMs, 1000);
 }
 
 TEST_F(VideoRenderQualityTrackerTest, countsReleasedFrames) {
     Configuration c;
+    c.enabled = true;
     Helper h(16.66, c);
     h.drop(10);
     h.render({16.66, 16.66, 16.66});
@@ -368,6 +424,7 @@
 
 TEST_F(VideoRenderQualityTrackerTest, countsSkippedFrames) {
     Configuration c;
+    c.enabled = true;
     Helper h(16.66, c);
     h.drop(10); // dropped frames are not counted
     h.skip(10); // frames skipped before rendering a frame are not counted
@@ -382,6 +439,7 @@
 
 TEST_F(VideoRenderQualityTrackerTest, whenSkippedFramesAreDropped_countsDroppedFrames) {
     Configuration c;
+    c.enabled = true;
     c.areSkippedFramesDropped = true;
     Helper h(16.66, c);
     h.skip(10); // skipped frames at the beginning of playback are not counted
@@ -398,6 +456,7 @@
 
 TEST_F(VideoRenderQualityTrackerTest, whenNotSkippedFramesAreDropped_countsDroppedFrames) {
     Configuration c;
+    c.enabled = true;
     c.areSkippedFramesDropped = false;
     Helper h(16.66, c);
     h.skip(10); // skipped frames at the beginning of playback are not counted
@@ -414,6 +473,7 @@
 
 TEST_F(VideoRenderQualityTrackerTest, countsRenderedFrames) {
     Configuration c;
+    c.enabled = true;
     Helper h(16.66, c);
     h.drop(10); // dropped frames are not counted
     h.render({16.66, 16.66, 16.66});
@@ -425,6 +485,7 @@
 
 TEST_F(VideoRenderQualityTrackerTest, detectsFrameRate) {
     Configuration c;
+    c.enabled = true;
     c.frameRateDetectionToleranceUs = 2 * 1000; // 2 ms
     Helper h(16.66, c);
     h.render({16.6, 16.7, 16.6, 16.7});
@@ -434,6 +495,7 @@
 
 TEST_F(VideoRenderQualityTrackerTest, handlesSeeking) {
     Configuration c;
+    c.enabled = true;
     c.maxExpectedContentFrameDurationUs = 30;
     VideoRenderQualityTracker v(c);
     v.onFrameReleased(0, 0);
@@ -472,6 +534,7 @@
 
 TEST_F(VideoRenderQualityTrackerTest, withSkipping_handlesSeeking) {
     Configuration c;
+    c.enabled = true;
     c.maxExpectedContentFrameDurationUs = 30;
     VideoRenderQualityTracker v(c);
     v.onFrameReleased(0, 0);
@@ -508,6 +571,7 @@
 
 TEST_F(VideoRenderQualityTrackerTest, whenLowTolerance_doesntDetectFrameRate) {
     Configuration c;
+    c.enabled = true;
     c.frameRateDetectionToleranceUs = 0;
     Helper h(16.66, c);
     h.render({16.6, 16.7, 16.6, 16.7});
@@ -517,6 +581,7 @@
 
 TEST_F(VideoRenderQualityTrackerTest, whenFrameRateDestabilizes_detectsFrameRate) {
     Configuration c;
+    c.enabled = true;
     c.frameRateDetectionToleranceUs = 2 * 1000; // 2 ms
     Helper h(16.66, c);
     h.render({16.6, 16.7, 16.6, 16.7});
@@ -527,6 +592,7 @@
 
 TEST_F(VideoRenderQualityTrackerTest, detects32Pulldown) {
     Configuration c;
+    c.enabled = true;
     c.frameRateDetectionToleranceUs = 2 * 1000; // 2 ms
     Helper h(41.66, c);
     h.render({49.9, 33.2, 50.0, 33.4, 50.1, 33.2});
@@ -536,6 +602,7 @@
 
 TEST_F(VideoRenderQualityTrackerTest, whenBad32Pulldown_doesntDetect32Pulldown) {
     Configuration c;
+    c.enabled = true;
     c.frameRateDetectionToleranceUs = 2 * 1000; // 2 ms
     Helper h(41.66, c);
     h.render({50.0, 33.33, 33.33, 50.00, 33.33, 50.00});
@@ -545,6 +612,7 @@
 
 TEST_F(VideoRenderQualityTrackerTest, whenFrameRateChanges_detectsMostRecentFrameRate) {
     Configuration c;
+    c.enabled = true;
     c.frameRateDetectionToleranceUs = 2 * 1000; // 2 ms
     Helper h(16.66, c);
     h.render({16.6, 16.7, 16.6, 16.7});
@@ -558,6 +626,7 @@
 
 TEST_F(VideoRenderQualityTrackerTest, whenFrameRateIsUnstable_doesntDetectFrameRate) {
     Configuration c;
+    c.enabled = true;
     c.frameRateDetectionToleranceUs = 2 * 1000; // 2 ms
     Helper h(16.66, c);
     h.render({16.66, 30.0, 16.66, 30.0, 16.66});
@@ -567,6 +636,7 @@
 
 TEST_F(VideoRenderQualityTrackerTest, capturesFreezeRate) {
     Configuration c;
+    c.enabled = true;
     Helper h(20, c);
     h.render(3);
     EXPECT_EQ(h.getMetrics().freezeRate, 0);
@@ -579,6 +649,7 @@
 
 TEST_F(VideoRenderQualityTrackerTest, capturesFreezeDurationHistogram) {
     Configuration c;
+    c.enabled = true;
     // +17 because freeze durations include the render time of the previous frame
     c.freezeDurationMsHistogramBuckets = {2 * 17 + 17, 3 * 17 + 17, 6 * 17 + 17};
     Helper h(17, c);
@@ -612,6 +683,7 @@
 
 TEST_F(VideoRenderQualityTrackerTest, capturesFreezeDistanceHistogram) {
     Configuration c;
+    c.enabled = true;
     c.freezeDistanceMsHistogramBuckets = {1 * 17, 5 * 17, 6 * 17};
     Helper h(17, c);
     h.render(1);
@@ -643,6 +715,7 @@
 
 TEST_F(VideoRenderQualityTrackerTest, when60hz_hasNoJudder) {
     Configuration c;
+    c.enabled = true;
     Helper h(16.66, c); // ~24Hz
     h.render({16.66, 16.66, 16.66, 16.66, 16.66, 16.66, 16.66});
     EXPECT_LE(h.getMetrics().judderScoreHistogram.getMax(), 0);
@@ -651,6 +724,7 @@
 
 TEST_F(VideoRenderQualityTrackerTest, whenSmallVariance60hz_hasNoJudder) {
     Configuration c;
+    c.enabled = true;
     Helper h(16.66, c); // ~24Hz
     h.render({14, 18, 14, 18, 14, 18, 14, 18});
     EXPECT_LE(h.getMetrics().judderScoreHistogram.getMax(), 0);
@@ -659,6 +733,7 @@
 
 TEST_F(VideoRenderQualityTrackerTest, whenBadSmallVariance60Hz_hasJudder) {
     Configuration c;
+    c.enabled = true;
     Helper h(16.66, c); // ~24Hz
     h.render({14, 18, 14, /* no 18 between 14s */ 14, 18, 14, 18});
     EXPECT_EQ(h.getMetrics().judderScoreHistogram.getCount(), 1);
@@ -666,6 +741,7 @@
 
 TEST_F(VideoRenderQualityTrackerTest, when30Hz_hasNoJudder) {
     Configuration c;
+    c.enabled = true;
     Helper h(33.33, c);
     h.render({33.33, 33.33, 33.33, 33.33, 33.33, 33.33});
     EXPECT_LE(h.getMetrics().judderScoreHistogram.getMax(), 0);
@@ -674,6 +750,7 @@
 
 TEST_F(VideoRenderQualityTrackerTest, whenSmallVariance30Hz_hasNoJudder) {
     Configuration c;
+    c.enabled = true;
     Helper h(33.33, c);
     h.render({29.0, 35.0, 29.0, 35.0, 29.0, 35.0});
     EXPECT_LE(h.getMetrics().judderScoreHistogram.getMax(), 0);
@@ -682,6 +759,7 @@
 
 TEST_F(VideoRenderQualityTrackerTest, whenBadSmallVariance30Hz_hasJudder) {
     Configuration c;
+    c.enabled = true;
     Helper h(33.33, c);
     h.render({29.0, 35.0, 29.0, /* no 35 between 29s */ 29.0, 35.0, 29.0, 35.0});
     EXPECT_EQ(h.getMetrics().judderScoreHistogram.getCount(), 1);
@@ -689,6 +767,7 @@
 
 TEST_F(VideoRenderQualityTrackerTest, whenBad30HzTo60Hz_hasJudder) {
     Configuration c;
+    c.enabled = true;
     Helper h(33.33, c);
     h.render({33.33, 33.33, 50.0, /* frame stayed 1 vsync too long */ 16.66, 33.33, 33.33});
     EXPECT_EQ(h.getMetrics().judderScoreHistogram.getCount(), 2); // note: 2 counts of judder
@@ -696,6 +775,7 @@
 
 TEST_F(VideoRenderQualityTrackerTest, when24HzTo60Hz_hasNoJudder) {
     Configuration c;
+    c.enabled = true;
     Helper h(41.66, c);
     h.render({50.0, 33.33, 50.0, 33.33, 50.0, 33.33});
     EXPECT_LE(h.getMetrics().judderScoreHistogram.getMax(), 0);
@@ -704,6 +784,7 @@
 
 TEST_F(VideoRenderQualityTrackerTest, when25HzTo60Hz_hasJudder) {
     Configuration c;
+    c.enabled = true;
     Helper h(40, c);
     h.render({33.33, 33.33, 50.0});
     h.render({33.33, 33.33, 50.0});
@@ -716,6 +797,7 @@
 
 TEST_F(VideoRenderQualityTrackerTest, when50HzTo60Hz_hasJudder) {
     Configuration c;
+    c.enabled = true;
     Helper h(20, c);
     h.render({16.66, 16.66, 16.66, 33.33});
     h.render({16.66, 16.66, 16.66, 33.33});
@@ -728,6 +810,7 @@
 
 TEST_F(VideoRenderQualityTrackerTest, when30HzTo50Hz_hasJudder) {
     Configuration c;
+    c.enabled = true;
     Helper h(33.33, c);
     h.render({40.0, 40.0, 40.0, 60.0});
     h.render({40.0, 40.0, 40.0, 60.0});
@@ -739,6 +822,7 @@
 
 TEST_F(VideoRenderQualityTrackerTest, whenSmallVariancePulldown24HzTo60Hz_hasNoJudder) {
     Configuration c;
+    c.enabled = true;
     Helper h(41.66, c);
     h.render({52.0, 31.33, 52.0, 31.33, 52.0, 31.33});
     EXPECT_EQ(h.getMetrics().judderScoreHistogram.getCount(), 0);
@@ -746,6 +830,7 @@
 
 TEST_F(VideoRenderQualityTrackerTest, whenBad24HzTo60Hz_hasJudder) {
     Configuration c;
+    c.enabled = true;
     Helper h(41.66, c);
     h.render({50.0, 33.33, 50.0, 33.33, /* no 50 between 33s */ 33.33, 50.0, 33.33});
     EXPECT_EQ(h.getMetrics().judderScoreHistogram.getCount(), 1);
@@ -753,6 +838,7 @@
 
 TEST_F(VideoRenderQualityTrackerTest, capturesJudderScoreHistogram) {
     Configuration c;
+    c.enabled = true;
     c.judderErrorToleranceUs = 2000;
     c.judderScoreHistogramBuckets = {1, 5, 8};
     Helper h(16, c);
@@ -767,6 +853,7 @@
 TEST_F(VideoRenderQualityTrackerTest, ranksJudderScoresInOrder) {
     // Each rendering is ranked from best to worst from a user experience
     Configuration c;
+    c.enabled = true;
     c.judderErrorToleranceUs = 2000;
     c.judderScoreHistogramBuckets = {0, 1000};
     int64_t previousScore = 0;
@@ -846,6 +933,7 @@
 
 TEST_F(VideoRenderQualityTrackerTest, capturesFreezeEvents) {
     Configuration c;
+    c.enabled = true;
     c.freezeEventMax = 5;
     c.freezeEventDetailsMax = 4;
     c.freezeEventDistanceToleranceMs = 1000;
@@ -938,6 +1026,7 @@
 
 TEST_F(VideoRenderQualityTrackerTest, capturesJudderEvents) {
     Configuration c;
+    c.enabled = true;
     c.judderEventMax = 4;
     c.judderEventDetailsMax = 3;
     c.judderEventDistanceToleranceMs = 100;
@@ -988,6 +1077,7 @@
 
 TEST_F(VideoRenderQualityTrackerTest, capturesOverallFreezeScore) {
     Configuration c;
+    c.enabled = true;
     // # drops * 20ms + 20ms because current frame is frozen + 1 for bucket threshold
     c.freezeDurationMsHistogramBuckets = {1 * 20 + 21, 5 * 20 + 21, 10 * 20 + 21};
     c.freezeDurationMsHistogramToScore = {10, 100, 1000};
@@ -1012,6 +1102,7 @@
 
 TEST_F(VideoRenderQualityTrackerTest, capturesOverallJudderScore) {
     Configuration c;
+    c.enabled = true;
     c.judderScoreHistogramBuckets = {0, 6, 10};
     c.judderScoreHistogramToScore = {10, 100, 1000};
     Helper h(20, c);
@@ -1024,4 +1115,107 @@
     EXPECT_EQ(h.getMetrics().judderScore, 10 + 300 + 2000);
 }
 
+TEST_F(VideoRenderQualityTrackerTest,
+       freezesForTraceDuration_withThrottle_throttlesTraceTrigger) {
+    Configuration c;
+    c.enabled = true;
+    c.traceTriggerEnabled = true; // The trigger is enabled, so traces should be triggered.
+    // The value of traceTriggerThrottleMs must be larger than traceMinFreezeDurationMs. Otherwise,
+    // the throttle does not work.
+    c.traceTriggerThrottleMs = 200;
+    c.traceMinFreezeDurationMs = 4 * 20; // 4 frames.
+
+    Helper h(20, c);
+    // Freeze triggers separated by 100ms which is less than the threshold.
+    h.render(1); // Video start.
+    h.drop(3);   // Freeze.
+    h.render(1); // Trace triggered.
+    h.render(1); // Throttle time:  20/200ms
+    h.drop(3);   // Throttle time:  80/200ms
+    h.render(1); // Throttle time: 100/200ms (Trace not triggered)
+    EXPECT_EQ(h.getTraceTriggeredCount(), 1);
+    // Next freeze trigger is separated by 200ms which breaks the throttle threshold.
+    h.render(1); // Throttle time: 120/200ms
+    h.drop(3);   // Throttle time: 180/200ms
+    h.render(1); // Throttle time: 200/200ms (Trace triggered)
+    EXPECT_EQ(h.getTraceTriggeredCount(), 2);
+    // Next freeze trigger is separated by 100ms which is less than the threshold.
+    h.render(1); // Throttle time:  20/200ms
+    h.drop(3);   // Throttle time:  80/200ms
+    h.render(1); // Throttle time: 100/200ms (Trace not triggered)
+    EXPECT_EQ(h.getTraceTriggeredCount(), 2);
+    // Freeze duration is less than traceMinFreezeDurationMs and throttle ends.
+    h.render(1); // Throttle time: 120/200ms
+    h.render(1); // Throttle time: 140/200ms
+    h.drop(2);   // Throttle time: 180/200ms
+    h.render(1); // Throttle time: 200/200ms (Trace not triggered, freeze duration = 60ms)
+    EXPECT_EQ(h.getTraceTriggeredCount(), 2);
+}
+
+TEST_F(VideoRenderQualityTrackerTest,
+       freezeForTraceDuration_withTraceDisabled_doesNotTriggerTrace) {
+    Configuration c;
+    c.enabled = true;
+    c.traceTriggerEnabled = false; // The trigger is disabled, so no traces should be triggered.
+    c.traceTriggerThrottleMs = 0; // Disable throttle in the test case.
+    c.traceMinFreezeDurationMs = 4 * 20; // 4 frames.
+
+    Helper h(20, c);
+    h.render(1);
+    h.drop(3);
+    h.render(1); // Render duration is 80 ms.
+    h.drop(4);
+    h.render(1); // Render duration is 100 ms.
+
+    EXPECT_EQ(h.getTraceTriggeredCount(), 0);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, doesNotCountCatchUpAfterPauseAsFreeze) {
+    Configuration c;
+    c.enabled = true;
+    c.pauseAudioLatencyUs = 200 * 1000; // allows for up to 10 frames to be dropped to catch up
+                                        // to the audio position
+    Helper h(20, c);
+    // A few frames followed by a long pause
+    h.render({20, 20, 1000});
+    h.drop(10); // simulate catching up to audio
+    h.render({20, 20, 1000});
+    h.drop(11); // simulate catching up to audio but then also dropping frames
+    h.render({20});
+
+    // Only 1 freeze is counted because the first freeze (200ms) because it's equal to or below the
+    // pause latency allowance, and the algorithm assumes a legitimate case of the video trying to
+    // catch up to the audio position, which continued to play for a short period of time (less than
+    // 200ms) after the pause was initiated
+    EXPECT_EQ(h.getMetrics().freezeDurationMsHistogram.getCount(), 1);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, capturesMaximumContentDroppedAfterPause) {
+    Configuration c;
+    c.enabled = true;
+    c.pauseAudioLatencyUs = 200 * 1000; // allows for up to 10 frames to be dropped to catch up
+                                        // to the audio position
+    Helper h(20, c);
+
+    // Freezes are below the pause latency are captured
+    h.render({20, 20, 1000});
+    h.drop(6);
+    h.render({20, 20, 1000});
+    h.drop(8);
+    h.render({20, 20, 1000});
+    h.drop(7);
+    h.render({20});
+    EXPECT_EQ(h.getMetrics().maxContentDroppedAfterPauseMs, 8 * 20);
+
+    // Freezes are above the pause latency are also captured
+    h.render({20, 20, 1000});
+    h.drop(10);
+    h.render({20, 20, 1000});
+    h.drop(12);
+    h.render({20, 20, 1000});
+    h.drop(11);
+    h.render({20});
+    EXPECT_EQ(h.getMetrics().maxContentDroppedAfterPauseMs, 12 * 20);
+}
+
 } // android
diff --git a/media/libstagefright/tests/mediacodec/MediaCodecTest.cpp b/media/libstagefright/tests/mediacodec/MediaCodecTest.cpp
index 71ddbe5..ed01e36 100644
--- a/media/libstagefright/tests/mediacodec/MediaCodecTest.cpp
+++ b/media/libstagefright/tests/mediacodec/MediaCodecTest.cpp
@@ -89,7 +89,8 @@
     MOCK_METHOD(void, initiateStart, (), (override));
     MOCK_METHOD(void, initiateShutdown, (bool keepComponentAllocated), (override));
     MOCK_METHOD(void, onMessageReceived, (const sp<AMessage> &msg), (override));
-    MOCK_METHOD(status_t, setSurface, (const sp<Surface> &surface), (override));
+    MOCK_METHOD(
+            status_t, setSurface, (const sp<Surface> &surface, uint32_t generation), (override));
     MOCK_METHOD(void, signalFlush, (), (override));
     MOCK_METHOD(void, signalResume, (), (override));
     MOCK_METHOD(void, signalRequestIDRFrame, (), (override));
diff --git a/media/libstagefright/webm/WebmWriter.cpp b/media/libstagefright/webm/WebmWriter.cpp
index 3823c36..ca862b0 100644
--- a/media/libstagefright/webm/WebmWriter.cpp
+++ b/media/libstagefright/webm/WebmWriter.cpp
@@ -67,6 +67,25 @@
     return true;
 }
 
+bool WebmWriter::isSampleMetadataValid(size_t trackIndex, int64_t timeUs) {
+    int64_t prevTimeUs = 0;
+    if (mLastTimestampUsByTrackIndex.find(trackIndex) != mLastTimestampUsByTrackIndex.end()) {
+        prevTimeUs = mLastTimestampUsByTrackIndex[trackIndex];
+    }
+    // WebM has monotonically increasing timestamps
+    if (timeUs < 0 || timeUs < prevTimeUs) {
+        return false;
+    }
+    int64_t lastDurationUs = timeUs - prevTimeUs;
+    // Ensure that the timeUs value does not overflow,
+    // when adding lastDurationUs in the WebmFrameMediaSourceThread.
+    if (timeUs > (INT64_MAX / 1000) - lastDurationUs) {
+        return false;
+    }
+    mLastTimestampUsByTrackIndex[trackIndex] = timeUs;
+    return true;
+}
+
 WebmWriter::WebmWriter(int fd)
     : mFd(dup(fd)),
       mInitCheck(mFd < 0 ? NO_INIT : OK),
diff --git a/media/libstagefright/webm/include/webm/WebmWriter.h b/media/libstagefright/webm/include/webm/WebmWriter.h
index e339add..4c51f0e 100644
--- a/media/libstagefright/webm/include/webm/WebmWriter.h
+++ b/media/libstagefright/webm/include/webm/WebmWriter.h
@@ -40,6 +40,9 @@
     // which is compatible with WebmWriter.
     // Note that this overloads that method in the base class.
     static bool isFdOpenModeValid(int fd);
+    // Returns true if the timestamp is valid which is compatible with the WebmWriter.
+    // Note that this overloads that method in the base class.
+    bool isSampleMetadataValid(size_t trackIndex, int64_t timeUs);
     explicit WebmWriter(int fd);
     ~WebmWriter() { reset(); }
 
@@ -67,6 +70,7 @@
     uint64_t mInfoSize;
     uint64_t mTracksOffset;
     uint64_t mCuesOffset;
+    std::map<size_t, int64_t> mLastTimestampUsByTrackIndex;
 
     bool mPaused;
     bool mStarted;
diff --git a/media/libstagefright/writer_fuzzers/WriterFuzzerBase.cpp b/media/libstagefright/writer_fuzzers/WriterFuzzerBase.cpp
index fee5c94..7235ba9 100644
--- a/media/libstagefright/writer_fuzzers/WriterFuzzerBase.cpp
+++ b/media/libstagefright/writer_fuzzers/WriterFuzzerBase.cpp
@@ -209,6 +209,9 @@
     }
     vector<FrameData> bufferInfo = mBufferSource->getFrameList(trackIndex);
     for (int idx = startFrameIndex; idx < endFrameIndex; ++idx) {
+        if (!mWriter->isSampleMetadataValid(trackIndex, bufferInfo[idx].timeUs)) {
+            continue;
+        }
         sp<ABuffer> buffer = new ABuffer((void *)bufferInfo[idx].buf, bufferInfo[idx].size);
         MediaBuffer *mediaBuffer = new MediaBuffer(buffer);
 
diff --git a/media/mediaserver/manifest_media_c2_software.xml b/media/mediaserver/manifest_media_c2_software.xml
index a5b4896..d7fb1a0 100644
--- a/media/mediaserver/manifest_media_c2_software.xml
+++ b/media/mediaserver/manifest_media_c2_software.xml
@@ -8,4 +8,9 @@
             <instance>software</instance>
         </interface>
     </hal>
+    <hal format="aidl">
+        <name>android.hardware.media.c2</name>
+        <version>1</version>
+        <fqname>IComponentStore/software</fqname>
+    </hal>
 </manifest>
diff --git a/media/module/TEST_MAPPING b/media/module/TEST_MAPPING
new file mode 100644
index 0000000..1b572da
--- /dev/null
+++ b/media/module/TEST_MAPPING
@@ -0,0 +1,7 @@
+{
+  "presubmit": [
+    {
+      "name": "hal_implementation_test"
+    }
+  ]
+}
diff --git a/media/module/codecs/m4v_h263/dec/src/vop.cpp b/media/module/codecs/m4v_h263/dec/src/vop.cpp
index abc0861..2c937c3 100644
--- a/media/module/codecs/m4v_h263/dec/src/vop.cpp
+++ b/media/module/codecs/m4v_h263/dec/src/vop.cpp
@@ -136,6 +136,7 @@
                 case 0x05:
                 case 0x06:
                 case 0x08:
+                case 0x09:
                 case 0x10:
                 case 0x11:
                 case 0x12:
diff --git a/media/module/codecserviceregistrant/Android.bp b/media/module/codecserviceregistrant/Android.bp
index f3a1723..becb98a 100644
--- a/media/module/codecserviceregistrant/Android.bp
+++ b/media/module/codecserviceregistrant/Android.bp
@@ -6,34 +6,8 @@
     //   SPDX-license-identifier-Apache-2.0
 }
 
-cc_library {
-    name: "libmedia_codecserviceregistrant",
-    vendor_available: true,
-    min_sdk_version: "29",
-    apex_available: [
-        "//apex_available:platform",
-        "com.android.media.swcodec",
-    ],
-
-    srcs: [
-        "CodecServiceRegistrant.cpp",
-    ],
-
-    header_libs: [
-        "libmedia_headers",
-    ],
-
-    defaults: [
-        "libcodec2-hidl-defaults",
-    ],
-    shared_libs: [
-        "libbase",
-        "libcodec2_hidl@1.0",
-        "libcodec2_vndk",
-        "libhidlbase",
-        "libutils",
-    ],
-
+cc_defaults {
+    name: "libcodec2-runtime-libs",
     // Codecs
     runtime_libs: [
         "libcodec2_soft_avcdec",
@@ -61,6 +35,7 @@
         "libcodec2_soft_vp9dec",
         // "libcodec2_soft_av1dec_aom",  // replaced by the gav1 implementation
         "libcodec2_soft_av1dec_gav1",
+        "libcodec2_soft_av1dec_dav1d",
         "libcodec2_soft_av1enc",
         "libcodec2_soft_vp8enc",
         "libcodec2_soft_vp9enc",
@@ -70,3 +45,35 @@
         "libcodec2_soft_gsmdec",
     ],
 }
+
+cc_library {
+    name: "libmedia_codecserviceregistrant",
+    vendor_available: true,
+    min_sdk_version: "29",
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.media.swcodec",
+    ],
+
+    srcs: [
+        "CodecServiceRegistrant.cpp",
+    ],
+
+    header_libs: [
+        "libmedia_headers",
+    ],
+
+    defaults: [
+        "libcodec2-aidl-defaults",
+        "libcodec2-hidl-defaults",
+        "libcodec2-runtime-libs",
+    ],
+    shared_libs: [
+        "libbase",
+        "libcodec2_hidl@1.0",
+        "libcodec2_vndk",
+        "libhidlbase",
+        "libutils",
+    ],
+
+}
diff --git a/media/module/codecserviceregistrant/CodecServiceRegistrant.cpp b/media/module/codecserviceregistrant/CodecServiceRegistrant.cpp
index 1de9efe..f95fc4d 100644
--- a/media/module/codecserviceregistrant/CodecServiceRegistrant.cpp
+++ b/media/module/codecserviceregistrant/CodecServiceRegistrant.cpp
@@ -20,15 +20,25 @@
 #include <android/api-level.h>
 #include <android-base/logging.h>
 #include <android-base/properties.h>
+#include <android-base/stringprintf.h>
 
 #include <C2Component.h>
 #include <C2PlatformSupport.h>
+
 #include <codec2/hidl/1.0/ComponentStore.h>
 #include <codec2/hidl/1.1/ComponentStore.h>
 #include <codec2/hidl/1.2/ComponentStore.h>
 #include <codec2/hidl/1.2/Configurable.h>
 #include <codec2/hidl/1.2/types.h>
 #include <hidl/HidlSupport.h>
+#include <hidl/HidlTransportSupport.h>
+
+#include <android/binder_interface_utils.h>
+#include <android/binder_manager.h>
+#include <android/binder_process.h>
+#include <codec2/aidl/ComponentStore.h>
+#include <codec2/aidl/ParamTypes.h>
+
 #include <media/CodecServiceRegistrant.h>
 
 namespace /* unnamed */ {
@@ -36,59 +46,147 @@
 using ::android::hardware::hidl_vec;
 using ::android::hardware::hidl_string;
 using ::android::hardware::Return;
-using ::android::hardware::Void;
 using ::android::sp;
-using namespace ::android::hardware::media::c2::V1_2;
-using namespace ::android::hardware::media::c2::V1_2::utils;
+using ::ndk::ScopedAStatus;
+namespace c2_hidl = ::android::hardware::media::c2::V1_2;
+namespace c2_aidl = ::aidl::android::hardware::media::c2;
 
 constexpr c2_status_t C2_TRANSACTION_FAILED = C2_CORRUPTED;
 
 // Converter from IComponentStore to C2ComponentStore.
 class H2C2ComponentStore : public C2ComponentStore {
 protected:
-    using IComponentStore =
+    using HidlComponentStore =
         ::android::hardware::media::c2::V1_0::IComponentStore;
-    using IConfigurable =
+    using HidlConfigurable =
         ::android::hardware::media::c2::V1_0::IConfigurable;
-    sp<IComponentStore> mStore;
-    sp<IConfigurable> mConfigurable;
+    sp<HidlComponentStore> mHidlStore;
+    sp<HidlConfigurable> mHidlConfigurable;
+
+    using AidlComponentStore =
+        ::aidl::android::hardware::media::c2::IComponentStore;
+    using AidlConfigurable =
+        ::aidl::android::hardware::media::c2::IConfigurable;
+    std::shared_ptr<AidlComponentStore> mAidlStore;
+    std::shared_ptr<AidlConfigurable> mAidlConfigurable;
 public:
-    explicit H2C2ComponentStore(sp<IComponentStore> const& store)
-          : mStore{store},
-            mConfigurable{[store]() -> sp<IConfigurable>{
+    explicit H2C2ComponentStore(nullptr_t) {
+    }
+
+    explicit H2C2ComponentStore(sp<HidlComponentStore> const& store)
+          : mHidlStore{store},
+            mHidlConfigurable{[store]() -> sp<HidlConfigurable>{
                 if (!store) {
                     return nullptr;
                 }
-                Return<sp<IConfigurable>> transResult =
+                Return<sp<HidlConfigurable>> transResult =
                     store->getConfigurable();
                 return transResult.isOk() ?
-                        static_cast<sp<IConfigurable>>(transResult) :
+                        static_cast<sp<HidlConfigurable>>(transResult) :
                         nullptr;
             }()} {
-        if (!mConfigurable) {
+        if (!mHidlConfigurable) {
+            LOG(ERROR) << "Preferred store is corrupted.";
+        }
+    }
+
+    explicit H2C2ComponentStore(std::shared_ptr<AidlComponentStore> const& store)
+          : mAidlStore{store},
+            mAidlConfigurable{[store]() -> std::shared_ptr<AidlConfigurable>{
+                if (!store) {
+                    return nullptr;
+                }
+                std::shared_ptr<AidlConfigurable> configurable;
+                ScopedAStatus status = store->getConfigurable(&configurable);
+                if (!status.isOk()) {
+                    return nullptr;
+                }
+                return configurable;
+            }()} {
+        if (!mAidlConfigurable) {
             LOG(ERROR) << "Preferred store is corrupted.";
         }
     }
 
     virtual ~H2C2ComponentStore() override = default;
 
-    virtual c2_status_t config_sm(
+    c2_status_t config_sm(
             std::vector<C2Param*> const &params,
             std::vector<std::unique_ptr<C2SettingResult>>* const failures
             ) override {
-        Params hidlParams;
-        if (!createParamsBlob(&hidlParams, params)) {
+        if (mAidlStore) {
+            return config_sm_aidl(params, failures);
+        } else if (mHidlStore) {
+            return config_sm_hidl(params, failures);
+        } else {
+            return C2_OMITTED;
+        }
+    }
+
+    c2_status_t config_sm_aidl(
+            std::vector<C2Param*> const &params,
+            std::vector<std::unique_ptr<C2SettingResult>>* const failures
+            ) {
+        c2_aidl::Params aidlParams;
+        if (!c2_aidl::utils::CreateParamsBlob(&aidlParams, params)) {
+            LOG(ERROR) << "config -- bad input.";
+            return C2_TRANSACTION_FAILED;
+        }
+        c2_status_t status = C2_OK;
+        c2_aidl::IConfigurable::ConfigResult configResult;
+        ScopedAStatus transResult = mAidlConfigurable->config(
+                aidlParams, true, &configResult);
+        if (!transResult.isOk()) {
+            if (transResult.getExceptionCode() == EX_SERVICE_SPECIFIC) {
+                status = c2_status_t(transResult.getServiceSpecificError());
+                if (status != C2_BAD_INDEX) {
+                    LOG(DEBUG) << "config -- call failed: "
+                               << status << ".";
+                }
+            } else {
+                LOG(ERROR) << "config -- transaction failed.";
+                return C2_TRANSACTION_FAILED;
+            }
+        }
+        status = static_cast<c2_status_t>(configResult.status.status);
+        if (status != C2_BAD_INDEX) {
+            LOG(DEBUG) << "config -- call failed: "
+                       << status << ".";
+        }
+        size_t i = failures->size();
+        failures->resize(i + configResult.failures.size());
+        for (const c2_aidl::SettingResult& sf : configResult.failures) {
+            if (!c2_aidl::utils::FromAidl(&(*failures)[i++], sf)) {
+                LOG(ERROR) << "config -- "
+                           << "invalid SettingResult returned.";
+                status = C2_CORRUPTED;
+            }
+        }
+        if (!c2_aidl::utils::UpdateParamsFromBlob(params, configResult.params)) {
+            LOG(ERROR) << "config -- "
+                       << "failed to parse returned params.";
+            status = C2_CORRUPTED;
+        }
+        return status;
+    };
+
+    c2_status_t config_sm_hidl(
+            std::vector<C2Param*> const &params,
+            std::vector<std::unique_ptr<C2SettingResult>>* const failures
+            ) {
+        c2_hidl::Params hidlParams;
+        if (!c2_hidl::utils::createParamsBlob(&hidlParams, params)) {
             LOG(ERROR) << "config -- bad input.";
             return C2_TRANSACTION_FAILED;
         }
         c2_status_t status{};
-        Return<void> transResult = mConfigurable->config(
+        Return<void> transResult = mHidlConfigurable->config(
                 hidlParams,
                 true,
                 [&status, &params, failures](
-                        Status s,
-                        const hidl_vec<SettingResult> f,
-                        const Params& o) {
+                        c2_hidl::Status s,
+                        const hidl_vec<c2_hidl::SettingResult> f,
+                        const c2_hidl::Params& o) {
                     status = static_cast<c2_status_t>(s);
                     if (status != C2_OK && status != C2_BAD_INDEX) {
                         LOG(DEBUG) << "config -- call failed: "
@@ -96,14 +194,14 @@
                     }
                     size_t i = failures->size();
                     failures->resize(i + f.size());
-                    for (const SettingResult& sf : f) {
-                        if (!objcpy(&(*failures)[i++], sf)) {
+                    for (const c2_hidl::SettingResult& sf : f) {
+                        if (!c2_hidl::utils::objcpy(&(*failures)[i++], sf)) {
                             LOG(ERROR) << "config -- "
                                        << "invalid SettingResult returned.";
                             return;
                         }
                     }
-                    if (!updateParamsFromBlob(params, o)) {
+                    if (!c2_hidl::utils::updateParamsFromBlob(params, o)) {
                         LOG(ERROR) << "config -- "
                                    << "failed to parse returned params.";
                         status = C2_CORRUPTED;
@@ -116,33 +214,146 @@
         return status;
     };
 
-    virtual c2_status_t copyBuffer(
+    c2_status_t copyBuffer(
             std::shared_ptr<C2GraphicBuffer>,
             std::shared_ptr<C2GraphicBuffer>) override {
         LOG(ERROR) << "copyBuffer -- not supported.";
         return C2_OMITTED;
     }
 
-    virtual c2_status_t createComponent(
+    c2_status_t createComponent(
             C2String, std::shared_ptr<C2Component> *const component) override {
         component->reset();
         LOG(ERROR) << "createComponent -- not supported.";
         return C2_OMITTED;
     }
 
-    virtual c2_status_t createInterface(
-            C2String, std::shared_ptr<C2ComponentInterface> *const interface) {
+    c2_status_t createInterface(
+            C2String, std::shared_ptr<C2ComponentInterface> *const interface) override {
         interface->reset();
         LOG(ERROR) << "createInterface -- not supported.";
         return C2_OMITTED;
     }
 
-    virtual c2_status_t query_sm(
+    c2_status_t query_sm(
             const std::vector<C2Param *> &stackParams,
             const std::vector<C2Param::Index> &heapParamIndices,
-            std::vector<std::unique_ptr<C2Param>> *const heapParams) const
-            override {
-        hidl_vec<ParamIndex> indices(
+            std::vector<std::unique_ptr<C2Param>> *const heapParams) const override {
+        if (mAidlStore) {
+            return query_sm_aidl(stackParams, heapParamIndices, heapParams);
+        } else if (mHidlStore) {
+            return query_sm_hidl(stackParams, heapParamIndices, heapParams);
+        } else {
+            return C2_OMITTED;
+        }
+    }
+
+    static c2_status_t UpdateQueryResult(
+            const std::vector<C2Param *> &paramPointers,
+            size_t numStackIndices,
+            const std::vector<C2Param *> &stackParams,
+            std::vector<std::unique_ptr<C2Param>> *const heapParams) {
+        c2_status_t status = C2_OK;
+        size_t i = 0;
+        for (auto it = paramPointers.begin(); it != paramPointers.end(); ) {
+            C2Param* paramPointer = *it;
+            if (numStackIndices > 0) {
+                --numStackIndices;
+                if (!paramPointer) {
+                    LOG(WARNING) << "query -- null stack param.";
+                    ++it;
+                    continue;
+                }
+                for (; i < stackParams.size() && !stackParams[i]; ) {
+                    ++i;
+                }
+                if (i >= stackParams.size()) {
+                    LOG(ERROR) << "query -- unexpected error.";
+                    status = C2_CORRUPTED;
+                    break;
+                }
+                if (stackParams[i]->index() != paramPointer->index()) {
+                    LOG(WARNING) << "query -- param skipped: "
+                                    "index = "
+                                 << stackParams[i]->index() << ".";
+                    stackParams[i++]->invalidate();
+                    continue;
+                }
+                if (!stackParams[i++]->updateFrom(*paramPointer)) {
+                    LOG(WARNING) << "query -- param update failed: "
+                                    "index = "
+                                 << paramPointer->index() << ".";
+                }
+            } else {
+                if (!paramPointer) {
+                    LOG(WARNING) << "query -- null heap param.";
+                    ++it;
+                    continue;
+                }
+                if (!heapParams) {
+                    LOG(WARNING) << "query -- "
+                                    "unexpected extra stack param.";
+                } else {
+                    heapParams->emplace_back(
+                            C2Param::Copy(*paramPointer));
+                }
+            }
+            ++it;
+        }
+        return status;
+    }
+
+    c2_status_t query_sm_aidl(
+            const std::vector<C2Param *> &stackParams,
+            const std::vector<C2Param::Index> &heapParamIndices,
+            std::vector<std::unique_ptr<C2Param>> *const heapParams) const {
+        std::vector<int32_t> indices;
+        size_t numIndices = 0;
+        for (C2Param* const& stackParam : stackParams) {
+            if (!stackParam) {
+                LOG(WARNING) << "query -- null stack param encountered.";
+                continue;
+            }
+            indices[numIndices++] = stackParam->index();
+        }
+        size_t numStackIndices = numIndices;
+        for (const C2Param::Index& index : heapParamIndices) {
+            indices[numIndices++] = static_cast<uint32_t>(index);
+        }
+        indices.resize(numIndices);
+        if (heapParams) {
+            heapParams->reserve(heapParams->size() + numIndices);
+        }
+        c2_status_t status = C2_OK;
+        c2_aidl::IConfigurable::QueryResult aidlResult;
+        ScopedAStatus transResult = mAidlConfigurable->query(indices, true, &aidlResult);
+        if (!transResult.isOk()) {
+            if (transResult.getExceptionCode() == EX_SERVICE_SPECIFIC) {
+                status = c2_status_t(transResult.getServiceSpecificError());
+                LOG(DEBUG) << "query -- call failed: " << status << ".";
+                return status;
+            } else {
+                LOG(ERROR) << "query -- transaction failed.";
+                return C2_TRANSACTION_FAILED;
+            }
+        }
+        status = static_cast<c2_status_t>(aidlResult.status.status);
+        if (status != C2_OK) {
+            LOG(DEBUG) << "query -- call failed: " << status << ".";
+        }
+        std::vector<C2Param*> paramPointers;
+        if (!c2_aidl::utils::ParseParamsBlob(&paramPointers, aidlResult.params)) {
+            LOG(ERROR) << "query -- error while parsing params.";
+            return C2_CORRUPTED;
+        }
+        return UpdateQueryResult(paramPointers, numStackIndices, stackParams, heapParams);
+    }
+
+    c2_status_t query_sm_hidl(
+            const std::vector<C2Param *> &stackParams,
+            const std::vector<C2Param::Index> &heapParamIndices,
+            std::vector<std::unique_ptr<C2Param>> *const heapParams) const {
+        hidl_vec<c2_hidl::ParamIndex> indices(
                 stackParams.size() + heapParamIndices.size());
         size_t numIndices = 0;
         for (C2Param* const& stackParam : stackParams) {
@@ -150,23 +361,23 @@
                 LOG(WARNING) << "query -- null stack param encountered.";
                 continue;
             }
-            indices[numIndices++] = static_cast<ParamIndex>(stackParam->index());
+            indices[numIndices++] = static_cast<c2_hidl::ParamIndex>(stackParam->index());
         }
         size_t numStackIndices = numIndices;
         for (const C2Param::Index& index : heapParamIndices) {
             indices[numIndices++] =
-                    static_cast<ParamIndex>(static_cast<uint32_t>(index));
+                    static_cast<c2_hidl::ParamIndex>(static_cast<uint32_t>(index));
         }
         indices.resize(numIndices);
         if (heapParams) {
             heapParams->reserve(heapParams->size() + numIndices);
         }
         c2_status_t status;
-        Return<void> transResult = mConfigurable->query(
+        Return<void> transResult = mHidlConfigurable->query(
                 indices,
                 true,
                 [&status, &numStackIndices, &stackParams, heapParams](
-                        Status s, const Params& p) {
+                        c2_hidl::Status s, const c2_hidl::Params& p) {
                     status = static_cast<c2_status_t>(s);
                     if (status != C2_OK && status != C2_BAD_INDEX) {
                         LOG(DEBUG) << "query -- call failed: "
@@ -174,58 +385,13 @@
                         return;
                     }
                     std::vector<C2Param*> paramPointers;
-                    if (!parseParamsBlob(&paramPointers, p)) {
+                    if (!c2_hidl::utils::parseParamsBlob(&paramPointers, p)) {
                         LOG(ERROR) << "query -- error while parsing params.";
                         status = C2_CORRUPTED;
                         return;
                     }
-                    size_t i = 0;
-                    for (auto it = paramPointers.begin();
-                            it != paramPointers.end(); ) {
-                        C2Param* paramPointer = *it;
-                        if (numStackIndices > 0) {
-                            --numStackIndices;
-                            if (!paramPointer) {
-                                LOG(WARNING) << "query -- null stack param.";
-                                ++it;
-                                continue;
-                            }
-                            for (; i < stackParams.size() && !stackParams[i]; ) {
-                                ++i;
-                            }
-                            if (i >= stackParams.size()) {
-                                LOG(ERROR) << "query -- unexpected error.";
-                                status = C2_CORRUPTED;
-                                return;
-                            }
-                            if (stackParams[i]->index() != paramPointer->index()) {
-                                LOG(WARNING) << "query -- param skipped: "
-                                                "index = "
-                                             << stackParams[i]->index() << ".";
-                                stackParams[i++]->invalidate();
-                                continue;
-                            }
-                            if (!stackParams[i++]->updateFrom(*paramPointer)) {
-                                LOG(WARNING) << "query -- param update failed: "
-                                                "index = "
-                                             << paramPointer->index() << ".";
-                            }
-                        } else {
-                            if (!paramPointer) {
-                                LOG(WARNING) << "query -- null heap param.";
-                                ++it;
-                                continue;
-                            }
-                            if (!heapParams) {
-                                LOG(WARNING) << "query -- "
-                                                "unexpected extra stack param.";
-                            } else {
-                                heapParams->emplace_back(
-                                        C2Param::Copy(*paramPointer));
-                            }
-                        }
-                        ++it;
-                    }
+                    status = UpdateQueryResult(
+                            paramPointers, numStackIndices, stackParams, heapParams);
                 });
         if (!transResult.isOk()) {
             LOG(ERROR) << "query -- transaction failed.";
@@ -234,15 +400,58 @@
         return status;
     }
 
-    virtual c2_status_t querySupportedParams_nb(
+    c2_status_t querySupportedParams_nb(
+            std::vector<std::shared_ptr<C2ParamDescriptor>> *const params) const override {
+        if (mAidlStore) {
+            return querySupportedParams_nb_aidl(params);
+        } else if (mHidlStore) {
+            return querySupportedParams_nb_hidl(params);
+        } else {
+            return C2_OMITTED;
+        }
+    }
+
+    c2_status_t querySupportedParams_nb_aidl(
+            std::vector<std::shared_ptr<C2ParamDescriptor>> *const params) const {
+        c2_status_t status = C2_OK;
+        std::vector<c2_aidl::ParamDescriptor> aidlParams;
+        ScopedAStatus transResult = mAidlConfigurable->querySupportedParams(
+                std::numeric_limits<uint32_t>::min(),
+                std::numeric_limits<uint32_t>::max(),
+                &aidlParams);
+        if (!transResult.isOk()) {
+            if (transResult.getExceptionCode() == EX_SERVICE_SPECIFIC) {
+                status = c2_status_t(transResult.getServiceSpecificError());
+                LOG(DEBUG) << "querySupportedParams -- call failed: "
+                           << status << ".";
+                return status;
+            } else {
+                LOG(ERROR) << "querySupportedParams -- transaction failed.";
+                return C2_TRANSACTION_FAILED;
+            }
+        }
+
+        size_t i = params->size();
+        params->resize(i + aidlParams.size());
+        for (const c2_aidl::ParamDescriptor& sp : aidlParams) {
+            if (!c2_aidl::utils::FromAidl(&(*params)[i++], sp)) {
+                LOG(ERROR) << "querySupportedParams -- "
+                           << "invalid returned ParamDescriptor.";
+                break;
+            }
+        }
+        return status;
+    }
+
+    c2_status_t querySupportedParams_nb_hidl(
             std::vector<std::shared_ptr<C2ParamDescriptor>> *const params) const {
         c2_status_t status;
-        Return<void> transResult = mConfigurable->querySupportedParams(
+        Return<void> transResult = mHidlConfigurable->querySupportedParams(
                 std::numeric_limits<uint32_t>::min(),
                 std::numeric_limits<uint32_t>::max(),
                 [&status, params](
-                        Status s,
-                        const hidl_vec<ParamDescriptor>& p) {
+                        c2_hidl::Status s,
+                        const hidl_vec<c2_hidl::ParamDescriptor>& p) {
                     status = static_cast<c2_status_t>(s);
                     if (status != C2_OK) {
                         LOG(DEBUG) << "querySupportedParams -- call failed: "
@@ -251,8 +460,8 @@
                     }
                     size_t i = params->size();
                     params->resize(i + p.size());
-                    for (const ParamDescriptor& sp : p) {
-                        if (!objcpy(&(*params)[i++], sp)) {
+                    for (const c2_hidl::ParamDescriptor& sp : p) {
+                        if (!c2_hidl::utils::objcpy(&(*params)[i++], sp)) {
                             LOG(ERROR) << "querySupportedParams -- "
                                        << "invalid returned ParamDescriptor.";
                             return;
@@ -266,23 +475,80 @@
         return status;
     }
 
-    virtual c2_status_t querySupportedValues_sm(
+    c2_status_t querySupportedValues_sm(
+            std::vector<C2FieldSupportedValuesQuery> &fields) const override {
+        if (mAidlStore) {
+            return querySupportedValues_sm_aidl(fields);
+        } else if (mHidlStore) {
+            return querySupportedValues_sm_hidl(fields);
+        } else {
+            return C2_OMITTED;
+        }
+    }
+
+    c2_status_t querySupportedValues_sm_aidl(
             std::vector<C2FieldSupportedValuesQuery> &fields) const {
-        hidl_vec<FieldSupportedValuesQuery> inFields(fields.size());
+        std::vector<c2_aidl::FieldSupportedValuesQuery> aidlFields(fields.size());
         for (size_t i = 0; i < fields.size(); ++i) {
-            if (!objcpy(&inFields[i], fields[i])) {
+            if (!c2_aidl::utils::ToAidl(&aidlFields[i], fields[i])) {
+                LOG(ERROR) << "querySupportedValues -- bad input";
+                return C2_TRANSACTION_FAILED;
+            }
+        }
+
+        c2_status_t status = C2_OK;
+        c2_aidl::IConfigurable::QuerySupportedValuesResult queryResult;
+        ScopedAStatus transResult = mAidlConfigurable->querySupportedValues(
+                aidlFields, true, &queryResult);
+        if (!transResult.isOk()) {
+            if (transResult.getExceptionCode() == EX_SERVICE_SPECIFIC) {
+                status = c2_status_t(transResult.getServiceSpecificError());
+                LOG(DEBUG) << "querySupportedValues -- call failed: "
+                           << status << ".";
+                return status;
+            } else {
+                LOG(ERROR) << "querySupportedValues -- transaction failed.";
+                return C2_TRANSACTION_FAILED;
+            }
+        }
+        status = static_cast<c2_status_t>(queryResult.status.status);
+        if (status != C2_OK) {
+            LOG(DEBUG) << "querySupportedValues -- call failed: "
+                       << status << ".";
+        }
+        if (queryResult.values.size() != fields.size()) {
+            LOG(ERROR) << "querySupportedValues -- "
+                          "input and output lists "
+                          "have different sizes.";
+            return C2_CORRUPTED;
+        }
+        for (size_t i = 0; i < fields.size(); ++i) {
+            if (!c2_aidl::utils::FromAidl(&fields[i], aidlFields[i], queryResult.values[i])) {
+                LOG(ERROR) << "querySupportedValues -- "
+                              "invalid returned value.";
+                return C2_CORRUPTED;
+            }
+        }
+        return status;
+    }
+
+    c2_status_t querySupportedValues_sm_hidl(
+            std::vector<C2FieldSupportedValuesQuery> &fields) const {
+        hidl_vec<c2_hidl::FieldSupportedValuesQuery> inFields(fields.size());
+        for (size_t i = 0; i < fields.size(); ++i) {
+            if (!c2_hidl::utils::objcpy(&inFields[i], fields[i])) {
                 LOG(ERROR) << "querySupportedValues -- bad input";
                 return C2_TRANSACTION_FAILED;
             }
         }
 
         c2_status_t status;
-        Return<void> transResult = mConfigurable->querySupportedValues(
+        Return<void> transResult = mHidlConfigurable->querySupportedValues(
                 inFields,
                 true,
                 [&status, &inFields, &fields](
-                        Status s,
-                        const hidl_vec<FieldSupportedValuesQueryResult>& r) {
+                        c2_hidl::Status s,
+                        const hidl_vec<c2_hidl::FieldSupportedValuesQueryResult>& r) {
                     status = static_cast<c2_status_t>(s);
                     if (status != C2_OK) {
                         LOG(DEBUG) << "querySupportedValues -- call failed: "
@@ -297,7 +563,7 @@
                         return;
                     }
                     for (size_t i = 0; i < fields.size(); ++i) {
-                        if (!objcpy(&fields[i], inFields[i], r[i])) {
+                        if (!c2_hidl::utils::objcpy(&fields[i], inFields[i], r[i])) {
                             LOG(ERROR) << "querySupportedValues -- "
                                           "invalid returned value.";
                             status = C2_CORRUPTED;
@@ -312,31 +578,83 @@
         return status;
     }
 
-    virtual C2String getName() const {
-        C2String outName;
-        Return<void> transResult = mConfigurable->getName(
-                [&outName](const hidl_string& name) {
-                    outName = name.c_str();
-                });
-        if (!transResult.isOk()) {
-            LOG(ERROR) << "getName -- transaction failed.";
+    C2String getName() const override {
+        C2String outName = "(unknown)";
+        if (mAidlStore) {
+            ScopedAStatus transResult = mAidlConfigurable->getName(&outName);
+            if (!transResult.isOk()) {
+                LOG(ERROR) << "getName -- transaction failed.";
+            }
+        } else if (mHidlStore) {
+            Return<void> transResult = mHidlConfigurable->getName(
+                    [&outName](const hidl_string& name) {
+                        outName = name.c_str();
+                    });
+            if (!transResult.isOk()) {
+                LOG(ERROR) << "getName -- transaction failed.";
+            }
         }
         return outName;
     }
 
-    virtual std::shared_ptr<C2ParamReflector> getParamReflector() const
-            override {
+    virtual std::shared_ptr<C2ParamReflector> getParamReflector() const override {
         struct SimpleParamReflector : public C2ParamReflector {
-            virtual std::unique_ptr<C2StructDescriptor> describe(
+            std::unique_ptr<C2StructDescriptor> describe(
+                    C2Param::CoreIndex coreIndex) const override {
+                if (mAidlBase) {
+                    return describe_aidl(coreIndex);
+                } else if (mHidlBase) {
+                    return describe_hidl(coreIndex);
+                } else {
+                    return nullptr;
+                }
+            }
+
+            std::unique_ptr<C2StructDescriptor> describe_aidl(
                     C2Param::CoreIndex coreIndex) const {
-                hidl_vec<ParamIndex> indices(1);
-                indices[0] = static_cast<ParamIndex>(coreIndex.coreIndex());
+                std::vector<int32_t> indices(1);
+                indices[0] = coreIndex.coreIndex();
                 std::unique_ptr<C2StructDescriptor> descriptor;
-                Return<void> transResult = mBase->getStructDescriptors(
+                std::vector<c2_aidl::StructDescriptor> aidlDescs;
+                ScopedAStatus transResult = mAidlBase->getStructDescriptors(
+                        indices, &aidlDescs);
+                if (!transResult.isOk()) {
+                    c2_status_t status = C2_TRANSACTION_FAILED;
+                    if (transResult.getExceptionCode() == EX_SERVICE_SPECIFIC) {
+                        status = c2_status_t(transResult.getServiceSpecificError());
+                        LOG(DEBUG) << "SimpleParamReflector -- "
+                                      "getStructDescriptors() failed: "
+                                   << status << ".";
+                        return nullptr;
+                    }
+                }
+                if (aidlDescs.size() != 1) {
+                    LOG(DEBUG) << "SimpleParamReflector -- "
+                                  "getStructDescriptors() "
+                                  "returned vector of size "
+                               << aidlDescs.size() << ". "
+                                  "It should be 1.";
+                    return nullptr;
+                }
+                if (!c2_aidl::utils::FromAidl(&descriptor, aidlDescs[0])) {
+                    LOG(DEBUG) << "SimpleParamReflector -- "
+                                  "getStructDescriptors() returned "
+                                  "corrupted data.";
+                    return nullptr;
+                }
+                return descriptor;
+            }
+
+            std::unique_ptr<C2StructDescriptor> describe_hidl(
+                    C2Param::CoreIndex coreIndex) const {
+                hidl_vec<c2_hidl::ParamIndex> indices(1);
+                indices[0] = static_cast<c2_hidl::ParamIndex>(coreIndex.coreIndex());
+                std::unique_ptr<C2StructDescriptor> descriptor;
+                Return<void> transResult = mHidlBase->getStructDescriptors(
                         indices,
                         [&descriptor](
-                                Status s,
-                                const hidl_vec<StructDescriptor>& sd) {
+                                c2_hidl::Status s,
+                                const hidl_vec<c2_hidl::StructDescriptor>& sd) {
                             c2_status_t status = static_cast<c2_status_t>(s);
                             if (status != C2_OK) {
                                 LOG(DEBUG) << "SimpleParamReflector -- "
@@ -354,7 +672,7 @@
                                 descriptor.reset();
                                 return;
                             }
-                            if (!objcpy(&descriptor, sd[0])) {
+                            if (!c2_hidl::utils::objcpy(&descriptor, sd[0])) {
                                 LOG(DEBUG) << "SimpleParamReflector -- "
                                               "getStructDescriptors() returned "
                                               "corrupted data.";
@@ -365,13 +683,23 @@
                 return descriptor;
             }
 
-            explicit SimpleParamReflector(sp<IComponentStore> base)
-                : mBase(base) { }
+            explicit SimpleParamReflector(const sp<HidlComponentStore> &base)
+                : mHidlBase(base) { }
 
-            sp<IComponentStore> mBase;
+            explicit SimpleParamReflector(const std::shared_ptr<AidlComponentStore> &base)
+                : mAidlBase(base) { }
+
+            std::shared_ptr<AidlComponentStore> mAidlBase;
+            sp<HidlComponentStore> mHidlBase;
         };
 
-        return std::make_shared<SimpleParamReflector>(mStore);
+        if (mAidlStore) {
+            return std::make_shared<SimpleParamReflector>(mAidlStore);
+        } else if (mHidlStore) {
+            return std::make_shared<SimpleParamReflector>(mHidlStore);
+        } else {
+            return nullptr;
+        }
     }
 
     virtual std::vector<std::shared_ptr<const C2Component::Traits>>
@@ -406,6 +734,12 @@
 } // unnamed namespace
 
 extern "C" void RegisterCodecServices() {
+    const bool aidlSelected = c2_aidl::utils::IsSelected();
+    constexpr int kThreadCount = 64;
+    ABinderProcess_setThreadPoolMaxThreadCount(kThreadCount);
+    ABinderProcess_startThreadPool();
+    ::android::hardware::configureRpcThreadpool(kThreadCount, false);
+
     LOG(INFO) << "Creating software Codec2 service...";
     std::shared_ptr<C2ComponentStore> store =
         android::GetCodec2PlatformComponentStore();
@@ -417,28 +751,27 @@
     using namespace ::android::hardware::media::c2;
 
     int platformVersion = android_get_device_api_level();
+    // STOPSHIP: Remove code name checking once platform version bumps up to 35.
+    std::string codeName =
+        android::base::GetProperty("ro.build.version.codename", "");
+    if (codeName == "VanillaIceCream") {
+        platformVersion = __ANDROID_API_V__;
+    }
 
-    if (platformVersion >= __ANDROID_API_S__) {
-        android::sp<V1_2::IComponentStore> storeV1_2 =
-            new V1_2::utils::ComponentStore(store);
-        if (storeV1_2->registerAsService("software") != android::OK) {
-            LOG(ERROR) << "Cannot register software Codec2 v1.2 service.";
-            return;
-        }
+    android::sp<V1_0::IComponentStore> hidlStore;
+    std::shared_ptr<c2_aidl::IComponentStore> aidlStore;
+    const char *hidlVer = "(unknown)";
+    if (aidlSelected) {
+        aidlStore = ::ndk::SharedRefBase::make<c2_aidl::utils::ComponentStore>(store);
+    } else if (platformVersion >= __ANDROID_API_S__) {
+        hidlStore = ::android::sp<V1_2::utils::ComponentStore>::make(store);
+        hidlVer = "1.2";
     } else if (platformVersion == __ANDROID_API_R__) {
-        android::sp<V1_1::IComponentStore> storeV1_1 =
-            new V1_1::utils::ComponentStore(store);
-        if (storeV1_1->registerAsService("software") != android::OK) {
-            LOG(ERROR) << "Cannot register software Codec2 v1.1 service.";
-            return;
-        }
+        hidlStore = ::android::sp<V1_1::utils::ComponentStore>::make(store);
+        hidlVer = "1.1";
     } else if (platformVersion == __ANDROID_API_Q__) {
-        android::sp<V1_0::IComponentStore> storeV1_0 =
-            new V1_0::utils::ComponentStore(store);
-        if (storeV1_0->registerAsService("software") != android::OK) {
-            LOG(ERROR) << "Cannot register software Codec2 v1.0 service.";
-            return;
-        }
+        hidlStore = ::android::sp<V1_0::utils::ComponentStore>::make(store);
+        hidlVer = "1.0";
     } else {  // platformVersion < __ANDROID_API_Q__
         LOG(ERROR) << "The platform version " << platformVersion <<
                       " is not supported.";
@@ -448,19 +781,75 @@
         using IComponentStore =
             ::android::hardware::media::c2::V1_0::IComponentStore;
         std::string const preferredStoreName = "default";
-        sp<IComponentStore> preferredStore =
-            IComponentStore::getService(preferredStoreName.c_str());
-        if (preferredStore) {
-            ::android::SetPreferredCodec2ComponentStore(
-                    std::make_shared<H2C2ComponentStore>(preferredStore));
-            LOG(INFO) <<
-                    "Preferred Codec2 store is set to \"" <<
-                    preferredStoreName << "\".";
+        if (aidlSelected) {
+            std::shared_ptr<c2_aidl::IComponentStore> preferredStore;
+            if (__builtin_available(android __ANDROID_API_S__, *)) {
+                std::string instanceName = ::android::base::StringPrintf(
+                        "%s/%s", c2_aidl::IComponentStore::descriptor, preferredStoreName.c_str());
+                if (AServiceManager_isDeclared(instanceName.c_str())) {
+                    preferredStore = c2_aidl::IComponentStore::fromBinder(::ndk::SpAIBinder(
+                            AServiceManager_waitForService(instanceName.c_str())));
+                }
+            }
+            if (preferredStore) {
+                ::android::SetPreferredCodec2ComponentStore(
+                        std::make_shared<H2C2ComponentStore>(preferredStore));
+                LOG(INFO) <<
+                        "Preferred Codec2 AIDL store is set to \"" <<
+                        preferredStoreName << "\".";
+            } else {
+                LOG(INFO) <<
+                        "Preferred Codec2 AIDL store is defaulted to \"software\".";
+            }
         } else {
-            LOG(INFO) <<
-                    "Preferred Codec2 store is defaulted to \"software\".";
+            sp<IComponentStore> preferredStore =
+                IComponentStore::getService(preferredStoreName.c_str());
+            if (preferredStore) {
+                ::android::SetPreferredCodec2ComponentStore(
+                        std::make_shared<H2C2ComponentStore>(preferredStore));
+                LOG(INFO) <<
+                        "Preferred Codec2 HIDL store is set to \"" <<
+                        preferredStoreName << "\".";
+            } else {
+                LOG(INFO) <<
+                        "Preferred Codec2 HIDL store is defaulted to \"software\".";
+            }
         }
     }
-    LOG(INFO) << "Software Codec2 service created and registered.";
+
+    bool registered = false;
+    if (platformVersion >= __ANDROID_API_V__) {
+        if (!aidlStore) {
+            aidlStore = ::ndk::SharedRefBase::make<c2_aidl::utils::ComponentStore>(
+                    std::make_shared<H2C2ComponentStore>(nullptr));
+        }
+        const std::string serviceName =
+            std::string(c2_aidl::IComponentStore::descriptor) + "/software";
+        binder_exception_t ex = AServiceManager_addService(
+                aidlStore->asBinder().get(), serviceName.c_str());
+        if (ex == EX_NONE) {
+            registered = true;
+        } else {
+            LOG(ERROR) << "Cannot register software Codec2 AIDL service.";
+        }
+    }
+
+    if (!hidlStore) {
+        hidlStore = ::android::sp<V1_2::utils::ComponentStore>::make(
+                std::make_shared<H2C2ComponentStore>(nullptr));
+        hidlVer = "1.2";
+    }
+    if (hidlStore->registerAsService("software") == android::OK) {
+        registered = true;
+    } else {
+        LOG(ERROR) << "Cannot register software Codec2 v" << hidlVer << " service.";
+    }
+
+    if (registered) {
+        LOG(INFO) << "Software Codec2 service created and registered.";
+    }
+
+    ABinderProcess_joinThreadPool();
+    ::android::hardware::joinRpcThreadpool();
 }
 
diff --git a/media/module/codecserviceregistrant/fuzzer/Android.bp b/media/module/codecserviceregistrant/fuzzer/Android.bp
index 1cb8c2b..f4e8751 100644
--- a/media/module/codecserviceregistrant/fuzzer/Android.bp
+++ b/media/module/codecserviceregistrant/fuzzer/Android.bp
@@ -34,6 +34,7 @@
         "libmedia_headers",
     ],
     defaults: [
+        "libcodec2-aidl-defaults",
         "libcodec2-hidl-defaults",
     ],
     fuzz_config: {
diff --git a/media/module/codecserviceregistrant/fuzzer/codecServiceRegistrant_fuzzer.cpp b/media/module/codecserviceregistrant/fuzzer/codecServiceRegistrant_fuzzer.cpp
index fba4230..4868e0c 100644
--- a/media/module/codecserviceregistrant/fuzzer/codecServiceRegistrant_fuzzer.cpp
+++ b/media/module/codecserviceregistrant/fuzzer/codecServiceRegistrant_fuzzer.cpp
@@ -86,8 +86,8 @@
   }
 
   string const preferredStoreName = string(kServiceName);
-  sp<IComponentStore> preferredStore =
-      IComponentStore::getService(preferredStoreName.c_str());
+  sp<V1_0::IComponentStore> preferredStore =
+      V1_0::IComponentStore::getService(preferredStoreName.c_str());
   mH2C2 = new H2C2ComponentStore(preferredStore);
 }
 
diff --git a/media/module/extractors/fuzzers/Android.bp b/media/module/extractors/fuzzers/Android.bp
index 91ca7b1..d096d63 100644
--- a/media/module/extractors/fuzzers/Android.bp
+++ b/media/module/extractors/fuzzers/Android.bp
@@ -69,9 +69,9 @@
 
     fuzz_config: {
         cc: [
-            "android-media-fuzzing-reports@google.com",
+            "android-media-playback+bugs@google.com",
         ],
-        componentid: 155276,
+        componentid: 817235,
         hotlists: [
             "4593311",
         ],
@@ -189,6 +189,8 @@
     ],
 
     dictionary: "mkv_extractor_fuzzer.dict",
+
+    corpus: ["corpus/*"],
 }
 
 cc_fuzz {
diff --git a/media/module/extractors/fuzzers/corpus/103c24dec0f5da3638a771e451cecfe38339b29c b/media/module/extractors/fuzzers/corpus/103c24dec0f5da3638a771e451cecfe38339b29c
new file mode 100755
index 0000000..8b31683
--- /dev/null
+++ b/media/module/extractors/fuzzers/corpus/103c24dec0f5da3638a771e451cecfe38339b29c
Binary files differ
diff --git a/media/module/extractors/fuzzers/corpus/2e532f8eb60e1c757f4399377e8f563a3cf3abf2e b/media/module/extractors/fuzzers/corpus/2e532f8eb60e1c757f4399377e8f563a3cf3abf2e
new file mode 100755
index 0000000..f4d475d
--- /dev/null
+++ b/media/module/extractors/fuzzers/corpus/2e532f8eb60e1c757f4399377e8f563a3cf3abf2e
Binary files differ
diff --git a/media/module/extractors/fuzzers/corpus/45e6a4014883a7e1f1200e2a53eabb4f0109aec3 b/media/module/extractors/fuzzers/corpus/45e6a4014883a7e1f1200e2a53eabb4f0109aec3
new file mode 100755
index 0000000..8438e66
--- /dev/null
+++ b/media/module/extractors/fuzzers/corpus/45e6a4014883a7e1f1200e2a53eabb4f0109aec3
Binary files differ
diff --git a/media/module/extractors/fuzzers/corpus/465b39984b71d8b4c2b80072993fb7ec73b4af69 b/media/module/extractors/fuzzers/corpus/465b39984b71d8b4c2b80072993fb7ec73b4af69
new file mode 100755
index 0000000..2f622cd
--- /dev/null
+++ b/media/module/extractors/fuzzers/corpus/465b39984b71d8b4c2b80072993fb7ec73b4af69
Binary files differ
diff --git a/media/module/extractors/fuzzers/corpus/4ef9546eab199719aadead2d26d3c1d72f42e600 b/media/module/extractors/fuzzers/corpus/4ef9546eab199719aadead2d26d3c1d72f42e600
new file mode 100755
index 0000000..f053c01
--- /dev/null
+++ b/media/module/extractors/fuzzers/corpus/4ef9546eab199719aadead2d26d3c1d72f42e600
Binary files differ
diff --git a/media/module/extractors/fuzzers/corpus/6b35d5a3af88baf240293ff1b8adf0b774055e65 b/media/module/extractors/fuzzers/corpus/6b35d5a3af88baf240293ff1b8adf0b774055e65
new file mode 100755
index 0000000..872451c
--- /dev/null
+++ b/media/module/extractors/fuzzers/corpus/6b35d5a3af88baf240293ff1b8adf0b774055e65
Binary files differ
diff --git a/media/module/extractors/fuzzers/corpus/7e798d68a1bf154e079227ee43e69ea27844b7e8 b/media/module/extractors/fuzzers/corpus/7e798d68a1bf154e079227ee43e69ea27844b7e8
new file mode 100755
index 0000000..2f7e3ea
--- /dev/null
+++ b/media/module/extractors/fuzzers/corpus/7e798d68a1bf154e079227ee43e69ea27844b7e8
Binary files differ
diff --git a/media/module/extractors/fuzzers/corpus/d2fd225343c99872f5a825f7f06b8c1dac0e8687 b/media/module/extractors/fuzzers/corpus/d2fd225343c99872f5a825f7f06b8c1dac0e8687
new file mode 100755
index 0000000..10b5f9a
--- /dev/null
+++ b/media/module/extractors/fuzzers/corpus/d2fd225343c99872f5a825f7f06b8c1dac0e8687
Binary files differ
diff --git a/media/module/extractors/fuzzers/corpus/d5602e69abf068ed8f1277e412149b8664d06620 b/media/module/extractors/fuzzers/corpus/d5602e69abf068ed8f1277e412149b8664d06620
new file mode 100755
index 0000000..969b7a2
--- /dev/null
+++ b/media/module/extractors/fuzzers/corpus/d5602e69abf068ed8f1277e412149b8664d06620
Binary files differ
diff --git a/media/module/extractors/fuzzers/corpus/e73fa90346e7287b7e923b0ebf07ce8988d94498 b/media/module/extractors/fuzzers/corpus/e73fa90346e7287b7e923b0ebf07ce8988d94498
new file mode 100755
index 0000000..bd146b1
--- /dev/null
+++ b/media/module/extractors/fuzzers/corpus/e73fa90346e7287b7e923b0ebf07ce8988d94498
Binary files differ
diff --git a/media/module/extractors/fuzzers/corpus/f1ee160337d3a467402a2217897477f0fab15da b/media/module/extractors/fuzzers/corpus/f1ee160337d3a467402a2217897477f0fab15da
new file mode 100755
index 0000000..7cf844e
--- /dev/null
+++ b/media/module/extractors/fuzzers/corpus/f1ee160337d3a467402a2217897477f0fab15da
Binary files differ
diff --git a/media/module/extractors/mp4/ItemTable.cpp b/media/module/extractors/mp4/ItemTable.cpp
index 7fe5ba7..06da18d 100644
--- a/media/module/extractors/mp4/ItemTable.cpp
+++ b/media/module/extractors/mp4/ItemTable.cpp
@@ -1452,9 +1452,9 @@
                     info.isExif(), (long long)offset, (long long)size);
             if ((info.isExif() && size > 4) || (info.isXmp() && size > 0)) {
                 ExternalMetaItem metaItem = {
-                        .isExif = info.isExif(),
                         .offset = offset,
                         .size = size,
+                        .isExif = info.isExif(),
                 };
                 mItemIdToMetaMap.add(info.itemId, metaItem);
             }
diff --git a/media/module/extractors/mp4/MPEG4Extractor.cpp b/media/module/extractors/mp4/MPEG4Extractor.cpp
index a9ca078..4b0dfe8 100644
--- a/media/module/extractors/mp4/MPEG4Extractor.cpp
+++ b/media/module/extractors/mp4/MPEG4Extractor.cpp
@@ -522,7 +522,7 @@
         return AMEDIA_ERROR_UNKNOWN;
     }
 
-    [=] {
+    [this, &track] {
         int64_t duration;
         int32_t samplerate;
         // Only for audio track.
diff --git a/media/module/extractors/wav/WAVExtractor.cpp b/media/module/extractors/wav/WAVExtractor.cpp
index 9c3bac6..d278103 100644
--- a/media/module/extractors/wav/WAVExtractor.cpp
+++ b/media/module/extractors/wav/WAVExtractor.cpp
@@ -219,7 +219,7 @@
 
             mNumChannels = U16_LE_AT(&formatSpec[2]);
 
-            if (mNumChannels < 1 || mNumChannels > FCC_8) {
+            if (mNumChannels < 1 || mNumChannels > FCC_12) {
                 ALOGE("Unsupported number of channels (%d)", mNumChannels);
                 return AMEDIA_ERROR_UNSUPPORTED;
             }
diff --git a/media/module/foundation/MetaDataBase.cpp b/media/module/foundation/MetaDataBase.cpp
index da383fa..60478c9 100644
--- a/media/module/foundation/MetaDataBase.cpp
+++ b/media/module/foundation/MetaDataBase.cpp
@@ -23,6 +23,8 @@
 #include <stdlib.h>
 #include <string.h>
 
+#include <mutex>
+
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/AString.h>
 #include <media/stagefright/foundation/hexdump.h>
@@ -78,6 +80,7 @@
 
 
 struct MetaDataBase::MetaDataInternal {
+    std::mutex mLock;
     KeyedVector<uint32_t, MetaDataBase::typed_data> mItems;
 };
 
@@ -102,10 +105,12 @@
 }
 
 void MetaDataBase::clear() {
+    std::lock_guard<std::mutex> guard(mInternalData->mLock);
     mInternalData->mItems.clear();
 }
 
 bool MetaDataBase::remove(uint32_t key) {
+    std::lock_guard<std::mutex> guard(mInternalData->mLock);
     ssize_t i = mInternalData->mItems.indexOfKey(key);
 
     if (i < 0) {
@@ -252,6 +257,7 @@
         uint32_t key, uint32_t type, const void *data, size_t size) {
     bool overwrote_existing = true;
 
+    std::lock_guard<std::mutex> guard(mInternalData->mLock);
     ssize_t i = mInternalData->mItems.indexOfKey(key);
     if (i < 0) {
         typed_data item;
@@ -269,6 +275,7 @@
 
 bool MetaDataBase::findData(uint32_t key, uint32_t *type,
                         const void **data, size_t *size) const {
+    std::lock_guard<std::mutex> guard(mInternalData->mLock);
     ssize_t i = mInternalData->mItems.indexOfKey(key);
 
     if (i < 0) {
@@ -283,6 +290,7 @@
 }
 
 bool MetaDataBase::hasData(uint32_t key) const {
+    std::lock_guard<std::mutex> guard(mInternalData->mLock);
     ssize_t i = mInternalData->mItems.indexOfKey(key);
 
     if (i < 0) {
@@ -429,6 +437,7 @@
 
 String8 MetaDataBase::toString() const {
     String8 s;
+    std::lock_guard<std::mutex> guard(mInternalData->mLock);
     for (int i = mInternalData->mItems.size(); --i >= 0;) {
         int32_t key = mInternalData->mItems.keyAt(i);
         char cc[5];
@@ -443,6 +452,7 @@
 }
 
 void MetaDataBase::dumpToLog() const {
+    std::lock_guard<std::mutex> guard(mInternalData->mLock);
     for (int i = mInternalData->mItems.size(); --i >= 0;) {
         int32_t key = mInternalData->mItems.keyAt(i);
         char cc[5];
@@ -455,6 +465,7 @@
 #if defined(__ANDROID__) && !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
 status_t MetaDataBase::writeToParcel(Parcel &parcel) {
     status_t ret;
+    std::lock_guard<std::mutex> guard(mInternalData->mLock);
     size_t numItems = mInternalData->mItems.size();
     ret = parcel.writeUint32(uint32_t(numItems));
     if (ret) {
diff --git a/media/module/libmediatranscoding/transcoder/tests/AndroidTestTemplate.xml b/media/module/libmediatranscoding/transcoder/tests/AndroidTestTemplate.xml
index c3a0ced..f8906dc 100644
--- a/media/module/libmediatranscoding/transcoder/tests/AndroidTestTemplate.xml
+++ b/media/module/libmediatranscoding/transcoder/tests/AndroidTestTemplate.xml
@@ -15,6 +15,7 @@
 -->
 <configuration description="Unit test configuration for {MODULE}">
     <option name="test-suite-tag" value="TranscoderTests" />
+    <target_preparer class="com.android.tradefed.targetprep.RootTargetPreparer" />
     <target_preparer class="com.android.tradefed.targetprep.PushFilePreparer">
         <option name="cleanup" value="false" />
         <option name="push-file" key="TranscodingTestAssets" value="/data/local/tmp/TranscodingTestAssets" />
diff --git a/media/module/libmediatranscoding/transcoder/tests/VideoTrackTranscoderTests.cpp b/media/module/libmediatranscoding/transcoder/tests/VideoTrackTranscoderTests.cpp
index 88c3fd3..fed8fc9 100644
--- a/media/module/libmediatranscoding/transcoder/tests/VideoTrackTranscoderTests.cpp
+++ b/media/module/libmediatranscoding/transcoder/tests/VideoTrackTranscoderTests.cpp
@@ -20,6 +20,7 @@
 #define LOG_TAG "VideoTrackTranscoderTests"
 
 #include <android-base/logging.h>
+#include <android/binder_process.h>
 #include <fcntl.h>
 #include <gtest/gtest.h>
 #include <media/MediaSampleReaderNDK.h>
@@ -221,5 +222,6 @@
 
 int main(int argc, char** argv) {
     ::testing::InitGoogleTest(&argc, argv);
+    ABinderProcess_startThreadPool();
     return RUN_ALL_TESTS();
 }
diff --git a/media/module/libmediatranscoding/transcoder/tests/fuzzer/Android.bp b/media/module/libmediatranscoding/transcoder/tests/fuzzer/Android.bp
index 69b2827..b6eca2a 100644
--- a/media/module/libmediatranscoding/transcoder/tests/fuzzer/Android.bp
+++ b/media/module/libmediatranscoding/transcoder/tests/fuzzer/Android.bp
@@ -48,8 +48,16 @@
     ],
     fuzz_config: {
         cc: [
-            "android-media-fuzzing-reports@google.com",
+            "android-media-editing@google.com",
         ],
-        componentid: 155276,
+        componentid: 761430,
+        hotlists: [
+            "4593311",
+        ],
+        description: "The fuzzer targets the APIs of libmediatranscoder",
+        vector: "local_no_privileges_required",
+        service_privilege: "privileged",
+        users: "multi_user",
+        fuzzed_code_usage: "shipped",
     },
 }
diff --git a/media/module/mpeg2ts/ATSParser.cpp b/media/module/mpeg2ts/ATSParser.cpp
index 6aeea3b..86187bd 100644
--- a/media/module/mpeg2ts/ATSParser.cpp
+++ b/media/module/mpeg2ts/ATSParser.cpp
@@ -23,8 +23,8 @@
 #include "ESQueue.h"
 
 #include <android/hardware/cas/native/1.0/IDescrambler.h>
-#include <android/hidl/allocator/1.0/IAllocator.h>
 #include <android/hidl/memory/1.0/IMemory.h>
+#include <cutils/ashmem.h>
 #include <cutils/native_handle.h>
 #include <hidlmemory/mapping.h>
 #include <media/cas/DescramblerAPI.h>
@@ -46,12 +46,12 @@
 #include <inttypes.h>
 
 namespace android {
+using hardware::hidl_handle;
 using hardware::hidl_string;
 using hardware::hidl_vec;
 using hardware::hidl_memory;
 using namespace hardware::cas::V1_0;
 using namespace hardware::cas::native::V1_0;
-typedef hidl::allocator::V1_0::IAllocator TAllocator;
 typedef hidl::memory::V1_0::IMemory TMemory;
 
 // I want the expression "y" evaluated even if verbose logging is off.
@@ -210,7 +210,6 @@
     bool mSampleEncrypted;
     sp<AMessage> mSampleAesKeyItem;
     sp<TMemory> mHidlMemory;
-    sp<TAllocator> mHidlAllocator;
     hardware::cas::native::V1_0::SharedBuffer mDescramblerSrcBuffer;
     sp<ABuffer> mDescrambledBuffer;
     List<SubSampleInfo> mSubSamples;
@@ -1006,34 +1005,29 @@
     sp<ABuffer> newBuffer, newScrambledBuffer;
     sp<TMemory> newMem;
     if (mScrambled) {
-        if (mHidlAllocator == nullptr) {
-            mHidlAllocator = TAllocator::getService("ashmem");
-            if (mHidlAllocator == nullptr) {
-                ALOGE("[stream %d] can't get hidl allocator", mElementaryPID);
-                return false;
+        int fd = ashmem_create_region("mediaATS", neededSize);
+        if (fd < 0) {
+             ALOGE("[stream %d] create_ashmem_region failed for size %zu. FD returned: %d",
+                    mElementaryPID, neededSize, fd);
+            return false;
+        }
+
+        native_handle_t* handle = native_handle_create(1 /*numFds*/, 0/*numInts*/);
+        if (handle == nullptr) {
+            ALOGE("[stream %d] failed to create a native_handle_t", mElementaryPID);
+            if (close(fd)) {
+                ALOGE("[stream %d] failed to close ashmem fd. errno: %s", mElementaryPID,
+                      strerror(errno));
             }
-        }
 
-        hidl_memory hidlMemToken;
-        bool success;
-        auto transStatus = mHidlAllocator->allocate(
-                neededSize,
-                [&success, &hidlMemToken](
-                        bool s,
-                        hidl_memory const& m) {
-                    success = s;
-                    hidlMemToken = m;
-                });
-
-        if (!transStatus.isOk()) {
-            ALOGE("[stream %d] hidl allocator failed at the transport: %s",
-                    mElementaryPID, transStatus.description().c_str());
             return false;
         }
-        if (!success) {
-            ALOGE("[stream %d] hidl allocator failed", mElementaryPID);
-            return false;
-        }
+
+        handle->data[0] = fd;
+        hidl_handle memHandle;
+        memHandle.setTo(handle, true /*shouldOwn*/);
+        hidl_memory hidlMemToken("ashmem", memHandle, neededSize);
+
         newMem = mapMemory(hidlMemToken);
         if (newMem == nullptr || newMem->getPointer() == nullptr) {
             ALOGE("[stream %d] hidl failed to map memory", mElementaryPID);
diff --git a/media/module/mpeg2ts/Android.bp b/media/module/mpeg2ts/Android.bp
index bf762c6..c710ffb 100644
--- a/media/module/mpeg2ts/Android.bp
+++ b/media/module/mpeg2ts/Android.bp
@@ -44,7 +44,6 @@
         "libhidlmemory",
         "android.hardware.cas.native@1.0",
         "android.hidl.memory@1.0",
-        "android.hidl.allocator@1.0",
     ],
 
     header_libs: [
diff --git a/media/module/mpeg2ts/test/Android.bp b/media/module/mpeg2ts/test/Android.bp
index 34a8d3e..cccefac 100644
--- a/media/module/mpeg2ts/test/Android.bp
+++ b/media/module/mpeg2ts/test/Android.bp
@@ -37,9 +37,8 @@
     shared_libs: [
         "android.hardware.cas@1.0",
         "android.hardware.cas.native@1.0",
-        "android.hidl.token@1.0-utils",
-        "android.hidl.allocator@1.0",
         "libcrypto",
+        "libcutils",
         "libhidlbase",
         "libhidlmemory",
         "liblog",
diff --git a/media/module/service.mediatranscoding/tests/Android.bp b/media/module/service.mediatranscoding/tests/Android.bp
index 97fbd4c..9fb6d0d 100644
--- a/media/module/service.mediatranscoding/tests/Android.bp
+++ b/media/module/service.mediatranscoding/tests/Android.bp
@@ -14,6 +14,7 @@
 cc_defaults {
     name: "mediatranscodingservice_test_defaults",
 
+    cpp_std: "gnu++17",
     cflags: [
         "-Wall",
         "-Werror",
diff --git a/media/mtp/MtpFfsHandle.cpp b/media/mtp/MtpFfsHandle.cpp
index 2ffd775..ef8c9aa 100644
--- a/media/mtp/MtpFfsHandle.cpp
+++ b/media/mtp/MtpFfsHandle.cpp
@@ -297,6 +297,10 @@
 }
 
 void MtpFfsHandle::close() {
+    auto timeout = std::chrono::seconds(2);
+    std::unique_lock lk(m);
+    cv.wait_for(lk, timeout ,[this]{return child_threads==0;});
+
     io_destroy(mCtx);
     closeEndpoints();
     closeConfig();
@@ -669,6 +673,11 @@
     char *temp = new char[me.length];
     memcpy(temp, me.data, me.length);
     me.data = temp;
+
+    std::unique_lock lk(m);
+    child_threads++;
+    lk.unlock();
+
     std::thread t([this, me]() { return this->doSendEvent(me); });
     t.detach();
     return 0;
@@ -680,6 +689,11 @@
     if (static_cast<unsigned>(ret) != length)
         PLOG(ERROR) << "Mtp error sending event thread!";
     delete[] reinterpret_cast<char*>(me.data);
+
+    std::unique_lock lk(m);
+    child_threads--;
+    lk.unlock();
+    cv.notify_one();
 }
 
 } // namespace android
diff --git a/media/mtp/MtpFfsHandle.h b/media/mtp/MtpFfsHandle.h
index e552e03..51cdef0 100644
--- a/media/mtp/MtpFfsHandle.h
+++ b/media/mtp/MtpFfsHandle.h
@@ -60,6 +60,10 @@
     bool mCanceled;
     bool mBatchCancel;
 
+    std::mutex m;
+    std::condition_variable cv;
+    std::atomic<int> child_threads{0};
+
     android::base::unique_fd mControl;
     // "in" from the host's perspective => sink for mtp server
     android::base::unique_fd mBulkIn;
diff --git a/media/mtp/MtpPacket.cpp b/media/mtp/MtpPacket.cpp
index f069a83..5faaac2 100644
--- a/media/mtp/MtpPacket.cpp
+++ b/media/mtp/MtpPacket.cpp
@@ -92,24 +92,46 @@
 }
 
 uint16_t MtpPacket::getUInt16(int offset) const {
-    return ((uint16_t)mBuffer[offset + 1] << 8) | (uint16_t)mBuffer[offset];
+    if ((unsigned long)(offset+2) <= mBufferSize) {
+        return ((uint16_t)mBuffer[offset + 1] << 8) | (uint16_t)mBuffer[offset];
+    }
+    else {
+        ALOGE("offset for buffer read is greater than buffer size!");
+        abort();
+    }
 }
 
 uint32_t MtpPacket::getUInt32(int offset) const {
-    return ((uint32_t)mBuffer[offset + 3] << 24) | ((uint32_t)mBuffer[offset + 2] << 16) |
-           ((uint32_t)mBuffer[offset + 1] << 8)  | (uint32_t)mBuffer[offset];
+    if ((unsigned long)(offset+4) <= mBufferSize) {
+        return ((uint32_t)mBuffer[offset + 3] << 24) | ((uint32_t)mBuffer[offset + 2] << 16) |
+               ((uint32_t)mBuffer[offset + 1] << 8)  | (uint32_t)mBuffer[offset];
+    }
+    else {
+        ALOGE("offset for buffer read is greater than buffer size!");
+        abort();
+    }
 }
 
 void MtpPacket::putUInt16(int offset, uint16_t value) {
-    mBuffer[offset++] = (uint8_t)(value & 0xFF);
-    mBuffer[offset++] = (uint8_t)((value >> 8) & 0xFF);
+    if ((unsigned long)(offset+2) <= mBufferSize) {
+        mBuffer[offset++] = (uint8_t)(value & 0xFF);
+        mBuffer[offset++] = (uint8_t)((value >> 8) & 0xFF);
+    }
+    else {
+        ALOGE("offset for buffer write is greater than buffer size!");
+    }
 }
 
 void MtpPacket::putUInt32(int offset, uint32_t value) {
-    mBuffer[offset++] = (uint8_t)(value & 0xFF);
-    mBuffer[offset++] = (uint8_t)((value >> 8) & 0xFF);
-    mBuffer[offset++] = (uint8_t)((value >> 16) & 0xFF);
-    mBuffer[offset++] = (uint8_t)((value >> 24) & 0xFF);
+    if ((unsigned long)(offset+4) <= mBufferSize) {
+        mBuffer[offset++] = (uint8_t)(value & 0xFF);
+        mBuffer[offset++] = (uint8_t)((value >> 8) & 0xFF);
+        mBuffer[offset++] = (uint8_t)((value >> 16) & 0xFF);
+        mBuffer[offset++] = (uint8_t)((value >> 24) & 0xFF);
+    }
+    else {
+        ALOGE("offset for buffer write is greater than buffer size!");
+    }
 }
 
 uint16_t MtpPacket::getContainerCode() const {
diff --git a/media/mtp/tests/MtpFuzzer/Android.bp b/media/mtp/tests/MtpFuzzer/Android.bp
index 9e41680..acae06a 100644
--- a/media/mtp/tests/MtpFuzzer/Android.bp
+++ b/media/mtp/tests/MtpFuzzer/Android.bp
@@ -38,11 +38,19 @@
     ],
     fuzz_config: {
 
-        cc: ["jameswei@google.com"],
-        componentid: 1344,
+        cc: ["android-usb@google.com"],
+        componentid: 1407286,
         acknowledgement: [
             "Grant Hernandez of Google",
         ],
+        hotlists: [
+            "4593311",
+        ],
+        description: "The fuzzer targets the APIs of libmtp library",
+        vector: "local_no_privileges_required",
+        service_privilege: "privileged",
+        users: "multi_user",
+        fuzzed_code_usage: "shipped",
     },
 }
 cc_fuzz {
diff --git a/media/mtp/tests/MtpFuzzer/MtpPacketFuzzerUtils.h b/media/mtp/tests/MtpFuzzer/MtpPacketFuzzerUtils.h
index 87fea9f..9be53a2 100644
--- a/media/mtp/tests/MtpFuzzer/MtpPacketFuzzerUtils.h
+++ b/media/mtp/tests/MtpFuzzer/MtpPacketFuzzerUtils.h
@@ -28,6 +28,7 @@
 constexpr size_t kMinSize = 0;
 constexpr size_t kMaxSize = 1000;
 constexpr size_t kMaxLength = 1000;
+constexpr size_t kMaxPathLength = 64;
 
 class MtpPacketFuzzerUtils {
   protected:
@@ -43,7 +44,7 @@
     };
 
     void fillFilePath(FuzzedDataProvider* fdp) {
-       mPath= fdp->ConsumeRandomLengthString(kMaxLength);
+       mPath= fdp->ConsumeRandomLengthString(kMaxPathLength);
     };
 
     void fillUsbDevFsUrb(FuzzedDataProvider* fdp) {
diff --git a/media/ndk/Android.bp b/media/ndk/Android.bp
index 8b9dde3..9ec7700 100644
--- a/media/ndk/Android.bp
+++ b/media/ndk/Android.bp
@@ -208,19 +208,21 @@
 }
 
 cc_test {
-    name: "AImageReaderWindowHandleTest",
+    name: "AImageReaderWindowTest",
     test_suites: ["device-tests"],
-    srcs: ["tests/AImageReaderWindowHandleTest.cpp"],
+    srcs: ["tests/AImageReaderWindowTest.cpp"],
     shared_libs: [
         "libbinder",
         "libmediandk",
         "libmediautils",
         "libnativewindow",
         "libgui",
+        "libhidlbase",
         "libutils",
         "libui",
         "libcutils",
         "android.hardware.graphics.bufferqueue@1.0",
+        "android.hidl.token@1.0",
     ],
 
     header_libs: [
diff --git a/media/ndk/NdkImageReader.cpp b/media/ndk/NdkImageReader.cpp
index caee37d..7b19ac0 100644
--- a/media/ndk/NdkImageReader.cpp
+++ b/media/ndk/NdkImageReader.cpp
@@ -570,6 +570,9 @@
     }
 }
 
+// The LL-NDK API is now deprecated. New devices will no longer have the token
+// manager service installed, so createHalToken will return false and this
+// will return AMEDIA_ERROR_UNKNOWN on those devices.
 media_status_t AImageReader::getWindowNativeHandle(native_handle **handle) {
     if (mWindowHandle != nullptr) {
         *handle = mWindowHandle;
diff --git a/media/ndk/NdkMediaCodec.cpp b/media/ndk/NdkMediaCodec.cpp
index 2fb5728..b230df5 100644
--- a/media/ndk/NdkMediaCodec.cpp
+++ b/media/ndk/NdkMediaCodec.cpp
@@ -672,7 +672,7 @@
         if (out_size != NULL) {
             *out_size = abuf->capacity();
         }
-        return abuf->base();
+        return abuf->data();
     }
 
     android::Vector<android::sp<android::MediaCodecBuffer> > abufs;
@@ -689,7 +689,7 @@
         if (out_size != NULL) {
             *out_size = abufs[idx]->capacity();
         }
-        return abufs[idx]->base();
+        return abufs[idx]->data();
     }
     ALOGE("couldn't get input buffers");
     return NULL;
@@ -707,7 +707,7 @@
         if (out_size != NULL) {
             *out_size = abuf->capacity();
         }
-        return abuf->base();
+        return abuf->data();
     }
 
     android::Vector<android::sp<android::MediaCodecBuffer> > abufs;
@@ -720,7 +720,7 @@
         if (out_size != NULL) {
             *out_size = abufs[idx]->capacity();
         }
-        return abufs[idx]->base();
+        return abufs[idx]->data();
     }
     ALOGE("couldn't get output buffers");
     return NULL;
diff --git a/media/ndk/TEST_MAPPING b/media/ndk/TEST_MAPPING
index e420812..1a15728 100644
--- a/media/ndk/TEST_MAPPING
+++ b/media/ndk/TEST_MAPPING
@@ -1,7 +1,7 @@
 // mappings for frameworks/av/media/ndk
 {
   "presubmit": [
-    { "name": "AImageReaderWindowHandleTest" },
+    { "name": "AImageReaderWindowTest" },
     { "name": "libmediandk_test" }
   ]
 }
diff --git a/media/ndk/fuzzer/ndk_crypto_fuzzer.cpp b/media/ndk/fuzzer/ndk_crypto_fuzzer.cpp
index 2b22f0f..fcb0520 100644
--- a/media/ndk/fuzzer/ndk_crypto_fuzzer.cpp
+++ b/media/ndk/fuzzer/ndk_crypto_fuzzer.cpp
@@ -16,6 +16,9 @@
 
 #include <fuzzer/FuzzedDataProvider.h>
 #include <media/NdkMediaCrypto.h>
+#include <functional>
+
+#include <functional>
 
 constexpr size_t kMaxString = 256;
 constexpr size_t kMinBytes = 0;
diff --git a/media/ndk/fuzzer/ndk_image_reader_fuzzer.cpp b/media/ndk/fuzzer/ndk_image_reader_fuzzer.cpp
index 6c11798..6450742 100644
--- a/media/ndk/fuzzer/ndk_image_reader_fuzzer.cpp
+++ b/media/ndk/fuzzer/ndk_image_reader_fuzzer.cpp
@@ -18,6 +18,7 @@
 #include <fuzzer/FuzzedDataProvider.h>
 #include <gui/BufferQueue.h>
 #include <media/NdkImageReader.h>
+#include <functional>
 
 constexpr int32_t kMaxSize = INT_MAX;
 constexpr int32_t kMinSize = 1;
diff --git a/media/ndk/include/media/NdkImageReader.h b/media/ndk/include/media/NdkImageReader.h
index b6dcaae..4fc9918 100644
--- a/media/ndk/include/media/NdkImageReader.h
+++ b/media/ndk/include/media/NdkImageReader.h
@@ -534,15 +534,23 @@
  * Get the native_handle_t corresponding to the ANativeWindow owned by the
  * AImageReader provided.
  *
+ * This is deprecated on devices with vendor API level greater than 34 and
+ * will return AMEDIA_ERROR_UNKNOWN on those devices.
+ * The native_handle_t is no longer used with AIDL interfaces and
+ * ANativeWindow is used directly instead.
+ * Use AImageRead_getWindow to get the ANativeWindow and use that object.
+ *
  * @param reader The image reader of interest.
  * @param handle The output native_handle_t. This native handle is owned by
  *               this image reader.
  *
  * @return AMEDIA_OK if the method call succeeds.
  *         AMEDIA_ERROR_INVALID_PARAMETER if reader or handle are NULL.
- *         AMEDIA_ERROR_UNKNOWN if some other error is encountered.
+ *         AMEDIA_ERROR_UNKNOWN if some other error is encountered or
+ *         the device no longer has android.hidl.token service to
+ *         satisfy the request because it is deprecated.
  */
-media_status_t AImageReader_getWindowNativeHandle(
+[[deprecated]] media_status_t AImageReader_getWindowNativeHandle(
     AImageReader *reader, /* out */native_handle_t **handle);
 #endif
 
diff --git a/media/ndk/include/media/NdkMediaDataSource.h b/media/ndk/include/media/NdkMediaDataSource.h
index 4158a97..197e202 100644
--- a/media/ndk/include/media/NdkMediaDataSource.h
+++ b/media/ndk/include/media/NdkMediaDataSource.h
@@ -14,6 +14,14 @@
  * limitations under the License.
  */
 
+/**
+ * @addtogroup Media
+ * @{
+ */
+
+/**
+ * @file NdkMediaDataSource.h
+ */
 
 /*
  * This file defines an NDK API.
diff --git a/media/ndk/include/media/NdkMediaExtractor.h b/media/ndk/include/media/NdkMediaExtractor.h
index e429820..af30b8b 100644
--- a/media/ndk/include/media/NdkMediaExtractor.h
+++ b/media/ndk/include/media/NdkMediaExtractor.h
@@ -221,9 +221,9 @@
  * available (end of stream). This API can be used in in conjunction with
  * AMediaExtractor_readSampleData:
  *
- * ssize_t sampleSize = AMediaExtractor_getSampleSize(ex);
+ * <pre>ssize_t sampleSize = AMediaExtractor_getSampleSize(ex);
  * uint8_t *buf = new uint8_t[sampleSize];
- * AMediaExtractor_readSampleData(ex, buf, sampleSize);
+ * AMediaExtractor_readSampleData(ex, buf, sampleSize);</pre>
  *
  * Available since API level 28.
  */
@@ -245,13 +245,13 @@
 int64_t AMediaExtractor_getCachedDuration(AMediaExtractor *) __INTRODUCED_IN(28);
 
 /**
- * Read the current sample's metadata format into |fmt|. Examples of sample metadata are
+ * Read the current sample's metadata format into `fmt`. Examples of sample metadata are
  * SEI (supplemental enhancement information) and MPEG user data, both of which can embed
  * closed-caption data.
  *
  * Returns AMEDIA_OK on success or AMEDIA_ERROR_* to indicate failure reason.
- * Existing key-value pairs in |fmt| would be removed if this API returns AMEDIA_OK.
- * The contents of |fmt| is undefined if this API returns AMEDIA_ERROR_*.
+ * Existing key-value pairs in `fmt` would be removed if this API returns AMEDIA_OK.
+ * The contents of `fmt` is undefined if this API returns AMEDIA_ERROR_*.
  *
  * Available since API level 28.
  */
diff --git a/media/ndk/libmediandk.map.txt b/media/ndk/libmediandk.map.txt
index 4f045fd..262c169 100644
--- a/media/ndk/libmediandk.map.txt
+++ b/media/ndk/libmediandk.map.txt
@@ -185,11 +185,11 @@
     AMediaCodecCryptoInfo_setPattern; # introduced=24
     AMediaCodec_configure;
     AMediaCodec_createCodecByName;
-    AMediaCodec_createCodecByNameForClient; # systemapi # introduced=31
+    AMediaCodec_createCodecByNameForClient; # systemapi introduced=31
     AMediaCodec_createDecoderByType;
-    AMediaCodec_createDecoderByTypeForClient; # systemapi # introduced=31
+    AMediaCodec_createDecoderByTypeForClient; # systemapi introduced=31
     AMediaCodec_createEncoderByType;
-    AMediaCodec_createEncoderByTypeForClient; # systemapi # introduced=31
+    AMediaCodec_createEncoderByTypeForClient; # systemapi introduced=31
     AMediaCodec_delete;
     AMediaCodec_dequeueInputBuffer;
     AMediaCodec_dequeueOutputBuffer;
diff --git a/media/ndk/tests/AImageReaderWindowHandleTest.cpp b/media/ndk/tests/AImageReaderWindowHandleTest.cpp
deleted file mode 100644
index 27864c2..0000000
--- a/media/ndk/tests/AImageReaderWindowHandleTest.cpp
+++ /dev/null
@@ -1,209 +0,0 @@
-/*
- * Copyright 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include <gtest/gtest.h>
-#include <media/NdkImageReader.h>
-#include <media/NdkImage.h>
-#include <mediautils/AImageReaderUtils.h>
-#include <gui/IGraphicBufferProducer.h>
-#include <gui/bufferqueue/1.0/H2BGraphicBufferProducer.h>
-#include <NdkImagePriv.h>
-#include <NdkImageReaderPriv.h>
-#include <vndk/hardware_buffer.h>
-#include <memory>
-
-namespace android {
-
-using HGraphicBufferProducer = hardware::graphics::bufferqueue::V1_0::
-        IGraphicBufferProducer;
-using hardware::graphics::bufferqueue::V1_0::utils::H2BGraphicBufferProducer;
-using aimg::AImageReader_getHGBPFromHandle;
-
-typedef IGraphicBufferProducer::QueueBufferInput QueueBufferInput;
-typedef IGraphicBufferProducer::QueueBufferOutput QueueBufferOutput;
-
-static constexpr uint64_t kImageBufferUsage =
-    AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN;
-static constexpr int kImageWidth = 640;
-static constexpr int kImageHeight = 480;
-static constexpr int kImageFormat = AIMAGE_FORMAT_RGBA_8888;
-static constexpr int kMaxImages = 1;
-
-static constexpr int64_t kQueueBufferInputTimeStamp = 1384888611;
-static constexpr bool kQueueBufferInputIsAutoTimeStamp = false;
-static constexpr android_dataspace kQueueBufferInputDataspace = HAL_DATASPACE_UNKNOWN;
-static const Rect kQueueBufferInputRect = Rect(kImageWidth, kImageHeight);
-static constexpr int kQueueBufferInputScalingMode = 0;
-static constexpr int kQueueBufferInputTransform = 0;
-static const sp<Fence> kQueueBufferInputFence = Fence::NO_FENCE;
-
-static constexpr int kOnImageAvailableWaitUs = 100 * 1000;
-
-class AImageReaderWindowHandleTest : public ::testing::Test {
-   public:
-    void SetUp() override {
-        AImageReader_newWithUsage(kImageWidth, kImageHeight, kImageFormat,
-                                  kImageBufferUsage , kMaxImages, &imageReader_);
-        media_status_t ret = AMEDIA_ERROR_UNKNOWN;
-        ASSERT_NE(imageReader_, nullptr);
-        ret = AImageReader_setImageListener(imageReader_,
-                                            &imageReaderAvailableCb_);
-        ASSERT_EQ(ret, AMEDIA_OK);
-        ret = AImageReader_setBufferRemovedListener(imageReader_,
-                                                    &imageReaderDetachedCb_);
-        ASSERT_EQ(ret, AMEDIA_OK);
-    }
-    void TearDown() override {
-        if (imageReader_) {
-            AImageReader_delete(imageReader_);
-        }
-    }
-
-    void HandleImageAvailable() {
-        AImage *outImage = nullptr;
-        media_status_t ret = AMEDIA_OK;
-        auto imageDeleter = [](AImage *img) { AImage_delete(img); };
-        std::unique_ptr<AImage, decltype(imageDeleter)> img(nullptr, imageDeleter);
-
-        // Test that the image can be acquired.
-        ret = AImageReader_acquireNextImage(imageReader_, &outImage);
-        ASSERT_EQ(ret, AMEDIA_OK);
-        img.reset(outImage);
-        ASSERT_NE(img, nullptr);
-
-        // Test that we can get a handle to the image's hardware buffer and a
-        // native handle to it.
-        AHardwareBuffer *hardwareBuffer = nullptr;
-        ret = AImage_getHardwareBuffer(img.get(), &hardwareBuffer);
-        ASSERT_EQ(ret, AMEDIA_OK);
-        ASSERT_NE(hardwareBuffer, nullptr);
-        const native_handle_t *nh = AHardwareBuffer_getNativeHandle(hardwareBuffer);
-        ASSERT_NE(nh, nullptr);
-        std::unique_lock<std::mutex> lock(imageAvailableMutex_);
-        imageAvailable_ = true;
-        imageCondVar_.notify_one();
-    }
-
-    static void onImageAvailable(void *context, AImageReader *reader) {
-        (void)reader;
-        AImageReaderWindowHandleTest *thisContext =
-            reinterpret_cast<AImageReaderWindowHandleTest *>(context);
-        thisContext->HandleImageAvailable();
-    }
-
-    static void onBufferRemoved(void *, AImageReader *, AHardwareBuffer *) {
-    }
-
-    AImageReader *imageReader_ = nullptr;
-    AImageReader_ImageListener imageReaderAvailableCb_{this, onImageAvailable};
-    AImageReader_BufferRemovedListener imageReaderDetachedCb_{this, onBufferRemoved};
-    std::mutex imageAvailableMutex_;
-    std::condition_variable imageCondVar_;
-    bool imageAvailable_ = false;
-};
-
-static void fillRGBA8Buffer(uint8_t* buf, int w, int h, int stride) {
-    const size_t PIXEL_SIZE = 4;
-    for (int x = 0; x < w; x++) {
-        for (int y = 0; y < h; y++) {
-            off_t offset = (y * stride + x) * PIXEL_SIZE;
-            for (int c = 0; c < 4; c++) {
-                int parityX = (x / (1 << (c+2))) & 1;
-                int parityY = (y / (1 << (c+2))) & 1;
-                buf[offset + c] = (parityX ^ parityY) ? 231 : 35;
-            }
-        }
-    }
-}
-
-TEST_F(AImageReaderWindowHandleTest, CreateWindowNativeHandle) {
-    // Check that we can create a native_handle_t corresponding to the
-    // AImageReader.
-    native_handle_t *nh = nullptr;
-    AImageReader_getWindowNativeHandle(imageReader_, &nh);
-    ASSERT_NE(nh, nullptr);
-
-    // Check that there are only ints in the handle.
-    ASSERT_EQ(nh->numFds, 0);
-    ASSERT_NE(nh->numInts, 0);
-
-    // Check that the HGBP can be retrieved from the handle.
-    sp<HGraphicBufferProducer> hgbp =  AImageReader_getHGBPFromHandle(nh);
-    ASSERT_NE(hgbp, nullptr);
-    sp<IGraphicBufferProducer> igbp = new H2BGraphicBufferProducer(hgbp);
-    int dequeuedSlot = -1;
-    sp<Fence> dequeuedFence;
-    IGraphicBufferProducer::QueueBufferOutput output;
-    ASSERT_EQ(OK, igbp->connect(nullptr, NATIVE_WINDOW_API_CPU, false, &output));
-
-    // Test that we can dequeue a buffer.
-    ASSERT_EQ(OK,
-              ~IGraphicBufferProducer::BUFFER_NEEDS_REALLOCATION &
-                      (igbp->dequeueBuffer(&dequeuedSlot, &dequeuedFence,
-                                           kImageWidth, kImageHeight,
-                                           kImageFormat, kImageBufferUsage,
-                                           nullptr, nullptr)));
-    EXPECT_LE(0, dequeuedSlot);
-    EXPECT_GT(BufferQueue::NUM_BUFFER_SLOTS, dequeuedSlot);
-
-    sp<GraphicBuffer> dequeuedBuffer;
-    igbp->requestBuffer(dequeuedSlot, &dequeuedBuffer);
-    uint8_t* img = nullptr;
-    ASSERT_EQ(NO_ERROR, dequeuedBuffer->lock(kImageBufferUsage, (void**)(&img)));
-
-    // Write in some dummy image data.
-    fillRGBA8Buffer(img, dequeuedBuffer->getWidth(), dequeuedBuffer->getHeight(),
-                    dequeuedBuffer->getStride());
-    ASSERT_EQ(NO_ERROR, dequeuedBuffer->unlock());
-    QueueBufferInput queueBufferInput(kQueueBufferInputTimeStamp,
-                                      kQueueBufferInputIsAutoTimeStamp,
-                                      kQueueBufferInputDataspace,
-                                      kQueueBufferInputRect,
-                                      kQueueBufferInputScalingMode,
-                                      kQueueBufferInputTransform,
-                                      kQueueBufferInputFence);
-    QueueBufferOutput queueBufferOutput;
-    ASSERT_EQ(OK, igbp->queueBuffer(dequeuedSlot, queueBufferInput,
-                                    &queueBufferOutput));
-    // wait until the onImageAvailable callback is called, or timeout completes.
-    std::unique_lock<std::mutex> lock(imageAvailableMutex_);
-    imageCondVar_.wait_for(lock, std::chrono::microseconds(kOnImageAvailableWaitUs),
-                           [this]{ return this->imageAvailable_;});
-    EXPECT_TRUE(imageAvailable_) <<  "Timed out waiting for image data to be handled!\n";
-}
-
-class AImageReaderPrivateFormatTest : public ::testing::Test {
-  public:
-    void SetUp() override {
-        auto status = AImageReader_new(kImageWidth, kImageHeight, AIMAGE_FORMAT_RAW_DEPTH,
-                                       kMaxImages, &imgReader);
-        EXPECT_TRUE(status == AMEDIA_OK);
-    }
-
-    void TearDown() override {
-        if (imgReader) {
-            AImageReader_delete(imgReader);
-        }
-    }
-    AImageReader *imgReader = nullptr;
-};
-
-TEST_F(AImageReaderPrivateFormatTest, CreateTest) {
-    EXPECT_TRUE(imgReader != nullptr);
-}
-
-
-}  // namespace android
diff --git a/media/ndk/tests/AImageReaderWindowTest.cpp b/media/ndk/tests/AImageReaderWindowTest.cpp
new file mode 100644
index 0000000..650b990
--- /dev/null
+++ b/media/ndk/tests/AImageReaderWindowTest.cpp
@@ -0,0 +1,240 @@
+/*
+ * Copyright 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <android/hidl/token/1.0/ITokenManager.h>
+#include <android/hidl/manager/1.2/IServiceManager.h>
+#include <gtest/gtest.h>
+#include <hidl/ServiceManagement.h>
+#include <media/NdkImageReader.h>
+#include <media/NdkImage.h>
+#include <mediautils/AImageReaderUtils.h>
+#include <gui/IGraphicBufferProducer.h>
+#include <gui/Surface.h>
+#include <gui/bufferqueue/1.0/H2BGraphicBufferProducer.h>
+#include <NdkImagePriv.h>
+#include <NdkImageReaderPriv.h>
+#include <vndk/hardware_buffer.h>
+#include <memory>
+
+namespace android {
+
+using HGraphicBufferProducer = hardware::graphics::bufferqueue::V1_0::
+        IGraphicBufferProducer;
+using hardware::graphics::bufferqueue::V1_0::utils::H2BGraphicBufferProducer;
+using hidl::manager::V1_2::IServiceManager;
+using hidl::token::V1_0::ITokenManager;
+using aimg::AImageReader_getHGBPFromHandle;
+
+typedef IGraphicBufferProducer::QueueBufferInput QueueBufferInput;
+typedef IGraphicBufferProducer::QueueBufferOutput QueueBufferOutput;
+
+static constexpr uint64_t kImageBufferUsage =
+    AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN;
+static constexpr int kImageWidth = 640;
+static constexpr int kImageHeight = 480;
+static constexpr int kImageFormat = AIMAGE_FORMAT_RGBA_8888;
+static constexpr int kMaxImages = 1;
+
+static constexpr int64_t kQueueBufferInputTimeStamp = 1384888611;
+static constexpr bool kQueueBufferInputIsAutoTimeStamp = false;
+static constexpr android_dataspace kQueueBufferInputDataspace = HAL_DATASPACE_UNKNOWN;
+static const Rect kQueueBufferInputRect = Rect(kImageWidth, kImageHeight);
+static constexpr int kQueueBufferInputScalingMode = 0;
+static constexpr int kQueueBufferInputTransform = 0;
+static const sp<Fence> kQueueBufferInputFence = Fence::NO_FENCE;
+
+static constexpr int kOnImageAvailableWaitUs = 100 * 1000;
+
+class AImageReaderWindowTest : public ::testing::Test {
+   public:
+    void SetUp() override {
+        AImageReader_newWithUsage(kImageWidth, kImageHeight, kImageFormat,
+                                  kImageBufferUsage , kMaxImages, &imageReader_);
+        media_status_t ret = AMEDIA_ERROR_UNKNOWN;
+        ASSERT_NE(imageReader_, nullptr);
+        ret = AImageReader_setImageListener(imageReader_,
+                                            &imageReaderAvailableCb_);
+        ASSERT_EQ(ret, AMEDIA_OK);
+        ret = AImageReader_setBufferRemovedListener(imageReader_,
+                                                    &imageReaderDetachedCb_);
+        ASSERT_EQ(ret, AMEDIA_OK);
+    }
+    void TearDown() override {
+        if (imageReader_) {
+            AImageReader_delete(imageReader_);
+        }
+    }
+
+    void HandleImageAvailable() {
+        AImage *outImage = nullptr;
+        media_status_t ret = AMEDIA_OK;
+        auto imageDeleter = [](AImage *img) { AImage_delete(img); };
+        std::unique_ptr<AImage, decltype(imageDeleter)> img(nullptr, imageDeleter);
+
+        // Test that the image can be acquired.
+        ret = AImageReader_acquireNextImage(imageReader_, &outImage);
+        ASSERT_EQ(ret, AMEDIA_OK);
+        img.reset(outImage);
+        ASSERT_NE(img, nullptr);
+
+        // Test that we can get a handle to the image's hardware buffer and a
+        // native handle to it.
+        AHardwareBuffer *hardwareBuffer = nullptr;
+        ret = AImage_getHardwareBuffer(img.get(), &hardwareBuffer);
+        ASSERT_EQ(ret, AMEDIA_OK);
+        ASSERT_NE(hardwareBuffer, nullptr);
+        const native_handle_t *nh = AHardwareBuffer_getNativeHandle(hardwareBuffer);
+        ASSERT_NE(nh, nullptr);
+        std::unique_lock<std::mutex> lock(imageAvailableMutex_);
+        imageAvailable_ = true;
+        imageCondVar_.notify_one();
+    }
+
+    static void onImageAvailable(void *context, AImageReader *reader) {
+        (void)reader;
+        AImageReaderWindowTest *thisContext =
+            reinterpret_cast<AImageReaderWindowTest *>(context);
+        thisContext->HandleImageAvailable();
+    }
+
+    static void onBufferRemoved(void *, AImageReader *, AHardwareBuffer *) {
+    }
+
+    static void fillRGBA8Buffer(uint8_t* buf, int w, int h, int stride) {
+        const size_t PIXEL_SIZE = 4;
+        for (int x = 0; x < w; x++) {
+            for (int y = 0; y < h; y++) {
+                off_t offset = (y * stride + x) * PIXEL_SIZE;
+                for (int c = 0; c < 4; c++) {
+                    int parityX = (x / (1 << (c+2))) & 1;
+                    int parityY = (y / (1 << (c+2))) & 1;
+                    buf[offset + c] = (parityX ^ parityY) ? 231 : 35;
+                }
+            }
+        }
+    }
+
+    void validateIGBP(sp<IGraphicBufferProducer>& igbp) {
+        int dequeuedSlot = -1;
+        sp<Fence> dequeuedFence;
+        IGraphicBufferProducer::QueueBufferOutput output;
+        ASSERT_EQ(OK, igbp->connect(nullptr, NATIVE_WINDOW_API_CPU, false, &output));
+
+        // Test that we can dequeue a buffer.
+        ASSERT_EQ(OK,
+                  ~IGraphicBufferProducer::BUFFER_NEEDS_REALLOCATION &
+                          (igbp->dequeueBuffer(&dequeuedSlot, &dequeuedFence,
+                                               kImageWidth, kImageHeight,
+                                               kImageFormat, kImageBufferUsage,
+                                               nullptr, nullptr)));
+        EXPECT_LE(0, dequeuedSlot);
+        EXPECT_GT(BufferQueue::NUM_BUFFER_SLOTS, dequeuedSlot);
+
+        sp<GraphicBuffer> dequeuedBuffer;
+        igbp->requestBuffer(dequeuedSlot, &dequeuedBuffer);
+        uint8_t* img = nullptr;
+        ASSERT_EQ(NO_ERROR, dequeuedBuffer->lock(kImageBufferUsage, (void**)(&img)));
+
+        // Write in some placeholder image data.
+        fillRGBA8Buffer(img, dequeuedBuffer->getWidth(), dequeuedBuffer->getHeight(),
+                        dequeuedBuffer->getStride());
+        ASSERT_EQ(NO_ERROR, dequeuedBuffer->unlock());
+        QueueBufferInput queueBufferInput(kQueueBufferInputTimeStamp,
+                                          kQueueBufferInputIsAutoTimeStamp,
+                                          kQueueBufferInputDataspace,
+                                          kQueueBufferInputRect,
+                                          kQueueBufferInputScalingMode,
+                                          kQueueBufferInputTransform,
+                                          kQueueBufferInputFence);
+        QueueBufferOutput queueBufferOutput;
+        ASSERT_EQ(OK, igbp->queueBuffer(dequeuedSlot, queueBufferInput,
+                                        &queueBufferOutput));
+        // wait until the onImageAvailable callback is called, or timeout completes.
+        std::unique_lock<std::mutex> lock(imageAvailableMutex_);
+        imageCondVar_.wait_for(lock, std::chrono::microseconds(kOnImageAvailableWaitUs),
+                               [this]{ return this->imageAvailable_;});
+        EXPECT_TRUE(imageAvailable_) <<  "Timed out waiting for image data to be handled!\n";
+    }
+
+    AImageReader *imageReader_ = nullptr;
+    AImageReader_ImageListener imageReaderAvailableCb_{this, onImageAvailable};
+    AImageReader_BufferRemovedListener imageReaderDetachedCb_{this, onBufferRemoved};
+    std::mutex imageAvailableMutex_;
+    std::condition_variable imageCondVar_;
+    bool imageAvailable_ = false;
+};
+
+
+TEST_F(AImageReaderWindowTest, CreateWindowNativeHandle) {
+    // Check that we can create a native_handle_t corresponding to the
+    // AImageReader.
+    native_handle_t *nh = nullptr;
+    media_status_t status = AImageReader_getWindowNativeHandle(imageReader_, &nh);
+
+    // On newer devices without the HIDL TokenManager service this API is
+    // deprecated and will return an error.
+    if (IServiceManager::Transport::EMPTY ==
+        hardware::defaultServiceManager1_2()->getTransport(ITokenManager::descriptor, "default")) {
+      EXPECT_EQ(status, AMEDIA_ERROR_UNKNOWN);
+      return;
+    }
+    ASSERT_NE(nh, nullptr);
+
+    // Check that there are only ints in the handle.
+    ASSERT_EQ(nh->numFds, 0);
+    ASSERT_NE(nh->numInts, 0);
+
+    // Check that the HGBP can be retrieved from the handle.
+    sp<HGraphicBufferProducer> hgbp =  AImageReader_getHGBPFromHandle(nh);
+    ASSERT_NE(hgbp, nullptr);
+    sp<IGraphicBufferProducer> igbp = new H2BGraphicBufferProducer(hgbp);
+
+    validateIGBP(igbp);
+}
+
+TEST_F(AImageReaderWindowTest, CreateWindow) {
+    ANativeWindow* window = nullptr;
+    media_status_t status = AImageReader_getWindow(imageReader_, &window);
+
+    ASSERT_NE(window, nullptr);
+
+    sp<IGraphicBufferProducer> igbp = Surface::getIGraphicBufferProducer(window);
+
+    validateIGBP(igbp);
+}
+
+class AImageReaderPrivateFormatTest : public ::testing::Test {
+  public:
+    void SetUp() override {
+        auto status = AImageReader_new(kImageWidth, kImageHeight, AIMAGE_FORMAT_RAW_DEPTH,
+                                       kMaxImages, &imgReader);
+        EXPECT_TRUE(status == AMEDIA_OK);
+    }
+
+    void TearDown() override {
+        if (imgReader) {
+            AImageReader_delete(imgReader);
+        }
+    }
+    AImageReader *imgReader = nullptr;
+};
+
+TEST_F(AImageReaderPrivateFormatTest, CreateTest) {
+    EXPECT_TRUE(imgReader != nullptr);
+}
+
+
+}  // namespace android
diff --git a/media/utils/BatteryNotifier.cpp b/media/utils/BatteryNotifier.cpp
index 09bc042..7762c24 100644
--- a/media/utils/BatteryNotifier.cpp
+++ b/media/utils/BatteryNotifier.cpp
@@ -85,8 +85,8 @@
 
 void BatteryNotifier::noteStopAudio(uid_t uid) {
     Mutex::Autolock _l(mLock);
-    if (mAudioRefCounts.find(uid) == mAudioRefCounts.end()) {
-        ALOGW("%s: audio refcount is broken for uid(%d).", __FUNCTION__, (int)uid);
+    if (mAudioRefCounts.find(uid) == mAudioRefCounts.end() || (mAudioRefCounts[uid] == 0)) {
+        ALOGE("%s: audio refcount is broken for uid(%d).", __FUNCTION__, (int)uid);
         return;
     }
 
diff --git a/media/utils/MethodStatistics.cpp b/media/utils/MethodStatistics.cpp
index 086757b..80f0fc4 100644
--- a/media/utils/MethodStatistics.cpp
+++ b/media/utils/MethodStatistics.cpp
@@ -20,6 +20,8 @@
 
 // Repository for MethodStatistics Objects
 
+// It's important to have the HAL class name defined with suffix "Hidl/Aidl" because
+// TimerThread::isRequestFromHal use this string to match binder call to/from hal.
 std::shared_ptr<std::vector<std::string>>
 getStatisticsClassesForModule(std::string_view moduleName) {
     static const std::map<std::string, std::shared_ptr<std::vector<std::string>>,
@@ -34,6 +36,15 @@
                 "StreamOutHalHidl",
               })
         },
+        {
+            METHOD_STATISTICS_MODULE_NAME_AUDIO_AIDL,
+            std::shared_ptr<std::vector<std::string>>(
+                new std::vector<std::string>{
+                "DeviceHalAidl",
+                "EffectHalAidl",
+                "StreamHalAidl",
+              })
+        },
     };
     auto it = m.find(moduleName);
     if (it == m.end()) return {};
@@ -61,6 +72,9 @@
             addClassesToMap(
                     getStatisticsClassesForModule(METHOD_STATISTICS_MODULE_NAME_AUDIO_HIDL),
                     m);
+            addClassesToMap(
+                    getStatisticsClassesForModule(METHOD_STATISTICS_MODULE_NAME_AUDIO_AIDL),
+                    m);
             return m;
         }();
 
diff --git a/media/utils/OWNERS b/media/utils/OWNERS
index f9cb567..fe3205a 100644
--- a/media/utils/OWNERS
+++ b/media/utils/OWNERS
@@ -1 +1,4 @@
-gkasten@google.com
+# Bug component: 48436
+atneya@google.com
+hunga@google.com
+include platform/frameworks/av:/media/janitors/audio_OWNERS #{LAST_RESORT_SUGGESTION}
diff --git a/media/utils/TimerThread.cpp b/media/utils/TimerThread.cpp
index 3966103..25852e4 100644
--- a/media/utils/TimerThread.cpp
+++ b/media/utils/TimerThread.cpp
@@ -104,11 +104,16 @@
 //
 /* static */
 bool TimerThread::isRequestFromHal(const std::shared_ptr<const Request>& request) {
-    const size_t hidlPos = request->tag.asStringView().find("Hidl");
-    if (hidlPos == std::string::npos) return false;
-    // should be a separator afterwards Hidl which indicates the string was in the class.
-    const size_t separatorPos = request->tag.asStringView().find("::", hidlPos);
-    return separatorPos != std::string::npos;
+    for (const auto& s : {"Hidl", "Aidl"}) {
+        const auto& tagSV = request->tag.asStringView();
+        const size_t halStrPos = tagSV.find(s);
+        // should be a separator afterwards Hidl/Aidl which indicates the string was in the class.
+        if (halStrPos != std::string::npos && tagSV.find("::", halStrPos) != std::string::npos) {
+            return true;
+        }
+    }
+
+    return false;
 }
 
 struct TimerThread::SnapshotAnalysis TimerThread::getSnapshotAnalysis(size_t retiredCount) const {
diff --git a/media/utils/include/mediautils/BatteryNotifier.h b/media/utils/include/mediautils/BatteryNotifier.h
index 3812d7a..73bed4a 100644
--- a/media/utils/include/mediautils/BatteryNotifier.h
+++ b/media/utils/include/mediautils/BatteryNotifier.h
@@ -68,6 +68,38 @@
     sp<IBatteryStats> getBatteryService_l();
 };
 
+namespace mediautils {
+class BatteryStatsAudioHandle {
+  public:
+    static constexpr uid_t INVALID_UID = static_cast<uid_t>(-1);
+
+    explicit BatteryStatsAudioHandle(uid_t uid) : mUid(uid) {
+        if (uid != INVALID_UID) {
+            BatteryNotifier::getInstance().noteStartAudio(mUid);
+        }
+    }
+
+    BatteryStatsAudioHandle(BatteryStatsAudioHandle&& other) : mUid(other.mUid) {
+        other.mUid = INVALID_UID;
+    }
+
+    BatteryStatsAudioHandle(const BatteryStatsAudioHandle& other) = delete;
+
+    BatteryStatsAudioHandle& operator=(const BatteryStatsAudioHandle& other) = delete;
+
+    BatteryStatsAudioHandle& operator=(BatteryStatsAudioHandle&& other) = delete;
+
+    ~BatteryStatsAudioHandle() {
+        if (mUid != INVALID_UID) {
+            BatteryNotifier::getInstance().noteStopAudio(mUid);
+        }
+    }
+
+  private:
+    // Logically const
+    uid_t mUid = INVALID_UID;
+};
+}  // namespace mediautils
 }  // namespace android
 
 #endif // MEDIA_BATTERY_NOTIFIER_H
diff --git a/media/utils/include/mediautils/ExtendedAccumulator.h b/media/utils/include/mediautils/ExtendedAccumulator.h
index 7e3e170..30045f3 100644
--- a/media/utils/include/mediautils/ExtendedAccumulator.h
+++ b/media/utils/include/mediautils/ExtendedAccumulator.h
@@ -48,9 +48,9 @@
 
   public:
     enum class Wrap {
-        NORMAL = 0,
-        UNDERFLOW = 1,
-        OVERFLOW = 2,
+        Normal = 0,
+        Underflow = 1,
+        Overflow = 2,
     };
 
     using UnsignedInt = Integral;
@@ -63,11 +63,11 @@
     std::pair<SignedInt, Wrap> poll(UnsignedInt value) {
         auto acc = mAccumulated.load(std::memory_order_relaxed);
         const auto bottom_bits = static_cast<UnsignedInt>(acc);
-        std::pair<SignedInt, Wrap> res = {0, Wrap::NORMAL};
+        std::pair<SignedInt, Wrap> res = {0, Wrap::Normal};
         const bool overflow = __builtin_sub_overflow(value, bottom_bits, &res.first);
 
         if (overflow) {
-            res.second = (res.first > 0) ? Wrap::OVERFLOW : Wrap::UNDERFLOW;
+            res.second = (res.first > 0) ? Wrap::Overflow : Wrap::Underflow;
         }
 
         const bool acc_overflow = __builtin_add_overflow(acc, res.first, &acc);
diff --git a/media/utils/include/mediautils/FixedString.h b/media/utils/include/mediautils/FixedString.h
index 047aa82..c316813 100644
--- a/media/utils/include/mediautils/FixedString.h
+++ b/media/utils/include/mediautils/FixedString.h
@@ -101,10 +101,15 @@
         return strncmp(c_str(), s, capacity() + 1) == 0;
     }
 
-    bool operator==(std::string_view s) const {
+    bool operator==(const std::string_view s) const {
         return size() == s.size() && memcmp(data(), s.data(), size()) == 0;
     }
 
+    template <uint32_t N_>
+    bool operator==(const FixedString<N_>& s) const {
+        return operator==(s.asStringView());
+    }
+
     // operator not-equals
     template <typename T>
     bool operator!=(const T& other) const {
diff --git a/media/utils/include/mediautils/MethodStatistics.h b/media/utils/include/mediautils/MethodStatistics.h
index c8b36d8..2543dfa 100644
--- a/media/utils/include/mediautils/MethodStatistics.h
+++ b/media/utils/include/mediautils/MethodStatistics.h
@@ -124,6 +124,7 @@
 // Managed Statistics support.
 // Supported Modules
 #define METHOD_STATISTICS_MODULE_NAME_AUDIO_HIDL "AudioHidl"
+#define METHOD_STATISTICS_MODULE_NAME_AUDIO_AIDL "AudioAidl"
 
 // Returns a vector of class names for the module, or a nullptr if module not found.
 std::shared_ptr<std::vector<std::string>>
diff --git a/media/utils/include/mediautils/StaticStringView.h b/media/utils/include/mediautils/StaticStringView.h
index 14be240..e9a5deb 100644
--- a/media/utils/include/mediautils/StaticStringView.h
+++ b/media/utils/include/mediautils/StaticStringView.h
@@ -21,15 +21,15 @@
 
 #pragma push_macro("EXPLICIT_CONVERSION_GENERATE_OPERATOR")
 #undef EXPLICIT_CONVERSION_GENERATE_OPERATOR
-#define EXPLICIT_CONVERSION_GENERATE_OPERATOR(T, U, op)               \
-    friend constexpr bool operator op(T lhs, T rhs) {                 \
-        return operator op(static_cast<U>(lhs), static_cast<U>(rhs)); \
-    }                                                                 \
-    friend constexpr bool operator op(T lhs, U rhs) {                 \
-        return operator op(static_cast<U>(lhs), rhs);                 \
-    }                                                                 \
-    friend constexpr bool operator op(U lhs, T rhs) {                 \
-        return operator op(lhs, static_cast<U>(rhs));                 \
+#define EXPLICIT_CONVERSION_GENERATE_OPERATOR(T, U, op)    \
+    friend constexpr bool operator op(T lhs, T rhs) {      \
+        return static_cast<U>(lhs) op static_cast<U>(rhs); \
+    }                                                      \
+    friend constexpr bool operator op(T lhs, U rhs) {      \
+        return static_cast<U>(lhs) op rhs;                 \
+    }                                                      \
+    friend constexpr bool operator op(U lhs, T rhs) {      \
+        return lhs op static_cast<U>(rhs);                 \
     }
 
 #pragma push_macro("EXPLICIT_CONVERSION_GENERATE_COMPARISON_OPERATORS")
diff --git a/media/utils/tests/extended_accumulator_tests.cpp b/media/utils/tests/extended_accumulator_tests.cpp
index e243e7e..2591df0 100644
--- a/media/utils/tests/extended_accumulator_tests.cpp
+++ b/media/utils/tests/extended_accumulator_tests.cpp
@@ -68,10 +68,10 @@
     EXPECT_EQ(result, delta);
 
     // Test overflow/underflow event reporting.
-    if (next < base) EXPECT_EQ(TestDetect::Wrap::UNDERFLOW, status);
+    if (next < base) EXPECT_EQ(TestDetect::Wrap::Underflow, status);
     else if (next > base + std::numeric_limits<TestUInt>::max())
-        EXPECT_EQ(TestDetect::Wrap::OVERFLOW, status);
-    else EXPECT_EQ(TestDetect::Wrap::NORMAL, status);
+        EXPECT_EQ(TestDetect::Wrap::Overflow, status);
+    else EXPECT_EQ(TestDetect::Wrap::Normal, status);
 }
 
 // Test this utility on every combination of prior and update value for the
diff --git a/services/audioflinger/Android.bp b/services/audioflinger/Android.bp
index 0cd6243..0c878c9 100644
--- a/services/audioflinger/Android.bp
+++ b/services/audioflinger/Android.bp
@@ -142,9 +142,10 @@
     name: "libaudioflinger",
 
     defaults: [
-        "latest_android_media_audio_common_types_cpp_shared",
-        "latest_android_hardware_audio_core_sounddose_ndk_shared",
         "audioflinger_flags_defaults",
+        "latest_android_hardware_audio_core_sounddose_ndk_export_shared_lib_header",
+        "latest_android_hardware_audio_core_sounddose_ndk_shared",
+        "latest_android_media_audio_common_types_cpp_shared",
     ],
 
     srcs: [
@@ -217,7 +218,6 @@
 
     export_shared_lib_headers: [
         "libpermission",
-        "android.hardware.audio.core.sounddose-V1-ndk",
     ],
 
     cflags: [
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index 0d539c0..403fb9e 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -27,6 +27,7 @@
 //#define BUFLOG_NDEBUG 0
 #include <afutils/BufLog.h>
 #include <afutils/DumpTryLock.h>
+#include <afutils/NBAIO_Tee.h>
 #include <afutils/Permission.h>
 #include <afutils/PropertyUtils.h>
 #include <afutils/TypedLogger.h>
@@ -189,6 +190,7 @@
 BINDER_METHOD_ENTRY(supportsBluetoothVariableLatency) \
 BINDER_METHOD_ENTRY(getSoundDoseInterface) \
 BINDER_METHOD_ENTRY(getAudioPolicyConfig) \
+BINDER_METHOD_ENTRY(getAudioMixPort) \
 
 // singleton for Binder Method Statistics for IAudioFlinger
 static auto& getIAudioFlingerStatistics() {
@@ -268,9 +270,6 @@
     BatteryNotifier::getInstance().noteResetAudio();
 
     mMediaLogNotifier->run("MediaLogNotifier");
-    std::vector<pid_t> halPids;
-    mDevicesFactoryHal->getHalPids(&halPids);
-    mediautils::TimeCheck::setAudioHalPids(halPids);
 
     // Notify that we have started (also called when audioserver service restarts)
     mediametrics::LogItem(mMetricsId)
@@ -857,12 +856,15 @@
             dprintf(fd, "\nIEffect binder call profile:\n");
             write(fd, timeCheckStats.c_str(), timeCheckStats.size());
 
-            // Automatically fetch HIDL statistics.
-            std::shared_ptr<std::vector<std::string>> hidlClassNames =
-                    mediautils::getStatisticsClassesForModule(
-                            METHOD_STATISTICS_MODULE_NAME_AUDIO_HIDL);
-            if (hidlClassNames) {
-                for (const auto& className : *hidlClassNames) {
+            // Automatically fetch HIDL or AIDL statistics.
+            const std::string_view halType = (mDevicesFactoryHal->getHalVersion().getType() ==
+                                      AudioHalVersionInfo::Type::HIDL)
+                                             ? METHOD_STATISTICS_MODULE_NAME_AUDIO_HIDL
+                                             : METHOD_STATISTICS_MODULE_NAME_AUDIO_AIDL;
+            const std::shared_ptr<std::vector<std::string>> halClassNames =
+                    mediautils::getStatisticsClassesForModule(halType);
+            if (halClassNames) {
+                for (const auto& className : *halClassNames) {
                     auto stats = mediautils::getStatisticsForClass(className);
                     if (stats) {
                         timeCheckStats = stats->dump();
@@ -1888,7 +1890,7 @@
         return 0;
     }
     if ((sampleRate == 0) ||
-            !audio_is_valid_format(format) || !audio_has_proportional_frames(format) ||
+            !audio_is_valid_format(format) ||
             !audio_is_input_channel(channelMask)) {
         return 0;
     }
@@ -1911,6 +1913,10 @@
 
     std::vector<audio_format_t> formats = {format};
     if (format != AUDIO_FORMAT_PCM_16_BIT) {
+        // For compressed format, buffer size may be queried using PCM. Allow this for compatibility
+        // in cases the primary hw dev does not support the format.
+        // TODO: replace with a table of formats and nominal buffer sizes (based on nominal bitrate
+        // and codec frame size).
         formats.push_back(AUDIO_FORMAT_PCM_16_BIT);
     }
 
@@ -2089,7 +2095,8 @@
     }
 }
 
-void AudioFlinger::ioConfigChanged(audio_io_config_event_t event,
+// Hold either AudioFlinger::mutex or ThreadBase::mutex
+void AudioFlinger::ioConfigChanged_l(audio_io_config_event_t event,
                                    const sp<AudioIoDescriptor>& ioDesc,
                                    pid_t pid) {
     media::AudioIoConfigEvent eventAidl = VALUE_OR_FATAL(
@@ -2261,8 +2268,8 @@
     }
     adjAttributionSource = afutils::checkAttributionSourcePackage(
             adjAttributionSource);
-    // we don't yet support anything other than linear PCM
-    if (!audio_is_valid_format(input.config.format) || !audio_is_linear_pcm(input.config.format)) {
+    // further format checks are performed by createRecordTrack_l()
+    if (!audio_is_valid_format(input.config.format)) {
         ALOGE("createRecord() invalid format %#x", input.config.format);
         lStatus = BAD_VALUE;
         goto Exit;
@@ -2943,7 +2950,7 @@
             latencyMs = playbackThread->latency();
 
             // notify client processes of the new output creation
-            playbackThread->ioConfigChanged(AUDIO_OUTPUT_OPENED);
+            playbackThread->ioConfigChanged_l(AUDIO_OUTPUT_OPENED);
 
             // the first primary output opened designates the primary hw device if no HW module
             // named "primary" was already loaded.
@@ -2957,7 +2964,7 @@
                 mHardwareStatus = AUDIO_HW_IDLE;
             }
         } else {
-            thread->ioConfigChanged(AUDIO_OUTPUT_OPENED);
+            thread->ioConfigChanged_l(AUDIO_OUTPUT_OPENED);
         }
         response->output = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_io_handle_t_int32_t(output));
         response->config = VALUE_OR_RETURN_STATUS(
@@ -2990,7 +2997,7 @@
     thread->addOutputTrack(thread2);
     mPlaybackThreads.add(id, thread);
     // notify client processes of the new output creation
-    thread->ioConfigChanged(AUDIO_OUTPUT_OPENED);
+    thread->ioConfigChanged_l(AUDIO_OUTPUT_OPENED);
     return id;
 }
 
@@ -3050,7 +3057,7 @@
             mMmapThreads.removeItem(output);
             ALOGD("closing mmapThread %p", mmapThread.get());
         }
-        ioConfigChanged(AUDIO_OUTPUT_CLOSED, sp<AudioIoDescriptor>::make(output));
+        ioConfigChanged_l(AUDIO_OUTPUT_CLOSED, sp<AudioIoDescriptor>::make(output));
         mPatchPanel->notifyStreamClosed(output);
     }
     // The thread entity (active unit of execution) is no longer running here,
@@ -3153,7 +3160,7 @@
 
     if (thread != 0) {
         // notify client processes of the new input creation
-        thread->ioConfigChanged(AUDIO_INPUT_OPENED);
+        thread->ioConfigChanged_l(AUDIO_INPUT_OPENED);
         return NO_ERROR;
     }
     return NO_INIT;
@@ -3189,41 +3196,19 @@
         return 0;
     }
 
-    audio_config_t halconfig = *config;
-    sp<DeviceHalInterface> inHwHal = inHwDev->hwDevice();
-    sp<StreamInHalInterface> inStream;
-    status_t status = inHwHal->openInputStream(
-            *input, devices, &halconfig, flags, address, source,
-            outputDevice, outputDeviceAddress, &inStream);
-    ALOGV("openInput_l() openInputStream returned input %p, devices %#x, SamplingRate %d"
-           ", Format %#x, Channels %#x, flags %#x, status %d addr %s",
-            inStream.get(),
+    AudioStreamIn *inputStream = nullptr;
+    status_t status = inHwDev->openInputStream(
+            &inputStream,
+            *input,
             devices,
-            halconfig.sample_rate,
-            halconfig.format,
-            halconfig.channel_mask,
             flags,
-            status, address);
+            config,
+            address,
+            source,
+            outputDevice,
+            outputDeviceAddress.c_str());
 
-    // If the input could not be opened with the requested parameters and we can handle the
-    // conversion internally, try to open again with the proposed parameters.
-    if (status == BAD_VALUE &&
-        audio_is_linear_pcm(config->format) &&
-        audio_is_linear_pcm(halconfig.format) &&
-        (halconfig.sample_rate <= AUDIO_RESAMPLER_DOWN_RATIO_MAX * config->sample_rate) &&
-        (audio_channel_count_from_in_mask(halconfig.channel_mask) <= FCC_LIMIT) &&
-        (audio_channel_count_from_in_mask(config->channel_mask) <= FCC_LIMIT)) {
-        // FIXME describe the change proposed by HAL (save old values so we can log them here)
-        ALOGV("openInput_l() reopening with proposed sampling rate and channel mask");
-        inStream.clear();
-        status = inHwHal->openInputStream(
-                *input, devices, &halconfig, flags, address, source,
-                outputDevice, outputDeviceAddress, &inStream);
-        // FIXME log this new status; HAL should not propose any further changes
-    }
-
-    if (status == NO_ERROR && inStream != 0) {
-        AudioStreamIn *inputStream = new AudioStreamIn(inHwDev, inStream, flags);
+    if (status == NO_ERROR) {
         if ((flags & AUDIO_INPUT_FLAG_MMAP_NOIRQ) != 0) {
             const sp<IAfMmapCaptureThread> thread =
                     IAfMmapCaptureThread::create(this, *input, inHwDev, inputStream, mSystemReady);
@@ -3312,7 +3297,7 @@
             dumpToThreadLog_l(mmapThread);
             mMmapThreads.removeItem(input);
         }
-        ioConfigChanged(AUDIO_INPUT_CLOSED, sp<AudioIoDescriptor>::make(input));
+        ioConfigChanged_l(AUDIO_INPUT_CLOSED, sp<AudioIoDescriptor>::make(input));
     }
     // FIXME: calling thread->exit() without mutex() held should not be needed anymore now that
     // we have a different lock for notification client
@@ -3692,7 +3677,8 @@
         return {};
     }
 
-    return thread->outDeviceTypes();
+    audio_utils::lock_guard l(thread->mutex());
+    return thread->outDeviceTypes_l();
 }
 
 IAfPlaybackThread* AudioFlinger::fastPlaybackThread_l() const
@@ -3815,7 +3801,7 @@
         patchTrack->setPeerProxy(patchRecord, true /* holdReference */);
         patchRecord->setPeerProxy(patchTrack, false /* holdReference */);
     }
-    track->setTeePatchesToUpdate(std::move(teePatches));
+    track->setTeePatchesToUpdate_l(std::move(teePatches));
 }
 
 sp<audioflinger::SyncEvent> AudioFlinger::createSyncEvent(AudioSystem::sync_event_t type,
@@ -4269,7 +4255,7 @@
 
     response->id = idOut;
     response->enabled = enabledOut != 0;
-    response->effect = handle->asIEffect();
+    response->effect = handle.get() ? handle->asIEffect() : nullptr;
     response->desc = VALUE_OR_RETURN_STATUS(
             legacy2aidl_effect_descriptor_t_EffectDescriptor(descOut));
 
@@ -4486,8 +4472,9 @@
     }
 
     for (size_t i = 0; i < mPlaybackThreads.size(); i++) {
-        sp<IAfEffectChain> ec =
-                mPlaybackThreads.valueAt(i)->getEffectChain_l(AUDIO_SESSION_OUTPUT_MIX);
+        const auto thread = mPlaybackThreads.valueAt(i);
+        audio_utils::lock_guard l(thread->mutex());
+        const sp<IAfEffectChain> ec = thread->getEffectChain_l(AUDIO_SESSION_OUTPUT_MIX);
         if (ec != 0 && ec->isNonOffloadableEnabled()) {
             return true;
         }
@@ -4607,6 +4594,24 @@
     return mPatchPanel->listAudioPatches_l(num_patches, patches);
 }
 
+/**
+ * Get the attributes of the mix port when connecting to the given device port.
+ */
+status_t AudioFlinger::getAudioMixPort(const struct audio_port_v7 *devicePort,
+                                       struct audio_port_v7 *mixPort) const {
+    if (status_t status = AudioValidator::validateAudioPort(*devicePort); status != NO_ERROR) {
+        ALOGE("%s, invalid device port, status=%d", __func__, status);
+        return status;
+    }
+    if (status_t status = AudioValidator::validateAudioPort(*mixPort); status != NO_ERROR) {
+        ALOGE("%s, invalid mix port, status=%d", __func__, status);
+        return status;
+    }
+
+    audio_utils::lock_guard _l(mutex());
+    return mPatchPanel->getAudioMixPort_l(devicePort, mixPort);
+}
+
 // ----------------------------------------------------------------------------
 
 status_t AudioFlinger::onTransactWrapper(TransactionCode code,
@@ -4640,6 +4645,7 @@
         case TransactionCode::GET_SUPPORTED_LATENCY_MODES:
         case TransactionCode::INVALIDATE_TRACKS:
         case TransactionCode::GET_AUDIO_POLICY_CONFIG:
+        case TransactionCode::GET_AUDIO_MIX_PORT:
             ALOGW("%s: transaction %d received from PID %d",
                   __func__, code, IPCThreadState::self()->getCallingPid());
             // return status only for non void methods
@@ -4728,16 +4734,6 @@
     mediautils::TimeCheck::kDefaultSecondChanceDuration,
     true /* crashOnTimeout */);
 
-    // Make sure we connect to Audio Policy Service before calling into AudioFlinger:
-    //  - AudioFlinger can call into Audio Policy Service with its global mutex held
-    //  - If this is the first time Audio Policy Service is queried from inside audioserver process
-    //  this will trigger Audio Policy Manager initialization.
-    //  - Audio Policy Manager initialization calls into AudioFlinger which will try to lock
-    //  its global mutex and a deadlock will occur.
-    if (IPCThreadState::self()->getCallingPid() != getpid()) {
-        AudioSystem::get_audio_policy_service();
-    }
-
     return delegate();
 }
 
diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h
index 2c34144..b1751da 100644
--- a/services/audioflinger/AudioFlinger.h
+++ b/services/audioflinger/AudioFlinger.h
@@ -255,6 +255,10 @@
     status_t getAudioPolicyConfig(media::AudioPolicyConfig* config) final
             EXCLUDES_AudioFlinger_Mutex;
 
+    // Get the attributes of the mix port when connecting to the given device port.
+    status_t getAudioMixPort(const struct audio_port_v7* devicePort,
+                             struct audio_port_v7* mixPort) const final EXCLUDES_AudioFlinger_Mutex;
+
     status_t onTransactWrapper(TransactionCode code, const Parcel& data, uint32_t flags,
             const std::function<status_t()>& delegate) final EXCLUDES_AudioFlinger_Mutex;
 
@@ -338,7 +342,8 @@
 
     // ----- begin IAfThreadCallback interface
 
-    bool isNonOffloadableGlobalEffectEnabled_l() const final REQUIRES(mutex());
+    bool isNonOffloadableGlobalEffectEnabled_l() const final
+            REQUIRES(mutex()) EXCLUDES_ThreadBase_Mutex;
     bool btNrecIsOff() const final { return mBtNrecIsOff.load(); }
     float masterVolume_l() const final REQUIRES(mutex());
     bool masterMute_l() const final REQUIRES(mutex());
@@ -383,7 +388,8 @@
             const audioflinger::SyncEventCallback& callBack,
             const wp<IAfTrackBase>& cookie) final EXCLUDES_AudioFlinger_Mutex;
 
-    void ioConfigChanged(audio_io_config_event_t event,
+    // Hold either AudioFlinger::mutex or ThreadBase::mutex
+    void ioConfigChanged_l(audio_io_config_event_t event,
             const sp<AudioIoDescriptor>& ioDesc,
             pid_t pid = 0) final EXCLUDES_AudioFlinger_ClientMutex;
     void onNonOffloadableGlobalEffectEnable() final EXCLUDES_AudioFlinger_Mutex;
diff --git a/services/audioflinger/Effects.cpp b/services/audioflinger/Effects.cpp
index 0f3e130..95fed5b 100644
--- a/services/audioflinger/Effects.cpp
+++ b/services/audioflinger/Effects.cpp
@@ -2570,6 +2570,7 @@
             uint32_t rightZero = 0;
             volumeControlEffect->setVolume(&leftZero, &rightZero, true /*controller*/);
         }
+        mVolumeControlEffect = volumeControlEffect;
     }
     mLeftVolume = newLeft;
     mRightVolume = newRight;
@@ -3084,7 +3085,10 @@
     return t->sampleRate();
 }
 
-audio_channel_mask_t EffectChain::EffectCallback::inChannelMask(int id) const {
+audio_channel_mask_t EffectChain::EffectCallback::inChannelMask(int id) const
+NO_THREAD_SAFETY_ANALYSIS
+// calling function 'hasAudioSession_l' requires holding mutex 'ThreadBase_Mutex' exclusively
+{
     const sp<IAfThreadBase> t = thread().promote();
     if (t == nullptr) {
         return AUDIO_CHANNEL_NONE;
@@ -3120,7 +3124,10 @@
     return audio_channel_count_from_out_mask(inChannelMask(id));
 }
 
-audio_channel_mask_t EffectChain::EffectCallback::outChannelMask() const {
+audio_channel_mask_t EffectChain::EffectCallback::outChannelMask() const
+NO_THREAD_SAFETY_ANALYSIS
+// calling function 'hasAudioSession_l' requires holding mutex 'ThreadBase_Mutex' exclusively
+{
     const sp<IAfThreadBase> t = thread().promote();
     if (t == nullptr) {
         return AUDIO_CHANNEL_NONE;
diff --git a/services/audioflinger/Effects.h b/services/audioflinger/Effects.h
index aeb0fea..8869b69 100644
--- a/services/audioflinger/Effects.h
+++ b/services/audioflinger/Effects.h
@@ -488,8 +488,10 @@
     bool isBitPerfectCompatible() const final;
 
     // isCompatibleWithThread_l() must be called with thread->mutex() held
-    bool isCompatibleWithThread_l(const sp<IAfThreadBase>& thread) const final;
+    bool isCompatibleWithThread_l(const sp<IAfThreadBase>& thread) const final
+            REQUIRES(audio_utils::ThreadBase_Mutex);
 
+    // Requires either IAfThreadBase::mutex() or EffectChain::mutex() held
     bool containsHapticGeneratingEffect_l() final;
 
     void setHapticIntensity_l(int id, os::HapticScale intensity) final;
@@ -648,7 +650,7 @@
 
              const sp<EffectCallback> mEffectCallback;
 
-             wp<EffectModule> mVolumeControlEffect;
+             wp<IAfEffectModule> mVolumeControlEffect;
 };
 
 class DeviceEffectProxy : public IAfDeviceEffectProxy, public EffectBase {
diff --git a/services/audioflinger/IAfPatchPanel.h b/services/audioflinger/IAfPatchPanel.h
index 5a6621e..6110e4c 100644
--- a/services/audioflinger/IAfPatchPanel.h
+++ b/services/audioflinger/IAfPatchPanel.h
@@ -302,6 +302,13 @@
 
     virtual void closeThreadInternal_l(const sp<IAfThreadBase>& thread) const
             REQUIRES(audio_utils::AudioFlinger_Mutex) = 0;
+
+    /**
+     * Get the attributes of the mix port when connecting to the given device port.
+     */
+    virtual status_t getAudioMixPort_l(
+            const struct audio_port_v7* devicePort,
+            struct audio_port_v7* mixPort) REQUIRES(audio_utils::AudioFlinger_Mutex) = 0;
 };
 
 }  // namespace android
diff --git a/services/audioflinger/IAfThread.h b/services/audioflinger/IAfThread.h
index fc2f805..5a7429d 100644
--- a/services/audioflinger/IAfThread.h
+++ b/services/audioflinger/IAfThread.h
@@ -70,7 +70,7 @@
     virtual audio_utils::mutex& mutex() const
             RETURN_CAPABILITY(audio_utils::AudioFlinger_Mutex) = 0;
     virtual bool isNonOffloadableGlobalEffectEnabled_l() const
-            REQUIRES(mutex()) = 0;  // Tracks
+            REQUIRES(mutex()) EXCLUDES_ThreadBase_Mutex = 0;  // Tracks
     virtual audio_unique_id_t nextUniqueId(audio_unique_id_use_t use) = 0;
     virtual bool btNrecIsOff() const = 0;
     virtual float masterVolume_l() const
@@ -110,7 +110,8 @@
             const wp<IAfTrackBase>& cookie)
             EXCLUDES_AudioFlinger_Mutex = 0;
 
-    virtual void ioConfigChanged(audio_io_config_event_t event,
+    // Hold either AudioFlinger::mutex or ThreadBase::mutex
+    virtual void ioConfigChanged_l(audio_io_config_event_t event,
             const sp<AudioIoDescriptor>& ioDesc,
             pid_t pid = 0) EXCLUDES_AudioFlinger_ClientMutex = 0;
     virtual void onNonOffloadableGlobalEffectEnable() EXCLUDES_AudioFlinger_Mutex = 0;
@@ -140,7 +141,7 @@
     static bool isValidPcmSinkFormat(audio_format_t format);
 
     virtual status_t readyToRun() = 0;
-    virtual void clearPowerManager() = 0;
+    virtual void clearPowerManager() EXCLUDES_ThreadBase_Mutex = 0;
     virtual status_t initCheck() const = 0;
     virtual type_t type() const = 0;
     virtual bool isDuplicating() const = 0;
@@ -156,21 +157,23 @@
     virtual size_t frameCount() const = 0;
     virtual audio_channel_mask_t hapticChannelMask() const = 0;
     virtual uint32_t hapticChannelCount() const = 0;
-    virtual uint32_t latency_l() const = 0;
-    virtual void setVolumeForOutput_l(float left, float right) const = 0;
+    virtual uint32_t latency_l() const = 0;  // NO_THREAD_SAFETY_ANALYSIS
+    virtual void setVolumeForOutput_l(float left, float right) const REQUIRES(mutex()) = 0;
 
     // Return's the HAL's frame count i.e. fast mixer buffer size.
     virtual size_t frameCountHAL() const = 0;
     virtual size_t frameSize() const = 0;
     // Should be "virtual status_t requestExitAndWait()" and override same
     // method in Thread, but Thread::requestExitAndWait() is not yet virtual.
-    virtual void exit() = 0;
-    virtual bool checkForNewParameter_l(const String8& keyValuePair, status_t& status) = 0;
-    virtual status_t setParameters(const String8& keyValuePairs) = 0;
-    virtual String8 getParameters(const String8& keys) = 0;
-    virtual void ioConfigChanged(
+    virtual void exit() EXCLUDES_ThreadBase_Mutex = 0;
+    virtual bool checkForNewParameter_l(const String8& keyValuePair, status_t& status)
+             REQUIRES(mutex()) = 0;
+    virtual status_t setParameters(const String8& keyValuePairs) EXCLUDES_ThreadBase_Mutex = 0;
+    virtual String8 getParameters(const String8& keys) EXCLUDES_ThreadBase_Mutex = 0;
+    virtual void ioConfigChanged_l(
             audio_io_config_event_t event, pid_t pid = 0,
-            audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE) = 0;
+            audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE)
+            /* holds either AF::mutex or TB::mutex */ = 0;
 
     // sendConfigEvent_l() must be called with ThreadBase::mLock held
     // Can temporarily release the lock if waiting for a reply from
@@ -178,38 +181,53 @@
     // status_t sendConfigEvent_l(sp<ConfigEvent>& event);
     virtual void sendIoConfigEvent(
             audio_io_config_event_t event, pid_t pid = 0,
-            audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE) = 0;
+            audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE) EXCLUDES_ThreadBase_Mutex = 0;
     virtual void sendIoConfigEvent_l(
             audio_io_config_event_t event, pid_t pid = 0,
-            audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE) = 0;
-    virtual void sendPrioConfigEvent(pid_t pid, pid_t tid, int32_t prio, bool forApp) = 0;
-    virtual void sendPrioConfigEvent_l(pid_t pid, pid_t tid, int32_t prio, bool forApp) = 0;
-    virtual status_t sendSetParameterConfigEvent_l(const String8& keyValuePair) = 0;
+            audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE) REQUIRES(mutex()) = 0;
+    virtual void sendPrioConfigEvent(pid_t pid, pid_t tid, int32_t prio, bool forApp)
+            EXCLUDES_ThreadBase_Mutex = 0;
+    virtual void sendPrioConfigEvent_l(pid_t pid, pid_t tid, int32_t prio, bool forApp)
+            REQUIRES(mutex()) = 0;
+    virtual status_t sendSetParameterConfigEvent_l(const String8& keyValuePair)
+            REQUIRES(mutex()) = 0;
     virtual status_t sendCreateAudioPatchConfigEvent(
-            const struct audio_patch* patch, audio_patch_handle_t* handle) = 0;
-    virtual status_t sendReleaseAudioPatchConfigEvent(audio_patch_handle_t handle) = 0;
+            const struct audio_patch* patch, audio_patch_handle_t* handle)
+            EXCLUDES_ThreadBase_Mutex = 0;
+    virtual status_t sendReleaseAudioPatchConfigEvent(audio_patch_handle_t handle)
+            EXCLUDES_ThreadBase_Mutex = 0;
     virtual status_t sendUpdateOutDeviceConfigEvent(
-            const DeviceDescriptorBaseVector& outDevices) = 0;
-    virtual void sendResizeBufferConfigEvent_l(int32_t maxSharedAudioHistoryMs) = 0;
-    virtual void sendCheckOutputStageEffectsEvent() = 0;
-    virtual void sendCheckOutputStageEffectsEvent_l() = 0;
-    virtual void sendHalLatencyModesChangedEvent_l() = 0;
+            const DeviceDescriptorBaseVector& outDevices) EXCLUDES_ThreadBase_Mutex = 0;
+    virtual void sendResizeBufferConfigEvent_l(int32_t maxSharedAudioHistoryMs)
+            REQUIRES(mutex()) = 0;
+    virtual void sendCheckOutputStageEffectsEvent() EXCLUDES_ThreadBase_Mutex = 0;
+    virtual void sendCheckOutputStageEffectsEvent_l()
+            REQUIRES(mutex()) = 0;
+    virtual void sendHalLatencyModesChangedEvent_l()
+            REQUIRES(mutex()) = 0;
 
-    virtual void processConfigEvents_l() = 0;
-    virtual void setCheckOutputStageEffects() = 0;
-    virtual void cacheParameters_l() = 0;
+    virtual void processConfigEvents_l()
+            REQUIRES(mutex()) = 0;
+    virtual void setCheckOutputStageEffects() = 0;  // no mutex needed
+    virtual void cacheParameters_l()
+            REQUIRES(mutex()) = 0;
     virtual status_t createAudioPatch_l(
-            const struct audio_patch* patch, audio_patch_handle_t* handle) = 0;
-    virtual status_t releaseAudioPatch_l(const audio_patch_handle_t handle) = 0;
-    virtual void updateOutDevices(const DeviceDescriptorBaseVector& outDevices) = 0;
-    virtual void toAudioPortConfig(struct audio_port_config* config) = 0;
-    virtual void resizeInputBuffer_l(int32_t maxSharedAudioHistoryMs) = 0;
+            const struct audio_patch* patch, audio_patch_handle_t* handle)
+            REQUIRES(mutex()) = 0;
+    virtual status_t releaseAudioPatch_l(const audio_patch_handle_t handle)
+            REQUIRES(mutex()) = 0;
+    virtual void updateOutDevices(const DeviceDescriptorBaseVector& outDevices)
+            EXCLUDES_ThreadBase_Mutex = 0;
+    virtual void toAudioPortConfig(struct audio_port_config* config)
+            EXCLUDES_ThreadBase_Mutex = 0;
+    virtual void resizeInputBuffer_l(int32_t maxSharedAudioHistoryMs)
+            REQUIRES(mutex()) = 0;
 
     // see note at declaration of mStandby, mOutDevice and mInDevice
     virtual bool inStandby() const = 0;
-    virtual const DeviceTypeSet outDeviceTypes() const = 0;
-    virtual audio_devices_t inDeviceType() const = 0;
-    virtual DeviceTypeSet getDeviceTypes() const = 0;
+    virtual const DeviceTypeSet outDeviceTypes_l() const REQUIRES(mutex()) = 0;
+    virtual audio_devices_t inDeviceType_l() const REQUIRES(mutex()) = 0;
+    virtual DeviceTypeSet getDeviceTypes_l() const REQUIRES(mutex()) = 0;
     virtual const AudioDeviceTypeAddrVector& outDeviceTypeAddrs() const = 0;
     virtual const AudioDeviceTypeAddr& inDeviceTypeAddr() const = 0;
     virtual bool isOutput() const = 0;
@@ -226,7 +244,7 @@
             bool pinned,
             bool probe,
             bool notifyFramesProcessed)
-            REQUIRES(audio_utils::AudioFlinger_Mutex) = 0;
+            REQUIRES(audio_utils::AudioFlinger_Mutex) EXCLUDES_ThreadBase_Mutex = 0;
 
     // return values for hasAudioSession (bit field)
     enum effect_state {
@@ -243,28 +261,39 @@
     };
 
     // get effect chain corresponding to session Id.
-    virtual sp<IAfEffectChain> getEffectChain(audio_session_t sessionId) const = 0;
+    virtual sp<IAfEffectChain> getEffectChain(audio_session_t sessionId) const
+            EXCLUDES_ThreadBase_Mutex = 0;
     // same as getEffectChain() but must be called with ThreadBase mutex locked
-    virtual sp<IAfEffectChain> getEffectChain_l(audio_session_t sessionId) const = 0;
-    virtual std::vector<int> getEffectIds_l(audio_session_t sessionId) const = 0;
+    virtual sp<IAfEffectChain> getEffectChain_l(audio_session_t sessionId) const
+            REQUIRES(mutex()) = 0;
+    virtual std::vector<int> getEffectIds_l(audio_session_t sessionId) const
+            REQUIRES(mutex()) = 0;
     // add an effect chain to the chain list (mEffectChains)
-    virtual status_t addEffectChain_l(const sp<IAfEffectChain>& chain) = 0;
+    virtual status_t addEffectChain_l(const sp<IAfEffectChain>& chain)
+            REQUIRES(mutex()) = 0;
     // remove an effect chain from the chain list (mEffectChains)
-    virtual size_t removeEffectChain_l(const sp<IAfEffectChain>& chain) = 0;
+    virtual size_t removeEffectChain_l(const sp<IAfEffectChain>& chain)
+            REQUIRES(mutex()) = 0;
     // lock all effect chains Mutexes. Must be called before releasing the
     // ThreadBase mutex before processing the mixer and effects. This guarantees the
     // integrity of the chains during the process.
     // Also sets the parameter 'effectChains' to current value of mEffectChains.
-    virtual void lockEffectChains_l(Vector<sp<IAfEffectChain>>& effectChains) = 0;
+    virtual void lockEffectChains_l(Vector<sp<IAfEffectChain>>& effectChains)
+            REQUIRES(mutex()) = 0;
     // unlock effect chains after process
-    virtual void unlockEffectChains(const Vector<sp<IAfEffectChain>>& effectChains) = 0;
+    virtual void unlockEffectChains(const Vector<sp<IAfEffectChain>>& effectChains)
+            EXCLUDES_ThreadBase_Mutex = 0;
     // get a copy of mEffectChains vector
-    virtual Vector<sp<IAfEffectChain>> getEffectChains_l() const = 0;
+    virtual Vector<sp<IAfEffectChain>> getEffectChains_l() const
+            REQUIRES(mutex()) = 0;
     // set audio mode to all effect chains
-    virtual void setMode(audio_mode_t mode) = 0;
+    virtual void setMode(audio_mode_t mode)
+            EXCLUDES_ThreadBase_Mutex = 0;
     // get effect module with corresponding ID on specified audio session
-    virtual sp<IAfEffectModule> getEffect(audio_session_t sessionId, int effectId) const = 0;
-    virtual sp<IAfEffectModule> getEffect_l(audio_session_t sessionId, int effectId) const = 0;
+    virtual sp<IAfEffectModule> getEffect(audio_session_t sessionId, int effectId) const
+            EXCLUDES_ThreadBase_Mutex = 0;
+    virtual sp<IAfEffectModule> getEffect_l(audio_session_t sessionId, int effectId) const
+            REQUIRES(mutex()) = 0;
     // add and effect module. Also creates the effect chain is none exists for
     // the effects audio session. Only called in a context of moving an effect
     // from one thread to another
@@ -272,29 +301,36 @@
             REQUIRES(audio_utils::AudioFlinger_Mutex, mutex()) = 0;
     // remove and effect module. Also removes the effect chain is this was the last
     // effect
-    virtual void removeEffect_l(const sp<IAfEffectModule>& effect, bool release = false) = 0;
+    virtual void removeEffect_l(const sp<IAfEffectModule>& effect, bool release = false)
+            REQUIRES(mutex()) = 0;
     // disconnect an effect handle from module and destroy module if last handle
-    virtual void disconnectEffectHandle(IAfEffectHandle* handle, bool unpinIfLast) = 0;
+    virtual void disconnectEffectHandle(IAfEffectHandle* handle, bool unpinIfLast)
+            EXCLUDES_ThreadBase_Mutex = 0;
     // detach all tracks connected to an auxiliary effect
-    virtual void detachAuxEffect_l(int effectId) = 0;
+    virtual void detachAuxEffect_l(int effectId) REQUIRES(mutex()) = 0;
     // returns a combination of:
     // - EFFECT_SESSION if effects on this audio session exist in one chain
     // - TRACK_SESSION if tracks on this audio session exist
     // - FAST_SESSION if fast tracks on this audio session exist
     // - SPATIALIZED_SESSION if spatialized tracks on this audio session exist
-    virtual uint32_t hasAudioSession_l(audio_session_t sessionId) const = 0;
-    virtual uint32_t hasAudioSession(audio_session_t sessionId) const = 0;
+    virtual uint32_t hasAudioSession_l(audio_session_t sessionId) const REQUIRES(mutex()) = 0;
+    virtual uint32_t hasAudioSession(audio_session_t sessionId) const
+            EXCLUDES_ThreadBase_Mutex = 0;
 
     // the value returned by default implementation is not important as the
     // strategy is only meaningful for PlaybackThread which implements this method
-    virtual product_strategy_t getStrategyForSession_l(audio_session_t sessionId) const = 0;
+    virtual product_strategy_t getStrategyForSession_l(audio_session_t sessionId) const
+            REQUIRES(mutex()) = 0;
 
     // check if some effects must be suspended/restored when an effect is enabled
     // or disabled
     virtual void checkSuspendOnEffectEnabled(
-            bool enabled, audio_session_t sessionId, bool threadLocked) = 0;
+            bool enabled, audio_session_t sessionId, bool threadLocked)
+            EXCLUDES_ThreadBase_Mutex = 0;
 
-    virtual status_t setSyncEvent(const sp<audioflinger::SyncEvent>& event) = 0;
+    virtual status_t setSyncEvent(const sp<audioflinger::SyncEvent>& event)
+            EXCLUDES_ThreadBase_Mutex = 0;
+    // internally static, perhaps make static member.
     virtual bool isValidSyncEvent(const sp<audioflinger::SyncEvent>& event) const = 0;
 
     // Return a reference to a per-thread heap which can be used to allocate IMemory
@@ -307,33 +343,35 @@
 
     virtual sp<IMemory> pipeMemory() const = 0;
 
-    virtual void systemReady() = 0;
+    virtual void systemReady() EXCLUDES_ThreadBase_Mutex = 0;
 
     // checkEffectCompatibility_l() must be called with ThreadBase::mLock held
     virtual status_t checkEffectCompatibility_l(
-            const effect_descriptor_t* desc, audio_session_t sessionId) = 0;
+            const effect_descriptor_t* desc, audio_session_t sessionId) REQUIRES(mutex()) = 0;
 
-    virtual void broadcast_l() = 0;
+    virtual void broadcast_l() REQUIRES(mutex()) = 0;
 
-    virtual bool isTimestampCorrectionEnabled() const = 0;
+    virtual bool isTimestampCorrectionEnabled_l() const REQUIRES(mutex()) = 0;
 
     virtual bool isMsdDevice() const = 0;
 
-    virtual void dump(int fd, const Vector<String16>& args) = 0;
+    virtual void dump(int fd, const Vector<String16>& args) EXCLUDES_ThreadBase_Mutex = 0;
 
     // deliver stats to mediametrics.
-    virtual void sendStatistics(bool force) = 0;
+    virtual void sendStatistics(bool force) EXCLUDES_ThreadBase_Mutex = 0;
 
     virtual audio_utils::mutex& mutex() const
             RETURN_CAPABILITY(audio_utils::ThreadBase_Mutex) = 0;
 
-    virtual void onEffectEnable(const sp<IAfEffectModule>& effect) = 0;
-    virtual void onEffectDisable() = 0;
+    virtual void onEffectEnable(const sp<IAfEffectModule>& effect) EXCLUDES_ThreadBase_Mutex = 0;
+    virtual void onEffectDisable() EXCLUDES_ThreadBase_Mutex = 0;
 
     // invalidateTracksForAudioSession_l must be called with holding mLock.
-    virtual void invalidateTracksForAudioSession_l(audio_session_t sessionId) const = 0;
+    virtual void invalidateTracksForAudioSession_l(audio_session_t sessionId) const
+            REQUIRES(mutex()) = 0;
     // Invalidate all the tracks with the given audio session.
-    virtual void invalidateTracksForAudioSession(audio_session_t sessionId) const = 0;
+    virtual void invalidateTracksForAudioSession(audio_session_t sessionId) const
+            EXCLUDES_ThreadBase_Mutex = 0;
 
     virtual bool isStreamInitialized() const = 0;
     virtual void startMelComputation_l(const sp<audio_utils::MelProcessor>& processor)
@@ -341,10 +379,12 @@
     virtual void stopMelComputation_l()
             REQUIRES(audio_utils::AudioFlinger_Mutex) = 0;
 
-    virtual product_strategy_t getStrategyForStream(audio_stream_type_t stream) const = 0;
+    virtual product_strategy_t getStrategyForStream(audio_stream_type_t stream) const
+            EXCLUDES_AUDIO_ALL = 0;
 
     virtual void setEffectSuspended_l(
-            const effect_uuid_t* type, bool suspend, audio_session_t sessionId) = 0;
+            const effect_uuid_t* type, bool suspend, audio_session_t sessionId)
+            REQUIRES(mutex()) = 0;
 
     // Dynamic cast to derived interface
     virtual sp<IAfDirectOutputThread> asIAfDirectOutputThread() { return nullptr; }
@@ -392,7 +432,7 @@
     // return estimated latency in milliseconds, as reported by HAL
     virtual uint32_t latency() const = 0;  // should be in IAfThreadBase?
 
-    virtual uint32_t& fastTrackAvailMask_l() = 0;
+    virtual uint32_t& fastTrackAvailMask_l() REQUIRES(mutex()) = 0;
 
     virtual sp<IAfTrack> createTrack_l(
             const sp<Client>& client,
@@ -418,63 +458,70 @@
             bool isBitPerfect)
             REQUIRES(audio_utils::AudioFlinger_Mutex) = 0;
 
-    virtual status_t addTrack_l(const sp<IAfTrack>& track) = 0;
-    virtual bool destroyTrack_l(const sp<IAfTrack>& track) = 0;
-    virtual bool isTrackActive(const sp<IAfTrack>& track) const = 0;
-    virtual void addOutputTrack_l(const sp<IAfTrack>& track) = 0;
+    virtual status_t addTrack_l(const sp<IAfTrack>& track) REQUIRES(mutex()) = 0;
+    virtual bool destroyTrack_l(const sp<IAfTrack>& track) REQUIRES(mutex()) = 0;
+    virtual bool isTrackActive(const sp<IAfTrack>& track) const REQUIRES(mutex()) = 0;
+    virtual void addOutputTrack_l(const sp<IAfTrack>& track) REQUIRES(mutex()) = 0;
 
-    virtual AudioStreamOut* getOutput_l() const = 0;
-    virtual AudioStreamOut* getOutput() const = 0;
-    virtual AudioStreamOut* clearOutput() = 0;
+    virtual AudioStreamOut* getOutput_l() const REQUIRES(mutex()) = 0;
+    virtual AudioStreamOut* getOutput() const EXCLUDES_ThreadBase_Mutex = 0;
+    virtual AudioStreamOut* clearOutput() EXCLUDES_ThreadBase_Mutex = 0;
 
     // a very large number of suspend() will eventually wraparound, but unlikely
     virtual void suspend() = 0;
     virtual void restore() = 0;
     virtual bool isSuspended() const = 0;
-    virtual status_t getRenderPosition(uint32_t* halFrames, uint32_t* dspFrames) const = 0;
+    virtual status_t getRenderPosition(uint32_t* halFrames, uint32_t* dspFrames) const
+            EXCLUDES_ThreadBase_Mutex = 0;
     // Consider also removing and passing an explicit mMainBuffer initialization
     // parameter to AF::IAfTrack::Track().
     virtual float* sinkBuffer() const = 0;
 
-    virtual status_t attachAuxEffect(const sp<IAfTrack>& track, int EffectId) = 0;
-    virtual status_t attachAuxEffect_l(const sp<IAfTrack>& track, int EffectId) = 0;
+    virtual status_t attachAuxEffect(const sp<IAfTrack>& track, int EffectId)
+            EXCLUDES_ThreadBase_Mutex = 0;
+    virtual status_t attachAuxEffect_l(const sp<IAfTrack>& track, int EffectId)
+            REQUIRES(mutex()) = 0;
 
     // called with AudioFlinger lock held
-    virtual bool invalidateTracks_l(audio_stream_type_t streamType) = 0;
-    virtual bool invalidateTracks_l(std::set<audio_port_handle_t>& portIds) = 0;
-    virtual void invalidateTracks(audio_stream_type_t streamType) = 0;
+    virtual bool invalidateTracks_l(audio_stream_type_t streamType) REQUIRES(mutex()) = 0;
+    virtual bool invalidateTracks_l(std::set<audio_port_handle_t>& portIds) REQUIRES(mutex()) = 0;
+    virtual void invalidateTracks(audio_stream_type_t streamType)
+            EXCLUDES_ThreadBase_Mutex = 0;
     // Invalidate tracks by a set of port ids. The port id will be removed from
     // the given set if the corresponding track is found and invalidated.
-    virtual void invalidateTracks(std::set<audio_port_handle_t>& portIds) = 0;
+    virtual void invalidateTracks(std::set<audio_port_handle_t>& portIds)
+            EXCLUDES_ThreadBase_Mutex = 0;
 
-    virtual status_t getTimestamp_l(AudioTimestamp& timestamp) = 0;
-    virtual void addPatchTrack(const sp<IAfPatchTrack>& track) = 0;
-    virtual void deletePatchTrack(const sp<IAfPatchTrack>& track) = 0;
+    virtual status_t getTimestamp_l(AudioTimestamp& timestamp) REQUIRES(mutex()) = 0;
+    virtual void addPatchTrack(const sp<IAfPatchTrack>& track) EXCLUDES_ThreadBase_Mutex = 0;
+    virtual void deletePatchTrack(const sp<IAfPatchTrack>& track) EXCLUDES_ThreadBase_Mutex = 0;
 
     // Return the asynchronous signal wait time.
-    virtual int64_t computeWaitTimeNs_l() const = 0;
+    virtual int64_t computeWaitTimeNs_l() const REQUIRES(mutex()) = 0;
     // returns true if the track is allowed to be added to the thread.
     virtual bool isTrackAllowed_l(
             audio_channel_mask_t channelMask, audio_format_t format, audio_session_t sessionId,
-            uid_t uid) const = 0;
+            uid_t uid) const REQUIRES(mutex()) = 0;
 
     virtual bool supportsHapticPlayback() const = 0;
 
-    virtual void setDownStreamPatch(const struct audio_patch* patch) = 0;
+    virtual void setDownStreamPatch(const struct audio_patch* patch)
+            EXCLUDES_ThreadBase_Mutex = 0;
 
-    virtual IAfTrack* getTrackById_l(audio_port_handle_t trackId) = 0;
+    virtual IAfTrack* getTrackById_l(audio_port_handle_t trackId) REQUIRES(mutex()) = 0;
 
     virtual bool hasMixer() const = 0;
 
     virtual status_t setRequestedLatencyMode(audio_latency_mode_t mode) = 0;
 
-    virtual status_t getSupportedLatencyModes(std::vector<audio_latency_mode_t>* modes) = 0;
+    virtual status_t getSupportedLatencyModes(std::vector<audio_latency_mode_t>* modes)
+           EXCLUDES_ThreadBase_Mutex = 0;
 
     virtual status_t setBluetoothVariableLatencyEnabled(bool enabled) = 0;
 
-    virtual void setStandby() = 0;
-    virtual void setStandby_l() = 0;
-    virtual bool waitForHalStart() = 0;
+    virtual void setStandby() EXCLUDES_ThreadBase_Mutex = 0;
+    virtual void setStandby_l() REQUIRES(mutex()) = 0;
+    virtual bool waitForHalStart() EXCLUDES_ThreadBase_Mutex = 0;
 
     virtual bool hasFastMixer() const = 0;
     virtual FastTrackUnderruns getFastTrackUnderruns(size_t fastIndex) const = 0;
@@ -494,9 +541,9 @@
             const sp<IAfThreadCallback>& afThreadCallback, IAfPlaybackThread* mainThread,
             audio_io_handle_t id, bool systemReady);
 
-    virtual void addOutputTrack(IAfPlaybackThread* thread) = 0;
+    virtual void addOutputTrack(IAfPlaybackThread* thread) EXCLUDES_ThreadBase_Mutex = 0;
     virtual uint32_t waitTimeMs() const = 0;
-    virtual void removeOutputTrack(IAfPlaybackThread* thread) = 0;
+    virtual void removeOutputTrack(IAfPlaybackThread* thread) EXCLUDES_ThreadBase_Mutex = 0;
 };
 
 class IAfRecordThread : public virtual IAfThreadBase {
@@ -521,42 +568,49 @@
             status_t* status /*non-NULL*/,
             audio_port_handle_t portId,
             int32_t maxSharedAudioHistoryMs)
-            REQUIRES(audio_utils::AudioFlinger_Mutex) = 0;
-    virtual void destroyTrack_l(const sp<IAfRecordTrack>& track) = 0;
-    virtual void removeTrack_l(const sp<IAfRecordTrack>& track) = 0;
+            REQUIRES(audio_utils::AudioFlinger_Mutex) EXCLUDES_ThreadBase_Mutex = 0;
+    virtual void destroyTrack_l(const sp<IAfRecordTrack>& track) REQUIRES(mutex()) = 0;
+    virtual void removeTrack_l(const sp<IAfRecordTrack>& track) REQUIRES(mutex()) = 0;
 
     virtual status_t start(
             IAfRecordTrack* recordTrack, AudioSystem::sync_event_t event,
-            audio_session_t triggerSession) = 0;
+            audio_session_t triggerSession) EXCLUDES_ThreadBase_Mutex = 0;
 
     // ask the thread to stop the specified track, and
     // return true if the caller should then do it's part of the stopping process
-    virtual bool stop(IAfRecordTrack* recordTrack) = 0;
+    virtual bool stop(IAfRecordTrack* recordTrack) EXCLUDES_ThreadBase_Mutex = 0;
 
+    // NO_THREAD_SAFETY_ANALYSIS: consider atomics
     virtual AudioStreamIn* getInput() const = 0;
     virtual AudioStreamIn* clearInput() = 0;
 
     virtual status_t getActiveMicrophones(
-            std::vector<media::MicrophoneInfoFw>* activeMicrophones) const = 0;
-    virtual status_t setPreferredMicrophoneDirection(audio_microphone_direction_t direction) = 0;
-    virtual status_t setPreferredMicrophoneFieldDimension(float zoom) = 0;
+            std::vector<media::MicrophoneInfoFw>* activeMicrophones)
+            const EXCLUDES_ThreadBase_Mutex = 0;
+    virtual status_t setPreferredMicrophoneDirection(audio_microphone_direction_t direction)
+            EXCLUDES_ThreadBase_Mutex = 0;
+    virtual status_t setPreferredMicrophoneFieldDimension(float zoom)
+            EXCLUDES_ThreadBase_Mutex = 0;
 
-    virtual void addPatchTrack(const sp<IAfPatchRecord>& record) = 0;
-    virtual void deletePatchTrack(const sp<IAfPatchRecord>& record) = 0;
+    virtual void addPatchTrack(const sp<IAfPatchRecord>& record)
+            EXCLUDES_ThreadBase_Mutex = 0;
+    virtual void deletePatchTrack(const sp<IAfPatchRecord>& record)
+            EXCLUDES_ThreadBase_Mutex = 0;
     virtual bool fastTrackAvailable() const = 0;
     virtual void setFastTrackAvailable(bool available) = 0;
 
-    virtual void setRecordSilenced(audio_port_handle_t portId, bool silenced) = 0;
+    virtual void setRecordSilenced(audio_port_handle_t portId, bool silenced)
+            EXCLUDES_ThreadBase_Mutex = 0;
     virtual bool hasFastCapture() const = 0;
 
-    virtual void checkBtNrec() = 0;
-    virtual uint32_t getInputFramesLost() const = 0;
+    virtual void checkBtNrec() EXCLUDES_ThreadBase_Mutex = 0;
+    virtual uint32_t getInputFramesLost() const EXCLUDES_ThreadBase_Mutex = 0;
 
     virtual status_t shareAudioHistory(
             const std::string& sharedAudioPackageName,
             audio_session_t sharedSessionId = AUDIO_SESSION_NONE,
-            int64_t sharedAudioStartMs = -1) = 0;
-    virtual void resetAudioHistory_l() = 0;
+            int64_t sharedAudioStartMs = -1) EXCLUDES_ThreadBase_Mutex = 0;
+    virtual void resetAudioHistory_l() REQUIRES(mutex()) = 0;
 };
 
 class IAfMmapThread : public virtual IAfThreadBase {
@@ -575,26 +629,32 @@
             audio_session_t sessionId,
             const sp<MmapStreamCallback>& callback,
             audio_port_handle_t deviceId,
-            audio_port_handle_t portId) = 0;
-    virtual void disconnect() = 0;
+            audio_port_handle_t portId) EXCLUDES_ThreadBase_Mutex = 0;
+    virtual void disconnect() EXCLUDES_ThreadBase_Mutex = 0;
 
     // MmapStreamInterface handling (see adapter)
     virtual status_t createMmapBuffer(
-            int32_t minSizeFrames, struct audio_mmap_buffer_info* info) = 0;
-    virtual status_t getMmapPosition(struct audio_mmap_position* position) const = 0;
+            int32_t minSizeFrames, struct audio_mmap_buffer_info* info)
+            EXCLUDES_ThreadBase_Mutex = 0;
+    virtual status_t getMmapPosition(struct audio_mmap_position* position) const
+            EXCLUDES_ThreadBase_Mutex = 0;
     virtual status_t start(
             const AudioClient& client, const audio_attributes_t* attr,
-            audio_port_handle_t* handle) = 0;
-    virtual status_t stop(audio_port_handle_t handle) = 0;
-    virtual status_t standby() = 0;
-    virtual status_t getExternalPosition(uint64_t* position, int64_t* timeNanos) const = 0;
-    virtual status_t reportData(const void* buffer, size_t frameCount) = 0;
+            audio_port_handle_t* handle) EXCLUDES_ThreadBase_Mutex = 0;
+    virtual status_t stop(audio_port_handle_t handle) EXCLUDES_ThreadBase_Mutex = 0;
+    virtual status_t standby() EXCLUDES_ThreadBase_Mutex = 0;
+    virtual status_t getExternalPosition(uint64_t* position, int64_t* timeNanos) const
+            EXCLUDES_ThreadBase_Mutex = 0;
+    virtual status_t reportData(const void* buffer, size_t frameCount)
+            EXCLUDES_ThreadBase_Mutex = 0;
 
     // TODO(b/291317898)  move to IAfThreadBase?
-    virtual void invalidateTracks(std::set<audio_port_handle_t>& portIds) = 0;
+    virtual void invalidateTracks(std::set<audio_port_handle_t>& portIds)
+            EXCLUDES_ThreadBase_Mutex = 0;
 
     // Sets the UID records silence - TODO(b/291317898)  move to IAfMmapCaptureThread
-    virtual void setRecordSilenced(audio_port_handle_t portId, bool silenced) = 0;
+    virtual void setRecordSilenced(audio_port_handle_t portId, bool silenced)
+            EXCLUDES_ThreadBase_Mutex = 0;
 
     virtual sp<IAfMmapPlaybackThread> asIAfMmapPlaybackThread() { return nullptr; }
     virtual sp<IAfMmapCaptureThread> asIAfMmapCaptureThread() { return nullptr; }
@@ -606,7 +666,7 @@
             const sp<IAfThreadCallback>& afThreadCallback, audio_io_handle_t id,
             AudioHwDevice* hwDev, AudioStreamOut* output, bool systemReady);
 
-    virtual AudioStreamOut* clearOutput() = 0;
+    virtual AudioStreamOut* clearOutput() EXCLUDES_ThreadBase_Mutex = 0;
 };
 
 class IAfMmapCaptureThread : public virtual IAfMmapThread {
@@ -615,7 +675,7 @@
             const sp<IAfThreadCallback>& afThreadCallback, audio_io_handle_t id,
             AudioHwDevice* hwDev, AudioStreamIn* input, bool systemReady);
 
-    virtual AudioStreamIn* clearInput() = 0;
+    virtual AudioStreamIn* clearInput() EXCLUDES_ThreadBase_Mutex = 0;
 };
 
 }  // namespace android
diff --git a/services/audioflinger/IAfTrack.h b/services/audioflinger/IAfTrack.h
index cf30ded..2302e13 100644
--- a/services/audioflinger/IAfTrack.h
+++ b/services/audioflinger/IAfTrack.h
@@ -227,6 +227,18 @@
     virtual void setMetadataHasChanged() = 0;
 
     /**
+     * Called when a track moves to active state to record its contribution to battery usage.
+     * Track state transitions should eventually be handled within the track class.
+     */
+    virtual void beginBatteryAttribution() = 0;
+
+    /**
+     * Called when a track moves out of the active state to record its contribution
+     * to battery usage.
+     */
+    virtual void endBatteryAttribution() = 0;
+
+    /**
      * For RecordTrack
      * TODO(b/291317964) either use this or add asRecordTrack or asTrack etc.
      */
@@ -339,10 +351,10 @@
     virtual sp<os::ExternalVibration> getExternalVibration() const = 0;
 
     // This function should be called with holding thread lock.
-    virtual void updateTeePatches() = 0;
+    virtual void updateTeePatches_l() = 0;
 
     // Argument teePatchesToUpdate is by value, use std::move to optimize.
-    virtual void setTeePatchesToUpdate(TeePatches teePatchesToUpdate) = 0;
+    virtual void setTeePatchesToUpdate_l(TeePatches teePatchesToUpdate) = 0;
 
     static bool checkServerLatencySupported(audio_format_t format, audio_output_flags_t flags) {
         return audio_is_linear_pcm(format) && (flags & AUDIO_OUTPUT_FLAG_HW_AV_SYNC) == 0;
diff --git a/services/audioflinger/MelReporter.cpp b/services/audioflinger/MelReporter.cpp
index add453f..ef932ec 100644
--- a/services/audioflinger/MelReporter.cpp
+++ b/services/audioflinger/MelReporter.cpp
@@ -38,24 +38,21 @@
 
     ndk::SpAIBinder soundDoseBinder;
     if (device->getSoundDoseInterface(module, &soundDoseBinder) != OK) {
-        ALOGW("%s: HAL cannot provide sound dose interface for module %s, use internal MEL",
+        ALOGW("%s: HAL cannot provide sound dose interface for module %s",
               __func__, module.c_str());
-        activateInternalSoundDoseComputation();
         return false;
     }
 
     if (soundDoseBinder == nullptr) {
-         ALOGW("%s: HAL doesn't implement a sound dose interface for module %s, use internal MEL",
+         ALOGW("%s: HAL doesn't implement a sound dose interface for module %s",
               __func__, module.c_str());
-        activateInternalSoundDoseComputation();
         return false;
     }
 
     std::shared_ptr<ISoundDose> soundDoseInterface = ISoundDose::fromBinder(soundDoseBinder);
 
-    if (!mSoundDoseManager->setHalSoundDoseInterface(soundDoseInterface)) {
+    if (!mSoundDoseManager->setHalSoundDoseInterface(module, soundDoseInterface)) {
         ALOGW("%s: cannot activate HAL MEL reporting for module %s", __func__, module.c_str());
-        activateInternalSoundDoseComputation();
         return false;
     }
 
@@ -73,35 +70,14 @@
         mUseHalSoundDoseInterface = false;
     }
 
-    mSoundDoseManager->setHalSoundDoseInterface(nullptr);
+    // reset the HAL interfaces and use internal MELs
+    mSoundDoseManager->resetHalSoundDoseInterfaces();
 }
 
 void MelReporter::onFirstRef() {
     mAfMelReporterCallback->getPatchCommandThread()->addListener(this);
-}
 
-bool MelReporter::shouldComputeMelForDeviceType(audio_devices_t device) {
-    if (!mSoundDoseManager->isCsdEnabled()) {
-        ALOGV("%s csd is disabled", __func__);
-        return false;
-    }
-    if (mSoundDoseManager->forceComputeCsdOnAllDevices()) {
-        return true;
-    }
-
-    switch (device) {
-        case AUDIO_DEVICE_OUT_WIRED_HEADSET:
-        case AUDIO_DEVICE_OUT_WIRED_HEADPHONE:
-        // TODO(b/278265907): enable A2DP when we can distinguish A2DP headsets
-        // case AUDIO_DEVICE_OUT_BLUETOOTH_A2DP:
-        case AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES:
-        case AUDIO_DEVICE_OUT_USB_HEADSET:
-        case AUDIO_DEVICE_OUT_BLE_HEADSET:
-        case AUDIO_DEVICE_OUT_BLE_BROADCAST:
-            return true;
-        default:
-            return false;
-    }
+    mSoundDoseManager = sp<SoundDoseManager>::make(sp<IMelReporterCallback>::fromExisting(this));
 }
 
 void MelReporter::updateMetadataForCsd(audio_io_handle_t streamHandle,
@@ -127,16 +103,17 @@
     }
 
     auto activeMelPatchIt = mActiveMelPatches.find(activeMelPatchId.value());
-    if (activeMelPatchIt != mActiveMelPatches.end()
-        && shouldActivateCsd != activeMelPatchIt->second.csdActive) {
-        if (activeMelPatchIt->second.csdActive) {
-            ALOGV("%s should not compute CSD for stream handle %d", __func__, streamHandle);
-            stopMelComputationForPatch_l(activeMelPatchIt->second);
-        } else {
-            ALOGV("%s should compute CSD for stream handle %d", __func__, streamHandle);
-            startMelComputationForActivePatch_l(activeMelPatchIt->second);
+    if (activeMelPatchIt != mActiveMelPatches.end()) {
+        if (shouldActivateCsd != activeMelPatchIt->second.csdActive) {
+            if (activeMelPatchIt->second.csdActive) {
+                ALOGV("%s should not compute CSD for stream handle %d", __func__, streamHandle);
+                stopMelComputationForPatch_l(activeMelPatchIt->second);
+            } else {
+                ALOGV("%s should compute CSD for stream handle %d", __func__, streamHandle);
+                startMelComputationForActivePatch_l(activeMelPatchIt->second);
+            }
+            activeMelPatchIt->second.csdActive = shouldActivateCsd;
         }
-        activeMelPatchIt->second.csdActive = shouldActivateCsd;
     }
 }
 
@@ -159,23 +136,28 @@
     audio_io_handle_t streamHandle = patch.mAudioPatch.sources[0].ext.mix.handle;
     ActiveMelPatch newPatch;
     newPatch.streamHandle = streamHandle;
+    newPatch.csdActive = false;
     for (size_t i = 0; i < patch.mAudioPatch.num_sinks; ++i) {
-        if (patch.mAudioPatch.sinks[i].type == AUDIO_PORT_TYPE_DEVICE
-            && shouldComputeMelForDeviceType(patch.mAudioPatch.sinks[i].ext.device.type)) {
+        if (patch.mAudioPatch.sinks[i].type == AUDIO_PORT_TYPE_DEVICE &&
+                mSoundDoseManager->shouldComputeCsdForDeviceType(
+                        patch.mAudioPatch.sinks[i].ext.device.type)) {
             audio_port_handle_t deviceId = patch.mAudioPatch.sinks[i].id;
-            newPatch.deviceHandles.push_back(deviceId);
+            bool shouldComputeCsd = mSoundDoseManager->shouldComputeCsdForDeviceWithAddress(
+                    patch.mAudioPatch.sinks[i].ext.device.type,
+                    patch.mAudioPatch.sinks[i].ext.device.address);
+            newPatch.deviceStates.push_back({deviceId, shouldComputeCsd});
+            newPatch.csdActive |= shouldComputeCsd;
             AudioDeviceTypeAddr adt{patch.mAudioPatch.sinks[i].ext.device.type,
                                     patch.mAudioPatch.sinks[i].ext.device.address};
             mSoundDoseManager->mapAddressToDeviceId(adt, deviceId);
         }
     }
 
-    if (!newPatch.deviceHandles.empty()) {
+    if (!newPatch.deviceStates.empty() && newPatch.csdActive) {
         audio_utils::lock_guard _afl(mAfMelReporterCallback->mutex());  // AudioFlinger_Mutex
         audio_utils::lock_guard _l(mutex());
         ALOGV("%s add patch handle %d to active devices", __func__, handle);
         startMelComputationForActivePatch_l(newPatch);
-        newPatch.csdActive = true;
         mActiveMelPatches[handle] = newPatch;
     }
 }
@@ -189,18 +171,41 @@
         return;
     }
 
-    for (const auto& deviceHandle : patch.deviceHandles) {
-        ++mActiveDevices[deviceHandle];
-        ALOGI("%s add stream %d that uses device %d for CSD, nr of streams: %d", __func__,
-              patch.streamHandle, deviceHandle, mActiveDevices[deviceHandle]);
+    for (const auto& device : patch.deviceStates) {
+        if (device.second) {
+            ++mActiveDevices[device.first];
+            ALOGI("%s add stream %d that uses device %d for CSD, nr of streams: %d", __func__,
+                  patch.streamHandle, device.first, mActiveDevices[device.first]);
 
-        if (outputThread != nullptr && !useHalSoundDoseInterface_l()) {
-            outputThread->startMelComputation_l(mSoundDoseManager->getOrCreateProcessorForDevice(
-                deviceHandle,
-                patch.streamHandle,
-                outputThread->sampleRate(),
-                outputThread->channelCount(),
-                outputThread->format()));
+            if (outputThread != nullptr && !useHalSoundDoseInterface_l()) {
+                outputThread->startMelComputation_l(
+                        mSoundDoseManager->getOrCreateProcessorForDevice(
+                                device.first,
+                                patch.streamHandle,
+                                outputThread->sampleRate(),
+                                outputThread->channelCount(),
+                                outputThread->format()));
+            }
+        }
+    }
+}
+
+void MelReporter::startMelComputationForDeviceId(audio_port_handle_t deviceId) {
+    ALOGV("%s(%d)", __func__, deviceId);
+    audio_utils::lock_guard _laf(mAfMelReporterCallback->mutex());
+    audio_utils::lock_guard _l(mutex());
+
+    for (auto& activeMelPatch : mActiveMelPatches) {
+        bool csdActive = false;
+        for (auto& device: activeMelPatch.second.deviceStates) {
+            if (device.first == deviceId && !device.second) {
+                device.second = true;
+            }
+            csdActive |= device.second;
+        }
+        if (csdActive && !activeMelPatch.second.csdActive) {
+            activeMelPatch.second.csdActive = csdActive;
+            startMelComputationForActivePatch_l(activeMelPatch.second);
         }
     }
 }
@@ -240,6 +245,9 @@
 void MelReporter::stopInternalMelComputation() {
     ALOGV("%s", __func__);
     audio_utils::lock_guard _l(mutex());
+    if (mUseHalSoundDoseInterface) {
+        return;
+    }
     mActiveMelPatches.clear();
     mUseHalSoundDoseInterface = true;
 }
@@ -247,30 +255,48 @@
 void MelReporter::stopMelComputationForPatch_l(const ActiveMelPatch& patch)
 NO_THREAD_SAFETY_ANALYSIS  // access of AudioFlinger::checkOutputThread_l
 {
-    if (!patch.csdActive) {
-        // no need to stop CSD inactive patches
-        return;
-    }
-
     auto outputThread = mAfMelReporterCallback->checkOutputThread_l(patch.streamHandle);
 
     ALOGV("%s: stop MEL for stream id: %d", __func__, patch.streamHandle);
-    for (const auto& deviceId : patch.deviceHandles) {
-        if (mActiveDevices[deviceId] > 0) {
-            --mActiveDevices[deviceId];
-            if (mActiveDevices[deviceId] == 0) {
+    for (const auto& device : patch.deviceStates) {
+        if (mActiveDevices[device.first] > 0) {
+            --mActiveDevices[device.first];
+            if (mActiveDevices[device.first] == 0) {
                 // no stream is using deviceId anymore
-                ALOGI("%s removing device %d from active CSD devices", __func__, deviceId);
-                mSoundDoseManager->clearMapDeviceIdEntries(deviceId);
+                ALOGI("%s removing device %d from active CSD devices", __func__, device.first);
+                mSoundDoseManager->clearMapDeviceIdEntries(device.first);
             }
         }
     }
 
+    mSoundDoseManager->removeStreamProcessor(patch.streamHandle);
     if (outputThread != nullptr && !useHalSoundDoseInterface_l()) {
         outputThread->stopMelComputation_l();
     }
 }
 
+void MelReporter::stopMelComputationForDeviceId(audio_port_handle_t deviceId) {
+    ALOGV("%s(%d)", __func__, deviceId);
+    audio_utils::lock_guard _laf(mAfMelReporterCallback->mutex());
+    audio_utils::lock_guard _l(mutex());
+
+    for (auto& activeMelPatch : mActiveMelPatches) {
+        bool csdActive = false;
+        for (auto& device: activeMelPatch.second.deviceStates) {
+            if (device.first == deviceId && device.second) {
+                device.second = false;
+            }
+            csdActive |= device.second;
+        }
+
+        if (!csdActive && activeMelPatch.second.csdActive) {
+            activeMelPatch.second.csdActive = csdActive;
+            stopMelComputationForPatch_l(activeMelPatch.second);
+        }
+    }
+
+}
+
 std::optional<audio_patch_handle_t> MelReporter::activePatchStreamHandle_l(
         audio_io_handle_t streamHandle) {
     for(const auto& patchIt : mActiveMelPatches) {
diff --git a/services/audioflinger/MelReporter.h b/services/audioflinger/MelReporter.h
index 07ab94d..ce89b24 100644
--- a/services/audioflinger/MelReporter.h
+++ b/services/audioflinger/MelReporter.h
@@ -42,11 +42,11 @@
  * Class for listening to new patches and starting the MEL computation. MelReporter is
  * concealed within AudioFlinger, their lifetimes are the same.
  */
-class MelReporter : public PatchCommandThread::PatchCommandListener {
+class MelReporter : public PatchCommandThread::PatchCommandListener,
+                    public IMelReporterCallback {
 public:
     explicit MelReporter(const sp<IAfMelReporterCallback>& afMelReporterCallback)
-        : mAfMelReporterCallback(afMelReporterCallback),
-         mSoundDoseManager(sp<SoundDoseManager>::make()) {}
+        : mAfMelReporterCallback(afMelReporterCallback) {}
 
     void onFirstRef() override;
 
@@ -78,6 +78,12 @@
 
     std::string dump();
 
+    // IMelReporterCallback methods
+    void stopMelComputationForDeviceId(audio_port_handle_t deviceId) final
+            EXCLUDES_MelReporter_Mutex;
+    void startMelComputationForDeviceId(audio_port_handle_t deviceId) final
+            EXCLUDES_MelReporter_Mutex;
+
     // PatchCommandListener methods
     void onCreateAudioPatch(audio_patch_handle_t handle,
             const IAfPatchPanel::Patch& patch) final
@@ -96,13 +102,15 @@
 private:
     struct ActiveMelPatch {
         audio_io_handle_t streamHandle{AUDIO_IO_HANDLE_NONE};
-        std::vector<audio_port_handle_t> deviceHandles;
+        /**
+         * Stores device ids and whether they are compatible for CSD calculation.
+         * The boolean value can change since BT audio device types are user-configurable
+         * to headphones/headsets or other device types.
+         */
+        std::vector<std::pair<audio_port_handle_t,bool>> deviceStates;
         bool csdActive;
     };
 
-    /** Returns true if we should compute MEL for the given device. */
-    bool shouldComputeMelForDeviceType(audio_devices_t device);
-
     void stopInternalMelComputation();
     audio_utils::mutex& mutex() const RETURN_CAPABILITY(audio_utils::MelReporter_Mutex) {
         return mMutex;
diff --git a/services/audioflinger/OWNERS b/services/audioflinger/OWNERS
index 17d4c37..e1f69cc 100644
--- a/services/audioflinger/OWNERS
+++ b/services/audioflinger/OWNERS
@@ -1,4 +1,6 @@
+# Bug component: 48436
+elaurent@google.com
 hunga@google.com
-jmtrivi@google.com
+jiabin@google.com
 mnaganov@google.com
-philburk@google.com
+include platform/frameworks/av:/media/janitors/audio_OWNERS #{LAST_RESORT_SUGGESTION}
diff --git a/services/audioflinger/PatchPanel.cpp b/services/audioflinger/PatchPanel.cpp
index 7d3900b..17591dd 100644
--- a/services/audioflinger/PatchPanel.cpp
+++ b/services/audioflinger/PatchPanel.cpp
@@ -447,6 +447,24 @@
     return status;
 }
 
+status_t PatchPanel::getAudioMixPort_l(const audio_port_v7 *devicePort,
+                                       audio_port_v7 *mixPort) {
+    if (devicePort->type != AUDIO_PORT_TYPE_DEVICE) {
+        ALOGE("%s the type of given device port is not DEVICE", __func__);
+        return INVALID_OPERATION;
+    }
+    if (mixPort->type != AUDIO_PORT_TYPE_MIX) {
+        ALOGE("%s the type of given mix port is not MIX", __func__);
+        return INVALID_OPERATION;
+    }
+    AudioHwDevice* hwDevice = findAudioHwDeviceByModule_l(devicePort->ext.device.hw_module);
+    if (hwDevice == nullptr) {
+        ALOGW("%s cannot find hw module %d", __func__, devicePort->ext.device.hw_module);
+        return BAD_VALUE;
+    }
+    return hwDevice->getAudioMixPort(devicePort, mixPort);
+}
+
 PatchPanel::Patch::~Patch()
 {
     ALOGE_IF(isSoftware(), "Software patch connections leaked %d %d",
diff --git a/services/audioflinger/PatchPanel.h b/services/audioflinger/PatchPanel.h
index 1ff8fff..b107eb0 100644
--- a/services/audioflinger/PatchPanel.h
+++ b/services/audioflinger/PatchPanel.h
@@ -73,6 +73,12 @@
     void closeThreadInternal_l(const sp<IAfThreadBase>& thread) const final
             REQUIRES(audio_utils::AudioFlinger_Mutex);
 
+    /**
+     * Get the attributes of the mix port when connecting to the given device port
+     */
+    status_t getAudioMixPort_l(const audio_port_v7* devicePort, audio_port_v7* mixPort) final
+            REQUIRES(audio_utils::AudioFlinger_Mutex);
+
 private:
     AudioHwDevice* findAudioHwDeviceByModule_l(audio_module_handle_t module)
             REQUIRES(audio_utils::AudioFlinger_Mutex);
diff --git a/services/audioflinger/PlaybackTracks.h b/services/audioflinger/PlaybackTracks.h
index 15e85f9..2577ca8 100644
--- a/services/audioflinger/PlaybackTracks.h
+++ b/services/audioflinger/PlaybackTracks.h
@@ -23,6 +23,7 @@
 #include <audio_utils/mutex.h>
 #include <audio_utils/LinearMap.h>
 #include <binder/AppOpsManager.h>
+#include <utils/RWLock.h>
 
 namespace android {
 
@@ -193,8 +194,8 @@
     sp<os::ExternalVibration> getExternalVibration() const final { return mExternalVibration; }
 
             // This function should be called with holding thread lock.
-    void updateTeePatches() final;
-    void setTeePatchesToUpdate(TeePatches teePatchesToUpdate) final;
+    void updateTeePatches_l() final;
+    void setTeePatchesToUpdate_l(TeePatches teePatchesToUpdate) final;
 
     void tallyUnderrunFrames(size_t frames) final {
        if (isOut()) { // we expect this from output tracks only
@@ -349,8 +350,10 @@
 
 private:
     void                interceptBuffer(const AudioBufferProvider::Buffer& buffer);
+    // Must hold thread lock to access tee patches
     template <class F>
-    void                forEachTeePatchTrack(F f) {
+    void                forEachTeePatchTrack_l(F f) {
+        RWLock::AutoRLock readLock(mTeePatchesRWLock);
         for (auto& tp : mTeePatches) { f(tp.patchTrack); }
     };
 
@@ -386,6 +389,7 @@
     audio_output_flags_t mFlags;
     TeePatches mTeePatches;
     std::optional<TeePatches> mTeePatchesToUpdate;
+    RWLock              mTeePatchesRWLock;
     const float         mSpeed;
     const bool          mIsSpatialized;
     const bool          mIsBitPerfect;
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index 0b73fba..01e7b0d 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -185,7 +185,7 @@
 // Minimum amount of time between checking to see if the timestamp is advancing
 // for underrun detection. If we check too frequently, we may not detect a
 // timestamp update and will falsely detect underrun.
-static const nsecs_t kMinimumTimeBetweenTimestampChecksNs = 150 /* ms */ * 1000;
+static constexpr nsecs_t kMinimumTimeBetweenTimestampChecksNs = 150 /* ms */ * 1'000'000;
 
 // The universal constant for ubiquitous 20ms value. The value of 20ms seems to provide a good
 // balance between power consumption and latency, and allows threads to be scheduled reliably
@@ -875,7 +875,7 @@
         } break;
         case CFG_EVENT_IO: {
             IoConfigEventData *data = (IoConfigEventData *)event->mData.get();
-            ioConfigChanged(data->mEvent, data->mPid, data->mPortId);
+            ioConfigChanged_l(data->mEvent, data->mPid, data->mPortId);
         } break;
         case CFG_EVENT_SET_PARAMETER: {
             SetParameterConfigEventData *data = (SetParameterConfigEventData *)event->mData.get();
@@ -886,22 +886,22 @@
             }
         } break;
         case CFG_EVENT_CREATE_AUDIO_PATCH: {
-            const DeviceTypeSet oldDevices = getDeviceTypes();
+            const DeviceTypeSet oldDevices = getDeviceTypes_l();
             CreateAudioPatchConfigEventData *data =
                                             (CreateAudioPatchConfigEventData *)event->mData.get();
             event->mStatus = createAudioPatch_l(&data->mPatch, &data->mHandle);
-            const DeviceTypeSet newDevices = getDeviceTypes();
+            const DeviceTypeSet newDevices = getDeviceTypes_l();
             configChanged = oldDevices != newDevices;
             mLocalLog.log("CFG_EVENT_CREATE_AUDIO_PATCH: old device %s (%s) new device %s (%s)",
                     dumpDeviceTypes(oldDevices).c_str(), toString(oldDevices).c_str(),
                     dumpDeviceTypes(newDevices).c_str(), toString(newDevices).c_str());
         } break;
         case CFG_EVENT_RELEASE_AUDIO_PATCH: {
-            const DeviceTypeSet oldDevices = getDeviceTypes();
+            const DeviceTypeSet oldDevices = getDeviceTypes_l();
             ReleaseAudioPatchConfigEventData *data =
                                             (ReleaseAudioPatchConfigEventData *)event->mData.get();
             event->mStatus = releaseAudioPatch_l(data->mHandle);
-            const DeviceTypeSet newDevices = getDeviceTypes();
+            const DeviceTypeSet newDevices = getDeviceTypes_l();
             configChanged = oldDevices != newDevices;
             mLocalLog.log("CFG_EVENT_RELEASE_AUDIO_PATCH: old device %s (%s) new device %s (%s)",
                     dumpDeviceTypes(oldDevices).c_str(), toString(oldDevices).c_str(),
@@ -1089,9 +1089,9 @@
     }
     // Note: output device may be used by capture threads for effects such as AEC.
     dprintf(fd, "  Output devices: %s (%s)\n",
-            dumpDeviceTypes(outDeviceTypes()).c_str(), toString(outDeviceTypes()).c_str());
+            dumpDeviceTypes(outDeviceTypes_l()).c_str(), toString(outDeviceTypes_l()).c_str());
     dprintf(fd, "  Input device: %#x (%s)\n",
-            inDeviceType(), toString(inDeviceType()).c_str());
+            inDeviceType_l(), toString(inDeviceType_l()).c_str());
     dprintf(fd, "  Audio source: %d (%s)\n", mAudioSource, toString(mAudioSource).c_str());
 
     // Dump timestamp statistics for the Thread types that support it.
@@ -1102,7 +1102,8 @@
             || mType == OFFLOAD
             || mType == SPATIALIZER) {
         dprintf(fd, "  Timestamp stats: %s\n", mTimestampVerifier.toString().c_str());
-        dprintf(fd, "  Timestamp corrected: %s\n", isTimestampCorrectionEnabled() ? "yes" : "no");
+        dprintf(fd, "  Timestamp corrected: %s\n",
+                isTimestampCorrectionEnabled_l() ? "yes" : "no");
     }
 
     if (mLastIoBeginNs > 0) { // MMAP may not set this
@@ -1941,7 +1942,7 @@
     logTrack("add", track);
     mActiveTracksGeneration++;
     mLatestActiveTrack = track;
-    ++mBatteryCounter[track->uid()].second;
+    track->beginBatteryAttribution();
     mHasChanged = true;
     return mActiveTracks.add(track);
 }
@@ -1955,7 +1956,7 @@
     }
     logTrack("remove", track);
     mActiveTracksGeneration++;
-    --mBatteryCounter[track->uid()].second;
+    track->endBatteryAttribution();
     // mLatestActiveTrack is not cleared even if is the same as track.
     mHasChanged = true;
 #ifdef TEE_SINK
@@ -1968,45 +1969,23 @@
 template <typename T>
 void ThreadBase::ActiveTracks<T>::clear() {
     for (const sp<T> &track : mActiveTracks) {
-        BatteryNotifier::getInstance().noteStopAudio(track->uid());
+        track->endBatteryAttribution();
         logTrack("clear", track);
     }
     mLastActiveTracksGeneration = mActiveTracksGeneration;
     if (!mActiveTracks.empty()) { mHasChanged = true; }
     mActiveTracks.clear();
     mLatestActiveTrack.clear();
-    mBatteryCounter.clear();
 }
 
 template <typename T>
-void ThreadBase::ActiveTracks<T>::updatePowerState(
+void ThreadBase::ActiveTracks<T>::updatePowerState_l(
         const sp<ThreadBase>& thread, bool force) {
     // Updates ActiveTracks client uids to the thread wakelock.
     if (mActiveTracksGeneration != mLastActiveTracksGeneration || force) {
         thread->updateWakeLockUids_l(getWakeLockUids());
         mLastActiveTracksGeneration = mActiveTracksGeneration;
     }
-
-    // Updates BatteryNotifier uids
-    for (auto it = mBatteryCounter.begin(); it != mBatteryCounter.end();) {
-        const uid_t uid = it->first;
-        ssize_t &previous = it->second.first;
-        ssize_t &current = it->second.second;
-        if (current > 0) {
-            if (previous == 0) {
-                BatteryNotifier::getInstance().noteStartAudio(uid);
-            }
-            previous = current;
-            ++it;
-        } else if (current == 0) {
-            if (previous > 0) {
-                BatteryNotifier::getInstance().noteStopAudio(uid);
-            }
-            it = mBatteryCounter.erase(it); // std::map<> is stable on iterator erase.
-        } else /* (current < 0) */ {
-            LOG_ALWAYS_FATAL("negative battery count %zd", current);
-        }
-    }
 }
 
 template <typename T>
@@ -2045,6 +2024,7 @@
 // Call only from threadLoop() or when it is idle.
 // Do not call from high performance code as this may do binder rpc to the MediaMetrics service.
 void ThreadBase::sendStatistics(bool force)
+NO_THREAD_SAFETY_ANALYSIS
 {
     // Do not log if we have no stats.
     // We choose the timestamp verifier because it is the most likely item to be present.
@@ -2076,8 +2056,8 @@
     item->setInt64(MM_PREFIX "channelMask", (int64_t)mChannelMask);
     item->setCString(MM_PREFIX "encoding", toString(mFormat).c_str());
     item->setInt32(MM_PREFIX "frameCount", (int32_t)mFrameCount);
-    item->setCString(MM_PREFIX "outDevice", toString(outDeviceTypes()).c_str());
-    item->setCString(MM_PREFIX "inDevice", toString(inDeviceType()).c_str());
+    item->setCString(MM_PREFIX "outDevice", toString(outDeviceTypes_l()).c_str());
+    item->setCString(MM_PREFIX "inDevice", toString(inDeviceType_l()).c_str());
 
     // thread statistics
     if (mIoJitterMs.getN() > 0) {
@@ -2358,10 +2338,7 @@
     dprintf(fd, "  Total writes: %d\n", mNumWrites);
     dprintf(fd, "  Delayed writes: %d\n", mNumDelayedWrites);
     dprintf(fd, "  Blocked in write: %s\n", mInWrite ? "yes" : "no");
-    dprintf(fd, "  Suspend count: %d\n", mSuspended);
-    dprintf(fd, "  Sink buffer : %p\n", mSinkBuffer);
-    dprintf(fd, "  Mixer buffer: %p\n", mMixerBuffer);
-    dprintf(fd, "  Effect buffer: %p\n", mEffectBuffer);
+    dprintf(fd, "  Suspend count: %d\n", (int32_t)mSuspended);
     dprintf(fd, "  Fast track availMask=%#x\n", mFastTrackAvailMask);
     dprintf(fd, "  Standby delay ns=%lld\n", (long long)mStandbyDelayNs);
     AudioStreamOut *output = mOutput;
@@ -2434,6 +2411,7 @@
     }
 
     if (isBitPerfect) {
+        audio_utils::lock_guard _l(mutex());
         sp<IAfEffectChain> chain = getEffectChain_l(sessionId);
         if (chain.get() != nullptr) {
             // Bit-perfect is required according to the configuration and preferred mixer
@@ -2791,6 +2769,8 @@
     return latency_l();
 }
 uint32_t PlaybackThread::latency_l() const
+NO_THREAD_SAFETY_ANALYSIS
+// Fix later.
 {
     uint32_t latency;
     if (initCheck() == NO_ERROR && mOutput->stream->getLatency(&latency) == OK) {
@@ -2858,7 +2838,6 @@
 
 // addTrack_l() must be called with ThreadBase::mutex() held
 status_t PlaybackThread::addTrack_l(const sp<IAfTrack>& track)
-NO_THREAD_SAFETY_ANALYSIS  // release and re-acquire mutex()
 {
     status_t status = ALREADY_EXISTS;
 
@@ -3026,7 +3005,7 @@
     return mOutput->stream->selectPresentation(presentationId, programId);
 }
 
-void PlaybackThread::ioConfigChanged(audio_io_config_event_t event, pid_t pid,
+void PlaybackThread::ioConfigChanged_l(audio_io_config_event_t event, pid_t pid,
                                                    audio_port_handle_t portId) {
     ALOGV("PlaybackThread::ioConfigChanged, thread %p, event %d", this, event);
     sp<AudioIoDescriptor> desc;
@@ -3048,7 +3027,7 @@
         desc = sp<AudioIoDescriptor>::make(mId);
         break;
     }
-    mAfThreadCallback->ioConfigChanged(event, desc, pid);
+    mAfThreadCallback->ioConfigChanged_l(event, desc, pid);
 }
 
 void PlaybackThread::onWriteReady()
@@ -3234,8 +3213,8 @@
     if (hasMixer()) {
         mNormalFrameCount = (mNormalFrameCount + 15) & ~15;
     }
-    ALOGI("HAL output buffer size %zu frames, normal sink buffer size %zu frames", mFrameCount,
-            mNormalFrameCount);
+    ALOGI("HAL output buffer size %zu frames, normal sink buffer size %zu frames",
+            (size_t)mFrameCount, mNormalFrameCount);
 
     // Check if we want to throttle the processing to no more than 2x normal rate
     mThreadThrottle = property_get_bool("af.thread.throttle", true /* default_value */);
@@ -3472,7 +3451,7 @@
             ALOGD("ro.audio.silent is ignored since no output device is set");
             return;
         }
-        if (isSingleDeviceType(outDeviceTypes(), AUDIO_DEVICE_OUT_REMOTE_SUBMIX)) {
+        if (isSingleDeviceType(outDeviceTypes_l(), AUDIO_DEVICE_OUT_REMOTE_SUBMIX)) {
             ALOGD("ro.audio.silent will be ignored for threads on AUDIO_DEVICE_OUT_REMOTE_SUBMIX");
             return;
         }
@@ -3642,7 +3621,7 @@
 
     // make sure standby delay is not too short when connected to an A2DP sink to avoid
     // truncating audio when going to standby.
-    if (!Intersection(outDeviceTypes(),  getAudioDeviceOutAllA2dpSet()).empty()) {
+    if (!Intersection(outDeviceTypes_l(),  getAudioDeviceOutAllA2dpSet()).empty()) {
         if (mStandbyDelayNs < kDefaultStandbyTimeInNsecs) {
             mStandbyDelayNs = kDefaultStandbyTimeInNsecs;
         }
@@ -3990,7 +3969,7 @@
         // If the device is AUDIO_DEVICE_OUT_BUS, check for downstream latency.
         //
         // Note: we access outDeviceTypes() outside of mutex().
-        if (isMsdDevice() && outDeviceTypes().count(AUDIO_DEVICE_OUT_BUS) != 0) {
+        if (isMsdDevice() && outDeviceTypes_l().count(AUDIO_DEVICE_OUT_BUS) != 0) {
             // Here, we try for the AF lock, but do not block on it as the latency
             // is more informational.
             if (mAfThreadCallback->mutex().try_lock()) {
@@ -4135,7 +4114,7 @@
             // mMixerStatusIgnoringFastTracks is also updated internally
             mMixerStatus = prepareTracks_l(&tracksToRemove);
 
-            mActiveTracks.updatePowerState(this);
+            mActiveTracks.updatePowerState_l(this);
 
             metadataUpdate = updateMetadata_l();
 
@@ -4177,7 +4156,7 @@
             setHalLatencyMode_l();
 
             for (const auto &track : mActiveTracks ) {
-                track->updateTeePatches();
+                track->updateTeePatches_l();
             }
 
             // signal actual start of output stream when the render position reported by the kernel
@@ -4508,9 +4487,10 @@
                                 // notify of throttle end on debug log
                                 // but prevent spamming for bluetooth
                                 ALOGD_IF(!isSingleDeviceType(
-                                                 outDeviceTypes(), audio_is_a2dp_out_device) &&
+                                                 outDeviceTypes_l(), audio_is_a2dp_out_device) &&
                                          !isSingleDeviceType(
-                                                 outDeviceTypes(), audio_is_hearing_aid_out_device),
+                                                 outDeviceTypes_l(),
+                                                 audio_is_hearing_aid_out_device),
                                         "mixer(%p) throttle end: throttle time(%u)", this, diff);
                                 mThreadThrottleEndMs = mThreadThrottleTimeMs;
                             }
@@ -4606,7 +4586,7 @@
                 timestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL],
                 mSampleRate);
 
-        if (isTimestampCorrectionEnabled()) {
+        if (isTimestampCorrectionEnabled_l()) {
             ALOGVV("TS_BEFORE: %d %lld %lld", id(),
                     (long long)timestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL],
                     (long long)timestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL]);
@@ -4685,7 +4665,7 @@
             // and we use systemTime().
             mTimestamp.mPosition[ExtendedTimestamp::LOCATION_SERVER] = mFramesWritten;
             mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_SERVER] = mLastIoBeginNs == -1
-                    ? systemTime() : mLastIoBeginNs;
+                    ? systemTime() : (int64_t)mLastIoBeginNs;
         }
 
         for (const sp<IAfTrack>& t : mActiveTracks) {
@@ -5326,7 +5306,8 @@
 // shared by MIXER and DIRECT, overridden by DUPLICATING
 void PlaybackThread::threadLoop_standby()
 {
-    ALOGV("Audio hardware entering standby, mixer %p, suspend count %d", this, mSuspended);
+    ALOGV("%s: audio hardware entering standby, mixer %p, suspend count %d",
+            __func__, this, (int32_t)mSuspended);
     mOutput->standby();
     if (mUseAsyncWrite != 0) {
         // discard any pending drain or write ack by incrementing sequence
@@ -6367,7 +6348,7 @@
 void MixerThread::dumpInternals_l(int fd, const Vector<String16>& args)
 {
     PlaybackThread::dumpInternals_l(fd, args);
-    dprintf(fd, "  Thread throttle time (msecs): %u\n", mThreadThrottleTimeMs);
+    dprintf(fd, "  Thread throttle time (msecs): %u\n", (uint32_t)mThreadThrottleTimeMs);
     dprintf(fd, "  AudioMixer tracks: %s\n", mAudioMixer->trackNames().c_str());
     dprintf(fd, "  Master mono: %s\n", mMasterMono ? "on" : "off");
     dprintf(fd, "  Master balance: %f (%s)\n", mMasterBalance.load(),
@@ -7218,6 +7199,7 @@
 {
     if (mFlushPending || mHwPaused) {
         // If a flush is pending or track was paused, just discard buffered data
+        audio_utils::lock_guard l(mutex());
         flushHw_l();
     } else {
         mMixerStatus = MIXER_DRAIN_ALL;
@@ -7704,8 +7686,13 @@
             mOutputTracks[i]->destroy();
             mOutputTracks.removeAt(i);
             updateWaitTime_l();
-            if (thread->getOutput() == mOutput) {
-                mOutput = NULL;
+            // NO_THREAD_SAFETY_ANALYSIS
+            // Lambda workaround: as thread != this
+            // we can safely call the remote thread getOutput.
+            const bool equalOutput =
+                    [&](){ return thread->getOutput() == mOutput; }();
+            if (equalOutput) {
+                mOutput = nullptr;
             }
             return;
         }
@@ -7970,10 +7957,11 @@
         break;
     case FastCapture_Static:
         initFastCapture = !mIsMsdDevice // Disable fast capture for MSD BUS devices.
+                && audio_is_linear_pcm(mFormat)
                 && (mFrameCount * 1000) / mSampleRate < kMinNormalCaptureBufferSizeMs;
-        ALOGV("%p kUseFastCapture = Static, (%lld * 1000) / %u vs %u, initFastCapture = %d "
-                "mIsMsdDevice = %d", this, (long long)mFrameCount, mSampleRate,
-                kMinNormalCaptureBufferSizeMs, initFastCapture, mIsMsdDevice);
+        ALOGV("%p kUseFastCapture = Static, format = 0x%x, (%lld * 1000) / %u vs %u, "
+                "initFastCapture = %d, mIsMsdDevice = %d", this, mFormat, (long long)mFrameCount,
+                mSampleRate, kMinNormalCaptureBufferSizeMs, initFastCapture, mIsMsdDevice);
         break;
     // case FastCapture_Dynamic:
     }
@@ -8117,6 +8105,9 @@
     // used to request a deferred sleep, to be executed later while mutex is unlocked
     uint32_t sleepUs = 0;
 
+    // timestamp correction enable is determined under lock, used in processing step.
+    bool timestampCorrectionEnabled = false;
+
     int64_t lastLoopCountRead = -2;  // never matches "previous" loop, when loopCount = 0.
 
     // loop while there is work to do
@@ -8261,7 +8252,7 @@
 
             }
 
-            mActiveTracks.updatePowerState(this);
+            mActiveTracks.updatePowerState_l(this);
 
             updateMetadata_l();
 
@@ -8281,6 +8272,7 @@
             }
             sleepUs = 0;
 
+            timestampCorrectionEnabled = isTimestampCorrectionEnabled_l();
             lockEffectChains_l(effectChains);
         }
 
@@ -8437,9 +8429,7 @@
                     && time > mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL]) {
 
                 mTimestampVerifier.add(position, time, mSampleRate);
-
-                // Correct timestamps
-                if (isTimestampCorrectionEnabled()) {
+                if (timestampCorrectionEnabled) {
                     ALOGVV("TS_BEFORE: %d %lld %lld",
                             id(), (long long)time, (long long)position);
                     auto correctedTimestamp = mTimestampVerifier.getLastCorrectedTimestamp();
@@ -8541,9 +8531,11 @@
                 // from framesIn.
                 // This isn't strictly necessary but helps limit buffer resizing in
                 // RecordBufferConverter.  TODO: remove when no longer needed.
-                framesOut = min(framesOut,
-                        destinationFramesPossible(
-                                framesIn, mSampleRate, activeTrack->sampleRate()));
+                if (audio_is_linear_pcm(activeTrack->format())) {
+                    framesOut = min(framesOut,
+                            destinationFramesPossible(
+                                    framesIn, mSampleRate, activeTrack->sampleRate()));
+                }
 
                 if (activeTrack->isDirect()) {
                     // No RecordBufferConverter used for direct streams. Pass
@@ -9418,7 +9410,7 @@
 {
     // disable AEC and NS if the device is a BT SCO headset supporting those
     // pre processings
-    bool suspend = audio_is_bluetooth_sco_device(inDeviceType()) &&
+    bool suspend = audio_is_bluetooth_sco_device(inDeviceType_l()) &&
                         mAfThreadCallback->btNrecIsOff();
     if (mBtNrecSuspended.exchange(suspend) != suspend) {
         for (size_t i = 0; i < mEffectChains.size(); i++) {
@@ -9529,7 +9521,7 @@
     return {};
 }
 
-void RecordThread::ioConfigChanged(audio_io_config_event_t event, pid_t pid,
+void RecordThread::ioConfigChanged_l(audio_io_config_event_t event, pid_t pid,
                                                  audio_port_handle_t portId) {
     sp<AudioIoDescriptor> desc;
     switch (event) {
@@ -9547,15 +9539,29 @@
         desc = sp<AudioIoDescriptor>::make(mId);
         break;
     }
-    mAfThreadCallback->ioConfigChanged(event, desc, pid);
+    mAfThreadCallback->ioConfigChanged_l(event, desc, pid);
 }
 
 void RecordThread::readInputParameters_l()
 {
-    status_t result = mInput->stream->getAudioProperties(&mSampleRate, &mChannelMask, &mHALFormat);
-    LOG_ALWAYS_FATAL_IF(result != OK, "Error retrieving audio properties from HAL: %d", result);
-    mFormat = mHALFormat;
+    const audio_config_base_t audioConfig = mInput->getAudioProperties();
+    mSampleRate = audioConfig.sample_rate;
+    mChannelMask = audioConfig.channel_mask;
+    if (!audio_is_input_channel(mChannelMask)) {
+        LOG_ALWAYS_FATAL("Channel mask %#x not valid for input", mChannelMask);
+    }
+
     mChannelCount = audio_channel_count_from_in_mask(mChannelMask);
+
+    // Get actual HAL format.
+    status_t result = mInput->stream->getAudioProperties(nullptr, nullptr, &mHALFormat);
+    LOG_ALWAYS_FATAL_IF(result != OK, "Error when retrieving input stream format: %d", result);
+    // Get format from the shim, which will be different than the HAL format
+    // if recording compressed audio from IEC61937 wrapped sources.
+    mFormat = audioConfig.format;
+    if (!audio_is_valid_format(mFormat)) {
+        LOG_ALWAYS_FATAL("Format %#x not valid for input", mFormat);
+    }
     if (audio_is_linear_pcm(mFormat)) {
         LOG_ALWAYS_FATAL_IF(mChannelCount > FCC_LIMIT, "HAL channel count %d > %d",
                 mChannelCount, FCC_LIMIT);
@@ -9563,8 +9569,7 @@
         // Can have more that FCC_LIMIT channels in encoded streams.
         ALOGI("HAL format %#x is not linear pcm", mFormat);
     }
-    result = mInput->stream->getFrameSize(&mFrameSize);
-    LOG_ALWAYS_FATAL_IF(result != OK, "Error retrieving frame size from HAL: %d", result);
+    mFrameSize = mInput->getFrameSize();
     LOG_ALWAYS_FATAL_IF(mFrameSize <= 0, "Error frame size was %zu but must be greater than zero",
             mFrameSize);
     result = mInput->stream->getBufferSize(&mBufferSize);
@@ -10014,25 +10019,27 @@
 void MmapThread::disconnect()
 {
     ActiveTracks<IAfMmapTrack> activeTracks;
+    audio_port_handle_t localPortId;
     {
         audio_utils::lock_guard _l(mutex());
         for (const sp<IAfMmapTrack>& t : mActiveTracks) {
             activeTracks.add(t);
         }
+        localPortId = mPortId;
     }
     for (const sp<IAfMmapTrack>& t : activeTracks) {
         stop(t->portId());
     }
     // This will decrement references and may cause the destruction of this thread.
     if (isOutput()) {
-        AudioSystem::releaseOutput(mPortId);
+        AudioSystem::releaseOutput(localPortId);
     } else {
-        AudioSystem::releaseInput(mPortId);
+        AudioSystem::releaseInput(localPortId);
     }
 }
 
 
-void MmapThread::configure(const audio_attributes_t* attr,
+void MmapThread::configure_l(const audio_attributes_t* attr,
                                                 audio_stream_type_t streamType __unused,
                                                 audio_session_t sessionId,
                                                 const sp<MmapStreamCallback>& callback,
@@ -10049,6 +10056,7 @@
 status_t MmapThread::createMmapBuffer(int32_t minSizeFrames,
                                   struct audio_mmap_buffer_info *info)
 {
+    audio_utils::lock_guard l(mutex());
     if (mHalStream == 0) {
         return NO_INIT;
     }
@@ -10058,6 +10066,7 @@
 
 status_t MmapThread::getMmapPosition(struct audio_mmap_position* position) const
 {
+    audio_utils::lock_guard l(mutex());
     if (mHalStream == 0) {
         return NO_INIT;
     }
@@ -10085,6 +10094,7 @@
                                          const audio_attributes_t *attr,
                                          audio_port_handle_t *handle)
 {
+    audio_utils::lock_guard l(mutex());
     ALOGV("%s clientUid %d mStandby %d mPortId %d *handle %d", __FUNCTION__,
           client.attributionSource.uid, mStandby, mPortId, *handle);
     if (mHalStream == 0) {
@@ -10095,7 +10105,7 @@
 
     // For the first track, reuse portId and session allocated when the stream was opened.
     if (*handle == mPortId) {
-        acquireWakeLock();
+        acquireWakeLock_l();
         return NO_ERROR;
     }
 
@@ -10105,20 +10115,23 @@
     const AttributionSourceState adjAttributionSource = afutils::checkAttributionSourcePackage(
             client.attributionSource);
 
+    const auto localSessionId = mSessionId;
+    auto localAttr = mAttr;
     if (isOutput()) {
         audio_config_t config = AUDIO_CONFIG_INITIALIZER;
         config.sample_rate = mSampleRate;
         config.channel_mask = mChannelMask;
         config.format = mFormat;
-        audio_stream_type_t stream = streamType();
+        audio_stream_type_t stream = streamType_l();
         audio_output_flags_t flags =
                 (audio_output_flags_t)(AUDIO_OUTPUT_FLAG_MMAP_NOIRQ | AUDIO_OUTPUT_FLAG_DIRECT);
         audio_port_handle_t deviceId = mDeviceId;
         std::vector<audio_io_handle_t> secondaryOutputs;
         bool isSpatialized;
         bool isBitPerfect;
-        ret = AudioSystem::getOutputForAttr(&mAttr, &io,
-                                            mSessionId,
+        mutex().unlock();
+        ret = AudioSystem::getOutputForAttr(&localAttr, &io,
+                                            localSessionId,
                                             &stream,
                                             adjAttributionSource,
                                             &config,
@@ -10128,6 +10141,8 @@
                                             &secondaryOutputs,
                                             &isSpatialized,
                                             &isBitPerfect);
+        mutex().lock();
+        mAttr = localAttr;
         ALOGD_IF(!secondaryOutputs.empty(),
                  "MmapThread::start does not support secondary outputs, ignoring them");
     } else {
@@ -10136,14 +10151,17 @@
         config.channel_mask = mChannelMask;
         config.format = mFormat;
         audio_port_handle_t deviceId = mDeviceId;
-        ret = AudioSystem::getInputForAttr(&mAttr, &io,
+        mutex().unlock();
+        ret = AudioSystem::getInputForAttr(&localAttr, &io,
                                               RECORD_RIID_INVALID,
-                                              mSessionId,
+                                              localSessionId,
                                               adjAttributionSource,
                                               &config,
                                               AUDIO_INPUT_FLAG_MMAP_NOIRQ,
                                               &deviceId,
                                               &portId);
+        mutex().lock();
+        // localAttr is const for getInputForAttr.
     }
     // APM should not chose a different input or output stream for the same set of attributes
     // and audo configuration
@@ -10154,18 +10172,20 @@
     }
 
     if (isOutput()) {
+        mutex().unlock();
         ret = AudioSystem::startOutput(portId);
+        mutex().lock();
     } else {
         {
             // Add the track record before starting input so that the silent status for the
             // client can be cached.
-            audio_utils::lock_guard _l(mutex());
             setClientSilencedState_l(portId, false /*silenced*/);
         }
+        mutex().unlock();
         ret = AudioSystem::startInput(portId);
+        mutex().lock();
     }
 
-    audio_utils::lock_guard _l(mutex());
     // abort if start is rejected by audio policy manager
     if (ret != NO_ERROR) {
         ALOGE("%s: error start rejected by AudioPolicyManager = %d", __FUNCTION__, ret);
@@ -10209,7 +10229,7 @@
     mActiveTracks.add(track);
     sp<IAfEffectChain> chain = getEffectChain_l(mSessionId);
     if (chain != 0) {
-        chain->setStrategy(getStrategyForStream(streamType()));
+        chain->setStrategy(getStrategyForStream(streamType_l()));
         chain->incTrackCnt();
         chain->incActiveTrackCnt();
     }
@@ -10231,18 +10251,17 @@
 status_t MmapThread::stop(audio_port_handle_t handle)
 {
     ALOGV("%s handle %d", __FUNCTION__, handle);
+    audio_utils::lock_guard l(mutex());
 
     if (mHalStream == 0) {
         return NO_INIT;
     }
 
     if (handle == mPortId) {
-        releaseWakeLock();
+        releaseWakeLock_l();
         return NO_ERROR;
     }
 
-    audio_utils::lock_guard _l(mutex());
-
     sp<IAfMmapTrack> track;
     for (const sp<IAfMmapTrack>& t : mActiveTracks) {
         if (handle == t->portId()) {
@@ -10283,8 +10302,10 @@
 }
 
 status_t MmapThread::standby()
+NO_THREAD_SAFETY_ANALYSIS  // clang bug
 {
     ALOGV("%s", __FUNCTION__);
+    audio_utils::lock_guard(mutex());
 
     if (mHalStream == 0) {
         return NO_INIT;
@@ -10298,7 +10319,7 @@
         mThreadSnapshot.onEnd();
         mStandby = true;
     }
-    releaseWakeLock();
+    releaseWakeLock_l();
     return NO_ERROR;
 }
 
@@ -10345,7 +10366,10 @@
 
 bool MmapThread::threadLoop()
 {
-    checkSilentMode_l();
+    {
+        audio_utils::unique_lock _l(mutex());
+        checkSilentMode_l();
+    }
 
     const String8 myName(String8::format("thread %p type %d TID %d", this, mType, gettid()));
 
@@ -10385,7 +10409,7 @@
 
         checkInvalidTracks_l();
 
-        mActiveTracks.updatePowerState(this);
+        mActiveTracks.updatePowerState_l(this);
 
         updateMetadata_l();
 
@@ -10442,7 +10466,7 @@
     return {};
 }
 
-void MmapThread::ioConfigChanged(audio_io_config_event_t event, pid_t pid,
+void MmapThread::ioConfigChanged_l(audio_io_config_event_t event, pid_t pid,
                                                audio_port_handle_t portId __unused) {
     sp<AudioIoDescriptor> desc;
     bool isInput = false;
@@ -10464,7 +10488,7 @@
         desc = sp<AudioIoDescriptor>::make(mId);
         break;
     }
-    mAfThreadCallback->ioConfigChanged(event, desc, pid);
+    mAfThreadCallback->ioConfigChanged_l(event, desc, pid);
 }
 
 status_t MmapThread::createAudioPatch_l(const struct audio_patch* patch,
@@ -10581,6 +10605,7 @@
 }
 
 void MmapThread::toAudioPortConfig(struct audio_port_config* config)
+NO_THREAD_SAFETY_ANALYSIS // mAudioHwDev handle access
 {
     ThreadBase::toAudioPortConfig(config);
     if (isOutput()) {
@@ -10698,7 +10723,6 @@
 }
 
 void MmapThread::checkInvalidTracks_l()
-NO_THREAD_SAFETY_ANALYSIS  // release and re-acquire mutex()
 {
     sp<MmapStreamCallback> callback;
     for (const sp<IAfMmapTrack>& track : mActiveTracks) {
@@ -10760,14 +10784,24 @@
         AudioHwDevice *hwDev,  AudioStreamOut *output, bool systemReady)
     : MmapThread(afThreadCallback, id, hwDev, output->stream, systemReady, true /* isOut */),
       mStreamType(AUDIO_STREAM_MUSIC),
-      mStreamVolume(1.0),
-      mStreamMute(false),
       mOutput(output)
 {
     snprintf(mThreadName, kThreadNameLength, "AudioMmapOut_%X", id);
     mChannelCount = audio_channel_count_from_out_mask(mChannelMask);
     mMasterVolume = afThreadCallback->masterVolume_l();
     mMasterMute = afThreadCallback->masterMute_l();
+
+    for (int i = AUDIO_STREAM_MIN; i < AUDIO_STREAM_FOR_POLICY_CNT; ++i) {
+        const audio_stream_type_t stream{static_cast<audio_stream_type_t>(i)};
+        mStreamTypes[stream].volume = 0.0f;
+        mStreamTypes[stream].mute = mAfThreadCallback->streamMute_l(stream);
+    }
+    // Audio patch and call assistant volume are always max
+    mStreamTypes[AUDIO_STREAM_PATCH].volume = 1.0f;
+    mStreamTypes[AUDIO_STREAM_PATCH].mute = false;
+    mStreamTypes[AUDIO_STREAM_CALL_ASSISTANT].volume = 1.0f;
+    mStreamTypes[AUDIO_STREAM_CALL_ASSISTANT].mute = false;
+
     if (mAudioHwDev) {
         if (mAudioHwDev->canSetMasterVolume()) {
             mMasterVolume = 1.0;
@@ -10786,7 +10820,8 @@
                                                 audio_port_handle_t deviceId,
                                                 audio_port_handle_t portId)
 {
-    MmapThread::configure(attr, streamType, sessionId, callback, deviceId, portId);
+    audio_utils::lock_guard l(mutex());
+    MmapThread::configure_l(attr, streamType, sessionId, callback, deviceId, portId);
     mStreamType = streamType;
 }
 
@@ -10824,8 +10859,8 @@
 void MmapPlaybackThread::setStreamVolume(audio_stream_type_t stream, float value)
 {
     audio_utils::lock_guard _l(mutex());
+    mStreamTypes[stream].volume = value;
     if (stream == mStreamType) {
-        mStreamVolume = value;
         broadcast_l();
     }
 }
@@ -10833,17 +10868,14 @@
 float MmapPlaybackThread::streamVolume(audio_stream_type_t stream) const
 {
     audio_utils::lock_guard _l(mutex());
-    if (stream == mStreamType) {
-        return mStreamVolume;
-    }
-    return 0.0f;
+    return mStreamTypes[stream].volume;
 }
 
 void MmapPlaybackThread::setStreamMute(audio_stream_type_t stream, bool muted)
 {
     audio_utils::lock_guard _l(mutex());
+    mStreamTypes[stream].mute = muted;
     if (stream == mStreamType) {
-        mStreamMute= muted;
         broadcast_l();
     }
 }
@@ -10883,14 +10915,13 @@
 {
     float volume;
 
-    if (mMasterMute || mStreamMute) {
+    if (mMasterMute || streamMuted_l()) {
         volume = 0;
     } else {
-        volume = mMasterVolume * mStreamVolume;
+        volume = mMasterVolume * streamVolume_l();
     }
 
     if (volume != mHalVolFloat) {
-
         // Convert volumes from float to 8.24
         uint32_t vol = (uint32_t)(volume * (1 << 24));
 
@@ -10924,8 +10955,8 @@
             track->setMetadataHasChanged();
             track->processMuteEvent_l(mAfThreadCallback->getOrCreateAudioManager(),
                 /*muteState=*/{mMasterMute,
-                               mStreamVolume == 0.f,
-                               mStreamMute,
+                               streamVolume_l() == 0.f,
+                               streamMuted_l(),
                                // TODO(b/241533526): adjust logic to include mute from AppOps
                                false /*muteFromPlaybackRestricted*/,
                                false /*muteFromClientVolume*/,
@@ -11038,7 +11069,7 @@
     MmapThread::dumpInternals_l(fd, args);
 
     dprintf(fd, "  Stream type: %d Stream volume: %f HAL volume: %f Stream mute %d\n",
-            mStreamType, mStreamVolume, mHalVolFloat, mStreamMute);
+            mStreamType, streamVolume_l(), mHalVolFloat, streamMuted_l());
     dprintf(fd, "  Master volume: %f Master mute %d\n", mMasterVolume, mMasterMute);
 }
 
diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h
index 3105ad7..b84079a 100644
--- a/services/audioflinger/Threads.h
+++ b/services/audioflinger/Threads.h
@@ -44,6 +44,13 @@
 public:
     static const char *threadTypeToString(type_t type);
 
+    // ThreadBase_ThreadLoop is a virtual mutex (always nullptr) that
+    // guards methods and variables that ONLY run and are accessed
+    // on the single threaded threadLoop().
+    //
+    // As access is by a single thread, the variables are thread safe.
+    static audio_utils::mutex* ThreadBase_ThreadLoop;
+
     IAfThreadCallback* afThreadCallback() const final { return mAfThreadCallback.get(); }
 
     ThreadBase(const sp<IAfThreadCallback>& afThreadCallback, audio_io_handle_t id,
@@ -51,7 +58,7 @@
     ~ThreadBase() override;
 
     status_t readyToRun() final;
-    void clearPowerManager() final;
+    void clearPowerManager() final EXCLUDES_ThreadBase_Mutex;
 
     // base for record and playback
     enum {
@@ -68,11 +75,9 @@
 
     class ConfigEventData: public RefBase {
     public:
-        virtual ~ConfigEventData() {}
-
         virtual  void dump(char *buffer, size_t size) = 0;
     protected:
-        ConfigEventData() {}
+        ConfigEventData() = default;
     };
 
     // Config event sequence by client if status needed (e.g binder thread calling setParameters()):
@@ -103,14 +108,22 @@
             }
         }
 
-        audio_utils::mutex& mutex() const { return mMutex; }
+        audio_utils::mutex& mutex() const RETURN_CAPABILITY(audio_utils::ConfigEvent_Mutex) {
+            return mMutex;
+        }
         const int mType; // event type e.g. CFG_EVENT_IO
         mutable audio_utils::mutex mMutex; // mutex associated with mCondition
         audio_utils::condition_variable mCondition; // condition for status return
+
+        // NO_THREAD_SAFETY_ANALYSIS Can we add GUARDED_BY?
         status_t mStatus; // status communicated to sender
-        bool mWaitStatus; // true if sender is waiting for status
-        bool mRequiresSystemReady; // true if must wait for system ready to enter event queue
-        sp<ConfigEventData> mData;     // event specific parameter data
+
+        bool mWaitStatus GUARDED_BY(mutex()); // true if sender is waiting for status
+        // true if must wait for system ready to enter event queue
+        bool mRequiresSystemReady GUARDED_BY(mutex());
+
+        // NO_THREAD_SAFETY_ANALYSIS Can we add GUARDED_BY?
+        sp<ConfigEventData> mData; // event specific parameter data
 
     protected:
         explicit ConfigEvent(int type, bool requiresSystemReady = false) :
@@ -197,7 +210,7 @@
         }
 
         const struct audio_patch mPatch;
-        audio_patch_handle_t mHandle;
+        audio_patch_handle_t mHandle;  // cannot be const
     };
 
     class CreateAudioPatchConfigEvent : public ConfigEvent {
@@ -219,7 +232,7 @@
             snprintf(buffer, size, "- Patch handle: %u\n", mHandle);
         }
 
-        audio_patch_handle_t mHandle;
+        const audio_patch_handle_t mHandle;
     };
 
     class ReleaseAudioPatchConfigEvent : public ConfigEvent {
@@ -240,7 +253,7 @@
             snprintf(buffer, size, "- Devices: %s", android::toString(mOutDevices).c_str());
         }
 
-        DeviceDescriptorBaseVector mOutDevices;
+        const DeviceDescriptorBaseVector mOutDevices;
     };
 
     class UpdateOutDevicesConfigEvent : public ConfigEvent {
@@ -260,7 +273,7 @@
             snprintf(buffer, size, "- mMaxSharedAudioHistoryMs: %d", mMaxSharedAudioHistoryMs);
         }
 
-        int32_t mMaxSharedAudioHistoryMs;
+        const int32_t mMaxSharedAudioHistoryMs;
     };
 
     class ResizeBufferConfigEvent : public ConfigEvent {
@@ -289,15 +302,14 @@
     class PMDeathRecipient : public IBinder::DeathRecipient {
     public:
         explicit    PMDeathRecipient(const wp<ThreadBase>& thread) : mThread(thread) {}
-        virtual     ~PMDeathRecipient() {}
 
         // IBinder::DeathRecipient
-        virtual     void        binderDied(const wp<IBinder>& who);
+        void binderDied(const wp<IBinder>& who) final;
 
     private:
         DISALLOW_COPY_AND_ASSIGN(PMDeathRecipient);
 
-        wp<ThreadBase> mThread;
+        const wp<ThreadBase> mThread;
     };
 
     type_t type() const final { return mType; }
@@ -311,8 +323,9 @@
     uint32_t channelCount() const final { return mChannelCount; }
     audio_channel_mask_t hapticChannelMask() const override { return AUDIO_CHANNEL_NONE; }
     uint32_t hapticChannelCount() const override { return 0; }
-    uint32_t latency_l() const override { return 0; }
-    void setVolumeForOutput_l(float /* left */, float /* right */) const override {}
+    uint32_t latency_l() const override { return 0; }  // NO_THREAD_SAFETY_ANALYSIS
+    void setVolumeForOutput_l(float /* left */, float /* right */) const override
+            REQUIRES(mutex()) {}
 
                 // Return's the HAL's frame count i.e. fast mixer buffer size.
     size_t frameCountHAL() const final { return mFrameCount; }
@@ -320,44 +333,49 @@
 
     // Should be "virtual status_t requestExitAndWait()" and override same
     // method in Thread, but Thread::requestExitAndWait() is not yet virtual.
-    void exit() final;
-    status_t setParameters(const String8& keyValuePairs) final;
+    void exit() final EXCLUDES_ThreadBase_Mutex;
+    status_t setParameters(const String8& keyValuePairs) final EXCLUDES_ThreadBase_Mutex;
 
                 // sendConfigEvent_l() must be called with ThreadBase::mutex() held
                 // Can temporarily release the lock if waiting for a reply from
                 // processConfigEvents_l().
-    status_t sendConfigEvent_l(sp<ConfigEvent>& event);
+    status_t sendConfigEvent_l(sp<ConfigEvent>& event) REQUIRES(mutex());
     void sendIoConfigEvent(audio_io_config_event_t event, pid_t pid = 0,
-            audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE) final;
+            audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE) final EXCLUDES_ThreadBase_Mutex;
     void sendIoConfigEvent_l(audio_io_config_event_t event, pid_t pid = 0,
-            audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE) final;
-    void sendPrioConfigEvent(pid_t pid, pid_t tid, int32_t prio, bool forApp) final;
-    void sendPrioConfigEvent_l(pid_t pid, pid_t tid, int32_t prio, bool forApp) final;
-    status_t sendSetParameterConfigEvent_l(const String8& keyValuePair) final;
+            audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE) final REQUIRES(mutex());
+    void sendPrioConfigEvent(pid_t pid, pid_t tid, int32_t prio, bool forApp) final
+            EXCLUDES_ThreadBase_Mutex;
+    void sendPrioConfigEvent_l(pid_t pid, pid_t tid, int32_t prio, bool forApp) final
+            REQUIRES(mutex());
+    status_t sendSetParameterConfigEvent_l(const String8& keyValuePair) final REQUIRES(mutex());
     status_t sendCreateAudioPatchConfigEvent(const struct audio_patch* patch,
-            audio_patch_handle_t* handle) final;
-    status_t sendReleaseAudioPatchConfigEvent(audio_patch_handle_t handle) final;
+            audio_patch_handle_t* handle) final EXCLUDES_ThreadBase_Mutex;
+    status_t sendReleaseAudioPatchConfigEvent(audio_patch_handle_t handle) final
+            EXCLUDES_ThreadBase_Mutex;
     status_t sendUpdateOutDeviceConfigEvent(
-            const DeviceDescriptorBaseVector& outDevices) final;
-    void sendResizeBufferConfigEvent_l(int32_t maxSharedAudioHistoryMs) final;
-    void sendCheckOutputStageEffectsEvent() final;
-    void sendCheckOutputStageEffectsEvent_l() final;
-    void sendHalLatencyModesChangedEvent_l() final;
+            const DeviceDescriptorBaseVector& outDevices) final EXCLUDES_ThreadBase_Mutex;
+    void sendResizeBufferConfigEvent_l(int32_t maxSharedAudioHistoryMs) final REQUIRES(mutex());
+    void sendCheckOutputStageEffectsEvent() final EXCLUDES_ThreadBase_Mutex;
+    void sendCheckOutputStageEffectsEvent_l() final REQUIRES(mutex());
+    void sendHalLatencyModesChangedEvent_l() final REQUIRES(mutex());
 
-    void processConfigEvents_l() final;
+    void processConfigEvents_l() final REQUIRES(mutex());
     void setCheckOutputStageEffects() override {}
     void updateOutDevices(const DeviceDescriptorBaseVector& outDevices) override;
     void toAudioPortConfig(struct audio_port_config* config) override;
-    void resizeInputBuffer_l(int32_t maxSharedAudioHistoryMs) override;
+    void resizeInputBuffer_l(int32_t maxSharedAudioHistoryMs) override REQUIRES(mutex());
 
     // see note at declaration of mStandby, mOutDevice and mInDevice
     bool inStandby() const override { return mStandby; }
-    const DeviceTypeSet outDeviceTypes() const final {
+    const DeviceTypeSet outDeviceTypes_l() const final REQUIRES(mutex()) {
         return getAudioDeviceTypes(mOutDeviceTypeAddrs);
     }
-    audio_devices_t inDeviceType() const final { return mInDeviceTypeAddr.mType; }
-    DeviceTypeSet getDeviceTypes() const final {
-        return isOutput() ? outDeviceTypes() : DeviceTypeSet({inDeviceType()});
+    audio_devices_t inDeviceType_l() const final REQUIRES(mutex()) {
+        return mInDeviceTypeAddr.mType;
+    }
+    DeviceTypeSet getDeviceTypes_l() const final REQUIRES(mutex()) {
+        return isOutput() ? outDeviceTypes_l() : DeviceTypeSet({inDeviceType_l()});
     }
 
     const AudioDeviceTypeAddrVector& outDeviceTypeAddrs() const final {
@@ -410,23 +428,26 @@
     // get effect chain corresponding to session Id.
     sp<IAfEffectChain> getEffectChain(audio_session_t sessionId) const final;
     // same as getEffectChain() but must be called with ThreadBase mutex locked
-    sp<IAfEffectChain> getEffectChain_l(audio_session_t sessionId) const final;
-    std::vector<int> getEffectIds_l(audio_session_t sessionId) const final;
+    sp<IAfEffectChain> getEffectChain_l(audio_session_t sessionId) const final REQUIRES(mutex());
+    std::vector<int> getEffectIds_l(audio_session_t sessionId) const final REQUIRES(mutex());
 
                 // lock all effect chains Mutexes. Must be called before releasing the
                 // ThreadBase mutex before processing the mixer and effects. This guarantees the
                 // integrity of the chains during the process.
                 // Also sets the parameter 'effectChains' to current value of mEffectChains.
-    void lockEffectChains_l(Vector<sp<IAfEffectChain>>& effectChains) final;
+    void lockEffectChains_l(Vector<sp<IAfEffectChain>>& effectChains) final REQUIRES(mutex());
                 // unlock effect chains after process
     void unlockEffectChains(const Vector<sp<IAfEffectChain>>& effectChains) final;
                 // get a copy of mEffectChains vector
-    Vector<sp<IAfEffectChain>> getEffectChains_l() const final { return mEffectChains; };
+    Vector<sp<IAfEffectChain>> getEffectChains_l() const final REQUIRES(mutex()) {
+        return mEffectChains;
+    }
                 // set audio mode to all effect chains
     void setMode(audio_mode_t mode) final;
                 // get effect module with corresponding ID on specified audio session
     sp<IAfEffectModule> getEffect(audio_session_t sessionId, int effectId) const final;
-    sp<IAfEffectModule> getEffect_l(audio_session_t sessionId, int effectId) const final;
+    sp<IAfEffectModule> getEffect_l(audio_session_t sessionId, int effectId) const final
+            REQUIRES(mutex());
                 // add and effect module. Also creates the effect chain is none exists for
                 // the effects audio session. Only called in a context of moving an effect
                 // from one thread to another
@@ -434,20 +455,22 @@
             REQUIRES(audio_utils::AudioFlinger_Mutex, mutex());
                 // remove and effect module. Also removes the effect chain is this was the last
                 // effect
-    void removeEffect_l(const sp<IAfEffectModule>& effect, bool release = false) final;
+    void removeEffect_l(const sp<IAfEffectModule>& effect, bool release = false) final
+            REQUIRES(mutex());
                 // disconnect an effect handle from module and destroy module if last handle
     void disconnectEffectHandle(IAfEffectHandle* handle, bool unpinIfLast) final;
                 // detach all tracks connected to an auxiliary effect
-    void detachAuxEffect_l(int /* effectId */) override {}
+    void detachAuxEffect_l(int /* effectId */) override REQUIRES(mutex()) {}
     // TODO(b/291317898) - remove hasAudioSession_l below.
-    uint32_t hasAudioSession_l(audio_session_t sessionId) const override = 0;
-    uint32_t hasAudioSession(audio_session_t sessionId) const final {
-                    std::lock_guard _l(mutex());
-                    return hasAudioSession_l(sessionId);
-                }
+    uint32_t hasAudioSession_l(audio_session_t sessionId) const override REQUIRES(mutex()) = 0;
+    uint32_t hasAudioSession(audio_session_t sessionId) const final EXCLUDES_ThreadBase_Mutex {
+        audio_utils::lock_guard _l(mutex());
+        return hasAudioSession_l(sessionId);
+    }
 
                 template <typename T>
-                uint32_t hasAudioSession_l(audio_session_t sessionId, const T& tracks) const {
+    uint32_t hasAudioSession_l(audio_session_t sessionId, const T& tracks) const
+            REQUIRES(mutex()) {
                     uint32_t result = 0;
                     if (getEffectChain_l(sessionId) != 0) {
                         result = EFFECT_SESSION;
@@ -476,9 +499,9 @@
                 // the value returned by default implementation is not important as the
                 // strategy is only meaningful for PlaybackThread which implements this method
     product_strategy_t getStrategyForSession_l(
-            audio_session_t /* sessionId */) const override {
-                    return static_cast<product_strategy_t>(0);
-                }
+            audio_session_t /* sessionId */) const override REQUIRES(mutex()){
+        return static_cast<product_strategy_t>(0);
+    }
 
                 // check if some effects must be suspended/restored when an effect is enabled
                 // or disabled
@@ -497,38 +520,41 @@
 
     sp<IMemory> pipeMemory() const override { return nullptr; }
 
-    void systemReady() final;
+    void systemReady() final EXCLUDES_ThreadBase_Mutex;
 
-    void broadcast_l() final;
+    void broadcast_l() final REQUIRES(mutex());
 
-    bool isTimestampCorrectionEnabled() const override { return false; }
+    bool isTimestampCorrectionEnabled_l() const override REQUIRES(mutex()) { return false; }
 
     bool isMsdDevice() const final { return mIsMsdDevice; }
 
     void dump(int fd, const Vector<String16>& args) override;
 
                 // deliver stats to mediametrics.
-    void sendStatistics(bool force) final;
+    void sendStatistics(bool force) final
+            REQUIRES(ThreadBase_ThreadLoop) EXCLUDES_ThreadBase_Mutex;
 
     audio_utils::mutex& mutex() const final RETURN_CAPABILITY(audio_utils::ThreadBase_Mutex) {
         return mMutex;
     }
     mutable audio_utils::mutex mMutex;
 
-    void onEffectEnable(const sp<IAfEffectModule>& effect) final;
-    void onEffectDisable() final;
+    void onEffectEnable(const sp<IAfEffectModule>& effect) final EXCLUDES_ThreadBase_Mutex;
+    void onEffectDisable() final EXCLUDES_ThreadBase_Mutex;
 
                 // invalidateTracksForAudioSession_l must be called with holding mutex().
-    void invalidateTracksForAudioSession_l(audio_session_t /* sessionId */) const override {}
+    void invalidateTracksForAudioSession_l(audio_session_t /* sessionId */) const override
+            REQUIRES(mutex()) {}
                 // Invalidate all the tracks with the given audio session.
-    void invalidateTracksForAudioSession(audio_session_t sessionId) const final {
-                    std::lock_guard _l(mutex());
+    void invalidateTracksForAudioSession(audio_session_t sessionId) const final
+            EXCLUDES_ThreadBase_Mutex {
+        audio_utils::lock_guard _l(mutex());
                     invalidateTracksForAudioSession_l(sessionId);
                 }
 
                 template <typename T>
-                void invalidateTracksForAudioSession_l(audio_session_t sessionId,
-                                                       const T& tracks) const {
+    void invalidateTracksForAudioSession_l(audio_session_t sessionId,
+            const T& tracks) const REQUIRES(mutex()) {
                     for (size_t i = 0; i < tracks.size(); ++i) {
                         const sp<IAfTrackBase>& track = tracks[i];
                         if (sessionId == track->sessionId()) {
@@ -553,41 +579,44 @@
                     effect_uuid_t mType;    // effect type UUID
                 };
 
-                void        acquireWakeLock();
-                virtual void acquireWakeLock_l();
-                void        releaseWakeLock();
-                void        releaseWakeLock_l();
-                void        updateWakeLockUids_l(const SortedVector<uid_t> &uids);
-                void        getPowerManager_l();
+    void acquireWakeLock() EXCLUDES_ThreadBase_Mutex;
+    virtual void acquireWakeLock_l() REQUIRES(mutex());
+    void releaseWakeLock() EXCLUDES_ThreadBase_Mutex;
+    void releaseWakeLock_l() REQUIRES(mutex());
+    void updateWakeLockUids_l(const SortedVector<uid_t> &uids) REQUIRES(mutex());
+    void getPowerManager_l() REQUIRES(mutex());
                 // suspend or restore effects of the specified type (or all if type is NULL)
                 // on a given session. The number of suspend requests is counted and restore
                 // occurs when all suspend requests are cancelled.
-                void setEffectSuspended_l(const effect_uuid_t *type,
+    void setEffectSuspended_l(const effect_uuid_t *type,
                                           bool suspend,
-                                          audio_session_t sessionId) final;
+            audio_session_t sessionId) final REQUIRES(mutex());
                 // updated mSuspendedSessions when an effect is suspended or restored
-                void        updateSuspendedSessions_l(const effect_uuid_t *type,
+    void updateSuspendedSessions_l(const effect_uuid_t *type,
                                                       bool suspend,
-                                                      audio_session_t sessionId);
+            audio_session_t sessionId) REQUIRES(mutex());
                 // check if some effects must be suspended when an effect chain is added
-                void checkSuspendOnAddEffectChain_l(const sp<IAfEffectChain>& chain);
+    void checkSuspendOnAddEffectChain_l(const sp<IAfEffectChain>& chain) REQUIRES(mutex());
 
                 // sends the metadata of the active tracks to the HAL
                 struct MetadataUpdate {
                     std::vector<playback_track_metadata_v7_t> playbackMetadataUpdate;
                     std::vector<record_track_metadata_v7_t>   recordMetadataUpdate;
                 };
-    virtual     MetadataUpdate           updateMetadata_l() = 0;
+    // NO_THREAD_SAFETY_ANALYSIS, updateMetadata_l() should include ThreadBase_ThreadLoop
+    // but MmapThread::start() -> exitStandby_l() -> updateMetadata_l() prevents this.
+    virtual MetadataUpdate updateMetadata_l() REQUIRES(mutex()) = 0;
 
                 String16 getWakeLockTag();
 
-    virtual     void        preExit() { }
-    virtual     void        setMasterMono_l(bool mono __unused) { }
+    virtual void preExit() EXCLUDES_ThreadBase_Mutex {}
+    virtual void setMasterMono_l(bool mono __unused) REQUIRES(mutex()) {}
     virtual     bool        requireMonoBlend() { return false; }
 
                             // called within the threadLoop to obtain timestamp from the HAL.
-    virtual     status_t    threadloop_getHalTimestamp_l(
-                                    ExtendedTimestamp *timestamp __unused) const {
+    virtual status_t threadloop_getHalTimestamp_l(
+            ExtendedTimestamp *timestamp __unused) const
+            REQUIRES(mutex(), ThreadBase_ThreadLoop) {
                                 return INVALID_OPERATION;
                             }
 public:
@@ -595,11 +624,12 @@
                 product_strategy_t getStrategyForStream(audio_stream_type_t stream) const;
 protected:
 
-    virtual     void        onHalLatencyModesChanged_l() {}
+    virtual void onHalLatencyModesChanged_l() REQUIRES(mutex()) {}
 
-    virtual     void        dumpInternals_l(int fd __unused, const Vector<String16>& args __unused)
-                            { }
-    virtual     void        dumpTracks_l(int fd __unused, const Vector<String16>& args __unused) { }
+    virtual void dumpInternals_l(int fd __unused, const Vector<String16>& args __unused)
+            REQUIRES(mutex()) {}
+    virtual void dumpTracks_l(int fd __unused, const Vector<String16>& args __unused)
+            REQUIRES(mutex()) {}
 
                 const type_t            mType;
 
@@ -624,10 +654,14 @@
                                                            // HAL format if Fastmixer is used.
                 audio_format_t          mHALFormat;
                 size_t                  mBufferSize;       // HAL buffer size for read() or write()
-                AudioDeviceTypeAddrVector mOutDeviceTypeAddrs; // output device types and addresses
-                AudioDeviceTypeAddr       mInDeviceTypeAddr;   // input device type and address
-                Vector< sp<ConfigEvent> >     mConfigEvents;
-                Vector< sp<ConfigEvent> >     mPendingConfigEvents; // events awaiting system ready
+
+     // output device types and addresses
+    AudioDeviceTypeAddrVector mOutDeviceTypeAddrs GUARDED_BY(mutex());
+    AudioDeviceTypeAddr mInDeviceTypeAddr GUARDED_BY(mutex());   // input device type and address
+    Vector<sp<ConfigEvent>> mConfigEvents GUARDED_BY(mutex());
+
+    // events awaiting system ready
+    Vector<sp<ConfigEvent>> mPendingConfigEvents GUARDED_BY(mutex());
 
                 // These fields are written and read by thread itself without lock or barrier,
                 // and read by other threads without lock or barrier via standby(), outDeviceTypes()
@@ -637,17 +671,17 @@
                 // with possibility that it might be inconsistent with other information.
                 bool                    mStandby;     // Whether thread is currently in standby.
 
+    // NO_THREAD_SAFETY_ANALYSIS - mPatch and mAudioSource should be guarded by mutex().
                 struct audio_patch      mPatch;
-
                 audio_source_t          mAudioSource;
 
                 const audio_io_handle_t mId;
-                Vector<sp<IAfEffectChain>> mEffectChains;
+    Vector<sp<IAfEffectChain>> mEffectChains GUARDED_BY(mutex());
 
                 static const int        kThreadNameLength = 16; // prctl(PR_SET_NAME) limit
                 char                    mThreadName[kThreadNameLength]; // guaranteed NUL-terminated
-                sp<os::IPowerManager>   mPowerManager;
-                sp<IBinder>             mWakeLockToken;
+    sp<os::IPowerManager> mPowerManager GUARDED_BY(mutex());
+    sp<IBinder> mWakeLockToken GUARDED_BY(mutex());
                 const sp<PMDeathRecipient> mDeathRecipient;
                 // list of suspended effects per session and per type. The first (outer) vector is
                 // keyed by session ID, the second (inner) by type UUID timeLow field
@@ -658,27 +692,31 @@
                 static const size_t     kLogSize = 4 * 1024;
                 sp<NBLog::Writer>       mNBLogWriter;
                 bool                    mSystemReady;
-                ExtendedTimestamp       mTimestamp;
-                TimestampVerifier< // For timestamp statistics.
-                        int64_t /* frame count */, int64_t /* time ns */> mTimestampVerifier;
+
+    // NO_THREAD_SAFETY_ANALYSIS - mTimestamp and mTimestampVerifier should be
+    // accessed under mutex for the RecordThread.
+    ExtendedTimestamp mTimestamp;
+    TimestampVerifier<int64_t /* frame count */, int64_t /* time ns */> mTimestampVerifier;
                 // DIRECT and OFFLOAD threads should reset frame count to zero on stop/flush
                 // TODO: add confirmation checks:
                 // 1) DIRECT threads and linear PCM format really resets to 0?
                 // 2) Is frame count really valid if not linear pcm?
                 // 3) Are all 64 bits of position returned, not just lowest 32 bits?
                 // Timestamp corrected device should be a single device.
-                audio_devices_t         mTimestampCorrectedDevice = AUDIO_DEVICE_NONE;
+
+    audio_devices_t mTimestampCorrectedDevice = AUDIO_DEVICE_NONE;  // CONST set in ctor
 
                 // ThreadLoop statistics per iteration.
-                int64_t                 mLastIoBeginNs = -1;
-                int64_t                 mLastIoEndNs = -1;
+    std::atomic<int64_t> mLastIoBeginNs = -1;  // set in threadLoop, read by dump()
+    int64_t mLastIoEndNs GUARDED_BY(ThreadBase_ThreadLoop) = -1;
 
                 // ThreadSnapshot is thread-safe (internally locked)
                 mediautils::ThreadSnapshot mThreadSnapshot;
 
-                // This should be read under ThreadBase lock (if not on the threadLoop thread).
-                audio_utils::Statistics<double> mIoJitterMs{0.995 /* alpha */};
-                audio_utils::Statistics<double> mProcessTimeMs{0.995 /* alpha */};
+    audio_utils::Statistics<double> mIoJitterMs GUARDED_BY(mutex()) {0.995 /* alpha */};
+    audio_utils::Statistics<double> mProcessTimeMs GUARDED_BY(mutex()) {0.995 /* alpha */};
+
+    // NO_THREAD_SAFETY_ANALYSIS  GUARDED_BY(mutex())
                 audio_utils::Statistics<double> mLatencyMs{0.995 /* alpha */};
                 audio_utils::Statistics<double> mMonopipePipeDepthStats{0.999 /* alpha */};
 
@@ -764,7 +802,8 @@
                     // ThreadBase thread.
                     void            clear();
                     // periodically called in the threadLoop() to update power state uids.
-                    void updatePowerState(const sp<ThreadBase>& thread, bool force = false);
+                    void updatePowerState_l(const sp<ThreadBase>& thread, bool force = false)
+                            REQUIRES(audio_utils::ThreadBase_Mutex);
 
                     /** @return true if one or move active tracks was added or removed since the
                      *          last time this function was called or the vector was created.
@@ -788,8 +827,6 @@
                         return wakeLockUids; // moved by underlying SharedBuffer
                     }
 
-                    std::map<uid_t, std::pair<ssize_t /* previous */, ssize_t /* current */>>
-                                        mBatteryCounter;
                     SortedVector<sp<T>> mActiveTracks;
                     int                 mActiveTracksGeneration;
                     int                 mLastActiveTracksGeneration;
@@ -799,11 +836,11 @@
                     bool                mHasChanged = false;
                 };
 
-                SimpleLog mLocalLog;
+                SimpleLog mLocalLog;  // locked internally
 
 private:
-                void dumpBase_l(int fd, const Vector<String16>& args);
-                void dumpEffectChains_l(int fd, const Vector<String16>& args);
+    void dumpBase_l(int fd, const Vector<String16>& args) REQUIRES(mutex());
+    void dumpEffectChains_l(int fd, const Vector<String16>& args) REQUIRES(mutex());
 };
 
 // --- PlaybackThread ---
@@ -836,35 +873,38 @@
     ~PlaybackThread() override;
 
     // Thread virtuals
-    bool threadLoop() final;
+    bool threadLoop() final REQUIRES(ThreadBase_ThreadLoop) EXCLUDES_ThreadBase_Mutex;
 
     // RefBase
     void onFirstRef() override;
 
     status_t checkEffectCompatibility_l(
-            const effect_descriptor_t* desc, audio_session_t sessionId) final;
+            const effect_descriptor_t* desc, audio_session_t sessionId) final REQUIRES(mutex());
 
-    void addOutputTrack_l(const sp<IAfTrack>& track) final {
+    void addOutputTrack_l(const sp<IAfTrack>& track) final REQUIRES(mutex()) {
         mTracks.add(track);
     }
 
 protected:
     // Code snippets that were lifted up out of threadLoop()
-    virtual     void        threadLoop_mix() = 0;
-    virtual     void        threadLoop_sleepTime() = 0;
-    virtual     ssize_t     threadLoop_write();
-    virtual     void        threadLoop_drain();
-    virtual     void        threadLoop_standby();
-    virtual     void        threadLoop_exit();
-    virtual     void        threadLoop_removeTracks(const Vector<sp<IAfTrack>>& tracksToRemove);
+    virtual void threadLoop_mix() REQUIRES(ThreadBase_ThreadLoop) = 0;
+    virtual void threadLoop_sleepTime() REQUIRES(ThreadBase_ThreadLoop) = 0;
+    virtual ssize_t threadLoop_write() REQUIRES(ThreadBase_ThreadLoop);
+    virtual void threadLoop_drain() REQUIRES(ThreadBase_ThreadLoop);
+    virtual void threadLoop_standby() REQUIRES(ThreadBase_ThreadLoop);
+    virtual void threadLoop_exit() REQUIRES(ThreadBase_ThreadLoop);
+    virtual void threadLoop_removeTracks(const Vector<sp<IAfTrack>>& tracksToRemove)
+            REQUIRES(ThreadBase_ThreadLoop);
 
                 // prepareTracks_l reads and writes mActiveTracks, and returns
                 // the pending set of tracks to remove via Vector 'tracksToRemove'.  The caller
                 // is responsible for clearing or destroying this Vector later on, when it
                 // is safe to do so. That will drop the final ref count and destroy the tracks.
-    virtual     mixer_state prepareTracks_l(Vector<sp<IAfTrack>>* tracksToRemove) = 0;
-                void        removeTracks_l(const Vector<sp<IAfTrack>>& tracksToRemove);
-                status_t    handleVoipVolume_l(float *volume);
+    virtual mixer_state prepareTracks_l(Vector<sp<IAfTrack>>* tracksToRemove)
+            REQUIRES(mutex(), ThreadBase_ThreadLoop) = 0;
+
+    void removeTracks_l(const Vector<sp<IAfTrack>>& tracksToRemove) REQUIRES(mutex());
+    status_t handleVoipVolume_l(float *volume) REQUIRES(mutex());
 
     // StreamOutHalInterfaceCallback implementation
     virtual     void        onWriteReady();
@@ -877,9 +917,9 @@
 protected:
 
     virtual     bool        waitingAsyncCallback();
-    virtual     bool        waitingAsyncCallback_l();
-    virtual     bool        shouldStandby_l();
-    virtual     void        onAddNewTrack_l();
+    virtual bool waitingAsyncCallback_l() REQUIRES(mutex());
+    virtual bool shouldStandby_l() REQUIRES(mutex(), ThreadBase_ThreadLoop);
+    virtual void onAddNewTrack_l() REQUIRES(mutex());
 public:  // AsyncCallbackThread
                 void        onAsyncError(); // error reported by AsyncCallbackThread
 protected:
@@ -888,20 +928,21 @@
             const std::basic_string<uint8_t>& metadataBs) final;
 
     // ThreadBase virtuals
-    virtual     void        preExit();
+    void preExit() final EXCLUDES_ThreadBase_Mutex;
 
     virtual     bool        keepWakeLock() const { return true; }
-    virtual     void        acquireWakeLock_l() {
+    virtual void acquireWakeLock_l() REQUIRES(mutex()) {
                                 ThreadBase::acquireWakeLock_l();
-                                mActiveTracks.updatePowerState(this, true /* force */);
+        mActiveTracks.updatePowerState_l(this, true /* force */);
                             }
 
-    virtual     void        checkOutputStageEffects() {}
+    virtual void checkOutputStageEffects()
+            REQUIRES(ThreadBase_ThreadLoop) EXCLUDES_ThreadBase_Mutex {}
     virtual     void        setHalLatencyMode_l() {}
 
 
-    void dumpInternals_l(int fd, const Vector<String16>& args) override;
-    void dumpTracks_l(int fd, const Vector<String16>& args) final;
+    void dumpInternals_l(int fd, const Vector<String16>& args) override REQUIRES(mutex());
+    void dumpTracks_l(int fd, const Vector<String16>& args) final REQUIRES(mutex());
 
 public:
 
@@ -910,15 +951,15 @@
                 // return estimated latency in milliseconds, as reported by HAL
     uint32_t latency() const final;
                 // same, but lock must already be held
-    uint32_t latency_l() const final;
+    uint32_t latency_l() const final /* REQUIRES(mutex()) */;  // NO_THREAD_SAFETY_ANALYSIS
 
                 // VolumeInterface
     void setMasterVolume(float value) final;
-    void setMasterBalance(float balance) override;
+    void setMasterBalance(float balance) override EXCLUDES_ThreadBase_Mutex;
     void setMasterMute(bool muted) final;
-    void setStreamVolume(audio_stream_type_t stream, float value) final;
-    void setStreamMute(audio_stream_type_t stream, bool muted) final;
-    float streamVolume(audio_stream_type_t stream) const final;
+    void setStreamVolume(audio_stream_type_t stream, float value) final EXCLUDES_ThreadBase_Mutex;
+    void setStreamMute(audio_stream_type_t stream, bool muted) final EXCLUDES_ThreadBase_Mutex;
+    float streamVolume(audio_stream_type_t stream) const final EXCLUDES_ThreadBase_Mutex;
     void setVolumeForOutput_l(float left, float right) const final;
 
     sp<IAfTrack> createTrack_l(
@@ -949,56 +990,72 @@
         return mActiveTracks.indexOf(track) >= 0;
     }
 
-    AudioStreamOut* getOutput_l() const final { return mOutput; }
-    AudioStreamOut* getOutput() const final;
-    AudioStreamOut* clearOutput() final;
+    AudioStreamOut* getOutput_l() const final REQUIRES(mutex()) { return mOutput; }
+    AudioStreamOut* getOutput() const final EXCLUDES_ThreadBase_Mutex;
+    AudioStreamOut* clearOutput() final EXCLUDES_ThreadBase_Mutex;
+
+    // NO_THREAD_SAFETY_ANALYSIS -- probably needs a lock.
     sp<StreamHalInterface> stream() const final;
 
-                // a very large number of suspend() will eventually wraparound, but unlikely
-    void suspend() final { (void) android_atomic_inc(&mSuspended); }
-    void restore() final
-                                {
-                                    // if restore() is done without suspend(), get back into
-                                    // range so that the next suspend() will operate correctly
-                                    if (android_atomic_dec(&mSuspended) <= 0) {
-                                        android_atomic_release_store(0, &mSuspended);
-                                    }
-                                }
-    bool isSuspended() const final
-                                { return android_atomic_acquire_load(&mSuspended) > 0; }
+    // suspend(), restore(), and isSuspended() are implemented atomically.
+    void suspend() final { ++mSuspended; }
+    void restore() final {
+        // if restore() is done without suspend(), get back into
+        // range so that the next suspend() will operate correctly
+        while (true) {
+            int32_t suspended = mSuspended;
+            if (suspended <= 0) {
+                ALOGW("%s: invalid mSuspended %d <= 0", __func__, suspended);
+                return;
+            }
+            const int32_t desired = suspended - 1;
+            if (mSuspended.compare_exchange_weak(suspended, desired)) return;
+        }
+    }
+    bool isSuspended() const final { return mSuspended > 0; }
 
-    String8 getParameters(const String8& keys);
-    void ioConfigChanged(audio_io_config_event_t event, pid_t pid = 0,
+    String8 getParameters(const String8& keys) EXCLUDES_ThreadBase_Mutex;
+
+    // Hold either the AudioFlinger::mutex or the ThreadBase::mutex
+    void ioConfigChanged_l(audio_io_config_event_t event, pid_t pid = 0,
             audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE) final;
-    status_t getRenderPosition(uint32_t* halFrames, uint32_t* dspFrames) const final;
+    status_t getRenderPosition(uint32_t* halFrames, uint32_t* dspFrames) const final
+            EXCLUDES_ThreadBase_Mutex;
                 // Consider also removing and passing an explicit mMainBuffer initialization
                 // parameter to AF::IAfTrack::Track().
     float* sinkBuffer() const final {
                     return reinterpret_cast<float *>(mSinkBuffer); };
 
-    void detachAuxEffect_l(int effectId) final;
+    void detachAuxEffect_l(int effectId) final REQUIRES(mutex());
 
-    status_t attachAuxEffect(const sp<IAfTrack>& track, int EffectId) final;
-    status_t attachAuxEffect_l(const sp<IAfTrack>& track, int EffectId) final;
+    status_t attachAuxEffect(const sp<IAfTrack>& track, int EffectId) final
+            EXCLUDES_ThreadBase_Mutex;
+    status_t attachAuxEffect_l(const sp<IAfTrack>& track, int EffectId) final REQUIRES(mutex());
 
-    status_t addEffectChain_l(const sp<IAfEffectChain>& chain) final;
-    size_t removeEffectChain_l(const sp<IAfEffectChain>& chain) final;
-    uint32_t hasAudioSession_l(audio_session_t sessionId) const final {
+    status_t addEffectChain_l(const sp<IAfEffectChain>& chain) final REQUIRES(mutex());
+    size_t removeEffectChain_l(const sp<IAfEffectChain>& chain) final REQUIRES(mutex());
+    uint32_t hasAudioSession_l(audio_session_t sessionId) const final REQUIRES(mutex()) {
                             return ThreadBase::hasAudioSession_l(sessionId, mTracks);
                         }
-    product_strategy_t getStrategyForSession_l(audio_session_t sessionId) const final;
+    product_strategy_t getStrategyForSession_l(audio_session_t sessionId) const final
+            REQUIRES(mutex());
 
 
-    status_t setSyncEvent(const sp<audioflinger::SyncEvent>& event) final;
+    status_t setSyncEvent(const sp<audioflinger::SyncEvent>& event) final
+            EXCLUDES_ThreadBase_Mutex;
+    // could be static.
     bool isValidSyncEvent(const sp<audioflinger::SyncEvent>& event) const final;
 
-                // called with AudioFlinger lock held
-    bool invalidateTracks_l(audio_stream_type_t streamType) final;
-    bool invalidateTracks_l(std::set<audio_port_handle_t>& portIds) final;
+    // Does this require the AudioFlinger mutex as well?
+    bool invalidateTracks_l(audio_stream_type_t streamType) final
+            REQUIRES(mutex());
+    bool invalidateTracks_l(std::set<audio_port_handle_t>& portIds) final
+            REQUIRES(mutex());
     void invalidateTracks(audio_stream_type_t streamType) override;
                 // Invalidate tracks by a set of port ids. The port id will be removed from
                 // the given set if the corresponding track is found and invalidated.
-    void invalidateTracks(std::set<audio_port_handle_t>& portIds) override;
+    void invalidateTracks(std::set<audio_port_handle_t>& portIds) override
+            EXCLUDES_ThreadBase_Mutex;
 
     size_t frameCount() const final { return mNormalFrameCount; }
 
@@ -1006,30 +1063,33 @@
                     return mMixerChannelMask;
                 }
 
-    status_t getTimestamp_l(AudioTimestamp& timestamp) final;
+    status_t getTimestamp_l(AudioTimestamp& timestamp) final
+            REQUIRES(mutex(), ThreadBase_ThreadLoop);
 
-    void addPatchTrack(const sp<IAfPatchTrack>& track) final;
-    void deletePatchTrack(const sp<IAfPatchTrack>& track) final;
+    void addPatchTrack(const sp<IAfPatchTrack>& track) final EXCLUDES_ThreadBase_Mutex;
+    void deletePatchTrack(const sp<IAfPatchTrack>& track) final EXCLUDES_ThreadBase_Mutex;
 
+    // NO_THREAD_SAFETY_ANALYSIS - fix this to use atomics.
     void toAudioPortConfig(struct audio_port_config* config) final;
 
                 // Return the asynchronous signal wait time.
-    int64_t computeWaitTimeNs_l() const override { return INT64_MAX; }
+    int64_t computeWaitTimeNs_l() const override REQUIRES(mutex()) { return INT64_MAX; }
                 // returns true if the track is allowed to be added to the thread.
     bool isTrackAllowed_l(
                                     audio_channel_mask_t channelMask __unused,
                                     audio_format_t format __unused,
                                     audio_session_t sessionId __unused,
-                                    uid_t uid) const override {
+            uid_t uid) const override REQUIRES(mutex()) {
                                 return trackCountForUid_l(uid) < PlaybackThread::kMaxTracksPerUid
                                        && mTracks.size() < PlaybackThread::kMaxTracks;
                             }
 
-    bool isTimestampCorrectionEnabled() const final {
-                                return audio_is_output_devices(mTimestampCorrectedDevice)
-                                        && outDeviceTypes().count(mTimestampCorrectedDevice) != 0;
+    bool isTimestampCorrectionEnabled_l() const final REQUIRES(mutex()) {
+        return audio_is_output_devices(mTimestampCorrectedDevice)
+                && outDeviceTypes_l().count(mTimestampCorrectedDevice) != 0;
                             }
 
+    // NO_THREAD_SAFETY_ANALYSIS - fix this to be atomic.
     bool isStreamInitialized() const final {
                                 return !(mOutput == nullptr || mOutput->stream == nullptr);
                             }
@@ -1046,12 +1106,12 @@
                     return (mHapticChannelMask & AUDIO_CHANNEL_HAPTIC_ALL) != AUDIO_CHANNEL_NONE;
                 }
 
-    void setDownStreamPatch(const struct audio_patch* patch) final {
-                    std::lock_guard _l(mutex());
+    void setDownStreamPatch(const struct audio_patch* patch) final EXCLUDES_ThreadBase_Mutex {
+        audio_utils::lock_guard _l(mutex());
                     mDownStreamPatch = *patch;
                 }
 
-    IAfTrack* getTrackById_l(audio_port_handle_t trackId) final;
+    IAfTrack* getTrackById_l(audio_port_handle_t trackId) final REQUIRES(mutex());
 
     bool hasMixer() const final {
                     return mType == MIXER || mType == DUPLICATING || mType == SPATIALIZER;
@@ -1074,19 +1134,19 @@
     void stopMelComputation_l() override
             REQUIRES(audio_utils::AudioFlinger_Mutex);
 
-    void setStandby() final {
-                    std::lock_guard _l(mutex());
+    void setStandby() final EXCLUDES_ThreadBase_Mutex {
+        audio_utils::lock_guard _l(mutex());
                     setStandby_l();
                 }
 
-    void setStandby_l() final {
+    void setStandby_l() final REQUIRES(mutex()) {
                     mStandby = true;
                     mHalStarted = false;
                     mKernelPositionOnStandby =
                         mTimestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL];
                 }
 
-    bool waitForHalStart() final {
+    bool waitForHalStart() final EXCLUDES_ThreadBase_Mutex {
                     audio_utils::unique_lock _l(mutex());
                     static const nsecs_t kWaitHalTimeoutNs = seconds(2);
                     nsecs_t endWaitTimetNs = systemTime() + kWaitHalTimeoutNs;
@@ -1104,10 +1164,17 @@
     // updated by readOutputParameters_l()
     size_t                          mNormalFrameCount;  // normal mixer and effects
 
-    bool                            mThreadThrottle;     // throttle the thread processing
-    uint32_t                        mThreadThrottleTimeMs; // throttle time for MIXER threads
-    uint32_t                        mThreadThrottleEndMs;  // notify once per throttling
-    uint32_t                        mHalfBufferMs;       // half the buffer size in milliseconds
+    // throttle the thread processing
+    bool mThreadThrottle GUARDED_BY(ThreadBase_ThreadLoop);
+
+    // throttle time for MIXER threads - atomic as read by dump()
+    std::atomic<uint32_t> mThreadThrottleTimeMs;
+
+    // notify once per throttling
+    uint32_t mThreadThrottleEndMs GUARDED_BY(ThreadBase_ThreadLoop);
+
+    // half the buffer size in milliseconds
+    uint32_t mHalfBufferMs GUARDED_BY(ThreadBase_ThreadLoop);
 
     void*                           mSinkBuffer;         // frame size aligned sink buffer
 
@@ -1127,21 +1194,21 @@
     // buffer before downmixing or data conversion to the sink buffer.
 
     // Set to "true" to enable the Mixer Buffer otherwise mixer output goes to sink buffer.
-    bool                            mMixerBufferEnabled;
+    bool mMixerBufferEnabled GUARDED_BY(ThreadBase_ThreadLoop);
 
     // Storage, 32 byte aligned (may make this alignment a requirement later).
     // Due to constraints on mNormalFrameCount, the buffer size is a multiple of 16 frames.
-    void*                           mMixerBuffer;
+    void* mMixerBuffer GUARDED_BY(ThreadBase_ThreadLoop);
 
     // Size of mMixerBuffer in bytes: mNormalFrameCount * #channels * sampsize.
-    size_t                          mMixerBufferSize;
+    size_t mMixerBufferSize GUARDED_BY(ThreadBase_ThreadLoop);
 
     // The audio format of mMixerBuffer. Set to AUDIO_FORMAT_PCM_(FLOAT|16_BIT) only.
-    audio_format_t                  mMixerBufferFormat;
+    audio_format_t mMixerBufferFormat GUARDED_BY(ThreadBase_ThreadLoop);
 
     // An internal flag set to true by MixerThread::prepareTracks_l()
     // when mMixerBuffer contains valid data after mixing.
-    bool                            mMixerBufferValid;
+    bool mMixerBufferValid GUARDED_BY(ThreadBase_ThreadLoop);
 
     // Effects Buffer (mEffectsBuffer*)
     //
@@ -1150,46 +1217,49 @@
     // to the sink buffer.
 
     // Set to "true" to enable the Effects Buffer otherwise effects output goes to sink buffer.
-    bool                            mEffectBufferEnabled;
+    bool mEffectBufferEnabled;
+    // NO_THREAD_SAFETY_ANALYSIS: Spatializer access this in addEffectChain_l()
 
     // Storage, 32 byte aligned (may make this alignment a requirement later).
     // Due to constraints on mNormalFrameCount, the buffer size is a multiple of 16 frames.
-    void*                           mEffectBuffer;
+    void* mEffectBuffer;
+    // NO_THREAD_SAFETY_ANALYSIS: Spatializer access this in addEffectChain_l()
 
     // Size of mEffectsBuffer in bytes: mNormalFrameCount * #channels * sampsize.
-    size_t                          mEffectBufferSize;
+    size_t mEffectBufferSize;
+    // NO_THREAD_SAFETY_ANALYSIS: Spatializer access this in addEffectChain_l()
 
     // The audio format of mEffectsBuffer. Set to AUDIO_FORMAT_PCM_16_BIT only.
-    audio_format_t                  mEffectBufferFormat;
+    // NO_THREAD_SAFETY_ANALYSIS: Spatializer access this in addEffectChain_l()
+    audio_format_t mEffectBufferFormat;
 
     // An internal flag set to true by MixerThread::prepareTracks_l()
     // when mEffectsBuffer contains valid data after mixing.
     //
     // When this is set, all mixer data is routed into the effects buffer
     // for any processing (including output processing).
-    bool                            mEffectBufferValid;
+    bool mEffectBufferValid GUARDED_BY(ThreadBase_ThreadLoop);
 
     // Set to "true" to enable when data has already copied to sink
-    bool                            mHasDataCopiedToSinkBuffer = false;
+    bool mHasDataCopiedToSinkBuffer GUARDED_BY(ThreadBase_ThreadLoop) = false;
 
     // Frame size aligned buffer used as input and output to all post processing effects
     // except the Spatializer in a SPATIALIZER thread. Non spatialized tracks are mixed into
     // this buffer so that post processing effects can be applied.
-    void*                           mPostSpatializerBuffer = nullptr;
+    void* mPostSpatializerBuffer GUARDED_BY(mutex()) = nullptr;
 
     // Size of mPostSpatializerBuffer in bytes
-    size_t                          mPostSpatializerBufferSize;
-
+    size_t mPostSpatializerBufferSize GUARDED_BY(mutex());
 
     // suspend count, > 0 means suspended.  While suspended, the thread continues to pull from
     // tracks and mix, but doesn't write to HAL.  A2DP and SCO HAL implementations can't handle
     // concurrent use of both of them, so Audio Policy Service suspends one of the threads to
     // workaround that restriction.
     // 'volatile' means accessed via atomic operations and no lock.
-    volatile int32_t                mSuspended;
+    std::atomic<int32_t> mSuspended;
 
     int64_t                         mBytesWritten;
-    std::atomic<int64_t>            mFramesWritten; // not reset on standby
+    std::atomic<int64_t> mFramesWritten;  // not reset on standby
     int64_t                         mLastFramesWritten = -1; // track changes in timestamp
                                                              // server frames written.
     int64_t                         mSuspendedFrames; // not reset on standby
@@ -1204,8 +1274,8 @@
     // mMasterMute is in both PlaybackThread and in AudioFlinger.  When a
     // PlaybackThread needs to find out if master-muted, it checks it's local
     // copy rather than the one in AudioFlinger.  This optimization saves a lock.
-    bool                            mMasterMute;
-                void        setMasterMute_l(bool muted) { mMasterMute = muted; }
+    bool mMasterMute GUARDED_BY(mutex());
+    void setMasterMute_l(bool muted) REQUIRES(mutex()) { mMasterMute = muted; }
 
                 auto discontinuityForStandbyOrFlush() const { // call on threadLoop or with lock.
                     return ((mType == DIRECT && !audio_is_linear_pcm(mFormat))
@@ -1224,24 +1294,28 @@
     // No sleep in standby mode; waits on a condition
 
     // Code snippets that are temporarily lifted up out of threadLoop() until the merge
-    virtual void checkSilentMode_l() final;  // consider unification with MMapThread
+
+    // consider unification with MMapThread
+    virtual void checkSilentMode_l() final REQUIRES(mutex());
 
     // Non-trivial for DUPLICATING only
-    virtual     void        saveOutputTracks() { }
-    virtual     void        clearOutputTracks() { }
+    virtual void saveOutputTracks() REQUIRES(ThreadBase_ThreadLoop) {}
+    virtual void clearOutputTracks() REQUIRES(ThreadBase_ThreadLoop) {}
 
     // Cache various calculated values, at threadLoop() entry and after a parameter change
-    virtual     void        cacheParameters_l();
+    virtual void cacheParameters_l() REQUIRES(mutex(), ThreadBase_ThreadLoop);
                 void        setCheckOutputStageEffects() override {
                                 mCheckOutputStageEffects.store(true);
                             }
 
-    virtual     uint32_t    correctLatency_l(uint32_t latency) const;
+    virtual uint32_t correctLatency_l(uint32_t latency) const REQUIRES(mutex());
 
     virtual     status_t    createAudioPatch_l(const struct audio_patch *patch,
-                                   audio_patch_handle_t *handle);
-    virtual     status_t    releaseAudioPatch_l(const audio_patch_handle_t handle);
+            audio_patch_handle_t *handle) REQUIRES(mutex());
+    virtual status_t releaseAudioPatch_l(const audio_patch_handle_t handle)
+            REQUIRES(mutex());
 
+    // NO_THREAD_SAFETY_ANALYSIS - fix this to use atomics
     bool usesHwAvSync() const final { return mType == DIRECT && mOutput != nullptr
                                     && mHwSupportsPause
                                     && (mOutput->flags & AUDIO_OUTPUT_FLAG_HW_AV_SYNC); }
@@ -1249,22 +1323,23 @@
                 uint32_t    trackCountForUid_l(uid_t uid) const;
 
                 void        invalidateTracksForAudioSession_l(
-    audio_session_t sessionId) const override {
+            audio_session_t sessionId) const override REQUIRES(mutex()) {
                                 ThreadBase::invalidateTracksForAudioSession_l(sessionId, mTracks);
                             }
 
     DISALLOW_COPY_AND_ASSIGN(PlaybackThread);
 
-    status_t addTrack_l(const sp<IAfTrack>& track) final;
-    bool destroyTrack_l(const sp<IAfTrack>& track) final;
+    status_t addTrack_l(const sp<IAfTrack>& track) final REQUIRES(mutex());
+    bool destroyTrack_l(const sp<IAfTrack>& track) final REQUIRES(mutex());
 
-    void        removeTrack_l(const sp<IAfTrack>& track);
+    void removeTrack_l(const sp<IAfTrack>& track) REQUIRES(mutex());
 
-    void        readOutputParameters_l();
-    MetadataUpdate          updateMetadata_l() final;
-    virtual void sendMetadataToBackend_l(const StreamOutHalInterface::SourceMetadata& metadata);
+    void readOutputParameters_l() REQUIRES(mutex());
+    MetadataUpdate updateMetadata_l() final REQUIRES(mutex());
+    virtual void sendMetadataToBackend_l(const StreamOutHalInterface::SourceMetadata& metadata)
+            REQUIRES(mutex()) ;
 
-    void        collectTimestamps_l();
+    void collectTimestamps_l() REQUIRES(mutex(), ThreadBase_ThreadLoop);
 
     // The Tracks class manages tracks added and removed from the Thread.
     template <typename T>
@@ -1340,17 +1415,17 @@
     uint32_t                        mSleepTimeUs;
 
     // mixer status returned by prepareTracks_l()
-    mixer_state                     mMixerStatus; // current cycle
+    mixer_state mMixerStatus GUARDED_BY(ThreadBase_ThreadLoop); // current cycle
                                                   // previous cycle when in prepareTracks_l()
-    mixer_state                     mMixerStatusIgnoringFastTracks;
+    mixer_state mMixerStatusIgnoringFastTracks GUARDED_BY(ThreadBase_ThreadLoop);
                                                   // FIXME or a separate ready state per track
 
     // FIXME move these declarations into the specific sub-class that needs them
     // MIXER only
-    uint32_t                        sleepTimeShift;
+    uint32_t sleepTimeShift GUARDED_BY(ThreadBase_ThreadLoop);
 
     // same as AudioFlinger::mStandbyTimeInNsecs except for DIRECT which uses a shorter value
-    nsecs_t                         mStandbyDelayNs;
+    nsecs_t mStandbyDelayNs;  // GUARDED_BY(mutex());
 
     // MIXER only
     nsecs_t                         maxPeriod;
@@ -1358,8 +1433,8 @@
     // DUPLICATING only
     uint32_t                        writeFrames;
 
-    size_t                          mBytesRemaining;
-    size_t                          mCurrentWriteLength;
+    size_t mBytesRemaining GUARDED_BY(ThreadBase_ThreadLoop);
+    size_t mCurrentWriteLength GUARDED_BY(ThreadBase_ThreadLoop);
     bool                            mUseAsyncWrite;
     // mWriteAckSequence contains current write sequence on bits 31-1. The write sequence is
     // incremented each time a write(), a flush() or a standby() occurs.
@@ -1412,7 +1487,7 @@
 
 protected:
                 // accessed by both binder threads and within threadLoop(), lock on mutex needed
-     uint32_t& fastTrackAvailMask_l() final { return mFastTrackAvailMask; }
+     uint32_t& fastTrackAvailMask_l() final REQUIRES(mutex()) { return mFastTrackAvailMask; }
      uint32_t mFastTrackAvailMask;  // bit i set if fast track [i] is available
                 bool        mHwSupportsPause;
                 bool        mHwPaused;
@@ -1481,18 +1556,20 @@
 
     // Thread virtuals
 
-    bool checkForNewParameter_l(const String8& keyValuePair, status_t& status) final;
+    bool checkForNewParameter_l(const String8& keyValuePair, status_t& status) final
+            REQUIRES(mutex());
 
     bool isTrackAllowed_l(
                                     audio_channel_mask_t channelMask, audio_format_t format,
-                                    audio_session_t sessionId, uid_t uid) const final;
+            audio_session_t sessionId, uid_t uid) const final REQUIRES(mutex());
 protected:
-    mixer_state prepareTracks_l(Vector<sp<IAfTrack>>* tracksToRemove) override;
+    mixer_state prepareTracks_l(Vector<sp<IAfTrack>>* tracksToRemove) override
+            REQUIRES(mutex(), ThreadBase_ThreadLoop);
     uint32_t idleSleepTimeUs() const final;
     uint32_t suspendSleepTimeUs() const final;
-    void cacheParameters_l() override;
+    void cacheParameters_l() override REQUIRES(mutex(), ThreadBase_ThreadLoop);
 
-    void acquireWakeLock_l() final {
+    void acquireWakeLock_l() final REQUIRES(mutex()) {
         PlaybackThread::acquireWakeLock_l();
         if (hasFastMixer()) {
             mFastMixer->setBoottimeOffset(
@@ -1500,18 +1577,19 @@
         }
     }
 
-                void        dumpInternals_l(int fd, const Vector<String16>& args) override;
+    void dumpInternals_l(int fd, const Vector<String16>& args) override REQUIRES(mutex());
 
     // threadLoop snippets
-    ssize_t threadLoop_write() override;
-    void threadLoop_standby() override;
-    void threadLoop_mix() override;
-    void threadLoop_sleepTime() override;
-    uint32_t correctLatency_l(uint32_t latency) const final;
+    ssize_t threadLoop_write() override REQUIRES(ThreadBase_ThreadLoop);
+    void threadLoop_standby() override REQUIRES(ThreadBase_ThreadLoop);
+    void threadLoop_mix() override REQUIRES(ThreadBase_ThreadLoop);
+    void threadLoop_sleepTime() override REQUIRES(ThreadBase_ThreadLoop);
+    uint32_t correctLatency_l(uint32_t latency) const final REQUIRES(mutex());
 
     status_t createAudioPatch_l(
-            const struct audio_patch* patch, audio_patch_handle_t* handle) final;
-    status_t releaseAudioPatch_l(const audio_patch_handle_t handle) final;
+            const struct audio_patch* patch, audio_patch_handle_t* handle)
+            final REQUIRES(mutex());
+    status_t releaseAudioPatch_l(const audio_patch_handle_t handle) final REQUIRES(mutex());
 
                 AudioMixer* mAudioMixer;    // normal mixer
 
@@ -1540,7 +1618,7 @@
 
                 // accessible only within the threadLoop(), no locks required
                 //          mFastMixer->sq()    // for mutating and pushing state
-                int32_t     mFastMixerFutex;    // for cold idle
+    int32_t mFastMixerFutex GUARDED_BY(ThreadBase_ThreadLoop);  // for cold idle
 
                 std::atomic_bool mMasterMono;
 public:
@@ -1550,8 +1628,9 @@
                               return mFastMixerDumpState.mTracks[fastIndex].mUnderruns;
                             }
 
-                status_t    threadloop_getHalTimestamp_l(
-                                    ExtendedTimestamp *timestamp) const override {
+    status_t threadloop_getHalTimestamp_l(
+            ExtendedTimestamp *timestamp) const override
+            REQUIRES(mutex(), ThreadBase_ThreadLoop) {
                                 if (mNormalSink.get() != nullptr) {
                                     return mNormalSink->getTimestamp(*timestamp);
                                 }
@@ -1575,16 +1654,16 @@
                 // and blending without limiter is idempotent but inefficient to do twice.
     virtual     bool       requireMonoBlend() { return mMasterMono.load() && !hasFastMixer(); }
 
-                void       setMasterBalance(float balance) override {
+    void setMasterBalance(float balance) override EXCLUDES_ThreadBase_Mutex {
                                mMasterBalance.store(balance);
                                if (hasFastMixer()) {
                                    mFastMixer->setMasterBalance(balance);
                                }
                            }
 
-                void       updateHalSupportedLatencyModes_l();
-                void       onHalLatencyModesChanged_l() override;
-                void       setHalLatencyMode_l() override;
+    void updateHalSupportedLatencyModes_l() REQUIRES(mutex());
+    void onHalLatencyModesChanged_l() override REQUIRES(mutex());
+    void setHalLatencyMode_l() override REQUIRES(mutex());
 };
 
 class DirectOutputThread : public PlaybackThread, public virtual IAfDirectOutputThread {
@@ -1599,35 +1678,36 @@
                        const audio_offload_info_t& offloadInfo)
         : DirectOutputThread(afThreadCallback, output, id, DIRECT, systemReady, offloadInfo) { }
 
-    virtual                 ~DirectOutputThread();
+    ~DirectOutputThread() override;
 
     status_t selectPresentation(int presentationId, int programId) final;
 
     // Thread virtuals
 
     virtual     bool        checkForNewParameter_l(const String8& keyValuePair,
-                                                   status_t& status);
+            status_t& status) REQUIRES(mutex());
 
-                void        flushHw_l() override;
+    void flushHw_l() override REQUIRES(mutex(), ThreadBase_ThreadLoop);
 
-                void        setMasterBalance(float balance) override;
+    void setMasterBalance(float balance) override EXCLUDES_ThreadBase_Mutex;
 
 protected:
     virtual     uint32_t    activeSleepTimeUs() const;
     virtual     uint32_t    idleSleepTimeUs() const;
     virtual     uint32_t    suspendSleepTimeUs() const;
-    virtual     void        cacheParameters_l();
+    virtual void cacheParameters_l() REQUIRES(mutex(), ThreadBase_ThreadLoop);
 
-                void        dumpInternals_l(int fd, const Vector<String16>& args) override;
+    void dumpInternals_l(int fd, const Vector<String16>& args) override REQUIRES(mutex());
 
     // threadLoop snippets
-    virtual     mixer_state prepareTracks_l(Vector<sp<IAfTrack>>* tracksToRemove);
-    virtual     void        threadLoop_mix();
-    virtual     void        threadLoop_sleepTime();
-    virtual     void        threadLoop_exit();
-    virtual     bool        shouldStandby_l();
+    virtual mixer_state prepareTracks_l(Vector<sp<IAfTrack>>* tracksToRemove)
+            REQUIRES(mutex(), ThreadBase_ThreadLoop);
+    virtual void threadLoop_mix() REQUIRES(ThreadBase_ThreadLoop);
+    virtual void threadLoop_sleepTime() REQUIRES(ThreadBase_ThreadLoop);
+    virtual void threadLoop_exit() REQUIRES(ThreadBase_ThreadLoop);
+    virtual bool shouldStandby_l() REQUIRES(mutex());
 
-    virtual     void        onAddNewTrack_l();
+    virtual void onAddNewTrack_l() REQUIRES(mutex());
 
     const       audio_offload_info_t mOffloadInfo;
 
@@ -1637,7 +1717,7 @@
     DirectOutputThread(const sp<IAfThreadCallback>& afThreadCallback, AudioStreamOut* output,
                        audio_io_handle_t id, ThreadBase::type_t type, bool systemReady,
                        const audio_offload_info_t& offloadInfo);
-    void processVolume_l(IAfTrack *track, bool lastTrack);
+    void processVolume_l(IAfTrack *track, bool lastTrack) REQUIRES(mutex());
     bool isTunerStream() const { return (mOffloadInfo.content_id > 0); }
 
     // prepareTracks_l() tells threadLoop_mix() the name of the single active track
@@ -1652,7 +1732,7 @@
 public:
     virtual     bool        hasFastMixer() const { return false; }
 
-    virtual     int64_t     computeWaitTimeNs_l() const override;
+    virtual int64_t computeWaitTimeNs_l() const override REQUIRES(mutex());
 
     status_t    threadloop_getHalTimestamp_l(ExtendedTimestamp *timestamp) const override {
                     // For DIRECT and OFFLOAD threads, query the output sink directly.
@@ -1679,19 +1759,20 @@
                   audio_io_handle_t id, bool systemReady,
                   const audio_offload_info_t& offloadInfo);
     virtual                 ~OffloadThread() {};
-                void        flushHw_l() override;
+    void flushHw_l() final REQUIRES(mutex(), ThreadBase_ThreadLoop);
 
 protected:
     // threadLoop snippets
-    virtual     mixer_state prepareTracks_l(Vector<sp<IAfTrack>>* tracksToRemove);
-    virtual     void        threadLoop_exit();
+    mixer_state prepareTracks_l(Vector<sp<IAfTrack>>* tracksToRemove) final
+            REQUIRES(mutex(), ThreadBase_ThreadLoop);
+    void threadLoop_exit() final REQUIRES(ThreadBase_ThreadLoop);
 
-    virtual     bool        waitingAsyncCallback();
-    virtual     bool        waitingAsyncCallback_l();
-    virtual     void        invalidateTracks(audio_stream_type_t streamType);
-                void        invalidateTracks(std::set<audio_port_handle_t>& portIds) override;
+    bool waitingAsyncCallback() final;
+    bool waitingAsyncCallback_l() final REQUIRES(mutex());
+    void invalidateTracks(audio_stream_type_t streamType) final EXCLUDES_ThreadBase_Mutex;
+    void invalidateTracks(std::set<audio_port_handle_t>& portIds) final EXCLUDES_ThreadBase_Mutex;
 
-    virtual     bool        keepWakeLock() const { return (mKeepWakeLock || (mDrainSequence & 1)); }
+    bool keepWakeLock() const final { return (mKeepWakeLock || (mDrainSequence & 1)); }
 
 private:
     size_t      mPausedWriteLength;     // length in bytes of write interrupted by pause
@@ -1704,10 +1785,10 @@
     explicit AsyncCallbackThread(const wp<PlaybackThread>& playbackThread);
 
     // Thread virtuals
-    virtual bool        threadLoop();
+    bool threadLoop() final;
 
     // RefBase
-    virtual void        onFirstRef();
+    void onFirstRef() final;
 
             void        exit();
             void        setWriteBlocked(uint32_t sequence);
@@ -1730,7 +1811,9 @@
     mutable audio_utils::mutex mMutex;
     bool                       mAsyncError;
 
-    audio_utils::mutex& mutex() const { return mMutex; }
+    audio_utils::mutex& mutex() const RETURN_CAPABILITY(audio_utils::AsyncCallbackThread_Mutex) {
+        return mMutex;
+    }
 };
 
 class DuplicatingThread : public MixerThread, public IAfDuplicatingThread {
@@ -1745,41 +1828,42 @@
     }
 
     // Thread virtuals
-    void addOutputTrack(IAfPlaybackThread* thread) final;
-    void removeOutputTrack(IAfPlaybackThread* thread) final;
+    void addOutputTrack(IAfPlaybackThread* thread) final EXCLUDES_ThreadBase_Mutex;
+    void removeOutputTrack(IAfPlaybackThread* thread) final EXCLUDES_ThreadBase_Mutex;
     uint32_t waitTimeMs() const final { return mWaitTimeMs; }
 
                 void        sendMetadataToBackend_l(
-                        const StreamOutHalInterface::SourceMetadata& metadata) override;
+            const StreamOutHalInterface::SourceMetadata& metadata) final REQUIRES(mutex());
 protected:
     virtual     uint32_t    activeSleepTimeUs() const;
-                void        dumpInternals_l(int fd, const Vector<String16>& args) override;
+    void dumpInternals_l(int fd, const Vector<String16>& args) final REQUIRES(mutex());
 
 private:
-                bool        outputsReady();
+    bool outputsReady() REQUIRES(ThreadBase_ThreadLoop);
 protected:
     // threadLoop snippets
-    virtual     void        threadLoop_mix();
-    virtual     void        threadLoop_sleepTime();
-    virtual     ssize_t     threadLoop_write();
-    virtual     void        threadLoop_standby();
-    virtual     void        cacheParameters_l();
+    void threadLoop_mix() final REQUIRES(ThreadBase_ThreadLoop);
+    void threadLoop_sleepTime() final REQUIRES(ThreadBase_ThreadLoop);
+    ssize_t threadLoop_write() final REQUIRES(ThreadBase_ThreadLoop);
+    void threadLoop_standby() final REQUIRES(ThreadBase_ThreadLoop);
+    void cacheParameters_l() final REQUIRES(mutex(), ThreadBase_ThreadLoop);
 
 private:
     // called from threadLoop, addOutputTrack, removeOutputTrack
-    virtual     void        updateWaitTime_l();
+    void updateWaitTime_l() REQUIRES(mutex());
 protected:
-    virtual     void        saveOutputTracks();
-    virtual     void        clearOutputTracks();
+    void saveOutputTracks() final REQUIRES(mutex(), ThreadBase_ThreadLoop);
+    void clearOutputTracks() final REQUIRES(mutex(), ThreadBase_ThreadLoop);
 private:
 
                 uint32_t    mWaitTimeMs;
-    SortedVector <sp<IAfOutputTrack>>  outputTracks;
-    SortedVector <sp<IAfOutputTrack>>  mOutputTracks;
+    // NO_THREAD_SAFETY_ANALYSIS  GUARDED_BY(ThreadBase_ThreadLoop)
+    SortedVector <sp<IAfOutputTrack>> outputTracks;
+    SortedVector <sp<IAfOutputTrack>> mOutputTracks GUARDED_BY(mutex());
 public:
     virtual     bool        hasFastMixer() const { return false; }
                 status_t    threadloop_getHalTimestamp_l(
-                                    ExtendedTimestamp *timestamp) const override {
+            ExtendedTimestamp *timestamp) const override REQUIRES(mutex()) {
         if (mOutputTracks.size() > 0) {
             // forward the first OutputTrack's kernel information for timestamp.
             const ExtendedTimestamp trackTimestamp =
@@ -1806,11 +1890,12 @@
 
     bool hasFastMixer() const final { return false; }
 
-    status_t setRequestedLatencyMode(audio_latency_mode_t mode) final;
+    status_t setRequestedLatencyMode(audio_latency_mode_t mode) final EXCLUDES_ThreadBase_Mutex;
 
 protected:
-    void checkOutputStageEffects() final;
-    void setHalLatencyMode_l() final;
+    void checkOutputStageEffects() final
+            REQUIRES(ThreadBase_ThreadLoop) EXCLUDES_ThreadBase_Mutex;
+    void setHalLatencyMode_l() final REQUIRES(mutex());
 
 private:
             // Do not request a specific mode by default
@@ -1836,15 +1921,15 @@
     ~RecordThread() override;
 
     // no addTrack_l ?
-    void destroyTrack_l(const sp<IAfRecordTrack>& track) final;
-    void removeTrack_l(const sp<IAfRecordTrack>& track) final;
+    void destroyTrack_l(const sp<IAfRecordTrack>& track) final REQUIRES(mutex());
+    void removeTrack_l(const sp<IAfRecordTrack>& track) final REQUIRES(mutex());
 
     // Thread virtuals
-    bool threadLoop() final;
-    void preExit() final;
+    bool threadLoop() final REQUIRES(ThreadBase_ThreadLoop) EXCLUDES_ThreadBase_Mutex;
+    void preExit() final EXCLUDES_ThreadBase_Mutex;
 
     // RefBase
-    void onFirstRef() final;
+    void onFirstRef() final EXCLUDES_ThreadBase_Mutex;
 
     status_t initCheck() const final { return mInput == nullptr ? NO_INIT : NO_ERROR; }
 
@@ -1868,15 +1953,15 @@
                     status_t *status /*non-NULL*/,
                     audio_port_handle_t portId,
                     int32_t maxSharedAudioHistoryMs) final
-            REQUIRES(audio_utils::AudioFlinger_Mutex);
+            REQUIRES(audio_utils::AudioFlinger_Mutex) EXCLUDES_ThreadBase_Mutex;
 
             status_t start(IAfRecordTrack* recordTrack,
                               AudioSystem::sync_event_t event,
-                              audio_session_t triggerSession) final;
+            audio_session_t triggerSession) final EXCLUDES_ThreadBase_Mutex;
 
             // ask the thread to stop the specified track, and
             // return true if the caller should then do it's part of the stopping process
-    bool stop(IAfRecordTrack* recordTrack) final;
+    bool stop(IAfRecordTrack* recordTrack) final EXCLUDES_ThreadBase_Mutex;
     AudioStreamIn* getInput() const final { return mInput; }
     AudioStreamIn* clearInput() final;
 
@@ -1884,27 +1969,30 @@
             virtual sp<StreamHalInterface> stream() const;
 
 
-    virtual bool        checkForNewParameter_l(const String8& keyValuePair,
-                                               status_t& status);
-    virtual void        cacheParameters_l() {}
-    virtual String8     getParameters(const String8& keys);
-    void ioConfigChanged(audio_io_config_event_t event, pid_t pid = 0,
+    virtual bool checkForNewParameter_l(const String8& keyValuePair,
+                                               status_t& status) REQUIRES(mutex());
+    virtual void cacheParameters_l() REQUIRES(mutex(), ThreadBase_ThreadLoop) {}
+    virtual String8 getParameters(const String8& keys) EXCLUDES_ThreadBase_Mutex;
+
+    // Hold either the AudioFlinger::mutex or the ThreadBase::mutex
+    void ioConfigChanged_l(audio_io_config_event_t event, pid_t pid = 0,
             audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE) final;
     virtual status_t    createAudioPatch_l(const struct audio_patch *patch,
-                                           audio_patch_handle_t *handle);
-    virtual status_t    releaseAudioPatch_l(const audio_patch_handle_t handle);
-            void        updateOutDevices(const DeviceDescriptorBaseVector& outDevices) override;
-            void        resizeInputBuffer_l(int32_t maxSharedAudioHistoryMs) override;
+            audio_patch_handle_t *handle) REQUIRES(mutex());
+    virtual status_t releaseAudioPatch_l(const audio_patch_handle_t handle) REQUIRES(mutex());
+    void updateOutDevices(const DeviceDescriptorBaseVector& outDevices) override
+            EXCLUDES_ThreadBase_Mutex;
+    void resizeInputBuffer_l(int32_t maxSharedAudioHistoryMs) override REQUIRES(mutex());
 
-    void addPatchTrack(const sp<IAfPatchRecord>& record) final;
-    void deletePatchTrack(const sp<IAfPatchRecord>& record) final;
+    void addPatchTrack(const sp<IAfPatchRecord>& record) final EXCLUDES_ThreadBase_Mutex;
+    void deletePatchTrack(const sp<IAfPatchRecord>& record) final EXCLUDES_ThreadBase_Mutex;
 
-            void        readInputParameters_l();
-    uint32_t getInputFramesLost() const final;
+    void readInputParameters_l() REQUIRES(mutex());
+    uint32_t getInputFramesLost() const final EXCLUDES_ThreadBase_Mutex;
 
-    virtual status_t addEffectChain_l(const sp<IAfEffectChain>& chain);
-    virtual size_t removeEffectChain_l(const sp<IAfEffectChain>& chain);
-            uint32_t hasAudioSession_l(audio_session_t sessionId) const override {
+    virtual status_t addEffectChain_l(const sp<IAfEffectChain>& chain) REQUIRES(mutex());
+    virtual size_t removeEffectChain_l(const sp<IAfEffectChain>& chain) REQUIRES(mutex());
+    uint32_t hasAudioSession_l(audio_session_t sessionId) const override REQUIRES(mutex()) {
                          return ThreadBase::hasAudioSession_l(sessionId, mTracks);
                      }
 
@@ -1913,7 +2001,8 @@
             // FIXME replace by Set [and implement Bag/Multiset for other uses].
             KeyedVector<audio_session_t, bool> sessionIds() const;
 
-            status_t setSyncEvent(const sp<audioflinger::SyncEvent>& event) override;
+    status_t setSyncEvent(const sp<audioflinger::SyncEvent>& event) override
+            EXCLUDES_ThreadBase_Mutex;
             bool     isValidSyncEvent(const sp<audioflinger::SyncEvent>& event) const override;
 
     static void syncStartEventCallback(const wp<audioflinger::SyncEvent>& event);
@@ -1922,52 +2011,55 @@
     bool hasFastCapture() const final { return mFastCapture != 0; }
     virtual void        toAudioPortConfig(struct audio_port_config *config);
 
-    virtual status_t    checkEffectCompatibility_l(const effect_descriptor_t *desc,
-                                                   audio_session_t sessionId);
+    virtual status_t checkEffectCompatibility_l(const effect_descriptor_t *desc,
+            audio_session_t sessionId) REQUIRES(mutex());
 
-    virtual void        acquireWakeLock_l() {
+    virtual void acquireWakeLock_l() REQUIRES(mutex()) {
                             ThreadBase::acquireWakeLock_l();
-                            mActiveTracks.updatePowerState(this, true /* force */);
+        mActiveTracks.updatePowerState_l(this, true /* force */);
                         }
 
-    void checkBtNrec() final;
+    void checkBtNrec() final EXCLUDES_ThreadBase_Mutex;
 
             // Sets the UID records silence
-    void setRecordSilenced(audio_port_handle_t portId, bool silenced) final;
+    void setRecordSilenced(audio_port_handle_t portId, bool silenced) final
+            EXCLUDES_ThreadBase_Mutex;
 
     status_t getActiveMicrophones(
-            std::vector<media::MicrophoneInfoFw>* activeMicrophones) const final;
-    status_t setPreferredMicrophoneDirection(audio_microphone_direction_t direction) final;
-    status_t setPreferredMicrophoneFieldDimension(float zoom) final;
+            std::vector<media::MicrophoneInfoFw>* activeMicrophones) const final
+            EXCLUDES_ThreadBase_Mutex;
+    status_t setPreferredMicrophoneDirection(audio_microphone_direction_t direction) final
+            EXCLUDES_ThreadBase_Mutex;
+    status_t setPreferredMicrophoneFieldDimension(float zoom) final EXCLUDES_ThreadBase_Mutex;
 
-            MetadataUpdate        updateMetadata_l() override;
+    MetadataUpdate updateMetadata_l() override REQUIRES(mutex());
 
     bool fastTrackAvailable() const final { return mFastTrackAvail; }
     void setFastTrackAvailable(bool available) final { mFastTrackAvail = available; }
 
-            bool        isTimestampCorrectionEnabled() const override {
+    bool isTimestampCorrectionEnabled_l() const override REQUIRES(mutex()) {
                             // checks popcount for exactly one device.
                             // Is currently disabled. Before enabling,
                             // verify compressed record timestamps.
                             return audio_is_input_device(mTimestampCorrectedDevice)
-                                    && inDeviceType() == mTimestampCorrectedDevice;
+                && inDeviceType_l() == mTimestampCorrectedDevice;
                         }
 
     status_t shareAudioHistory(const std::string& sharedAudioPackageName,
                                           audio_session_t sharedSessionId = AUDIO_SESSION_NONE,
-            int64_t sharedAudioStartMs = -1) final;
+            int64_t sharedAudioStartMs = -1) final EXCLUDES_ThreadBase_Mutex;
             status_t    shareAudioHistory_l(const std::string& sharedAudioPackageName,
                                           audio_session_t sharedSessionId = AUDIO_SESSION_NONE,
-                                          int64_t sharedAudioStartMs = -1);
-    void resetAudioHistory_l() final;
+            int64_t sharedAudioStartMs = -1) REQUIRES(mutex());
+    void resetAudioHistory_l() final REQUIRES(mutex());
 
     bool isStreamInitialized() const final {
                             return !(mInput == nullptr || mInput->stream == nullptr);
                         }
 
 protected:
-            void        dumpInternals_l(int fd, const Vector<String16>& args) override;
-            void        dumpTracks_l(int fd, const Vector<String16>& args) override;
+    void dumpInternals_l(int fd, const Vector<String16>& args) override REQUIRES(mutex());
+    void dumpTracks_l(int fd, const Vector<String16>& args) override REQUIRES(mutex());
 
 private:
             // Enter standby if not already in standby, and set mStandby flag
@@ -1976,10 +2068,10 @@
             // Call the HAL standby method unconditionally, and don't change mStandby flag
             void    inputStandBy();
 
-            void    checkBtNrec_l();
+    void checkBtNrec_l() REQUIRES(mutex());
 
-            int32_t getOldestFront_l();
-            void    updateFronts_l(int32_t offset);
+    int32_t getOldestFront_l() REQUIRES(mutex());
+    void updateFronts_l(int32_t offset) REQUIRES(mutex());
 
             AudioStreamIn                       *mInput;
             Source                              *mSource;
@@ -2065,88 +2157,112 @@
                                       audio_session_t sessionId,
                                       const sp<MmapStreamCallback>& callback,
                                       audio_port_handle_t deviceId,
-                                      audio_port_handle_t portId) override;
+            audio_port_handle_t portId) override EXCLUDES_ThreadBase_Mutex {
+        audio_utils::lock_guard l(mutex());
+        configure_l(attr, streamType, sessionId, callback, deviceId, portId);
+    }
 
-    void disconnect() final;
+    void configure_l(const audio_attributes_t* attr,
+            audio_stream_type_t streamType,
+            audio_session_t sessionId,
+            const sp<MmapStreamCallback>& callback,
+            audio_port_handle_t deviceId,
+            audio_port_handle_t portId) REQUIRES(mutex());
+
+    void disconnect() final EXCLUDES_ThreadBase_Mutex;
 
     // MmapStreamInterface for adapter.
-    status_t createMmapBuffer(int32_t minSizeFrames, struct audio_mmap_buffer_info* info) final;
-    status_t getMmapPosition(struct audio_mmap_position* position) const override;
+    status_t createMmapBuffer(int32_t minSizeFrames, struct audio_mmap_buffer_info* info) final
+            EXCLUDES_ThreadBase_Mutex;
+    status_t getMmapPosition(struct audio_mmap_position* position) const override
+            EXCLUDES_ThreadBase_Mutex;
     status_t start(const AudioClient& client,
                    const audio_attributes_t *attr,
-            audio_port_handle_t* handle) final;
-    status_t stop(audio_port_handle_t handle) final;
-    status_t standby() final;
-    status_t getExternalPosition(uint64_t* position, int64_t* timeNanos) const = 0;
-    status_t reportData(const void* buffer, size_t frameCount) override;
+            audio_port_handle_t* handle) final EXCLUDES_ThreadBase_Mutex;
+    status_t stop(audio_port_handle_t handle) final EXCLUDES_ThreadBase_Mutex;
+    status_t standby() final EXCLUDES_ThreadBase_Mutex;
+    status_t getExternalPosition(uint64_t* position, int64_t* timeNanos) const
+            EXCLUDES_ThreadBase_Mutex = 0;
+    status_t reportData(const void* buffer, size_t frameCount) override EXCLUDES_ThreadBase_Mutex;
 
     // RefBase
     void onFirstRef() final;
 
     // Thread virtuals
-    bool threadLoop() final;
+    bool threadLoop() final REQUIRES(ThreadBase_ThreadLoop) EXCLUDES_ThreadBase_Mutex;
 
     // Not in ThreadBase
-    virtual void threadLoop_exit() final;
-    virtual void threadLoop_standby() final;
-    virtual bool shouldStandby_l() final { return false; }
+    virtual void threadLoop_exit() final REQUIRES(ThreadBase_ThreadLoop);
+    virtual void threadLoop_standby() final REQUIRES(ThreadBase_ThreadLoop);
+    virtual bool shouldStandby_l() final REQUIRES(mutex()){ return false; }
     virtual status_t exitStandby_l() REQUIRES(mutex());
 
     status_t initCheck() const final { return mHalStream == nullptr ? NO_INIT : NO_ERROR; }
     size_t frameCount() const final { return mFrameCount; }
-    bool checkForNewParameter_l(const String8& keyValuePair, status_t& status) final;
-    String8 getParameters(const String8& keys) final;
-    void ioConfigChanged(audio_io_config_event_t event, pid_t pid = 0,
-            audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE) final;
-                void        readHalParameters_l();
-    void cacheParameters_l() final {}
+    bool checkForNewParameter_l(const String8& keyValuePair, status_t& status)
+            final REQUIRES(mutex());
+    String8 getParameters(const String8& keys) final EXCLUDES_ThreadBase_Mutex;
+    void ioConfigChanged_l(audio_io_config_event_t event, pid_t pid = 0,
+            audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE) final
+            /* holds either AF::mutex or TB::mutex */;
+    void readHalParameters_l() REQUIRES(mutex());
+    void cacheParameters_l() final REQUIRES(mutex(), ThreadBase_ThreadLoop) {}
     status_t createAudioPatch_l(
-            const struct audio_patch* patch, audio_patch_handle_t* handle) final;
-    status_t releaseAudioPatch_l(const audio_patch_handle_t handle) final;
+            const struct audio_patch* patch, audio_patch_handle_t* handle) final
+            REQUIRES(mutex());
+    status_t releaseAudioPatch_l(const audio_patch_handle_t handle) final
+            REQUIRES(mutex());
+    // NO_THREAD_SAFETY_ANALYSIS
     void toAudioPortConfig(struct audio_port_config* config) override;
 
     sp<StreamHalInterface> stream() const final { return mHalStream; }
-    status_t addEffectChain_l(const sp<IAfEffectChain>& chain) final;
-    size_t removeEffectChain_l(const sp<IAfEffectChain>& chain) final;
+    status_t addEffectChain_l(const sp<IAfEffectChain>& chain) final REQUIRES(mutex());
+    size_t removeEffectChain_l(const sp<IAfEffectChain>& chain) final REQUIRES(mutex());
     status_t checkEffectCompatibility_l(
-            const effect_descriptor_t *desc, audio_session_t sessionId) final;
+            const effect_descriptor_t *desc, audio_session_t sessionId) final REQUIRES(mutex());
 
-    uint32_t hasAudioSession_l(audio_session_t sessionId) const override {
+    uint32_t hasAudioSession_l(audio_session_t sessionId) const override REQUIRES(mutex()) {
                                 // Note: using mActiveTracks as no mTracks here.
                                 return ThreadBase::hasAudioSession_l(sessionId, mActiveTracks);
                             }
     status_t setSyncEvent(const sp<audioflinger::SyncEvent>& event) final;
     bool isValidSyncEvent(const sp<audioflinger::SyncEvent>& event) const final;
 
-    virtual void checkSilentMode_l() {} // cannot be const (RecordThread)
-    virtual void processVolume_l() {}
-                void        checkInvalidTracks_l();
+    virtual void checkSilentMode_l() REQUIRES(mutex()) {} // cannot be const (RecordThread)
+    virtual void processVolume_l() REQUIRES(mutex()) {}
+    void checkInvalidTracks_l() REQUIRES(mutex());
 
     // Not in ThreadBase
-    virtual audio_stream_type_t streamType() const { return AUDIO_STREAM_DEFAULT; }
-    virtual void invalidateTracks(audio_stream_type_t /* streamType */) {}
-    void invalidateTracks(std::set<audio_port_handle_t>& /* portIds */) override {}
+    virtual audio_stream_type_t streamType_l() const REQUIRES(mutex()) {
+        return AUDIO_STREAM_DEFAULT;
+    }
+    virtual void invalidateTracks(audio_stream_type_t /* streamType */)
+            EXCLUDES_ThreadBase_Mutex {}
+    void invalidateTracks(std::set<audio_port_handle_t>& /* portIds */) override
+            EXCLUDES_ThreadBase_Mutex {}
 
                 // Sets the UID records silence
     void setRecordSilenced(
-            audio_port_handle_t /* portId */, bool /* silenced */) override {}
+            audio_port_handle_t /* portId */, bool /* silenced */) override
+            EXCLUDES_ThreadBase_Mutex {}
 
     bool isStreamInitialized() const override { return false; }
 
-                void        setClientSilencedState_l(audio_port_handle_t portId, bool silenced) {
+    void setClientSilencedState_l(audio_port_handle_t portId, bool silenced) REQUIRES(mutex()) {
                                 mClientSilencedStates[portId] = silenced;
                             }
 
-                size_t      eraseClientSilencedState_l(audio_port_handle_t portId) {
+    size_t eraseClientSilencedState_l(audio_port_handle_t portId) REQUIRES(mutex()) {
                                 return mClientSilencedStates.erase(portId);
                             }
 
-                bool        isClientSilenced_l(audio_port_handle_t portId) const {
+    bool isClientSilenced_l(audio_port_handle_t portId) const REQUIRES(mutex()) {
                                 const auto it = mClientSilencedStates.find(portId);
                                 return it != mClientSilencedStates.end() ? it->second : false;
                             }
 
-                void        setClientSilencedIfExists_l(audio_port_handle_t portId, bool silenced) {
+    void setClientSilencedIfExists_l(audio_port_handle_t portId, bool silenced)
+            REQUIRES(mutex()) {
                                 const auto it = mClientSilencedStates.find(portId);
                                 if (it != mClientSilencedStates.end()) {
                                     it->second = silenced;
@@ -2154,28 +2270,28 @@
                             }
 
  protected:
-    void dumpInternals_l(int fd, const Vector<String16>& args) override;
-    void dumpTracks_l(int fd, const Vector<String16>& args) final;
+    void dumpInternals_l(int fd, const Vector<String16>& args) override REQUIRES(mutex());
+    void dumpTracks_l(int fd, const Vector<String16>& args) final REQUIRES(mutex());
 
                 /**
                  * @brief mDeviceId  current device port unique identifier
                  */
-                audio_port_handle_t     mDeviceId = AUDIO_PORT_HANDLE_NONE;
+    audio_port_handle_t mDeviceId GUARDED_BY(mutex()) = AUDIO_PORT_HANDLE_NONE;
 
-                audio_attributes_t      mAttr;
-                audio_session_t         mSessionId;
-                audio_port_handle_t     mPortId;
+    audio_attributes_t mAttr GUARDED_BY(mutex());
+    audio_session_t mSessionId GUARDED_BY(mutex());
+    audio_port_handle_t mPortId GUARDED_BY(mutex());
 
-                wp<MmapStreamCallback>  mCallback;
-                sp<StreamHalInterface>  mHalStream;
-                sp<DeviceHalInterface>  mHalDevice;
-                AudioHwDevice* const    mAudioHwDev;
-                ActiveTracks<IAfMmapTrack> mActiveTracks;
-                float                   mHalVolFloat;
-                std::map<audio_port_handle_t, bool> mClientSilencedStates;
+    wp<MmapStreamCallback> mCallback GUARDED_BY(mutex());
+    sp<StreamHalInterface> mHalStream; // NO_THREAD_SAFETY_ANALYSIS
+    sp<DeviceHalInterface> mHalDevice GUARDED_BY(mutex());
+    AudioHwDevice* const mAudioHwDev GUARDED_BY(mutex());
+    ActiveTracks<IAfMmapTrack> mActiveTracks GUARDED_BY(mutex());
+    float mHalVolFloat GUARDED_BY(mutex());
+    std::map<audio_port_handle_t, bool> mClientSilencedStates GUARDED_BY(mutex());
 
-                int32_t                 mNoCallbackWarningCount;
-     static     constexpr int32_t       kMaxNoCallbackWarnings = 5;
+    int32_t mNoCallbackWarningCount GUARDED_BY(mutex());
+    static constexpr int32_t kMaxNoCallbackWarnings = 5;
 };
 
 class MmapPlaybackThread : public MmapThread, public IAfMmapPlaybackThread,
@@ -2193,28 +2309,31 @@
                                       audio_session_t sessionId,
                                       const sp<MmapStreamCallback>& callback,
                                       audio_port_handle_t deviceId,
-                                      audio_port_handle_t portId) final;
+            audio_port_handle_t portId) final EXCLUDES_ThreadBase_Mutex;
 
-    AudioStreamOut* clearOutput() final;
+    AudioStreamOut* clearOutput() final EXCLUDES_ThreadBase_Mutex;
 
                 // VolumeInterface
     void setMasterVolume(float value) final;
-    void setMasterBalance(float /* value */) final {}  // Needs implementation?
-    void setMasterMute(bool muted) final;
-    void setStreamVolume(audio_stream_type_t stream, float value) final;
-    void setStreamMute(audio_stream_type_t stream, bool muted) final;
-    float streamVolume(audio_stream_type_t stream) const final;
+    // Needs implementation?
+    void setMasterBalance(float /* value */) final EXCLUDES_ThreadBase_Mutex {}
+    void setMasterMute(bool muted) final EXCLUDES_ThreadBase_Mutex;
+    void setStreamVolume(audio_stream_type_t stream, float value) final EXCLUDES_ThreadBase_Mutex;
+    void setStreamMute(audio_stream_type_t stream, bool muted) final EXCLUDES_ThreadBase_Mutex;
+    float streamVolume(audio_stream_type_t stream) const final EXCLUDES_ThreadBase_Mutex;
 
-                void        setMasterMute_l(bool muted) { mMasterMute = muted; }
+    void setMasterMute_l(bool muted) REQUIRES(mutex()) { mMasterMute = muted; }
 
-    void invalidateTracks(audio_stream_type_t streamType) final;
-    void invalidateTracks(std::set<audio_port_handle_t>& portIds) final;
+    void invalidateTracks(audio_stream_type_t streamType) final EXCLUDES_ThreadBase_Mutex;
+    void invalidateTracks(std::set<audio_port_handle_t>& portIds) final EXCLUDES_ThreadBase_Mutex;
 
-    audio_stream_type_t streamType() const final { return mStreamType; }
-    void checkSilentMode_l() final;
-    void processVolume_l() final;
+    audio_stream_type_t streamType_l() const final REQUIRES(mutex()) {
+        return mStreamType;
+    }
+    void checkSilentMode_l() final REQUIRES(mutex());
+    void processVolume_l() final REQUIRES(mutex());
 
-    MetadataUpdate updateMetadata_l() final;
+    MetadataUpdate updateMetadata_l() final REQUIRES(mutex());
 
     void toAudioPortConfig(struct audio_port_config* config) final;
 
@@ -2232,16 +2351,21 @@
             REQUIRES(audio_utils::AudioFlinger_Mutex);
 
 protected:
-    void dumpInternals_l(int fd, const Vector<String16>& args) final;
+    void dumpInternals_l(int fd, const Vector<String16>& args) final REQUIRES(mutex());
+    float streamVolume_l() const REQUIRES(mutex()) {
+                    return mStreamTypes[mStreamType].volume;
+                }
+    bool streamMuted_l() const REQUIRES(mutex()) {
+                    return mStreamTypes[mStreamType].mute;
+                }
 
-                audio_stream_type_t         mStreamType;
-                float                       mMasterVolume;
-                float                       mStreamVolume;
-                bool                        mMasterMute;
-                bool                        mStreamMute;
-                AudioStreamOut*             mOutput;
+    stream_type_t mStreamTypes[AUDIO_STREAM_CNT] GUARDED_BY(mutex());
+    audio_stream_type_t mStreamType GUARDED_BY(mutex());
+    float mMasterVolume GUARDED_BY(mutex());
+    bool mMasterMute GUARDED_BY(mutex());
+    AudioStreamOut* mOutput;  // NO_THREAD_SAFETY_ANALYSIS
 
-                mediautils::atomic_sp<audio_utils::MelProcessor> mMelProcessor;
+    mediautils::atomic_sp<audio_utils::MelProcessor> mMelProcessor;  // locked internally
 };
 
 class MmapCaptureThread : public MmapThread, public IAfMmapCaptureThread
@@ -2254,13 +2378,14 @@
         return sp<IAfMmapCaptureThread>::fromExisting(this);
     }
 
-    AudioStreamIn* clearInput() final;
+    AudioStreamIn* clearInput() final EXCLUDES_ThreadBase_Mutex;
 
     status_t exitStandby_l() REQUIRES(mutex()) final;
 
-    MetadataUpdate updateMetadata_l() final;
-    void processVolume_l() final;
-    void setRecordSilenced(audio_port_handle_t portId, bool silenced) final;
+    MetadataUpdate updateMetadata_l() final REQUIRES(mutex());
+    void processVolume_l() final REQUIRES(mutex());
+    void setRecordSilenced(audio_port_handle_t portId, bool silenced) final
+            EXCLUDES_ThreadBase_Mutex;
 
     void toAudioPortConfig(struct audio_port_config* config) final;
 
@@ -2272,7 +2397,7 @@
 
 protected:
 
-                AudioStreamIn*  mInput;
+    AudioStreamIn* mInput;  // NO_THREAD_SAFETY_ANALYSIS
 };
 
 class BitPerfectThread : public MixerThread {
@@ -2281,13 +2406,15 @@
                      audio_io_handle_t id, bool systemReady);
 
 protected:
-    mixer_state prepareTracks_l(Vector<sp<IAfTrack>>* tracksToRemove) final;
-    void threadLoop_mix() final;
+    mixer_state prepareTracks_l(Vector<sp<IAfTrack>>* tracksToRemove) final
+            REQUIRES(mutex(), ThreadBase_ThreadLoop);
+    void threadLoop_mix() final REQUIRES(ThreadBase_ThreadLoop);
 
 private:
-    bool mIsBitPerfect;
-    float mVolumeLeft = 0.f;
-    float mVolumeRight = 0.f;
+    // These variables are only accessed on the threadLoop; hence need no mutex.
+    bool mIsBitPerfect GUARDED_BY(ThreadBase_ThreadLoop) = false;
+    float mVolumeLeft GUARDED_BY(ThreadBase_ThreadLoop) = 0.f;
+    float mVolumeRight GUARDED_BY(ThreadBase_ThreadLoop) = 0.f;
 };
 
 } // namespace android
diff --git a/services/audioflinger/TrackBase.h b/services/audioflinger/TrackBase.h
index 4e37953..5708c61 100644
--- a/services/audioflinger/TrackBase.h
+++ b/services/audioflinger/TrackBase.h
@@ -236,6 +236,22 @@
     /** Set that a metadata has changed and needs to be notified to backend. Thread safe. */
     void setMetadataHasChanged() final { mChangeNotified.clear(); }
 
+    /**
+     * Called when a track moves to active state to record its contribution to battery usage.
+     * Track state transitions should eventually be handled within the track class.
+     */
+    void beginBatteryAttribution() final {
+        mBatteryStatsHolder.emplace(uid());
+    }
+
+    /**
+     * Called when a track moves out of the active state to record its contribution
+     * to battery usage.
+     */
+    void endBatteryAttribution() final {
+        mBatteryStatsHolder.reset();
+    }
+
 protected:
     DISALLOW_COPY_AND_ASSIGN(TrackBase);
 
@@ -379,6 +395,8 @@
 
     // If the last track change was notified to the client with readAndClearHasChanged
     std::atomic_flag    mChangeNotified = ATOMIC_FLAG_INIT;
+    // RAII object for battery stats book-keeping
+    std::optional<mediautils::BatteryStatsAudioHandle> mBatteryStatsHolder;
 };
 
 class PatchTrackBase : public PatchProxyBufferProvider, public virtual IAfPatchTrackBase
diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp
index 31246ec..4fe5b84 100644
--- a/services/audioflinger/Tracks.cpp
+++ b/services/audioflinger/Tracks.cpp
@@ -113,8 +113,7 @@
         mChannelCount(isOut ?
                 audio_channel_count_from_out_mask(channelMask) :
                 audio_channel_count_from_in_mask(channelMask)),
-        mFrameSize(audio_has_proportional_frames(format) ?
-                mChannelCount * audio_bytes_per_sample(format) : sizeof(int8_t)),
+        mFrameSize(audio_bytes_per_frame(mChannelCount, format)),
         mFrameCount(frameCount),
         mSessionId(sessionId),
         mIsOut(isOut),
@@ -451,6 +450,10 @@
     if (*_aidl_return != OK) {
         return Status::ok();
     }
+
+    // restrict position modulo INT_MAX to avoid integer sanitization abort
+    legacy.mPosition &= INT_MAX;
+
     *timestamp = legacy2aidl_AudioTimestamp_AudioTimestampInternal(legacy).value();
     return Status::ok();
 }
@@ -892,12 +895,12 @@
             audio_utils::lock_guard _l(thread->mutex());
             auto* const playbackThread = thread->asIAfPlaybackThread().get();
             wasActive = playbackThread->destroyTrack_l(this);
+            forEachTeePatchTrack_l([](const auto& patchTrack) { patchTrack->destroy(); });
         }
         if (isExternalTrack() && !wasActive) {
             AudioSystem::releaseOutput(mPortId);
         }
     }
-    forEachTeePatchTrack([](auto patchTrack) { patchTrack->destroy(); });
 }
 
 void Track::appendDumpHeader(String8& result) const
@@ -1078,7 +1081,13 @@
         // Additionally PatchProxyBufferProvider::obtainBuffer (called by PathTrack::getNextBuffer)
         // does not allow 0 frame size request contrary to getNextBuffer
     }
-    for (auto& teePatch : mTeePatches) {
+    TeePatches teePatches;
+    if (mTeePatchesRWLock.tryReadLock() == NO_ERROR) {
+        // Cache a copy of tee patches in case it is updated while using.
+        teePatches = mTeePatches;
+        mTeePatchesRWLock.unlock();
+    }
+    for (auto& teePatch : teePatches) {
         IAfPatchRecord* patchRecord = teePatch.patchRecord.get();
         const size_t framesWritten = patchRecord->writeFrames(
                 sourceBuffer.i8, frameCount, mFrameSize);
@@ -1091,7 +1100,7 @@
     using namespace std::chrono_literals;
     // Average is ~20us per track, this should virtually never be logged (Logging takes >200us)
     ALOGD_IF(spent > 500us, "%s: took %lldus to intercept %zu tracks", __func__,
-             spent.count(), mTeePatches.size());
+             spent.count(), teePatches.size());
 }
 
 // ExtendedAudioBufferProvider interface
@@ -1170,9 +1179,11 @@
     if (thread != 0) {
         if (isOffloaded()) {
             audio_utils::lock_guard _laf(thread->afThreadCallback()->mutex());
+            const bool nonOffloadableGlobalEffectEnabled =
+                    thread->afThreadCallback()->isNonOffloadableGlobalEffectEnabled_l();
             audio_utils::lock_guard _lth(thread->mutex());
             sp<IAfEffectChain> ec = thread->getEffectChain_l(mSessionId);
-            if (thread->afThreadCallback()->isNonOffloadableGlobalEffectEnabled_l() ||
+            if (nonOffloadableGlobalEffectEnabled ||
                     (ec != 0 && ec->isNonOffloadableEnabled())) {
                 invalidate();
                 return PERMISSION_DENIED;
@@ -1270,12 +1281,13 @@
             buffer.mFrameCount = 1;
             (void) mAudioTrackServerProxy->obtainBuffer(&buffer, true /*ackFlush*/);
         }
+        if (status == NO_ERROR) {
+            forEachTeePatchTrack_l([](const auto& patchTrack) { patchTrack->start(); });
+        }
     } else {
         status = BAD_VALUE;
     }
     if (status == NO_ERROR) {
-        forEachTeePatchTrack([](auto patchTrack) { patchTrack->start(); });
-
         // send format to AudioManager for playback activity monitoring
         const sp<IAudioManager> audioManager =
                 thread->afThreadCallback()->getOrCreateAudioManager();
@@ -1326,8 +1338,8 @@
             ALOGV("%s(%d): not stopping/stopped => stopping/stopped on thread %d",
                     __func__, mId, (int)mThreadIoHandle);
         }
+        forEachTeePatchTrack_l([](const auto& patchTrack) { patchTrack->stop(); });
     }
-    forEachTeePatchTrack([](auto patchTrack) { patchTrack->stop(); });
 }
 
 void Track::pause()
@@ -1362,9 +1374,9 @@
         default:
             break;
         }
+        // Pausing the TeePatch to avoid a glitch on underrun, at the cost of buffered audio loss.
+        forEachTeePatchTrack_l([](const auto& patchTrack) { patchTrack->pause(); });
     }
-    // Pausing the TeePatch to avoid a glitch on underrun, at the cost of buffered audio loss.
-    forEachTeePatchTrack([](auto patchTrack) { patchTrack->pause(); });
 }
 
 void Track::flush()
@@ -1425,9 +1437,10 @@
         // before mixer thread can run. This is important when offloading
         // because the hardware buffer could hold a large amount of audio
         playbackThread->broadcast_l();
+        // Flush the Tee to avoid on resume playing old data and glitching on the transition to
+        // new data
+        forEachTeePatchTrack_l([](const auto& patchTrack) { patchTrack->flush(); });
     }
-    // Flush the Tee to avoid on resume playing old data and glitching on the transition to new data
-    forEachTeePatchTrack([](auto patchTrack) { patchTrack->flush(); });
 }
 
 // must be called with thread lock held
@@ -1606,19 +1619,22 @@
     *backInserter++ = metadata;
 }
 
-void Track::updateTeePatches() {
+void Track::updateTeePatches_l() {
     if (mTeePatchesToUpdate.has_value()) {
-        forEachTeePatchTrack([](auto patchTrack) { patchTrack->destroy(); });
-        mTeePatches = mTeePatchesToUpdate.value();
+        forEachTeePatchTrack_l([](const auto& patchTrack) { patchTrack->destroy(); });
+        {
+            RWLock::AutoWLock writeLock(mTeePatchesRWLock);
+            mTeePatches = std::move(mTeePatchesToUpdate.value());
+        }
         if (mState == TrackBase::ACTIVE || mState == TrackBase::RESUMING ||
                 mState == TrackBase::STOPPING_1) {
-            forEachTeePatchTrack([](auto patchTrack) { patchTrack->start(); });
+            forEachTeePatchTrack_l([](const auto& patchTrack) { patchTrack->start(); });
         }
         mTeePatchesToUpdate.reset();
     }
 }
 
-void Track::setTeePatchesToUpdate(TeePatches teePatchesToUpdate) {
+void Track::setTeePatchesToUpdate_l(TeePatches teePatchesToUpdate) {
     ALOGW_IF(mTeePatchesToUpdate.has_value(),
              "%s, existing tee patches to update will be ignored", __func__);
     mTeePatchesToUpdate = std::move(teePatchesToUpdate);
diff --git a/services/audioflinger/afutils/NBAIO_Tee.cpp b/services/audioflinger/afutils/NBAIO_Tee.cpp
index 49057ce..86fb128 100644
--- a/services/audioflinger/afutils/NBAIO_Tee.cpp
+++ b/services/audioflinger/afutils/NBAIO_Tee.cpp
@@ -43,6 +43,7 @@
  "aftee_Date_ThreadId_C_reason.wav" RecordThread
  "aftee_Date_ThreadId_M_reason.wav" MixerThread (Normal)
  "aftee_Date_ThreadId_F_reason.wav" MixerThread (Fast)
+ "aftee_Date_ThreadId_D_reason.raw" DirectOutputThread (SpdifStreamOut)
  "aftee_Date_ThreadId_TrackId_R_reason.wav" RecordTrack
  "aftee_Date_ThreadId_TrackId_TrackName_T_reason.wav" PlaybackTrack
 
@@ -120,7 +121,7 @@
         return directory.size() > 0 && directory[0] == '/';
     }
 
-    std::string generateFilename(const std::string &suffix) const {
+    std::string generateFilename(const std::string &suffix, audio_format_t format) const {
         char fileTime[sizeof("YYYYmmdd_HHMMSS_\0")];
         struct timeval tv;
         gettimeofday(&tv, nullptr /* struct timezone */);
@@ -130,7 +131,7 @@
             "incorrect fileTime buffer");
         char msec[4];
         (void)snprintf(msec, sizeof(msec), "%03d", (int)(tv.tv_usec / 1000));
-        return mPrefix + fileTime + msec + suffix + ".wav";
+        return mPrefix + fileTime + msec + suffix + (audio_is_linear_pcm(format) ? ".wav" : ".raw");
     }
 
     bool isManagedFilename(const char *name) {
@@ -225,7 +226,7 @@
 NBAIO_Tee::NBAIO_TeeImpl::NBAIO_SinkSource NBAIO_Tee::NBAIO_TeeImpl::makeSinkSource(
         const NBAIO_Format &format, size_t frames, bool *enabled)
 {
-    if (Format_isValid(format) && audio_is_linear_pcm(format.mFormat)) {
+    if (Format_isValid(format) && audio_has_proportional_frames(format.mFormat)) {
         Pipe *pipe = new Pipe(frames, format);
         size_t numCounterOffers = 0;
         const NBAIO_Format offers[1] = {format};
@@ -259,7 +260,7 @@
         audio_format_t format,
         const std::string &suffix)
 {
-    std::string filename = generateFilename(suffix);
+    std::string filename = generateFilename(suffix, format);
 
     if (mThreadPool.launch(std::string("create ") + filename,
             [=]() { return createInternal(reader, sampleRate, channelCount, format, filename); })
@@ -406,6 +407,7 @@
     switch (format) {
     case AUDIO_FORMAT_PCM_8_BIT:
     case AUDIO_FORMAT_PCM_16_BIT:
+    case AUDIO_FORMAT_IEC61937:
         sf_format = SF_FORMAT_PCM_16;
         writeFormat = AUDIO_FORMAT_PCM_16_BIT;
         ALOGV("%s: %s using PCM_16 for format %#x", __func__, filename.c_str(), format);
@@ -424,7 +426,6 @@
         break;
     default:
         // TODO:
-        // handle audio_has_proportional_frames() formats.
         // handle compressed formats as single byte files.
         return BAD_VALUE;
     }
@@ -440,7 +441,7 @@
         .frames = 0,
         .samplerate = (int)sampleRate,
         .channels = (int)channelCount,
-        .format = SF_FORMAT_WAV | sf_format,
+        .format = sf_format | (audio_is_linear_pcm(format) ? SF_FORMAT_WAV : 0 /* RAW */),
     };
     SNDFILE *sf = sf_open(path.c_str(), SFM_WRITE, &info);
     if (sf == nullptr) {
@@ -463,7 +464,7 @@
         }
 
         // Convert input format to writeFormat as needed.
-        if (format != writeFormat) {
+        if (format != writeFormat && audio_is_linear_pcm(format)) {
             memcpy_by_audio_format(
                     buffer, writeFormat, buffer, format, actualRead * info.channels);
         }
diff --git a/services/audioflinger/afutils/NBAIO_Tee.h b/services/audioflinger/afutils/NBAIO_Tee.h
index 17b6175..a5c544e 100644
--- a/services/audioflinger/afutils/NBAIO_Tee.h
+++ b/services/audioflinger/afutils/NBAIO_Tee.h
@@ -24,6 +24,7 @@
 #include <mutex>
 #include <set>
 
+#include <audio_utils/clock.h>
 #include <cutils/properties.h>
 #include <media/nbaio/NBAIO.h>
 
@@ -48,7 +49,7 @@
  *
  * Some AudioFlinger specific notes:
  *
- * 1) Tees capture only linear PCM data.
+ * 1) Tees capture only linear PCM or IEC61937 data.
  * 2) Tees without any data written are considered empty and do not generate
  *    any output files.
  * 2) Once a Tee dumps data, it is considered "emptied" and new data
@@ -58,6 +59,7 @@
  *    WAV integer PCM 32 bit for AUDIO_FORMAT_PCM_8_24_BIT, AUDIO_FORMAT_PCM_24_BIT_PACKED
  *                               AUDIO_FORMAT_PCM_32_BIT.
  *    WAV float PCM 32 bit for AUDIO_FORMAT_PCM_FLOAT.
+ *    RAW for AUDIO_FORMAT_IEC61937.
  *
  * Input_Thread:
  * 1) Capture buffer is teed when read from the HAL, before resampling for the AudioRecord
@@ -68,8 +70,8 @@
  *    NormalMixer output (if no FastMixer).
  * 2) DuplicatingThreads do not tee any mixed data. Apply a tee on the downstream OutputTrack
  *    or on the upstream playback Tracks.
- * 3) DirectThreads and OffloadThreads do not tee any data. The upstream track
- *    (if linear PCM format) may be teed to discover data.
+ * 3) DirectThreads and OffloadThreads with SpdifStreamOut will tee IEC61937 wrapped data.
+ *    Otherwise, the upstream track (if linear PCM format) may be teed to discover data.
  * 4) MmapThreads are not supported.
  *
  * Tracks:
@@ -198,8 +200,8 @@
 
             // determine number of frames for Tee
             if (frames == 0) {
-                // TODO: consider varying frame count based on type.
-                frames = DEFAULT_TEE_FRAMES;
+                frames = (static_cast<long long>(DEFAULT_TEE_DURATION_MS) * format.mSampleRate)
+                            / MILLIS_PER_SECOND;
             }
 
             // TODO: should we check minimum number of frames?
@@ -260,8 +262,7 @@
         static NBAIO_SinkSource makeSinkSource(
                 const NBAIO_Format &format, size_t frames, bool *enabled);
 
-        // 0x200000 stereo 16-bit PCM frames = 47.5 seconds at 44.1 kHz, 8 megabytes
-        static constexpr size_t DEFAULT_TEE_FRAMES = 0x200000;
+        static constexpr size_t DEFAULT_TEE_DURATION_MS = 60'000;
 
         // atomic status checking
         std::atomic<bool> mEnabled{false};
diff --git a/services/audioflinger/datapath/Android.bp b/services/audioflinger/datapath/Android.bp
index 58f0422..4235f14 100644
--- a/services/audioflinger/datapath/Android.bp
+++ b/services/audioflinger/datapath/Android.bp
@@ -43,11 +43,14 @@
 
     srcs: [
         "AudioHwDevice.cpp",
+        "AudioStreamIn.cpp",
         "AudioStreamOut.cpp",
+        "SpdifStreamIn.cpp",
         "SpdifStreamOut.cpp",
     ],
 
     header_libs: [
+        "libaudioclient_headers",
         "libaudiohal_headers",
         "liberror_headers",
     ],
@@ -55,10 +58,18 @@
     shared_libs: [
         "audioclient-types-aidl-cpp",
         "av-types-aidl-cpp",
+        "libaudioflinger_utils", // NBAIO_Tee
+        "libaudioprocessing",
         "libaudiospdif",
         "libaudioutils",
         "libbase",
+        "libcutils",
         "liblog",
+        "libnbaio",
         "libutils", // refbase
     ],
+
+    include_dirs: [
+        "frameworks/av/services/audioflinger",  // for configuration
+    ],
 }
diff --git a/services/audioflinger/datapath/AudioHwDevice.cpp b/services/audioflinger/datapath/AudioHwDevice.cpp
index 9ff316c..95e9ecc 100644
--- a/services/audioflinger/datapath/AudioHwDevice.cpp
+++ b/services/audioflinger/datapath/AudioHwDevice.cpp
@@ -1,19 +1,19 @@
 /*
-**
-** Copyright 2007, The Android Open Source Project
-**
-** Licensed under the Apache License, Version 2.0 (the "License");
-** you may not use this file except in compliance with the License.
-** You may obtain a copy of the License at
-**
-**     http://www.apache.org/licenses/LICENSE-2.0
-**
-** Unless required by applicable law or agreed to in writing, software
-** distributed under the License is distributed on an "AS IS" BASIS,
-** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-** See the License for the specific language governing permissions and
-** limitations under the License.
-*/
+ *
+ * Copyright 2007, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 
 #define LOG_TAG "AudioHwDevice"
 //#define LOG_NDEBUG 0
@@ -21,10 +21,13 @@
 #include <system/audio.h>
 #include <utils/Log.h>
 
+#include <audio_utils/spdif/SPDIFDecoder.h>
 #include <audio_utils/spdif/SPDIFEncoder.h>
+#include <media/AudioResamplerPublic.h>
 
 #include "AudioHwDevice.h"
 #include "AudioStreamOut.h"
+#include "SpdifStreamIn.h"
 #include "SpdifStreamOut.h"
 
 namespace android {
@@ -47,12 +50,8 @@
     auto outputStream = new AudioStreamOut(this, flags);
 
     // Try to open the HAL first using the current format.
-    ALOGV("openOutputStream(), try "
-            " sampleRate %d, Format %#x, "
-            "channelMask %#x",
-            config->sample_rate,
-            config->format,
-            config->channel_mask);
+    ALOGV("openOutputStream(), try sampleRate %d, format %#x, channelMask %#x", config->sample_rate,
+            config->format, config->channel_mask);
     status_t status = outputStream->open(handle, deviceType, config, address);
 
     if (status != NO_ERROR) {
@@ -62,13 +61,8 @@
         // FIXME Look at any modification to the config.
         // The HAL might modify the config to suggest a wrapped format.
         // Log this so we can see what the HALs are doing.
-        ALOGI("openOutputStream(), HAL returned"
-            " sampleRate %d, Format %#x, "
-            "channelMask %#x, status %d",
-            config->sample_rate,
-            config->format,
-            config->channel_mask,
-            status);
+        ALOGI("openOutputStream(), HAL returned sampleRate %d, format %#x, channelMask %#x,"
+                " status %d", config->sample_rate, config->format, config->channel_mask, status);
 
         // If the data is encoded then try again using wrapped PCM.
         const bool wrapperNeeded = !audio_has_proportional_frames(originalConfig.format)
@@ -96,6 +90,79 @@
     return status;
 }
 
+status_t AudioHwDevice::openInputStream(
+        AudioStreamIn **ppStreamIn,
+        audio_io_handle_t handle,
+        audio_devices_t deviceType,
+        audio_input_flags_t flags,
+        struct audio_config *config,
+        const char *address,
+        audio_source_t source,
+        audio_devices_t outputDevice,
+        const char *outputDeviceAddress) {
+
+    struct audio_config originalConfig = *config;
+    auto inputStream = new AudioStreamIn(this, flags);
+
+    // Try to open the HAL first using the current format.
+    ALOGV("openInputStream(), try sampleRate %d, format %#x, channelMask %#x", config->sample_rate,
+            config->format, config->channel_mask);
+    status_t status = inputStream->open(handle, deviceType, config, address, source, outputDevice,
+                                        outputDeviceAddress);
+
+    // If the input could not be opened with the requested parameters and we can handle the
+    // conversion internally, try to open again with the proposed parameters.
+    if (status == BAD_VALUE &&
+        audio_is_linear_pcm(originalConfig.format) &&
+        audio_is_linear_pcm(config->format) &&
+        (config->sample_rate <= AUDIO_RESAMPLER_DOWN_RATIO_MAX * config->sample_rate) &&
+        (audio_channel_count_from_in_mask(config->channel_mask) <= FCC_LIMIT) &&
+        (audio_channel_count_from_in_mask(originalConfig.channel_mask) <= FCC_LIMIT)) {
+        // FIXME describe the change proposed by HAL (save old values so we can log them here)
+        ALOGV("openInputStream() reopening with proposed sampling rate and channel mask");
+        status = inputStream->open(handle, deviceType, config, address, source,
+                outputDevice, outputDeviceAddress);
+        // FIXME log this new status; HAL should not propose any further changes
+        if (status != NO_ERROR) {
+            delete inputStream;
+            inputStream = nullptr;
+        }
+    } else if (status != NO_ERROR) {
+        delete inputStream;
+        inputStream = nullptr;
+
+        // FIXME Look at any modification to the config.
+        // The HAL might modify the config to suggest a wrapped format.
+        // Log this so we can see what the HALs are doing.
+        ALOGI("openInputStream(), HAL returned sampleRate %d, format %#x, channelMask %#x,"
+                " status %d", config->sample_rate, config->format, config->channel_mask, status);
+
+        // If the data is encoded then try again using wrapped PCM.
+        const bool unwrapperNeeded = !audio_has_proportional_frames(originalConfig.format)
+                && ((flags & AUDIO_INPUT_FLAG_DIRECT) != 0);
+
+        if (unwrapperNeeded) {
+            if (SPDIFDecoder::isFormatSupported(originalConfig.format)) {
+                inputStream = new SpdifStreamIn(this, flags, originalConfig.format);
+                status = inputStream->open(handle, deviceType, &originalConfig, address, source,
+                        outputDevice, outputDeviceAddress);
+                if (status != NO_ERROR) {
+                    ALOGE("ERROR - openInputStream(), SPDIF open returned %d",
+                        status);
+                    delete inputStream;
+                    inputStream = nullptr;
+                }
+            } else {
+                ALOGE("ERROR - openInputStream(), SPDIFDecoder does not support format 0x%08x",
+                    originalConfig.format);
+            }
+        }
+    }
+
+    *ppStreamIn = inputStream;
+    return status;
+}
+
 bool AudioHwDevice::supportsAudioPatches() const {
     bool result;
     return mHwDevice->supportsAudioPatches(&result) == OK ? result : false;
@@ -118,5 +185,10 @@
     return mHwDevice->getAAudioHardwareBurstMinUsec();
 }
 
+status_t AudioHwDevice::getAudioMixPort(const struct audio_port_v7 *devicePort,
+                                        struct audio_port_v7 *mixPort) const {
+    return mHwDevice->getAudioMixPort(devicePort, mixPort);
+}
+
 
 }; // namespace android
diff --git a/services/audioflinger/datapath/AudioHwDevice.h b/services/audioflinger/datapath/AudioHwDevice.h
index f9cb80e..80c1473 100644
--- a/services/audioflinger/datapath/AudioHwDevice.h
+++ b/services/audioflinger/datapath/AudioHwDevice.h
@@ -1,22 +1,21 @@
 /*
-**
-** Copyright 2007, The Android Open Source Project
-**
-** Licensed under the Apache License, Version 2.0 (the "License");
-** you may not use this file except in compliance with the License.
-** You may obtain a copy of the License at
-**
-**     http://www.apache.org/licenses/LICENSE-2.0
-**
-** Unless required by applicable law or agreed to in writing, software
-** distributed under the License is distributed on an "AS IS" BASIS,
-** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-** See the License for the specific language governing permissions and
-** limitations under the License.
-*/
+ *
+ * Copyright 2007, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 
-#ifndef ANDROID_AUDIO_HW_DEVICE_H
-#define ANDROID_AUDIO_HW_DEVICE_H
+#pragma once
 
 #include <stdint.h>
 #include <stdlib.h>
@@ -30,6 +29,7 @@
 
 namespace android {
 
+class AudioStreamIn;
 class AudioStreamOut;
 
 class AudioHwDevice {
@@ -89,6 +89,17 @@
             struct audio_config *config,
             const char *address);
 
+    status_t openInputStream(
+            AudioStreamIn **ppStreamIn,
+            audio_io_handle_t handle,
+            audio_devices_t deviceType,
+            audio_input_flags_t flags,
+            struct audio_config *config,
+            const char *address,
+            audio_source_t source,
+            audio_devices_t outputDevice,
+            const char *outputDeviceAddress);
+
     [[nodiscard]] bool supportsAudioPatches() const;
 
     [[nodiscard]] status_t getAudioPort(struct audio_port_v7 *port) const;
@@ -101,6 +112,9 @@
 
     [[nodiscard]] int32_t getAAudioHardwareBurstMinUsec() const;
 
+    [[nodiscard]] status_t getAudioMixPort(const struct audio_port_v7 *devicePort,
+                                           struct audio_port_v7 *mixPort) const;
+
 private:
     const audio_module_handle_t mHandle;
     const char * const          mModuleName;
@@ -109,5 +123,3 @@
 };
 
 } // namespace android
-
-#endif // ANDROID_AUDIO_HW_DEVICE_H
diff --git a/services/audioflinger/datapath/AudioStreamIn.cpp b/services/audioflinger/datapath/AudioStreamIn.cpp
new file mode 100644
index 0000000..24f3bb9
--- /dev/null
+++ b/services/audioflinger/datapath/AudioStreamIn.cpp
@@ -0,0 +1,137 @@
+/*
+ *
+ * Copyright 2023, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "AudioFlinger"
+//#define LOG_NDEBUG 0
+#include "AudioStreamIn.h"
+
+#include <media/audiohal/DeviceHalInterface.h>
+#include <media/audiohal/StreamHalInterface.h>
+#include <system/audio.h>
+#include <utils/Log.h>
+
+#include "AudioHwDevice.h"
+
+namespace android {
+
+// ----------------------------------------------------------------------------
+AudioStreamIn::AudioStreamIn(AudioHwDevice *dev, audio_input_flags_t flags)
+        : audioHwDev(dev)
+        , flags(flags)
+{
+}
+
+// This must be defined here together with the HAL includes above and
+// not solely in the header.
+AudioStreamIn::~AudioStreamIn() = default;
+
+sp<DeviceHalInterface> AudioStreamIn::hwDev() const
+{
+    return audioHwDev->hwDevice();
+}
+
+status_t AudioStreamIn::getCapturePosition(int64_t* frames, int64_t* time)
+{
+    if (stream == nullptr) {
+        return NO_INIT;
+    }
+
+    int64_t halPosition = 0;
+    const status_t status = stream->getCapturePosition(&halPosition, time);
+    if (status != NO_ERROR) {
+        return status;
+    }
+
+    // Adjust for standby using HAL rate frames.
+    // Only apply this correction if the HAL is getting PCM frames.
+    if (mHalFormatHasProportionalFrames) {
+        const uint64_t adjustedPosition = (halPosition <= mFramesReadAtStandby) ?
+                0 : (halPosition - mFramesReadAtStandby);
+        // Scale from HAL sample rate to application rate.
+        *frames = adjustedPosition / mRateMultiplier;
+    } else {
+        // For compressed formats.
+        *frames = halPosition;
+    }
+
+    return status;
+}
+
+status_t AudioStreamIn::open(
+        audio_io_handle_t handle,
+        audio_devices_t deviceType,
+        struct audio_config *config,
+        const char *address,
+        audio_source_t source,
+        audio_devices_t outputDevice,
+        const char *outputDeviceAddress)
+{
+    sp<StreamInHalInterface> inStream;
+
+    int status = hwDev()->openInputStream(
+            handle,
+            deviceType,
+            config,
+            flags,
+            address,
+            source,
+            outputDevice,
+            outputDeviceAddress,
+            &inStream);
+    ALOGV("AudioStreamIn::open(), HAL returned stream %p, sampleRate %d, format %#x,"
+            " channelMask %#x, status %d", inStream.get(), config->sample_rate, config->format,
+            config->channel_mask, status);
+
+    if (status == NO_ERROR) {
+        stream = inStream;
+        mHalFormatHasProportionalFrames = audio_has_proportional_frames(config->format);
+        status = stream->getFrameSize(&mHalFrameSize);
+        LOG_ALWAYS_FATAL_IF(status != OK, "Error retrieving frame size from HAL: %d", status);
+        LOG_ALWAYS_FATAL_IF(mHalFrameSize == 0, "Error frame size was %zu but must be greater than"
+                " zero", mHalFrameSize);
+    }
+
+    return status;
+}
+
+audio_config_base_t AudioStreamIn::getAudioProperties() const
+{
+    audio_config_base_t result = AUDIO_CONFIG_BASE_INITIALIZER;
+    if (stream->getAudioProperties(&result) != OK) {
+        result.sample_rate = 0;
+        result.channel_mask = AUDIO_CHANNEL_INVALID;
+        result.format = AUDIO_FORMAT_INVALID;
+    }
+    return result;
+}
+
+status_t AudioStreamIn::standby()
+{
+    mFramesReadAtStandby = mFramesRead;
+    return stream->standby();
+}
+
+status_t AudioStreamIn::read(void* buffer, size_t bytes, size_t* read)
+{
+    const status_t result = stream->read(buffer, bytes, read);
+    if (result == OK && *read > 0 && mHalFrameSize > 0) {
+        mFramesRead += *read / mHalFrameSize;
+    }
+    return result;
+}
+
+} // namespace android
diff --git a/services/audioflinger/datapath/AudioStreamIn.h b/services/audioflinger/datapath/AudioStreamIn.h
index 604a4e4..6d1c6a7 100644
--- a/services/audioflinger/datapath/AudioStreamIn.h
+++ b/services/audioflinger/datapath/AudioStreamIn.h
@@ -31,30 +31,57 @@
     virtual status_t standby() = 0;
 };
 
-// AudioStreamIn is immutable, so its fields are const.
-// The methods must not be const to match StreamHalInterface signature.
-
-struct AudioStreamIn : public Source {
+/**
+ * Managed access to a HAL input stream.
+ */
+class AudioStreamIn : public Source {
+public:
     const AudioHwDevice* const audioHwDev;
-    const sp<StreamInHalInterface> stream;
+    sp<StreamInHalInterface> stream;
     const audio_input_flags_t flags;
 
-    AudioStreamIn(
-            const AudioHwDevice* dev, const sp<StreamInHalInterface>& in,
-            audio_input_flags_t flags)
-        : audioHwDev(dev), stream(in), flags(flags) {}
+    [[nodiscard]] sp<DeviceHalInterface> hwDev() const;
 
-    status_t read(void* buffer, size_t bytes, size_t* read) final {
-        return stream->read(buffer, bytes, read);
-    }
+    AudioStreamIn(AudioHwDevice *dev, audio_input_flags_t flags);
 
-    status_t getCapturePosition(int64_t* frames, int64_t* time) final {
-        return stream->getCapturePosition(frames, time);
-    }
+    virtual status_t open(
+            audio_io_handle_t handle,
+            audio_devices_t deviceType,
+            struct audio_config *config,
+            const char *address,
+            audio_source_t source,
+            audio_devices_t outputDevice,
+            const char *outputDeviceAddress);
 
-    status_t standby() final { return stream->standby(); }
+    ~AudioStreamIn() override;
 
-    sp<DeviceHalInterface> hwDev() const { return audioHwDev->hwDevice(); }
+    status_t getCapturePosition(int64_t* frames, int64_t* time) override;
+
+    status_t read(void* buffer, size_t bytes, size_t* read) override;
+
+    /**
+     * @return frame size from the perspective of the application and the AudioFlinger.
+     */
+    [[nodiscard]] virtual size_t getFrameSize() const { return mHalFrameSize; }
+
+    /**
+     * @return audio stream configuration: channel mask, format, sample rate:
+     *   - channel mask from the perspective of the application and the AudioFlinger,
+     *     The HAL is in stereo mode when playing multi-channel compressed audio over HDMI;
+     *   - format from the perspective of the application and the AudioFlinger;
+     *   - sample rate from the perspective of the application and the AudioFlinger,
+     *     The HAL may be running at a higher sample rate if, for example, playing wrapped EAC3.
+     */
+    [[nodiscard]] virtual audio_config_base_t getAudioProperties() const;
+
+    status_t standby() override;
+
+protected:
+    uint64_t mFramesRead = 0;
+    int64_t mFramesReadAtStandby = 0;
+    int mRateMultiplier = 1;
+    bool mHalFormatHasProportionalFrames = false;
+    size_t mHalFrameSize = 0;
 };
 
 }  // namespace android
diff --git a/services/audioflinger/datapath/AudioStreamOut.cpp b/services/audioflinger/datapath/AudioStreamOut.cpp
index 6fa82e5..1830d15 100644
--- a/services/audioflinger/datapath/AudioStreamOut.cpp
+++ b/services/audioflinger/datapath/AudioStreamOut.cpp
@@ -1,30 +1,31 @@
 /*
-**
-** Copyright 2015, The Android Open Source Project
-**
-** Licensed under the Apache License, Version 2.0 (the "License");
-** you may not use this file except in compliance with the License.
-** You may obtain a copy of the License at
-**
-**     http://www.apache.org/licenses/LICENSE-2.0
-**
-** Unless required by applicable law or agreed to in writing, software
-** distributed under the License is distributed on an "AS IS" BASIS,
-** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-** See the License for the specific language governing permissions and
-** limitations under the License.
-*/
+ *
+ * Copyright 2015, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 
 #define LOG_TAG "AudioFlinger"
 //#define LOG_NDEBUG 0
 
+#include "AudioStreamOut.h"
+
 #include <media/audiohal/DeviceHalInterface.h>
 #include <media/audiohal/StreamHalInterface.h>
 #include <system/audio.h>
 #include <utils/Log.h>
 
 #include "AudioHwDevice.h"
-#include "AudioStreamOut.h"
 
 namespace android {
 
@@ -132,14 +133,9 @@
             config,
             address,
             &outStream);
-    ALOGV("AudioStreamOut::open(), HAL returned "
-            " stream %p, sampleRate %d, Format %#x, "
-            "channelMask %#x, status %d",
-            outStream.get(),
-            config->sample_rate,
-            config->format,
-            config->channel_mask,
-            status);
+    ALOGV("AudioStreamOut::open(), HAL returned stream %p, sampleRate %d, format %#x,"
+            " channelMask %#x, status %d", outStream.get(), config->sample_rate, config->format,
+            config->channel_mask, status);
 
     // Some HALs may not recognize AUDIO_FORMAT_IEC61937. But if we declare
     // it as PCM then it will probably work.
@@ -162,7 +158,7 @@
         mHalFormatHasProportionalFrames = audio_has_proportional_frames(config->format);
         status = stream->getFrameSize(&mHalFrameSize);
         LOG_ALWAYS_FATAL_IF(status != OK, "Error retrieving frame size from HAL: %d", status);
-        LOG_ALWAYS_FATAL_IF(mHalFrameSize <= 0, "Error frame size was %zu but must be greater than"
+        LOG_ALWAYS_FATAL_IF(mHalFrameSize == 0, "Error frame size was %zu but must be greater than"
                 " zero", mHalFrameSize);
 
     }
diff --git a/services/audioflinger/datapath/AudioStreamOut.h b/services/audioflinger/datapath/AudioStreamOut.h
index ce00f8c..ea41bba 100644
--- a/services/audioflinger/datapath/AudioStreamOut.h
+++ b/services/audioflinger/datapath/AudioStreamOut.h
@@ -1,27 +1,28 @@
 /*
-**
-** Copyright 2015, The Android Open Source Project
-**
-** Licensed under the Apache License, Version 2.0 (the "License");
-** you may not use this file except in compliance with the License.
-** You may obtain a copy of the License at
-**
-**     http://www.apache.org/licenses/LICENSE-2.0
-**
-** Unless required by applicable law or agreed to in writing, software
-** distributed under the License is distributed on an "AS IS" BASIS,
-** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-** See the License for the specific language governing permissions and
-** limitations under the License.
-*/
+ *
+ * Copyright 2015, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 
-#ifndef ANDROID_AUDIO_STREAM_OUT_H
-#define ANDROID_AUDIO_STREAM_OUT_H
+#pragma once
 
 #include <stdint.h>
 #include <sys/types.h>
 
 #include <system/audio.h>
+#include <utils/Errors.h>
+#include <utils/RefBase.h>
 
 namespace android {
 
@@ -34,9 +35,6 @@
  */
 class AudioStreamOut {
 public:
-// AudioStreamOut is immutable, so its fields are const.
-// For emphasis, we could also make all pointers to them be "const *",
-// but that would clutter the code unnecessarily.
     AudioHwDevice * const audioHwDev;
     sp<StreamOutHalInterface> stream;
     const audio_output_flags_t flags;
@@ -101,15 +99,13 @@
     virtual void presentationComplete() { mExpectRetrograde = true; }
 
 protected:
-    uint64_t             mFramesWritten = 0; // reset by flush
-    uint64_t             mFramesWrittenAtStandby = 0;
-    uint64_t             mRenderPosition = 0; // reset by flush, standby, or presentation complete
-    int                  mRateMultiplier = 1;
-    bool                 mHalFormatHasProportionalFrames = false;
-    size_t               mHalFrameSize = 0;
-    bool                 mExpectRetrograde = false; // see presentationComplete
+    uint64_t mFramesWritten = 0; // reset by flush
+    uint64_t mFramesWrittenAtStandby = 0;
+    uint64_t mRenderPosition = 0; // reset by flush, standby, or presentation complete
+    int mRateMultiplier = 1;
+    bool mHalFormatHasProportionalFrames = false;
+    size_t mHalFrameSize = 0;
+    bool mExpectRetrograde = false; // see presentationComplete
 };
 
 } // namespace android
-
-#endif // ANDROID_AUDIO_STREAM_OUT_H
diff --git a/services/audioflinger/datapath/SpdifStreamIn.cpp b/services/audioflinger/datapath/SpdifStreamIn.cpp
new file mode 100644
index 0000000..98ce712
--- /dev/null
+++ b/services/audioflinger/datapath/SpdifStreamIn.cpp
@@ -0,0 +1,128 @@
+/*
+ *
+ * Copyright 2023, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "AudioFlinger"
+//#define LOG_NDEBUG 0
+#include "Configuration.h"
+#include <system/audio.h>
+#include <utils/Log.h>
+
+#include <audio_utils/spdif/SPDIFDecoder.h>
+
+#include "AudioHwDevice.h"
+#include "SpdifStreamIn.h"
+
+namespace android {
+
+/**
+ * If the HAL is generating IEC61937 data and AudioFlinger expects elementary stream then we need to
+ * extract the data using an SPDIF decoder.
+ */
+SpdifStreamIn::SpdifStreamIn(AudioHwDevice *dev,
+            audio_input_flags_t flags,
+            audio_format_t format)
+        : AudioStreamIn(dev, flags)
+        , mSpdifDecoder(this, format)
+{
+}
+
+status_t SpdifStreamIn::open(
+        audio_io_handle_t handle,
+        audio_devices_t devices,
+        struct audio_config *config,
+        const char *address,
+        audio_source_t source,
+        audio_devices_t outputDevice,
+        const char* outputDeviceAddress)
+{
+    struct audio_config customConfig = *config;
+
+    mApplicationConfig.format = config->format;
+    mApplicationConfig.sample_rate = config->sample_rate;
+    mApplicationConfig.channel_mask = config->channel_mask;
+
+    mRateMultiplier = spdif_rate_multiplier(config->format);
+    if (mRateMultiplier <= 0) {
+        ALOGE("ERROR SpdifStreamIn::open() unrecognized format 0x%08X\n", config->format);
+        return BAD_VALUE;
+    }
+    customConfig.sample_rate = config->sample_rate * mRateMultiplier;
+    customConfig.format = AUDIO_FORMAT_PCM_16_BIT;
+    customConfig.channel_mask = AUDIO_CHANNEL_IN_STEREO;
+
+    // Always print this because otherwise it could be very confusing if the
+    // HAL and AudioFlinger are using different formats.
+    // Print before open() because HAL may modify customConfig.
+    ALOGI("SpdifStreamIn::open() AudioFlinger requested sampleRate %d, format %#x, channelMask %#x",
+            config->sample_rate, config->format, config->channel_mask);
+    ALOGI("SpdifStreamIn::open() HAL configured for sampleRate %d, format %#x, channelMask %#x",
+            customConfig.sample_rate, customConfig.format, customConfig.channel_mask);
+
+    const status_t status = AudioStreamIn::open(
+            handle,
+            devices,
+            &customConfig,
+            address,
+            source,
+            outputDevice,
+            outputDeviceAddress);
+
+    ALOGI("SpdifStreamIn::open() status = %d", status);
+
+#ifdef TEE_SINK
+    if (status == OK) {
+        // Don't use PCM 16-bit format to avoid WAV encoding IEC61937 data.
+        mTee.set(customConfig.sample_rate,
+                audio_channel_count_from_in_mask(customConfig.channel_mask),
+                AUDIO_FORMAT_IEC61937, NBAIO_Tee::TEE_FLAG_INPUT_THREAD);
+        mTee.setId(std::string("_") + std::to_string(handle) + "_C");
+    }
+#endif
+
+    return status;
+}
+
+int SpdifStreamIn::standby()
+{
+    mSpdifDecoder.reset();
+    return AudioStreamIn::standby();
+}
+
+status_t SpdifStreamIn::readDataBurst(void* buffer, size_t bytes, size_t* read)
+{
+    status_t status = AudioStreamIn::read(buffer, bytes, read);
+
+#ifdef TEE_SINK
+    if (*read > 0) {
+        mTee.write(reinterpret_cast<const char *>(buffer), *read / AudioStreamIn::getFrameSize());
+    }
+#endif
+    return status;
+}
+
+status_t SpdifStreamIn::read(void* buffer, size_t numBytes, size_t* read)
+{
+    // Read from SPDIF extractor. It will call back to readDataBurst().
+    const auto bytesRead = mSpdifDecoder.read(buffer, numBytes);
+    if (bytesRead >= 0) {
+        *read = bytesRead;
+        return OK;
+    }
+    return NOT_ENOUGH_DATA;
+}
+
+} // namespace android
diff --git a/services/audioflinger/datapath/SpdifStreamIn.h b/services/audioflinger/datapath/SpdifStreamIn.h
new file mode 100644
index 0000000..78832ee
--- /dev/null
+++ b/services/audioflinger/datapath/SpdifStreamIn.h
@@ -0,0 +1,134 @@
+/*
+ *
+ * Copyright 2023, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <stdint.h>
+#include <sys/types.h>
+
+#include <system/audio.h>
+
+#include "AudioStreamIn.h"
+
+#include <audio_utils/spdif/SPDIFDecoder.h>
+#include <afutils/NBAIO_Tee.h>
+
+namespace android {
+
+/**
+ * Stream that is a PCM data burst in the HAL but looks like an encoded stream
+ * to the AudioFlinger. Wraps encoded data in an SPDIF wrapper per IEC61973-3.
+ */
+class SpdifStreamIn : public AudioStreamIn {
+public:
+
+    SpdifStreamIn(AudioHwDevice *dev, audio_input_flags_t flags,
+            audio_format_t format);
+
+    status_t open(
+            audio_io_handle_t handle,
+            audio_devices_t devices,
+            struct audio_config *config,
+            const char *address,
+            audio_source_t source,
+            audio_devices_t outputDevice,
+            const char* outputDeviceAddress) override;
+
+    /**
+    * Read audio buffer from driver. If at least one frame was read successfully prior to the error,
+    * it is suggested that the driver return that successful (short) byte count
+    * and then return an error in the subsequent call.
+    *
+    * If set_callback() has previously been called to enable non-blocking mode
+    * the write() is not allowed to block. It must write only the number of
+    * bytes that currently fit in the driver/hardware buffer and then return
+    * this byte count. If this is less than the requested write size the
+    * callback function must be called when more space is available in the
+    * driver/hardware buffer.
+    */
+    status_t read(void* buffer, size_t bytes, size_t* read) override;
+
+    /**
+     * @return frame size from the perspective of the application and the AudioFlinger.
+     */
+    [[nodiscard]] size_t getFrameSize() const override { return sizeof(int8_t); }
+
+    /**
+     * @return audio_config_base_t from the perspective of the application and the AudioFlinger.
+     */
+    [[nodiscard]] audio_config_base_t getAudioProperties() const override {
+        return mApplicationConfig;
+    }
+
+    /**
+     * @return format from the perspective of the application and the AudioFlinger.
+     */
+    [[nodiscard]] virtual audio_format_t getFormat() const { return mApplicationConfig.format; }
+
+    /**
+     * The HAL may be running at a higher sample rate if, for example, reading wrapped EAC3.
+     * @return sample rate from the perspective of the application and the AudioFlinger.
+     */
+    [[nodiscard]] virtual uint32_t getSampleRate() const { return mApplicationConfig.sample_rate; }
+
+    /**
+     * The HAL is in stereo mode when reading multi-channel compressed audio.
+     * @return channel mask from the perspective of the application and the AudioFlinger.
+     */
+    [[nodiscard]] virtual audio_channel_mask_t getChannelMask() const {
+        return mApplicationConfig.channel_mask;
+    }
+
+    status_t standby() override;
+
+private:
+
+    class MySPDIFDecoder : public SPDIFDecoder
+    {
+    public:
+        MySPDIFDecoder(SpdifStreamIn *spdifStreamIn, audio_format_t format)
+          :  SPDIFDecoder(format)
+          , mSpdifStreamIn(spdifStreamIn)
+        {
+        }
+
+        ssize_t readInput(void* buffer, size_t bytes) override
+        {
+            size_t bytesRead = 0;
+            const auto result = mSpdifStreamIn->readDataBurst(buffer, bytes, &bytesRead);
+            if (result < 0) {
+                return result;
+            }
+            return bytesRead;
+        }
+
+    protected:
+        SpdifStreamIn * const mSpdifStreamIn;
+    };
+
+    MySPDIFDecoder mSpdifDecoder;
+    audio_config_base_t mApplicationConfig = AUDIO_CONFIG_BASE_INITIALIZER;
+
+    status_t readDataBurst(void* data, size_t bytes, size_t* read);
+
+#ifdef TEE_SINK
+    NBAIO_Tee mTee;
+#endif
+
+};
+
+} // namespace android
diff --git a/services/audioflinger/datapath/SpdifStreamOut.cpp b/services/audioflinger/datapath/SpdifStreamOut.cpp
index 43e9c0c..65a4eec 100644
--- a/services/audioflinger/datapath/SpdifStreamOut.cpp
+++ b/services/audioflinger/datapath/SpdifStreamOut.cpp
@@ -1,22 +1,23 @@
 /*
-**
-** Copyright 2015, The Android Open Source Project
-**
-** Licensed under the Apache License, Version 2.0 (the "License");
-** you may not use this file except in compliance with the License.
-** You may obtain a copy of the License at
-**
-**     http://www.apache.org/licenses/LICENSE-2.0
-**
-** Unless required by applicable law or agreed to in writing, software
-** distributed under the License is distributed on an "AS IS" BASIS,
-** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-** See the License for the specific language governing permissions and
-** limitations under the License.
-*/
+ *
+ * Copyright 2015, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 
 #define LOG_TAG "AudioFlinger"
 //#define LOG_NDEBUG 0
+#include "Configuration.h"
 #include <system/audio.h>
 #include <utils/Log.h>
 
@@ -41,33 +42,21 @@
 }
 
 status_t SpdifStreamOut::open(
-                              audio_io_handle_t handle,
-                              audio_devices_t devices,
-                              struct audio_config *config,
-                              const char *address)
+        audio_io_handle_t handle,
+        audio_devices_t devices,
+        struct audio_config *config,
+        const char *address)
 {
     struct audio_config customConfig = *config;
 
-    mApplicationFormat = config->format;
-    mApplicationSampleRate = config->sample_rate;
-    mApplicationChannelMask = config->channel_mask;
+    mApplicationConfig.format = config->format;
+    mApplicationConfig.sample_rate = config->sample_rate;
+    mApplicationConfig.channel_mask = config->channel_mask;
 
-    // Some data bursts run at a higher sample rate.
-    // TODO Move this into the audio_utils as a static method.
-    switch(config->format) {
-        case AUDIO_FORMAT_E_AC3:
-        case AUDIO_FORMAT_E_AC3_JOC:
-            mRateMultiplier = 4;
-            break;
-        case AUDIO_FORMAT_AC3:
-        case AUDIO_FORMAT_DTS:
-        case AUDIO_FORMAT_DTS_HD:
-            mRateMultiplier = 1;
-            break;
-        default:
-            ALOGE("ERROR SpdifStreamOut::open() unrecognized format 0x%08X\n",
-                config->format);
-            return BAD_VALUE;
+    mRateMultiplier = spdif_rate_multiplier(config->format);
+    if (mRateMultiplier <= 0) {
+        ALOGE("ERROR SpdifStreamOut::open() unrecognized format 0x%08X\n", config->format);
+        return BAD_VALUE;
     }
     customConfig.sample_rate = config->sample_rate * mRateMultiplier;
 
@@ -77,16 +66,10 @@
     // Always print this because otherwise it could be very confusing if the
     // HAL and AudioFlinger are using different formats.
     // Print before open() because HAL may modify customConfig.
-    ALOGI("SpdifStreamOut::open() AudioFlinger requested"
-            " sampleRate %d, format %#x, channelMask %#x",
-            config->sample_rate,
-            config->format,
-            config->channel_mask);
-    ALOGI("SpdifStreamOut::open() HAL configured for"
-            " sampleRate %d, format %#x, channelMask %#x",
-            customConfig.sample_rate,
-            customConfig.format,
-            customConfig.channel_mask);
+    ALOGI("SpdifStreamOut::open() AudioFlinger requested sampleRate %d, format %#x,"
+            " channelMask %#x", config->sample_rate, config->format, config->channel_mask);
+    ALOGI("SpdifStreamOut::open() HAL configured for sampleRate %d, format %#x, channelMask %#x",
+            customConfig.sample_rate, customConfig.format, customConfig.channel_mask);
 
     const status_t status = AudioStreamOut::open(
             handle,
@@ -96,6 +79,16 @@
 
     ALOGI("SpdifStreamOut::open() status = %d", status);
 
+#ifdef TEE_SINK
+    if (status == OK) {
+        // Don't use PCM 16-bit format to avoid WAV encoding IEC61937 data.
+        mTee.set(customConfig.sample_rate,
+                audio_channel_count_from_out_mask(customConfig.channel_mask),
+                AUDIO_FORMAT_IEC61937, NBAIO_Tee::TEE_FLAG_OUTPUT_THREAD);
+        mTee.setId(std::string("_") + std::to_string(handle) + "_D");
+    }
+#endif
+
     return status;
 }
 
@@ -113,7 +106,15 @@
 
 ssize_t SpdifStreamOut::writeDataBurst(const void* buffer, size_t bytes)
 {
-    return AudioStreamOut::write(buffer, bytes);
+    const ssize_t written = AudioStreamOut::write(buffer, bytes);
+
+#ifdef TEE_SINK
+    if (written > 0) {
+        mTee.write(reinterpret_cast<const char *>(buffer),
+                written / AudioStreamOut::getFrameSize());
+    }
+#endif
+    return written;
 }
 
 ssize_t SpdifStreamOut::write(const void* buffer, size_t numBytes)
diff --git a/services/audioflinger/datapath/SpdifStreamOut.h b/services/audioflinger/datapath/SpdifStreamOut.h
index c8dc89f..c6d27ba 100644
--- a/services/audioflinger/datapath/SpdifStreamOut.h
+++ b/services/audioflinger/datapath/SpdifStreamOut.h
@@ -1,22 +1,21 @@
 /*
-**
-** Copyright 2015, The Android Open Source Project
-**
-** Licensed under the Apache License, Version 2.0 (the "License");
-** you may not use this file except in compliance with the License.
-** You may obtain a copy of the License at
-**
-**     http://www.apache.org/licenses/LICENSE-2.0
-**
-** Unless required by applicable law or agreed to in writing, software
-** distributed under the License is distributed on an "AS IS" BASIS,
-** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-** See the License for the specific language governing permissions and
-** limitations under the License.
-*/
+ *
+ * Copyright 2015, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 
-#ifndef ANDROID_SPDIF_STREAM_OUT_H
-#define ANDROID_SPDIF_STREAM_OUT_H
+#pragma once
 
 #include <stdint.h>
 #include <sys/types.h>
@@ -25,6 +24,7 @@
 
 #include "AudioStreamOut.h"
 
+#include <afutils/NBAIO_Tee.h>
 #include <audio_utils/spdif/SPDIFEncoder.h>
 
 namespace android {
@@ -39,8 +39,6 @@
     SpdifStreamOut(AudioHwDevice *dev, audio_output_flags_t flags,
             audio_format_t format);
 
-    ~SpdifStreamOut() override = default;
-
     status_t open(
             audio_io_handle_t handle,
             audio_devices_t devices,
@@ -68,22 +66,29 @@
     [[nodiscard]] size_t getFrameSize() const override { return sizeof(int8_t); }
 
     /**
+     * @return audio_config_base_t from the perspective of the application and the AudioFlinger.
+     */
+    [[nodiscard]] audio_config_base_t getAudioProperties() const override {
+        return mApplicationConfig;
+    }
+
+    /**
      * @return format from the perspective of the application and the AudioFlinger.
      */
-    [[nodiscard]] virtual audio_format_t getFormat() const { return mApplicationFormat; }
+    [[nodiscard]] virtual audio_format_t getFormat() const { return mApplicationConfig.format; }
 
     /**
      * The HAL may be running at a higher sample rate if, for example, playing wrapped EAC3.
      * @return sample rate from the perspective of the application and the AudioFlinger.
      */
-    [[nodiscard]] virtual uint32_t getSampleRate() const { return mApplicationSampleRate; }
+    [[nodiscard]] virtual uint32_t getSampleRate() const { return mApplicationConfig.sample_rate; }
 
     /**
      * The HAL is in stereo mode when playing multi-channel compressed audio over HDMI.
      * @return channel mask from the perspective of the application and the AudioFlinger.
      */
     [[nodiscard]] virtual audio_channel_mask_t getChannelMask() const {
-        return mApplicationChannelMask;
+        return mApplicationConfig.channel_mask;
     }
 
     status_t flush() override;
@@ -108,16 +113,15 @@
         SpdifStreamOut * const mSpdifStreamOut;
     };
 
-    MySPDIFEncoder       mSpdifEncoder;
-    audio_format_t       mApplicationFormat = AUDIO_FORMAT_DEFAULT;
-    uint32_t             mApplicationSampleRate = 0;
-    audio_channel_mask_t mApplicationChannelMask = AUDIO_CHANNEL_NONE;
+    MySPDIFEncoder mSpdifEncoder;
+    audio_config_base_t mApplicationConfig = AUDIO_CONFIG_BASE_INITIALIZER;
 
-    ssize_t  writeDataBurst(const void* data, size_t bytes);
-    ssize_t  writeInternal(const void* buffer, size_t bytes);
+    ssize_t writeDataBurst(const void* data, size_t bytes);
+
+#ifdef TEE_SINK
+    NBAIO_Tee mTee;
+#endif
 
 };
 
 } // namespace android
-
-#endif // ANDROID_SPDIF_STREAM_OUT_H
diff --git a/services/audioflinger/datapath/ThreadMetrics.h b/services/audioflinger/datapath/ThreadMetrics.h
index c643a57..4eb8aa0 100644
--- a/services/audioflinger/datapath/ThreadMetrics.h
+++ b/services/audioflinger/datapath/ThreadMetrics.h
@@ -14,8 +14,7 @@
  * limitations under the License.
  */
 
-#ifndef ANDROID_AUDIO_THREADMETRICS_H
-#define ANDROID_AUDIO_THREADMETRICS_H
+#pragma once
 
 #include <media/MediaMetricsItem.h>
 
@@ -210,5 +209,3 @@
 };
 
 } // namespace android
-
-#endif // ANDROID_AUDIO_THREADMETRICS_H
diff --git a/services/audioflinger/datapath/TrackMetrics.h b/services/audioflinger/datapath/TrackMetrics.h
index 2b44acb..ad5d3db 100644
--- a/services/audioflinger/datapath/TrackMetrics.h
+++ b/services/audioflinger/datapath/TrackMetrics.h
@@ -14,8 +14,7 @@
  * limitations under the License.
  */
 
-#ifndef ANDROID_AUDIO_TRACKMETRICS_H
-#define ANDROID_AUDIO_TRACKMETRICS_H
+#pragma once
 
 #include <binder/IActivityManager.h>
 #include <binder/IPCThreadState.h>
@@ -306,5 +305,3 @@
 };
 
 } // namespace android
-
-#endif // ANDROID_AUDIO_TRACKMETRICS_H
diff --git a/services/audioflinger/sounddose/SoundDoseManager.cpp b/services/audioflinger/sounddose/SoundDoseManager.cpp
index 21f346e..39c80d8 100644
--- a/services/audioflinger/sounddose/SoundDoseManager.cpp
+++ b/services/audioflinger/sounddose/SoundDoseManager.cpp
@@ -49,7 +49,7 @@
         size_t channelCount, audio_format_t format) {
     const std::lock_guard _l(mLock);
 
-    if (mHalSoundDose != nullptr && mEnabledCsd) {
+    if (mHalSoundDose.size() > 0 && mEnabledCsd) {
         ALOGD("%s: using HAL MEL computation, no MelProcessor needed.", __func__);
         return nullptr;
     }
@@ -82,20 +82,27 @@
     return melProcessor;
 }
 
-bool SoundDoseManager::setHalSoundDoseInterface(const std::shared_ptr<ISoundDose>& halSoundDose) {
+bool SoundDoseManager::setHalSoundDoseInterface(const std::string &module,
+                                                const std::shared_ptr<ISoundDose> &halSoundDose) {
     ALOGV("%s", __func__);
 
+    if (halSoundDose == nullptr) {
+        ALOGI("%s: passed ISoundDose object is null", __func__);
+        return false;
+    }
+
     std::shared_ptr<HalSoundDoseCallback> halSoundDoseCallback;
     {
         const std::lock_guard _l(mLock);
 
-        mHalSoundDose = halSoundDose;
-        if (halSoundDose == nullptr) {
-            ALOGI("%s: passed ISoundDose object is null, switching to internal CSD", __func__);
+        if (mHalSoundDose.find(module) != mHalSoundDose.end()) {
+            ALOGW("%s: Module %s already has a sound dose HAL assigned, skipping", __func__,
+                  module.c_str());
             return false;
         }
+        mHalSoundDose[module] = halSoundDose;
 
-        if (!mHalSoundDose->setOutputRs2UpperBound(mRs2UpperBound).isOk()) {
+        if (!halSoundDose->setOutputRs2UpperBound(mRs2UpperBound).isOk()) {
             ALOGW("%s: Cannot set RS2 value for momentary exposure %f",
                   __func__,
                   mRs2UpperBound);
@@ -121,16 +128,26 @@
     return true;
 }
 
+void SoundDoseManager::resetHalSoundDoseInterfaces() {
+    ALOGV("%s", __func__);
+
+    const std::lock_guard _l(mLock);
+    mHalSoundDose.clear();
+}
+
 void SoundDoseManager::setOutputRs2UpperBound(float rs2Value) {
     ALOGV("%s", __func__);
     const std::lock_guard _l(mLock);
 
-    if (mHalSoundDose != nullptr) {
-        // using the HAL sound dose interface
-        if (!mHalSoundDose->setOutputRs2UpperBound(rs2Value).isOk()) {
-            ALOGE("%s: Cannot set RS2 value for momentary exposure %f", __func__, rs2Value);
-            return;
+    if (mHalSoundDose.size() > 0) {
+        for (auto& halSoundDose : mHalSoundDose) {
+            // using the HAL sound dose interface
+            if (!halSoundDose.second->setOutputRs2UpperBound(rs2Value).isOk()) {
+                ALOGE("%s: Cannot set RS2 value for momentary exposure %f", __func__, rs2Value);
+                continue;
+            }
         }
+
         mRs2UpperBound = rs2Value;
         return;
     }
@@ -202,14 +219,16 @@
 
 ndk::ScopedAStatus SoundDoseManager::HalSoundDoseCallback::onMomentaryExposureWarning(
         float in_currentDbA, const AudioDevice& in_audioDevice) {
-    auto soundDoseManager = mSoundDoseManager.promote();
-    if (soundDoseManager == nullptr) {
-        return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE);
+    sp<SoundDoseManager> soundDoseManager;
+    {
+        const std::lock_guard _l(mCbLock);
+        soundDoseManager = mSoundDoseManager.promote();
+        if (soundDoseManager == nullptr) {
+            return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE);
+        }
     }
 
-    std::shared_ptr<ISoundDose> halSoundDose;
-    soundDoseManager->getHalSoundDose(&halSoundDose);
-    if(halSoundDose == nullptr) {
+    if (!soundDoseManager->useHalSoundDose()) {
         ALOGW("%s: HAL sound dose interface deactivated. Ignoring", __func__);
         return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE);
     }
@@ -229,14 +248,16 @@
 ndk::ScopedAStatus SoundDoseManager::HalSoundDoseCallback::onNewMelValues(
         const ISoundDose::IHalSoundDoseCallback::MelRecord& in_melRecord,
         const AudioDevice& in_audioDevice) {
-    auto soundDoseManager = mSoundDoseManager.promote();
-    if (soundDoseManager == nullptr) {
-        return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE);
+    sp<SoundDoseManager> soundDoseManager;
+    {
+        const std::lock_guard _l(mCbLock);
+        soundDoseManager = mSoundDoseManager.promote();
+        if (soundDoseManager == nullptr) {
+            return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE);
+        }
     }
 
-    std::shared_ptr<ISoundDose> halSoundDose;
-    soundDoseManager->getHalSoundDose(&halSoundDose);
-    if(halSoundDose == nullptr) {
+    if (!soundDoseManager->useHalSoundDose()) {
         ALOGW("%s: HAL sound dose interface deactivated. Ignoring", __func__);
         return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE);
     }
@@ -301,6 +322,25 @@
     return binder::Status::ok();
 }
 
+binder::Status SoundDoseManager::SoundDose::initCachedAudioDeviceCategories(
+        const std::vector<media::ISoundDose::AudioDeviceCategory>& btDeviceCategories) {
+    ALOGV("%s", __func__);
+    auto soundDoseManager = mSoundDoseManager.promote();
+    if (soundDoseManager != nullptr) {
+        soundDoseManager->initCachedAudioDeviceCategories(btDeviceCategories);
+    }
+    return binder::Status::ok();
+}
+binder::Status SoundDoseManager::SoundDose::setAudioDeviceCategory(
+        const media::ISoundDose::AudioDeviceCategory& btAudioDevice) {
+    ALOGV("%s", __func__);
+    auto soundDoseManager = mSoundDoseManager.promote();
+    if (soundDoseManager != nullptr) {
+        soundDoseManager->setAudioDeviceCategory(btAudioDevice);
+    }
+    return binder::Status::ok();
+}
+
 binder::Status SoundDoseManager::SoundDose::getOutputRs2UpperBound(float* value) {
     ALOGV("%s", __func__);
     auto soundDoseManager = mSoundDoseManager.promote();
@@ -358,7 +398,9 @@
         auto melProcessor = mp.second.promote();
         if (melProcessor != nullptr) {
             auto deviceId = melProcessor->getDeviceId();
-            if (mActiveDeviceTypes[deviceId] == deviceType) {
+            const auto deviceTypeIt = mActiveDeviceTypes.find(deviceId);
+            if (deviceTypeIt != mActiveDeviceTypes.end() &&
+                deviceTypeIt->second == deviceType) {
                 ALOGV("%s: set attenuation for deviceId %d to %f",
                         __func__, deviceId, attenuationDB);
                 melProcessor->setAttenuation(attenuationDB);
@@ -390,9 +432,105 @@
     return mEnabledCsd;
 }
 
+void SoundDoseManager::initCachedAudioDeviceCategories(
+        const std::vector<media::ISoundDose::AudioDeviceCategory>& deviceCategories) {
+    ALOGV("%s", __func__);
+    {
+        const std::lock_guard _l(mLock);
+        mBluetoothDevicesWithCsd.clear();
+    }
+    for (const auto& btDeviceCategory : deviceCategories) {
+        setAudioDeviceCategory(btDeviceCategory);
+    }
+}
+
+void SoundDoseManager::setAudioDeviceCategory(
+        const media::ISoundDose::AudioDeviceCategory& audioDevice) {
+    ALOGV("%s: set BT audio device type with address %s to headphone %d", __func__,
+          audioDevice.address.c_str(), audioDevice.csdCompatible);
+
+    std::vector<audio_port_handle_t> devicesToStart;
+    std::vector<audio_port_handle_t> devicesToStop;
+    {
+        const std::lock_guard _l(mLock);
+        const auto deviceIt = mBluetoothDevicesWithCsd.find(
+                std::make_pair(audioDevice.address,
+                               static_cast<audio_devices_t>(audioDevice.internalAudioType)));
+        if (deviceIt != mBluetoothDevicesWithCsd.end()) {
+            deviceIt->second = audioDevice.csdCompatible;
+        } else {
+            mBluetoothDevicesWithCsd.emplace(
+                    std::make_pair(audioDevice.address,
+                                   static_cast<audio_devices_t>(audioDevice.internalAudioType)),
+                    audioDevice.csdCompatible);
+        }
+
+        for (const auto &activeDevice: mActiveDevices) {
+            if (activeDevice.first.address() == audioDevice.address &&
+                activeDevice.first.mType ==
+                static_cast<audio_devices_t>(audioDevice.internalAudioType)) {
+                if (audioDevice.csdCompatible) {
+                    devicesToStart.push_back(activeDevice.second);
+                } else {
+                    devicesToStop.push_back(activeDevice.second);
+                }
+            }
+        }
+    }
+
+    for (const auto& deviceToStart : devicesToStart) {
+        mMelReporterCallback->startMelComputationForDeviceId(deviceToStart);
+    }
+    for (const auto& deviceToStop : devicesToStop) {
+        mMelReporterCallback->stopMelComputationForDeviceId(deviceToStop);
+    }
+}
+
+bool SoundDoseManager::shouldComputeCsdForDeviceType(audio_devices_t device) {
+    if (!isCsdEnabled()) {
+        ALOGV("%s csd is disabled", __func__);
+        return false;
+    }
+    if (forceComputeCsdOnAllDevices()) {
+        return true;
+    }
+
+    switch (device) {
+        case AUDIO_DEVICE_OUT_WIRED_HEADSET:
+        case AUDIO_DEVICE_OUT_WIRED_HEADPHONE:
+        case AUDIO_DEVICE_OUT_BLUETOOTH_A2DP:
+        case AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES:
+        case AUDIO_DEVICE_OUT_USB_HEADSET:
+        case AUDIO_DEVICE_OUT_BLE_HEADSET:
+        case AUDIO_DEVICE_OUT_BLE_BROADCAST:
+            return true;
+        default:
+            return false;
+    }
+}
+
+bool SoundDoseManager::shouldComputeCsdForDeviceWithAddress(const audio_devices_t type,
+                                                            const std::string& deviceAddress) {
+    if (!isCsdEnabled()) {
+        ALOGV("%s csd is disabled", __func__);
+        return false;
+    }
+    if (forceComputeCsdOnAllDevices()) {
+        return true;
+    }
+
+    if (!audio_is_ble_out_device(type) && !audio_is_a2dp_device(type)) {
+        return shouldComputeCsdForDeviceType(type);
+    }
+
+    const std::lock_guard _l(mLock);
+    const auto deviceIt = mBluetoothDevicesWithCsd.find(std::make_pair(deviceAddress, type));
+    return deviceIt != mBluetoothDevicesWithCsd.end() && deviceIt->second;
+}
+
 void SoundDoseManager::setUseFrameworkMel(bool useFrameworkMel) {
     // invalidate any HAL sound dose interface used
-    setHalSoundDoseInterface(nullptr);
+    resetHalSoundDoseInterfaces();
 
     const std::lock_guard _l(mLock);
     mUseFrameworkMel = useFrameworkMel;
@@ -419,14 +557,12 @@
         if (!mEnabledCsd) return false;
     }
 
-    std::shared_ptr<ISoundDose> halSoundDose;
-    getHalSoundDose(&halSoundDose);
-    return halSoundDose != nullptr;
+    return useHalSoundDose();
 }
 
-void SoundDoseManager::getHalSoundDose(std::shared_ptr<ISoundDose>* halSoundDose) const {
+bool SoundDoseManager::useHalSoundDose() const {
     const std::lock_guard _l(mLock);
-    *halSoundDose = mHalSoundDose;
+    return mHalSoundDose.size() > 0;
 }
 
 void SoundDoseManager::resetSoundDose() {
diff --git a/services/audioflinger/sounddose/SoundDoseManager.h b/services/audioflinger/sounddose/SoundDoseManager.h
index 9ed0661..6e0bc34 100644
--- a/services/audioflinger/sounddose/SoundDoseManager.h
+++ b/services/audioflinger/sounddose/SoundDoseManager.h
@@ -32,6 +32,15 @@
 
 using aidl::android::hardware::audio::core::sounddose::ISoundDose;
 
+class IMelReporterCallback : public virtual RefBase {
+public:
+    IMelReporterCallback() {};
+    virtual ~IMelReporterCallback() {};
+
+    virtual void stopMelComputationForDeviceId(audio_port_handle_t deviceId) = 0;
+    virtual void startMelComputationForDeviceId(audio_port_handle_t deviceId) = 0;
+};
+
 class SoundDoseManager : public audio_utils::MelProcessor::MelCallback {
 public:
     /** CSD is computed with a rolling window of 7 days. */
@@ -39,8 +48,9 @@
     /** Default RS2 upper bound in dBA as defined in IEC 62368-1 3rd edition. */
     static constexpr float kDefaultRs2UpperBound = 100.f;
 
-    SoundDoseManager()
-        : mMelAggregator(sp<audio_utils::MelAggregator>::make(kCsdWindowSeconds)),
+    explicit SoundDoseManager(const sp<IMelReporterCallback>& melReporterCallback)
+        : mMelReporterCallback(melReporterCallback),
+          mMelAggregator(sp<audio_utils::MelAggregator>::make(kCsdWindowSeconds)),
           mRs2UpperBound(kDefaultRs2UpperBound) {};
 
     /**
@@ -84,12 +94,15 @@
     sp<media::ISoundDose> getSoundDoseInterface(const sp<media::ISoundDoseCallback>& callback);
 
     /**
-     * Sets the HAL sound dose interface to use for the MEL computation. Use nullptr
-     * for using the internal MEL computation.
+     * Sets the HAL sound dose interface for a specific module to use for the MEL computation.
      *
      * @return true if setting the HAL sound dose value was successful, false otherwise.
      */
-    bool setHalSoundDoseInterface(const std::shared_ptr<ISoundDose>& halSoundDose);
+    bool setHalSoundDoseInterface(const std::string &module,
+                                  const std::shared_ptr<ISoundDose> &halSoundDose);
+
+    /** Reset all the stored HAL sound dose interface. */
+    void resetHalSoundDoseInterfaces();
 
     /** Returns the cached audio port id from the active devices. */
     audio_port_handle_t getIdForAudioDevice(
@@ -104,6 +117,21 @@
     /** Returns true if CSD is enabled. */
     bool isCsdEnabled();
 
+    void initCachedAudioDeviceCategories(
+            const std::vector<media::ISoundDose::AudioDeviceCategory>& deviceCategories);
+
+    void setAudioDeviceCategory(
+            const media::ISoundDose::AudioDeviceCategory& audioDevice);
+
+    /**
+     * Returns true if the type can compute CSD. For bluetooth devices we rely on whether we
+     * categorized the address as headphones/headsets, only in this case we return true.
+     */
+    bool shouldComputeCsdForDeviceWithAddress(const audio_devices_t type,
+                                              const std::string& deviceAddress);
+    /** Returns true for all device types which could support CSD computation. */
+    bool shouldComputeCsdForDeviceType(audio_devices_t device);
+
     std::string dump() const;
 
     // used for testing only
@@ -139,6 +167,13 @@
         binder::Status getOutputRs2UpperBound(float* value) override;
         binder::Status setCsdEnabled(bool enabled) override;
 
+        binder::Status initCachedAudioDeviceCategories(
+                const std::vector<media::ISoundDose::AudioDeviceCategory> &btDeviceCategories)
+                override;
+
+        binder::Status setAudioDeviceCategory(
+                const media::ISoundDose::AudioDeviceCategory& btAudioDevice) override;
+
         binder::Status getCsd(float* value) override;
         binder::Status forceUseFrameworkMel(bool useFrameworkMel) override;
         binder::Status forceComputeCsdOnAllDevices(bool computeCsdOnAllDevices) override;
@@ -161,6 +196,7 @@
                 const aidl::android::media::audio::common::AudioDevice& in_audioDevice) override;
 
         wp<SoundDoseManager> mSoundDoseManager;
+        std::mutex mCbLock;
     };
 
     void resetSoundDose();
@@ -174,11 +210,16 @@
     void setUseFrameworkMel(bool useFrameworkMel);
     void setComputeCsdOnAllDevices(bool computeCsdOnAllDevices);
     bool isSoundDoseHalSupported() const;
-    /** Returns the HAL sound dose interface or null if internal MEL computation is used. */
-    void getHalSoundDose(std::shared_ptr<ISoundDose>* halSoundDose) const;
+    /**
+     * Returns true if there is one active HAL sound dose interface or null if internal MEL
+     * computation is used.
+     **/
+    bool useHalSoundDose() const;
 
     mutable std::mutex mLock;
 
+    const sp<IMelReporterCallback> mMelReporterCallback;
+
     // no need for lock since MelAggregator is thread-safe
     const sp<audio_utils::MelAggregator> mMelAggregator;
 
@@ -191,15 +232,26 @@
     std::map<AudioDeviceTypeAddr, audio_port_handle_t> mActiveDevices GUARDED_BY(mLock);
     std::unordered_map<audio_port_handle_t, audio_devices_t> mActiveDeviceTypes GUARDED_BY(mLock);
 
+    struct bt_device_type_hash {
+        std::size_t operator() (const std::pair<std::string, audio_devices_t> &deviceType) const {
+            return std::hash<std::string>()(deviceType.first) ^
+                   std::hash<audio_devices_t>()(deviceType.second);
+        }
+    };
+    // storing the BT cached information as received from the java side
+    // see SoundDoseManager::setCachedAudioDeviceCategories
+    std::unordered_map<std::pair<std::string, audio_devices_t>, bool, bt_device_type_hash>
+            mBluetoothDevicesWithCsd GUARDED_BY(mLock);
+
     float mRs2UpperBound GUARDED_BY(mLock);
     std::unordered_map<audio_devices_t, float> mMelAttenuationDB GUARDED_BY(mLock);
 
     sp<SoundDose> mSoundDose GUARDED_BY(mLock);
 
-    std::shared_ptr<ISoundDose> mHalSoundDose GUARDED_BY(mLock);
+    std::unordered_map<std::string, std::shared_ptr<ISoundDose>> mHalSoundDose GUARDED_BY(mLock);
     std::shared_ptr<HalSoundDoseCallback> mHalSoundDoseCallback GUARDED_BY(mLock);
 
-    bool mUseFrameworkMel GUARDED_BY(mLock) = true;
+    bool mUseFrameworkMel GUARDED_BY(mLock) = false;
     bool mComputeCsdOnAllDevices GUARDED_BY(mLock) = false;
 
     bool mEnabledCsd GUARDED_BY(mLock) = true;
diff --git a/services/audioflinger/sounddose/tests/sounddosemanager_tests.cpp b/services/audioflinger/sounddose/tests/sounddosemanager_tests.cpp
index 9fab77d..5f6dcb9 100644
--- a/services/audioflinger/sounddose/tests/sounddosemanager_tests.cpp
+++ b/services/audioflinger/sounddose/tests/sounddosemanager_tests.cpp
@@ -39,21 +39,39 @@
                 (const std::shared_ptr<ISoundDose::IHalSoundDoseCallback>&), (override));
 };
 
+class MelReporterCallback : public IMelReporterCallback {
+public:
+    MOCK_METHOD(void, startMelComputationForDeviceId, (audio_port_handle_t), (override));
+    MOCK_METHOD(void, stopMelComputationForDeviceId, (audio_port_handle_t), (override));
+};
+
+constexpr char kPrimaryModule[] = "primary";
+constexpr char kSecondaryModule[] = "secondary";
+
 class SoundDoseManagerTest : public ::testing::Test {
 protected:
     void SetUp() override {
-        mSoundDoseManager = sp<SoundDoseManager>::make();
+        mMelReporterCallback = sp<MelReporterCallback>::make();
+        mSoundDoseManager = sp<SoundDoseManager>::make(mMelReporterCallback);
         mHalSoundDose = ndk::SharedRefBase::make<HalSoundDoseMock>();
+        mSecondaryHalSoundDose = ndk::SharedRefBase::make<HalSoundDoseMock>();
 
         ON_CALL(*mHalSoundDose.get(), setOutputRs2UpperBound)
             .WillByDefault([] (float rs2) {
                 EXPECT_EQ(rs2, ISoundDose::DEFAULT_MAX_RS2);
                 return ndk::ScopedAStatus::ok();
             });
+        ON_CALL(*mSecondaryHalSoundDose.get(), setOutputRs2UpperBound)
+                .WillByDefault([] (float rs2) {
+                    EXPECT_EQ(rs2, ISoundDose::DEFAULT_MAX_RS2);
+                    return ndk::ScopedAStatus::ok();
+                });
     }
 
+    sp<MelReporterCallback> mMelReporterCallback;
     sp<SoundDoseManager> mSoundDoseManager;
     std::shared_ptr<HalSoundDoseMock> mHalSoundDose;
+    std::shared_ptr<HalSoundDoseMock> mSecondaryHalSoundDose;
 };
 
 TEST_F(SoundDoseManagerTest, GetProcessorForExistingStream) {
@@ -101,7 +119,7 @@
 }
 
 TEST_F(SoundDoseManagerTest, InvalidHalInterfaceIsNotSet) {
-    EXPECT_FALSE(mSoundDoseManager->setHalSoundDoseInterface(nullptr));
+    EXPECT_FALSE(mSoundDoseManager->setHalSoundDoseInterface(kPrimaryModule, nullptr));
 }
 
 TEST_F(SoundDoseManagerTest, SetHalSoundDoseDisablesNewMelProcessorCallbacks) {
@@ -113,7 +131,7 @@
             return ndk::ScopedAStatus::ok();
         });
 
-    EXPECT_TRUE(mSoundDoseManager->setHalSoundDoseInterface(mHalSoundDose));
+    EXPECT_TRUE(mSoundDoseManager->setHalSoundDoseInterface(kPrimaryModule, mHalSoundDose));
 
     EXPECT_EQ(nullptr, mSoundDoseManager->getOrCreateProcessorForDevice(/*deviceId=*/2,
             /*streamHandle=*/1,
@@ -130,8 +148,17 @@
             EXPECT_NE(nullptr, callback);
             return ndk::ScopedAStatus::ok();
         });
+    EXPECT_CALL(*mSecondaryHalSoundDose.get(), setOutputRs2UpperBound).Times(1);
+    EXPECT_CALL(*mSecondaryHalSoundDose.get(), registerSoundDoseCallback)
+            .Times(1)
+            .WillOnce([&] (const std::shared_ptr<ISoundDose::IHalSoundDoseCallback>& callback) {
+                EXPECT_NE(nullptr, callback);
+                return ndk::ScopedAStatus::ok();
+        });
 
-    EXPECT_TRUE(mSoundDoseManager->setHalSoundDoseInterface(mHalSoundDose));
+    EXPECT_TRUE(mSoundDoseManager->setHalSoundDoseInterface(kPrimaryModule, mHalSoundDose));
+    EXPECT_TRUE(mSoundDoseManager->setHalSoundDoseInterface(kSecondaryModule,
+                                                            mSecondaryHalSoundDose));
 }
 
 TEST_F(SoundDoseManagerTest, MomentaryExposureFromHalWithNoAddressIllegalArgument) {
@@ -145,7 +172,7 @@
            return ndk::ScopedAStatus::ok();
        });
 
-    EXPECT_TRUE(mSoundDoseManager->setHalSoundDoseInterface(mHalSoundDose));
+    EXPECT_TRUE(mSoundDoseManager->setHalSoundDoseInterface(kPrimaryModule, mHalSoundDose));
 
     EXPECT_NE(nullptr, halCallback);
     AudioDevice audioDevice = {};
@@ -166,9 +193,9 @@
            return ndk::ScopedAStatus::ok();
        });
 
-    EXPECT_TRUE(mSoundDoseManager->setHalSoundDoseInterface(mHalSoundDose));
+    EXPECT_TRUE(mSoundDoseManager->setHalSoundDoseInterface(kPrimaryModule, mHalSoundDose));
     EXPECT_NE(nullptr, halCallback);
-    EXPECT_FALSE(mSoundDoseManager->setHalSoundDoseInterface(nullptr));
+    mSoundDoseManager->resetHalSoundDoseInterfaces();
 
     AudioDevice audioDevice = {};
     audioDevice.address.set<AudioDeviceAddress::id>("test");
@@ -188,7 +215,7 @@
            return ndk::ScopedAStatus::ok();
        });
 
-    EXPECT_TRUE(mSoundDoseManager->setHalSoundDoseInterface(mHalSoundDose));
+    EXPECT_TRUE(mSoundDoseManager->setHalSoundDoseInterface(kPrimaryModule, mHalSoundDose));
 
     EXPECT_NE(nullptr, halCallback);
     AudioDevice audioDevice = {};
@@ -239,9 +266,56 @@
 }
 
 TEST_F(SoundDoseManagerTest, GetDefaultForceUseFrameworkMel) {
-    // TODO: for now dogfooding with internal MEL. Revert to false when using the HAL MELs
-    EXPECT_TRUE(mSoundDoseManager->forceUseFrameworkMel());
+    EXPECT_FALSE(mSoundDoseManager->forceUseFrameworkMel());
 }
 
+TEST_F(SoundDoseManagerTest, SetAudioDeviceCategoryStopsNonHeadphone) {
+    media::ISoundDose::AudioDeviceCategory device1;
+    device1.address = "dev1";
+    device1.csdCompatible = false;
+    device1.internalAudioType = AUDIO_DEVICE_OUT_BLUETOOTH_A2DP;
+    const AudioDeviceTypeAddr dev1Adt{AUDIO_DEVICE_OUT_BLUETOOTH_A2DP, device1.address};
+
+    // this will mark the device as active
+    mSoundDoseManager->mapAddressToDeviceId(dev1Adt, /*deviceId=*/1);
+    EXPECT_CALL(*mMelReporterCallback.get(), stopMelComputationForDeviceId).Times(1);
+
+    mSoundDoseManager->setAudioDeviceCategory(device1);
+}
+
+TEST_F(SoundDoseManagerTest, SetAudioDeviceCategoryStartsHeadphone) {
+    media::ISoundDose::AudioDeviceCategory device1;
+    device1.address = "dev1";
+    device1.csdCompatible = true;
+    device1.internalAudioType = AUDIO_DEVICE_OUT_BLUETOOTH_A2DP;
+    const AudioDeviceTypeAddr dev1Adt{AUDIO_DEVICE_OUT_BLUETOOTH_A2DP, device1.address};
+
+        // this will mark the device as active
+    mSoundDoseManager->mapAddressToDeviceId(dev1Adt, /*deviceId=*/1);
+    EXPECT_CALL(*mMelReporterCallback.get(), startMelComputationForDeviceId).Times(1);
+
+    mSoundDoseManager->setAudioDeviceCategory(device1);
+}
+
+TEST_F(SoundDoseManagerTest, InitCachedAudioDevicesStartsOnlyActiveDevices) {
+    media::ISoundDose::AudioDeviceCategory device1;
+    media::ISoundDose::AudioDeviceCategory device2;
+    device1.address = "dev1";
+    device1.csdCompatible = true;
+    device1.internalAudioType = AUDIO_DEVICE_OUT_BLUETOOTH_A2DP;
+    device2.address = "dev2";
+    device2.csdCompatible = true;
+    device2.internalAudioType = AUDIO_DEVICE_OUT_BLUETOOTH_A2DP;
+    const AudioDeviceTypeAddr dev1Adt{AUDIO_DEVICE_OUT_BLUETOOTH_A2DP, device1.address};
+    std::vector<media::ISoundDose::AudioDeviceCategory> btDevices = {device1, device2};
+
+    // this will mark the device as active
+    mSoundDoseManager->mapAddressToDeviceId(dev1Adt, /*deviceId=*/1);
+    EXPECT_CALL(*mMelReporterCallback.get(), startMelComputationForDeviceId).Times(1);
+
+    mSoundDoseManager->initCachedAudioDeviceCategories(btDevices);
+}
+
+
 }  // namespace
 }  // namespace android
diff --git a/services/audiopolicy/AudioPolicyInterface.h b/services/audiopolicy/AudioPolicyInterface.h
index da0df5f..d49a002 100644
--- a/services/audiopolicy/AudioPolicyInterface.h
+++ b/services/audiopolicy/AudioPolicyInterface.h
@@ -574,6 +574,10 @@
                                              media::DeviceConnectedState state) = 0;
 
     virtual status_t invalidateTracks(const std::vector<audio_port_handle_t>& portIds) = 0;
+
+    // Get the attributes of the mix port when connecting to the given device port.
+    virtual status_t getAudioMixPort(const struct audio_port_v7 *devicePort,
+                                     struct audio_port_v7 *mixPort) = 0;
 };
 
     // These are the signatures of createAudioPolicyManager/destroyAudioPolicyManager
diff --git a/services/audiopolicy/OWNERS b/services/audiopolicy/OWNERS
index da9d32f..50ceadf 100644
--- a/services/audiopolicy/OWNERS
+++ b/services/audiopolicy/OWNERS
@@ -1,2 +1,6 @@
+# Bug component: 48436
+elaurent@google.com
+jiabin@google.com
 jmtrivi@google.com
 mnaganov@google.com
+include platform/frameworks/av:/media/janitors/audio_OWNERS #{LAST_RESORT_SUGGESTION}
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
index 876911d..1e57edd 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
@@ -102,9 +102,13 @@
     void setVolume(float volumeDb) { mCurVolumeDb = volumeDb; }
     float getVolume() const { return mCurVolumeDb; }
 
+    void setIsVoice(bool isVoice) { mIsVoice = isVoice; }
+    bool isVoice() const { return mIsVoice; }
+
 private:
     int mMuteCount = 0; /**< mute request counter */
     float mCurVolumeDb = NAN; /**< current volume in dB. */
+    bool mIsVoice = false; /** true if this volume source is used for voice call volume */
 };
 /**
  * Note: volume activities shall be indexed by CurvesId if we want to allow multiple
@@ -162,7 +166,8 @@
                            VolumeSource volumeSource, const StreamTypeVector &streams,
                            const DeviceTypeSet& deviceTypes,
                            uint32_t delayMs,
-                           bool force);
+                           bool force,
+                           bool isVoiceVolSrc = false);
 
     /**
      * @brief setStopTime set the stop time due to the client stoppage or a re routing of this
@@ -222,17 +227,25 @@
     {
         return mVolumeActivities[vs].decMuteCount();
     }
-    void setCurVolume(VolumeSource vs, float volumeDb)
+    void setCurVolume(VolumeSource vs, float volumeDb, bool isVoiceVolSrc)
     {
         // Even if not activity for this source registered, need to create anyway
         mVolumeActivities[vs].setVolume(volumeDb);
+        mVolumeActivities[vs].setIsVoice(isVoiceVolSrc);
     }
     float getCurVolume(VolumeSource vs) const
     {
         return mVolumeActivities.find(vs) != std::end(mVolumeActivities) ?
                     mVolumeActivities.at(vs).getVolume() : NAN;
     }
-
+    VolumeSource getVoiceSource() {
+        for (const auto &iter : mVolumeActivities) {
+            if (iter.second.isVoice()) {
+                return iter.first;
+            }
+        }
+        return VOLUME_SOURCE_NONE;
+    }
     bool isStrategyActive(product_strategy_t ps, uint32_t inPastMs = 0, nsecs_t sysTime = 0) const
     {
         return mRoutingActivities.find(ps) != std::end(mRoutingActivities)?
@@ -381,7 +394,8 @@
                            VolumeSource volumeSource, const StreamTypeVector &streams,
                            const DeviceTypeSet& device,
                            uint32_t delayMs,
-                           bool force);
+                           bool force,
+                           bool isVoiceVolSrc = false);
 
     virtual void toAudioPortConfig(struct audio_port_config *dstConfig,
                            const struct audio_port_config *srcConfig = NULL) const;
@@ -424,6 +438,15 @@
     bool supportsAllDevices(const DeviceVector &devices) const;
 
     /**
+     * @brief supportsAtLeastOne checks if any device in devices is currently supported
+     * @param devices to be checked against
+     * @return true if the device is weakly supported by type (e.g. for non bus / rsubmix devices),
+     *         true if the device is supported (both type and address) for bus / remote submix
+     *         false otherwise
+     */
+    bool supportsAtLeastOne(const DeviceVector &devices) const;
+
+    /**
      * @brief supportsDevicesForPlayback
      * @param devices to be checked against
      * @return true if the devices is a supported combo for playback
@@ -475,7 +498,8 @@
                            VolumeSource volumeSource, const StreamTypeVector &streams,
                            const DeviceTypeSet& deviceTypes,
                            uint32_t delayMs,
-                           bool force);
+                           bool force,
+                           bool isVoiceVolSrc = false);
 
     virtual void toAudioPortConfig(struct audio_port_config *dstConfig,
                            const struct audio_port_config *srcConfig = NULL) const;
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h b/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h
index 92292e1..7e29e10 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h
@@ -138,7 +138,7 @@
      */
     status_t setUserIdDeviceAffinities(int userId, const AudioDeviceTypeAddrVector& devices);
     status_t removeUserIdDeviceAffinities(int userId);
-    status_t getDevicesForUserId(int userId, Vector<AudioDeviceTypeAddr>& devices) const;
+    status_t getDevicesForUserId(int userId, AudioDeviceTypeAddrVector& devices) const;
 
     void dump(String8 *dst) const;
 
diff --git a/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h
index 80e098b..6c130fd 100644
--- a/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h
@@ -89,6 +89,8 @@
     void importAudioPortAndPickAudioProfile(const sp<PolicyAudioPort>& policyPort,
                                             bool force = false);
 
+    status_t readFromParcelable(const media::AudioPortFw& parcelable) override;
+
     void setEncapsulationInfoFromHal(AudioPolicyClientInterface *clientInterface);
 
     void dump(String8 *dst, int spaces, bool verbose = true) const;
@@ -104,7 +106,7 @@
     std::string mTagName; // Unique human readable identifier for a device port found in conf file.
     audio_format_t      mCurrentEncodedFormat;
     bool                mIsDynamic = false;
-    const std::string   mDeclaredAddress; // Original device address
+    std::string         mDeclaredAddress; // Original device address
 };
 
 class DeviceVector : public SortedVector<sp<DeviceDescriptor> >
diff --git a/services/audiopolicy/common/managerdefinitions/include/HwModule.h b/services/audiopolicy/common/managerdefinitions/include/HwModule.h
index cf20260..d206637 100644
--- a/services/audiopolicy/common/managerdefinitions/include/HwModule.h
+++ b/services/audiopolicy/common/managerdefinitions/include/HwModule.h
@@ -89,10 +89,12 @@
     status_t addProfile(const sp<IOProfile> &profile);
 
     status_t addOutputProfile(const std::string& name, const audio_config_t *config,
-            audio_devices_t device, const String8& address);
+            audio_devices_t device, const String8& address,
+            audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE);
     status_t removeOutputProfile(const std::string& name);
     status_t addInputProfile(const std::string& name, const audio_config_t *config,
-            audio_devices_t device, const String8& address);
+            audio_devices_t device, const String8& address,
+            audio_input_flags_t flags = AUDIO_INPUT_FLAG_NONE);
     status_t removeInputProfile(const std::string& name);
 
     audio_module_handle_t getHandle() const { return mHandle; }
diff --git a/services/audiopolicy/common/managerdefinitions/include/IOProfile.h b/services/audiopolicy/common/managerdefinitions/include/IOProfile.h
index c489eed..f3a9518 100644
--- a/services/audiopolicy/common/managerdefinitions/include/IOProfile.h
+++ b/services/audiopolicy/common/managerdefinitions/include/IOProfile.h
@@ -63,13 +63,7 @@
         if (getRole() == AUDIO_PORT_ROLE_SINK && (flags & AUDIO_INPUT_FLAG_MMAP_NOIRQ) != 0) {
             maxActiveCount = 0;
         }
-        if (getRole() == AUDIO_PORT_ROLE_SOURCE) {
-            mMixerBehaviors.clear();
-            mMixerBehaviors.insert(AUDIO_MIXER_BEHAVIOR_DEFAULT);
-            if (mFlags.output & AUDIO_OUTPUT_FLAG_BIT_PERFECT) {
-                mMixerBehaviors.insert(AUDIO_MIXER_BEHAVIOR_BIT_PERFECT);
-            }
-        }
+        refreshMixerBehaviors();
     }
 
     const MixerBehaviorSet& getMixerBehaviors() const {
@@ -222,6 +216,10 @@
 
     void toSupportedMixerAttributes(std::vector<audio_mixer_attributes_t>* mixerAttributes) const;
 
+    status_t readFromParcelable(const media::AudioPortFw& parcelable);
+
+    void importAudioPort(const audio_port_v7& port) override;
+
     // Number of streams currently opened for this profile.
     uint32_t     curOpenCount;
     // Number of streams currently active for this profile. This is not the number of active clients
@@ -229,6 +227,8 @@
     uint32_t     curActiveCount;
 
 private:
+    void refreshMixerBehaviors();
+
     DeviceVector mSupportedDevices; // supported devices: this input/output can be routed from/to
 
     MixerBehaviorSet mMixerBehaviors;
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
index 4877166..37cbbc4 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
@@ -163,7 +163,8 @@
                                       const StreamTypeVector &/*streams*/,
                                       const DeviceTypeSet& deviceTypes,
                                       uint32_t delayMs,
-                                      bool force)
+                                      bool force,
+                                      bool isVoiceVolSrc)
 {
 
     if (!supportedDevices().containsDeviceAmongTypes(deviceTypes)) {
@@ -176,7 +177,7 @@
     // - the force flag is set
     if (volumeDb != getCurVolume(volumeSource) || force) {
         ALOGV("%s for volumeSrc %d, volume %f, delay %d", __func__, volumeSource, volumeDb, delayMs);
-        setCurVolume(volumeSource, volumeDb);
+        setCurVolume(volumeSource, volumeDb, isVoiceVolSrc);
         return true;
     }
     return false;
@@ -322,7 +323,7 @@
     mOutput1(0), mOutput2(0), mDirectOpenCount(0),
     mDirectClientSession(AUDIO_SESSION_NONE)
 {
-    if (profile != NULL) {
+    if (profile != nullptr) {
         // By default, opening the output without immutable flags, the bit-perfect flags should be
         // applied when the apps explicitly request.
         mFlags = (audio_output_flags_t)(profile->getFlags() & (~AUDIO_OUTPUT_FLAG_BIT_PERFECT));
@@ -376,7 +377,10 @@
         supportedDevices.merge(mOutput2->supportedDevices());
         return supportedDevices;
     }
-    return mProfile->getSupportedDevices();
+    if (mProfile != nullptr) {
+        return mProfile->getSupportedDevices();
+    }
+    return DeviceVector();
 }
 
 bool SwAudioOutputDescriptor::supportsDevice(const sp<DeviceDescriptor> &device) const
@@ -389,6 +393,11 @@
     return supportedDevices().containsAllDevices(devices);
 }
 
+bool SwAudioOutputDescriptor::supportsAtLeastOne(const DeviceVector &devices) const
+{
+    return filterSupportedDevices(devices).size() > 0;
+}
+
 bool SwAudioOutputDescriptor::supportsDevicesForPlayback(const DeviceVector &devices) const
 {
     // No considering duplicated output
@@ -407,9 +416,10 @@
     if (isDuplicated()) {
         return (mOutput1->devicesSupportEncodedFormats(deviceTypes)
                     || mOutput2->devicesSupportEncodedFormats(deviceTypes));
-    } else {
+    } else if (mProfile != nullptr) {
        return mProfile->devicesSupportEncodedFormats(deviceTypes);
     }
+    return false;
 }
 
 bool SwAudioOutputDescriptor::containsSingleDeviceSupportingEncodedFormats(
@@ -419,7 +429,10 @@
         return (mOutput1->containsSingleDeviceSupportingEncodedFormats(device) &&
                 mOutput2->containsSingleDeviceSupportingEncodedFormats(device));
     }
-    return mProfile->containsSingleDeviceSupportingEncodedFormats(device);
+    if (mProfile != nullptr) {
+        return mProfile->containsSingleDeviceSupportingEncodedFormats(device);
+    }
+    return false;
 }
 
 uint32_t SwAudioOutputDescriptor::latency()
@@ -505,11 +518,12 @@
                                         VolumeSource vs, const StreamTypeVector &streamTypes,
                                         const DeviceTypeSet& deviceTypes,
                                         uint32_t delayMs,
-                                        bool force)
+                                        bool force,
+                                        bool isVoiceVolSrc)
 {
     StreamTypeVector streams = streamTypes;
     if (!AudioOutputDescriptor::setVolume(
-            volumeDb, muted, vs, streamTypes, deviceTypes, delayMs, force)) {
+            volumeDb, muted, vs, streamTypes, deviceTypes, delayMs, force, isVoiceVolSrc)) {
         return false;
     }
     if (streams.empty()) {
@@ -555,6 +569,10 @@
     float volumeAmpl = Volume::DbToAmpl(getCurVolume(vs));
     if (hasStream(streams, AUDIO_STREAM_BLUETOOTH_SCO)) {
         mClientInterface->setStreamVolume(AUDIO_STREAM_VOICE_CALL, volumeAmpl, mIoHandle, delayMs);
+        VolumeSource callVolSrc = getVoiceSource();
+        if (callVolSrc != VOLUME_SOURCE_NONE) {
+            setCurVolume(callVolSrc, getCurVolume(vs), true);
+        }
     }
     for (const auto &stream : streams) {
         ALOGV("%s output %d for volumeSource %d, volume %f, delay %d stream=%s", __func__,
@@ -578,6 +596,11 @@
                         "with the requested devices, all device types: %s",
                         __func__, dumpDeviceTypes(devices.types()).c_str());
 
+    if (mProfile == nullptr) {
+        ALOGE("%s : Cannot open descriptor without a profile ", __func__);
+        return INVALID_OPERATION;
+    }
+
     audio_config_t lHalConfig;
     if (halConfig == nullptr) {
         lHalConfig = AUDIO_CONFIG_INITIALIZER;
@@ -662,7 +685,7 @@
         }
         return NO_ERROR;
     }
-    if (!isActive()) {
+    if (mProfile != nullptr && !isActive()) {
         if (!mProfile->canStartNewIo()) {
             return INVALID_OPERATION;
         }
@@ -679,7 +702,7 @@
         return;
     }
 
-    if (!isActive()) {
+    if (mProfile != nullptr && !isActive()) {
         LOG_ALWAYS_FATAL_IF(mProfile->curActiveCount < 1,
                             "%s invalid profile active count %u",
                             __func__, mProfile->curActiveCount);
@@ -702,10 +725,11 @@
         }
 
         mClientInterface->closeOutput(mIoHandle);
-
-        LOG_ALWAYS_FATAL_IF(mProfile->curOpenCount < 1, "%s profile open count %u",
-                            __FUNCTION__, mProfile->curOpenCount);
-        mProfile->curOpenCount--;
+        if (mProfile != nullptr) {
+            LOG_ALWAYS_FATAL_IF(mProfile->curOpenCount < 1, "%s profile open count %u",
+                                __FUNCTION__, mProfile->curOpenCount);
+            mProfile->curOpenCount--;
+        }
         mIoHandle = AUDIO_IO_HANDLE_NONE;
     }
 }
@@ -740,7 +764,10 @@
         return std::max(mOutput1->getRecommendedMuteDurationMs(),
                 mOutput2->getRecommendedMuteDurationMs());
     }
-    return mProfile->recommendedMuteDurationMs;
+    if (mProfile != nullptr) {
+        return mProfile->recommendedMuteDurationMs;
+    }
+    return 0;
 }
 
 void SwAudioOutputDescriptor::setTracksInvalidatedStatusByStrategy(product_strategy_t strategy) {
@@ -783,10 +810,11 @@
                                         VolumeSource volumeSource, const StreamTypeVector &streams,
                                         const DeviceTypeSet& deviceTypes,
                                         uint32_t delayMs,
-                                        bool force)
+                                        bool force,
+                                        bool isVoiceVolSrc)
 {
     bool changed = AudioOutputDescriptor::setVolume(
-            volumeDb, muted, volumeSource, streams, deviceTypes, delayMs, force);
+            volumeDb, muted, volumeSource, streams, deviceTypes, delayMs, force, isVoiceVolSrc);
 
     if (changed) {
       // TODO: use gain controller on source device if any to adjust volume
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioPolicyConfig.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyConfig.cpp
index 8c7a7de..4edd11f 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioPolicyConfig.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyConfig.cpp
@@ -29,6 +29,8 @@
 
 namespace android {
 
+using media::audio::common::AudioDeviceAddress;
+using media::audio::common::AudioDeviceType;
 using media::audio::common::AudioIoFlags;
 using media::audio::common::AudioPortDeviceExt;
 using media::audio::common::AudioPortExt;
@@ -113,12 +115,17 @@
             ports.emplace(aidlPort.id, devicePort);
 
             if (const auto& deviceExt = aidlPort.ext.get<AudioPortExt::device>();
-                    deviceExt.device.type.connection.empty()) {  // Attached device
+                    deviceExt.device.type.connection.empty() ||
+                    // DeviceHalAidl connects remote submix input with an address.
+                    (deviceExt.device.type.type == AudioDeviceType::IN_SUBMIX &&
+                            deviceExt.device.address != AudioDeviceAddress())) {
+                // Attached device.
                 if (isInput) {
                     attachedInputDevices->add(devicePort);
                 } else {
                     attachedOutputDevices->add(devicePort);
-                    if ((deviceExt.flags & defaultDeviceFlag) != 0) {
+                    if (*defaultOutputDevice == nullptr &&
+                            (deviceExt.flags & defaultDeviceFlag) != 0) {
                         *defaultOutputDevice = devicePort;
                     }
                 }
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
index b41f86d..f870b4f 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
@@ -642,7 +642,7 @@
 }
 
 status_t AudioPolicyMixCollection::getDevicesForUserId(int userId,
-        Vector<AudioDeviceTypeAddr>& devices) const {
+        AudioDeviceTypeAddrVector& devices) const {
     // for each player mix:
     // find rules that don't exclude this userId, and add the device to the list
     for (size_t i = 0; i < size(); i++) {
@@ -660,7 +660,7 @@
             }
         }
         if (ruleAllowsUserId) {
-            devices.add(AudioDeviceTypeAddr(mix->mDeviceType, mix->mDeviceAddress.c_str()));
+            devices.push_back(AudioDeviceTypeAddr(mix->mDeviceType, mix->mDeviceAddress.c_str()));
         }
     }
     return NO_ERROR;
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioProfileVectorHelper.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioProfileVectorHelper.cpp
index 8ccb8b9..82f51ad 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioProfileVectorHelper.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioProfileVectorHelper.cpp
@@ -115,12 +115,22 @@
         profile->setDynamicFormat(true);
         profile->setDynamicChannels(dynamicFormatProfile->isDynamicChannels());
         profile->setDynamicRate(dynamicFormatProfile->isDynamicRate());
-        addAudioProfileAndSort(audioProfileVector, profile);
+        size_t profileIndex = 0;
+        for (; profileIndex < audioProfileVector.size(); profileIndex++) {
+            if (profile->equals(audioProfileVector.at(profileIndex))) {
+                // The dynamic profile is already there
+                break;
+            }
+        }
+        if (profileIndex >= audioProfileVector.size()) {
+            // Only add when the dynamic profile is not there
+            addAudioProfileAndSort(audioProfileVector, profile);
+        }
     }
 }
 
 void addDynamicAudioProfileAndSort(AudioProfileVector &audioProfileVector,
-                                      const sp<AudioProfile> &profileToAdd)
+                                   const sp<AudioProfile> &profileToAdd)
 {
     // Check valid profile to add:
     if (!profileToAdd->hasValidFormat()) {
@@ -143,11 +153,15 @@
                 audioProfileVector, profileToAdd->getChannels(), profileToAdd->getFormat());
         return;
     }
+    const bool originalIsDynamicFormat = profileToAdd->isDynamicFormat();
+    profileToAdd->setDynamicFormat(true); // set the format as dynamic to allow removal
     // Go through the list of profile to avoid duplicates
     for (size_t profileIndex = 0; profileIndex < audioProfileVector.size(); profileIndex++) {
         const sp<AudioProfile> &profile = audioProfileVector.at(profileIndex);
-        if (profile->isValid() && profile == profileToAdd) {
-            // Nothing to do
+        if (profile->isValid() && profile->equals(profileToAdd)) {
+            // The same profile is already there, no need to add.
+            // Reset `isDynamicProfile` as original value.
+            profileToAdd->setDynamicFormat(originalIsDynamicFormat);
             return;
         }
     }
diff --git a/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp
index 62e5bd4..fe25693 100644
--- a/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp
@@ -154,6 +154,12 @@
     policyPort->pickAudioProfile(mSamplingRate, mChannelMask, mFormat);
 }
 
+status_t DeviceDescriptor::readFromParcelable(const media::AudioPortFw& parcelable) {
+    RETURN_STATUS_IF_ERROR(DeviceDescriptorBase::readFromParcelable(parcelable));
+    mDeclaredAddress = DeviceDescriptorBase::address();
+    return OK;
+}
+
 void DeviceDescriptor::setEncapsulationInfoFromHal(
         AudioPolicyClientInterface *clientInterface) {
     AudioParameter param(String8(mDeviceTypeAddr.getAddress()));
@@ -225,8 +231,7 @@
 {
     bool added = false;
     for (const auto& device : devices) {
-        ALOG_ASSERT(device != nullptr, "Null pointer found when adding DeviceVector");
-        if (indexOf(device) < 0 && SortedVector::add(device) >= 0) {
+        if (device && indexOf(device) < 0 && SortedVector::add(device) >= 0) {
             added = true;
         }
     }
@@ -238,7 +243,10 @@
 
 ssize_t DeviceVector::add(const sp<DeviceDescriptor>& item)
 {
-    ALOG_ASSERT(item != nullptr, "Adding null pointer to DeviceVector");
+    if (!item) {
+        ALOGW("DeviceVector::%s() null device", __func__);
+        return -1;
+    }
     ssize_t ret = indexOf(item);
 
     if (ret < 0) {
diff --git a/services/audiopolicy/common/managerdefinitions/src/HwModule.cpp b/services/audiopolicy/common/managerdefinitions/src/HwModule.cpp
index 2c8e50b..6696b45 100644
--- a/services/audiopolicy/common/managerdefinitions/src/HwModule.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/HwModule.cpp
@@ -59,12 +59,13 @@
 }
 
 status_t HwModule::addOutputProfile(const std::string& name, const audio_config_t *config,
-                                    audio_devices_t device, const String8& address)
+                                    audio_devices_t device, const String8& address,
+                                    audio_output_flags_t flags)
 {
     sp<IOProfile> profile = new OutputProfile(name);
-
     profile->addAudioProfile(new AudioProfile(config->format, config->channel_mask,
                                               config->sample_rate));
+    profile->setFlags(flags);
 
     sp<DeviceDescriptor> devDesc =
             new DeviceDescriptor(device, getTagForDevice(device), address.c_str());
@@ -128,11 +129,13 @@
 }
 
 status_t HwModule::addInputProfile(const std::string& name, const audio_config_t *config,
-                                   audio_devices_t device, const String8& address)
+                                   audio_devices_t device, const String8& address,
+                                   audio_input_flags_t flags)
 {
     sp<IOProfile> profile = new InputProfile(name);
     profile->addAudioProfile(new AudioProfile(config->format, config->channel_mask,
                                               config->sample_rate));
+    profile->setFlags(flags);
 
     sp<DeviceDescriptor> devDesc =
             new DeviceDescriptor(device, getTagForDevice(device), address.c_str());
@@ -361,7 +364,7 @@
         DeviceVector moduleDevices = hwModule->getAllDevices();
         auto moduleDevice = moduleDevices.getDevice(deviceType, devAddress, encodedFormat);
 
-        // Prevent overwritting moduleDevice address if connected device does not have the same
+        // Prevent overwriting moduleDevice address if connected device does not have the same
         // address (since getDevice with empty address ignores match on address), use dynamic device
         if (moduleDevice && allowToCreate &&
                 (!moduleDevice->address().empty() &&
diff --git a/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp b/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp
index 03ab3f8..dd222de 100644
--- a/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp
@@ -171,6 +171,49 @@
     }
 }
 
+void IOProfile::refreshMixerBehaviors() {
+    if (getRole() == AUDIO_PORT_ROLE_SOURCE) {
+        mMixerBehaviors.clear();
+        mMixerBehaviors.insert(AUDIO_MIXER_BEHAVIOR_DEFAULT);
+        if (mFlags.output & AUDIO_OUTPUT_FLAG_BIT_PERFECT) {
+            mMixerBehaviors.insert(AUDIO_MIXER_BEHAVIOR_BIT_PERFECT);
+        }
+    }
+}
+
+status_t IOProfile::readFromParcelable(const media::AudioPortFw &parcelable) {
+    status_t status = AudioPort::readFromParcelable(parcelable);
+    if (status == OK) {
+        refreshMixerBehaviors();
+    }
+    return status;
+}
+
+void IOProfile::importAudioPort(const audio_port_v7 &port) {
+    if (mProfiles.hasDynamicFormat()) {
+        std::set<audio_format_t> formats;
+        for (size_t i = 0; i < port.num_audio_profiles; ++i) {
+            formats.insert(port.audio_profiles[i].format);
+        }
+        addProfilesForFormats(mProfiles, FormatVector(formats.begin(), formats.end()));
+    }
+    for (audio_format_t format : mProfiles.getSupportedFormats()) {
+        for (size_t i = 0; i < port.num_audio_profiles; ++i) {
+            if (port.audio_profiles[i].format == format) {
+                ChannelMaskSet channelMasks(port.audio_profiles[i].channel_masks,
+                        port.audio_profiles[i].channel_masks +
+                                port.audio_profiles[i].num_channel_masks);
+                SampleRateSet sampleRates(port.audio_profiles[i].sample_rates,
+                        port.audio_profiles[i].sample_rates +
+                                port.audio_profiles[i].num_sample_rates);
+                addDynamicAudioProfileAndSort(
+                        mProfiles, sp<AudioProfile>::make(
+                                format, channelMasks, sampleRates));
+            }
+        }
+    }
+}
+
 void IOProfile::dump(String8 *dst, int spaces) const
 {
     String8 extraInfo;
@@ -195,6 +238,10 @@
             spaces - 2, "", maxActiveCount, curActiveCount);
     dst->appendFormat("%*s- recommendedMuteDurationMs: %u ms\n",
             spaces - 2, "", recommendedMuteDurationMs);
+    if (hasDynamicAudioProfile() && !mMixerBehaviors.empty()) {
+        dst->appendFormat("%*s- mixerBehaviors: %s\n",
+                spaces - 2, "", dumpMixerBehaviors(mMixerBehaviors).c_str());
+    }
 }
 
 void IOProfile::log()
diff --git a/services/audiopolicy/engine/common/src/LastRemovableMediaDevices.cpp b/services/audiopolicy/engine/common/src/LastRemovableMediaDevices.cpp
index 93122e0..fcf410b 100644
--- a/services/audiopolicy/engine/common/src/LastRemovableMediaDevices.cpp
+++ b/services/audiopolicy/engine/common/src/LastRemovableMediaDevices.cpp
@@ -79,6 +79,7 @@
     case AUDIO_DEVICE_OUT_USB_ACCESSORY:
     case AUDIO_DEVICE_OUT_USB_DEVICE:
     case AUDIO_DEVICE_OUT_DGTL_DOCK_HEADSET:
+    case AUDIO_DEVICE_OUT_AUX_DIGITAL:
         return GROUP_WIRED;
     case AUDIO_DEVICE_OUT_BLUETOOTH_A2DP:
     case AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES:
diff --git a/services/audiopolicy/enginedefault/src/Engine.cpp b/services/audiopolicy/enginedefault/src/Engine.cpp
index 15f7842..e06bbb3 100644
--- a/services/audiopolicy/enginedefault/src/Engine.cpp
+++ b/services/audiopolicy/enginedefault/src/Engine.cpp
@@ -296,7 +296,8 @@
                             // excluding HEARING_AID and BLE_HEADSET because Dialer uses
                             // setCommunicationDevice to select them explicitly
                             AUDIO_DEVICE_OUT_HEARING_AID,
-                            AUDIO_DEVICE_OUT_BLE_HEADSET
+                            AUDIO_DEVICE_OUT_BLE_HEADSET,
+                            AUDIO_DEVICE_OUT_AUX_DIGITAL
                             }));
         if (!devices.isEmpty()) break;
         devices = availableOutputDevices.getFirstDevicesFromTypes({
@@ -399,20 +400,21 @@
         }
 
         if (devices2.isEmpty() && (getLastRemovableMediaDevices().size() > 0)) {
+            std::vector<audio_devices_t> excludedDevices;
+            // no sonification on aux digital (e.g. HDMI)
+            if (strategy == STRATEGY_SONIFICATION) {
+                excludedDevices.push_back(AUDIO_DEVICE_OUT_AUX_DIGITAL);
+            }
             if ((getForceUse(AUDIO_POLICY_FORCE_FOR_MEDIA) != AUDIO_POLICY_FORCE_NO_BT_A2DP)) {
                 // Get the last connected device of wired and bluetooth a2dp
                 devices2 = availableOutputDevices.getFirstDevicesFromTypes(
-                        getLastRemovableMediaDevices());
+                        getLastRemovableMediaDevices(GROUP_NONE, excludedDevices));
             } else {
                 // Get the last connected device of wired except bluetooth a2dp
                 devices2 = availableOutputDevices.getFirstDevicesFromTypes(
-                        getLastRemovableMediaDevices(GROUP_WIRED));
+                        getLastRemovableMediaDevices(GROUP_WIRED, excludedDevices));
             }
         }
-        if ((devices2.isEmpty()) && (strategy != STRATEGY_SONIFICATION)) {
-            // no sonification on aux digital (e.g. HDMI)
-            devices2 = availableOutputDevices.getDevicesFromType(AUDIO_DEVICE_OUT_AUX_DIGITAL);
-        }
         if ((devices2.isEmpty()) &&
                 (getForceUse(AUDIO_POLICY_FORCE_FOR_DOCK) == AUDIO_POLICY_FORCE_ANALOG_DOCK)) {
             devices2 = availableOutputDevices.getDevicesFromType(
diff --git a/services/audiopolicy/fuzzer/aidl/Android.bp b/services/audiopolicy/fuzzer/aidl/Android.bp
new file mode 100644
index 0000000..38a2cde
--- /dev/null
+++ b/services/audiopolicy/fuzzer/aidl/Android.bp
@@ -0,0 +1,74 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ ******************************************************************************/
+
+cc_defaults {
+    name: "audiopolicy_aidl_fuzzer_defaults",
+    shared_libs: [
+        "audiopolicy-aidl-cpp",
+        "audiopolicy-types-aidl-cpp",
+        "framework-permission-aidl-cpp",
+        "libaudiopolicy",
+        "libaudiopolicymanagerdefault",
+        "libactivitymanager_aidl",
+        "libaudiohal",
+        "libaudiopolicyservice",
+        "libaudioflinger",
+        "libaudioclient",
+        "libaudioprocessing",
+        "libhidlbase",
+        "liblog",
+        "libmediautils",
+        "libnblog",
+        "libnbaio",
+        "libpowermanager",
+        "libvibrator",
+        "packagemanager_aidl-cpp",
+    ],
+    static_libs: [
+        "libfakeservicemanager",
+        "libmediaplayerservice",
+    ],
+    header_libs: [
+        "libaudiohal_headers",
+        "libaudioflinger_headers",
+        "libaudiopolicymanager_interface_headers",
+        "libbinder_headers",
+        "libmedia_headers",
+    ],
+    fuzz_config: {
+        cc: [
+            "android-media-fuzzing-reports@google.com",
+        ],
+        componentid: 155276,
+        hotlists: ["4593311"],
+        description: "The fuzzer targets the APIs of libaudiopolicy",
+        vector: "local_no_privileges_required",
+        service_privilege: "privileged",
+        users: "multi_user",
+        fuzzed_code_usage: "shipped",
+    },
+}
+
+cc_fuzz {
+    name: "audiopolicy_aidl_fuzzer",
+    srcs: ["audiopolicy_aidl_fuzzer.cpp"],
+    defaults: [
+        "audiopolicy_aidl_fuzzer_defaults",
+        "service_fuzzer_defaults",
+    ],
+}
diff --git a/services/audiopolicy/fuzzer/aidl/audiopolicy_aidl_fuzzer.cpp b/services/audiopolicy/fuzzer/aidl/audiopolicy_aidl_fuzzer.cpp
new file mode 100644
index 0000000..ca79c49
--- /dev/null
+++ b/services/audiopolicy/fuzzer/aidl/audiopolicy_aidl_fuzzer.cpp
@@ -0,0 +1,86 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+#include <AudioFlinger.h>
+#include <android-base/logging.h>
+#include <android/binder_interface_utils.h>
+#include <android/binder_process.h>
+#include <android/media/IAudioPolicyService.h>
+#include <fakeservicemanager/FakeServiceManager.h>
+#include <fuzzbinder/libbinder_driver.h>
+#include <fuzzbinder/random_binder.h>
+#include <fuzzer/FuzzedDataProvider.h>
+#include <media/IAudioFlinger.h>
+#include <service/AudioPolicyService.h>
+
+using namespace android;
+using namespace android::binder;
+using namespace android::hardware;
+using android::fuzzService;
+
+[[clang::no_destroy]] static std::once_flag gSmOnce;
+sp<FakeServiceManager> gFakeServiceManager;
+
+bool addService(const String16& serviceName, const sp<FakeServiceManager>& fakeServiceManager,
+                FuzzedDataProvider& fdp) {
+    sp<IBinder> binder = getRandomBinder(&fdp);
+    if (binder == nullptr) {
+        return false;
+    }
+    CHECK_EQ(NO_ERROR, fakeServiceManager->addService(serviceName, binder));
+    return true;
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    FuzzedDataProvider fdp(data, size);
+
+    std::call_once(gSmOnce, [&] {
+        /* Create a FakeServiceManager instance and add required services */
+        gFakeServiceManager = sp<FakeServiceManager>::make();
+        setDefaultServiceManager(gFakeServiceManager);
+    });
+    gFakeServiceManager->clear();
+
+    for (const char* service :
+         {"activity", "sensor_privacy", "permission", "scheduling_policy",
+          "android.hardware.audio.core.IConfig", "batterystats", "media.metrics"}) {
+        if (!addService(String16(service), gFakeServiceManager, fdp)) {
+            return 0;
+        }
+    }
+
+    const auto audioFlinger = sp<AudioFlinger>::make();
+    const auto afAdapter = sp<AudioFlingerServerAdapter>::make(audioFlinger);
+
+    CHECK_EQ(NO_ERROR,
+             gFakeServiceManager->addService(
+                     String16(IAudioFlinger::DEFAULT_SERVICE_NAME), IInterface::asBinder(afAdapter),
+                     false /* allowIsolated */, IServiceManager::DUMP_FLAG_PRIORITY_DEFAULT));
+
+    AudioSystem::get_audio_flinger_for_fuzzer();
+    const auto audioPolicyService = sp<AudioPolicyService>::make();
+
+    CHECK_EQ(NO_ERROR,
+             gFakeServiceManager->addService(String16("media.audio_policy"), audioPolicyService,
+                                             false /* allowIsolated */,
+                                             IServiceManager::DUMP_FLAG_PRIORITY_DEFAULT));
+
+    fuzzService(media::IAudioPolicyService::asBinder(audioPolicyService),
+                FuzzedDataProvider(data, size));
+
+    return 0;
+}
diff --git a/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp b/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp
index 8793085..58fcb5c 100644
--- a/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp
+++ b/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp
@@ -661,7 +661,9 @@
 }
 
 AudioPolicyManagerFuzzerDPPlaybackReRouting::~AudioPolicyManagerFuzzerDPPlaybackReRouting() {
-    mManager->stopInput(mPortId);
+    if (mManager) {
+        mManager->stopInput(mPortId);
+    }
 }
 
 bool AudioPolicyManagerFuzzerDPPlaybackReRouting::initialize() {
@@ -773,7 +775,9 @@
 }
 
 AudioPolicyManagerFuzzerDPMixRecordInjection::~AudioPolicyManagerFuzzerDPMixRecordInjection() {
-    mManager->stopOutput(mPortId);
+    if (mManager) {
+        mManager->stopOutput(mPortId);
+    }
 }
 
 bool AudioPolicyManagerFuzzerDPMixRecordInjection::initialize() {
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index cc32aec..2fd908f 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -1298,7 +1298,8 @@
         if (outputDevices.size() == 1) {
             info = getPreferredMixerAttributesInfo(
                     outputDevices.itemAt(0)->getId(),
-                    mEngine->getProductStrategyForAttributes(*resultAttr));
+                    mEngine->getProductStrategyForAttributes(*resultAttr),
+                    true /*activeBitPerfectPreferred*/);
             // Only use preferred mixer if the uid matches or the preferred mixer is bit-perfect
             // and it is currently active.
             if (info != nullptr && info->getUid() != uid &&
@@ -1736,7 +1737,8 @@
     // Compressed formats for MSD module, ordered from most preferred to least preferred.
     static const std::vector<audio_format_t> formatsOrder = {{
             AUDIO_FORMAT_IEC60958, AUDIO_FORMAT_MAT_2_1, AUDIO_FORMAT_MAT_2_0, AUDIO_FORMAT_E_AC3,
-            AUDIO_FORMAT_AC3, AUDIO_FORMAT_PCM_16_BIT }};
+            AUDIO_FORMAT_AC3, AUDIO_FORMAT_PCM_FLOAT, AUDIO_FORMAT_PCM_32_BIT,
+            AUDIO_FORMAT_PCM_8_24_BIT, AUDIO_FORMAT_PCM_24_BIT_PACKED, AUDIO_FORMAT_PCM_16_BIT }};
     static const std::vector<audio_channel_mask_t> channelMasksOrder = [](){
         // Channel position masks for MSD module, 3D > 2D > 1D ordering (most preferred to least
         // preferred).
@@ -2152,6 +2154,26 @@
                 return DEAD_OBJECT;
             }
             info->increaseActiveClient();
+            if (info->getActiveClientCount() == 1 &&
+                (info->getFlags() & AUDIO_OUTPUT_FLAG_BIT_PERFECT) != AUDIO_OUTPUT_FLAG_NONE) {
+                // If it is first bit-perfect client, reroute all clients that will be routed to
+                // the bit-perfect sink so that it is guaranteed only bit-perfect stream is active.
+                PortHandleVector clientsToInvalidate;
+                for (size_t i = 0; i < mOutputs.size(); i++) {
+                    if (mOutputs[i] == outputDesc ||
+                        mOutputs[i]->devices().filter(outputDesc->devices()).isEmpty()) {
+                        continue;
+                    }
+                    for (const auto& c : mOutputs[i]->getClientIterable()) {
+                        clientsToInvalidate.push_back(c->portId());
+                    }
+                }
+                if (!clientsToInvalidate.empty()) {
+                    ALOGD("%s Invalidate clients due to first bit-perfect client started",
+                          __func__);
+                    mpClientInterface->invalidateTracks(clientsToInvalidate);
+                }
+            }
         }
     }
 
@@ -3203,7 +3225,8 @@
         ALOGD("%s: no group matching with %s", __FUNCTION__, toString(attributes).c_str());
         return BAD_VALUE;
     }
-    ALOGV("%s: group %d matching with %s", __FUNCTION__, group, toString(attributes).c_str());
+    ALOGV("%s: group %d matching with %s index %d",
+            __FUNCTION__, group, toString(attributes).c_str(), index);
     status_t status = NO_ERROR;
     IVolumeCurves &curves = getVolumeCurves(attributes);
     VolumeSource vs = toVolumeSource(group);
@@ -3320,6 +3343,21 @@
             status = volStatus;
         }
     }
+
+    // update voice volume if the an active call route exists
+    if (mCallRxSourceClient != nullptr && mCallRxSourceClient->isConnected()
+            && (curSrcDevices.find(
+                Volume::getDeviceForVolume({mCallRxSourceClient->sinkDevice()->type()}))
+                != curSrcDevices.end())) {
+        bool isVoiceVolSrc;
+        bool isBtScoVolSrc;
+        if (isVolumeConsistentForCalls(vs, {mCallRxSourceClient->sinkDevice()->type()},
+                isVoiceVolSrc, isBtScoVolSrc, __func__)
+                && (isVoiceVolSrc || isBtScoVolSrc)) {
+            setVoiceVolume(index, curves, isVoiceVolSrc, 0);
+        }
+    }
+
     mpClientInterface->onAudioVolumeGroupChanged(group, 0 /*flags*/);
     return status;
 }
@@ -3616,9 +3654,13 @@
             outputConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
             inputConfig.channel_mask = AUDIO_CHANNEL_IN_STEREO;
             rSubmixModule->addOutputProfile(address.c_str(), &outputConfig,
-                    AUDIO_DEVICE_OUT_REMOTE_SUBMIX, address);
+                    AUDIO_DEVICE_OUT_REMOTE_SUBMIX, address,
+                    audio_is_linear_pcm(outputConfig.format)
+                        ? AUDIO_OUTPUT_FLAG_NONE : AUDIO_OUTPUT_FLAG_DIRECT);
             rSubmixModule->addInputProfile(address.c_str(), &inputConfig,
-                    AUDIO_DEVICE_IN_REMOTE_SUBMIX, address);
+                    AUDIO_DEVICE_IN_REMOTE_SUBMIX, address,
+                    audio_is_linear_pcm(inputConfig.format)
+                        ? AUDIO_INPUT_FLAG_NONE : AUDIO_INPUT_FLAG_DIRECT);
 
             if ((res = setDeviceConnectionStateInt(deviceTypeToMakeAvailable,
                     AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
@@ -3787,6 +3829,44 @@
     return true;
 }
 
+void AudioPolicyManager::changeOutputDevicesMuteState(
+        const AudioDeviceTypeAddrVector& devices) {
+    ALOGVV("%s() num devices %zu", __func__, devices.size());
+
+    std::vector<sp<SwAudioOutputDescriptor>> outputs =
+            getSoftwareOutputsForDevices(devices);
+
+    for (size_t i = 0; i < outputs.size(); i++) {
+        sp<SwAudioOutputDescriptor> outputDesc = outputs[i];
+        DeviceVector prevDevices = outputDesc->devices();
+        checkDeviceMuteStrategies(outputDesc, prevDevices, 0 /* delayMs */);
+    }
+}
+
+std::vector<sp<SwAudioOutputDescriptor>> AudioPolicyManager::getSoftwareOutputsForDevices(
+        const AudioDeviceTypeAddrVector& devices) const
+{
+    std::vector<sp<SwAudioOutputDescriptor>> outputs;
+    DeviceVector deviceDescriptors;
+    for (size_t j = 0; j < devices.size(); j++) {
+        sp<DeviceDescriptor> desc = mHwModules.getDeviceDescriptor(
+                devices[j].mType, devices[j].getAddress(), String8(), AUDIO_FORMAT_DEFAULT);
+        if (desc == nullptr || !audio_is_output_device(devices[j].mType)) {
+            ALOGE("%s: device type %#x address %s not supported or not an output device",
+                __func__, devices[j].mType, devices[j].getAddress());
+                    continue;
+        }
+        deviceDescriptors.add(desc);
+    }
+    for (size_t i = 0; i < mOutputs.size(); i++) {
+        if (!mOutputs.valueAt(i)->supportsAtLeastOne(deviceDescriptors)) {
+            continue;
+        }
+        outputs.push_back(mOutputs.valueAt(i));
+    }
+    return outputs;
+}
+
 status_t AudioPolicyManager::setUidDeviceAffinities(uid_t uid,
         const AudioDeviceTypeAddrVector& devices) {
     ALOGV("%s() uid=%d num devices %zu", __FUNCTION__, uid, devices.size());
@@ -3853,7 +3933,8 @@
     return NO_ERROR;
 }
 
-void AudioPolicyManager::updateCallAndOutputRouting(bool forceVolumeReeval, uint32_t delayMs)
+void AudioPolicyManager::updateCallAndOutputRouting(bool forceVolumeReeval, uint32_t delayMs,
+    bool skipDelays)
 {
     uint32_t waitMs = 0;
     bool wasLeUnicastActive = isLeUnicastActive();
@@ -3879,8 +3960,8 @@
                 continue;
             }
             waitMs = setOutputDevices(outputDesc, newDevices, forceRouting, delayMs, nullptr,
-                                      true /*requiresMuteCheck*/,
-                                      !forceRouting /*requiresVolumeCheck*/);
+                                      !skipDelays /*requiresMuteCheck*/,
+                                      !forceRouting /*requiresVolumeCheck*/, skipDelays);
             // Only apply special touch sound delay once
             delayMs = 0;
         }
@@ -4065,13 +4146,18 @@
 
     // reevaluate outputs for all devices
     checkForDeviceAndOutputChanges();
-    updateCallAndOutputRouting();
+    changeOutputDevicesMuteState(devices);
+    updateCallAndOutputRouting(false /* forceVolumeReeval */, 0 /* delayMs */,
+        true /* skipDelays */);
+    changeOutputDevicesMuteState(devices);
 
     return NO_ERROR;
 }
 
 status_t AudioPolicyManager::removeUserIdDeviceAffinities(int userId) {
     ALOGV("%s() userId=%d", __FUNCTION__, userId);
+    AudioDeviceTypeAddrVector devices;
+    mPolicyMixes.getDevicesForUserId(userId, devices);
     status_t status = mPolicyMixes.removeUserIdDeviceAffinities(userId);
     if (status != NO_ERROR) {
         ALOGE("%s() Could not remove all device affinities fo userId = %d",
@@ -4081,7 +4167,10 @@
 
     // reevaluate outputs for all devices
     checkForDeviceAndOutputChanges();
-    updateCallAndOutputRouting();
+    changeOutputDevicesMuteState(devices);
+    updateCallAndOutputRouting(false /* forceVolumeReeval */, 0 /* delayMs */,
+        true /* skipDelays */);
+    changeOutputDevicesMuteState(devices);
 
     return NO_ERROR;
 }
@@ -4328,8 +4417,8 @@
             if (!mAvailableOutputDevices.containsAtLeastOne(curProfile->getSupportedDevices())) {
                 continue;
             }
-            if ((curProfile->getFlags() & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD)
-                        != AUDIO_OUTPUT_FLAG_NONE) {
+            if (offloadPossible && ((curProfile->getFlags() & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD)
+                        != AUDIO_OUTPUT_FLAG_NONE)) {
                 if ((directMode & AUDIO_DIRECT_OFFLOAD_GAPLESS_SUPPORTED)
                         != AUDIO_DIRECT_NOT_SUPPORTED) {
                     // Already reports offload gapless supported. No need to report offload support.
@@ -4490,16 +4579,24 @@
 }
 
 sp<PreferredMixerAttributesInfo> AudioPolicyManager::getPreferredMixerAttributesInfo(
-        audio_port_handle_t devicePortId, product_strategy_t strategy) {
+        audio_port_handle_t devicePortId,
+        product_strategy_t strategy,
+        bool activeBitPerfectPreferred) {
     auto it = mPreferredMixerAttrInfos.find(devicePortId);
     if (it == mPreferredMixerAttrInfos.end()) {
         return nullptr;
     }
-    auto mixerAttrInfoIt = it->second.find(strategy);
-    if (mixerAttrInfoIt == it->second.end()) {
-        return nullptr;
+    if (activeBitPerfectPreferred) {
+        for (auto [strategy, info] : it->second) {
+            if ((info->getFlags() & AUDIO_OUTPUT_FLAG_BIT_PERFECT) != AUDIO_OUTPUT_FLAG_NONE
+                && info->getActiveClientCount() != 0) {
+                return info;
+            }
+        }
     }
-    return mixerAttrInfoIt->second;
+    auto strategyMatchedMixerAttrInfoIt = it->second.find(strategy);
+    return strategyMatchedMixerAttrInfoIt == it->second.end()
+            ? nullptr : strategyMatchedMixerAttrInfoIt->second;
 }
 
 status_t AudioPolicyManager::getPreferredMixerAttributes(
@@ -5841,22 +5938,26 @@
         }
     }
 
+    // The caller can have the audio config criteria ignored by either passing a null ptr or
+    // the AUDIO_CONFIG_INITIALIZER value.
+    // If an audio config is specified, current policy is to only allow spatialization for
+    // some positional channel masks and PCM format
+
+    if (config != nullptr && *config != AUDIO_CONFIG_INITIALIZER) {
+        if (!audio_is_channel_mask_spatialized(config->channel_mask)) {
+            return false;
+        }
+        if (!audio_is_linear_pcm(config->format)) {
+            return false;
+        }
+    }
+
     sp<IOProfile> profile =
             getSpatializerOutputProfile(config, devices);
     if (profile == nullptr) {
         return false;
     }
 
-    // The caller can have the audio config criteria ignored by either passing a null ptr or
-    // the AUDIO_CONFIG_INITIALIZER value.
-    // If an audio config is specified, current policy is to only allow spatialization for
-    // some positional channel masks.
-
-    if (config != nullptr && *config != AUDIO_CONFIG_INITIALIZER) {
-        if (!audio_is_channel_mask_spatialized(config->channel_mask)) {
-            return false;
-        }
-    }
     return true;
 }
 
@@ -6519,7 +6620,7 @@
                     mpClientInterface->setParameters(input, String8(param));
                     free(param);
                 }
-                updateAudioProfiles(device, input, profile->getAudioProfiles());
+                updateAudioProfiles(device, input, profile);
                 if (!profile->hasValidAudioProfile()) {
                     ALOGW("checkInputsForDevice() direct input missing param");
                     desc->close();
@@ -7320,7 +7421,8 @@
                                               bool force,
                                               int delayMs,
                                               audio_patch_handle_t *patchHandle,
-                                              bool requiresMuteCheck, bool requiresVolumeCheck)
+                                              bool requiresMuteCheck, bool requiresVolumeCheck,
+                                              bool skipMuteDelay)
 {
     // TODO(b/262404095): Consider if the output need to be reopened.
     ALOGV("%s device %s delayMs %d", __func__, devices.toString().c_str(), delayMs);
@@ -7328,9 +7430,9 @@
 
     if (outputDesc->isDuplicated()) {
         muteWaitMs = setOutputDevices(outputDesc->subOutput1(), devices, force, delayMs,
-                nullptr /* patchHandle */, requiresMuteCheck);
+                nullptr /* patchHandle */, requiresMuteCheck, skipMuteDelay);
         muteWaitMs += setOutputDevices(outputDesc->subOutput2(), devices, force, delayMs,
-                nullptr /* patchHandle */, requiresMuteCheck);
+                nullptr /* patchHandle */, requiresMuteCheck, skipMuteDelay);
         return muteWaitMs;
     }
 
@@ -7396,12 +7498,16 @@
 
         // Add half reported latency to delayMs when muteWaitMs is null in order
         // to avoid disordered sequence of muting volume and changing devices.
-        installPatch(__func__, patchHandle, outputDesc.get(), patchBuilder.patch(),
-                muteWaitMs == 0 ? (delayMs + (outputDesc->latency() / 2)) : delayMs);
+        int actualDelayMs = !skipMuteDelay && muteWaitMs == 0
+                ? (delayMs + (outputDesc->latency() / 2)) : delayMs;
+        installPatch(__func__, patchHandle, outputDesc.get(), patchBuilder.patch(), actualDelayMs);
     }
 
-    // update stream volumes according to new device
-    applyStreamVolumes(outputDesc, filteredDevices.types(), delayMs);
+    // Since the mute is skip, also skip the apply stream volume as that will be applied externally
+    if (!skipMuteDelay) {
+        // update stream volumes according to new device
+        applyStreamVolumes(outputDesc, filteredDevices.types(), delayMs);
+    }
 
     return muteWaitMs;
 }
@@ -7576,8 +7682,10 @@
     const auto musicVolumeSrc = toVolumeSource(AUDIO_STREAM_MUSIC, false);
     const auto alarmVolumeSrc = toVolumeSource(AUDIO_STREAM_ALARM, false);
     const auto a11yVolumeSrc = toVolumeSource(AUDIO_STREAM_ACCESSIBILITY, false);
-
-    if (volumeSource == a11yVolumeSrc
+    // Verify that the current volume source is not the ringer volume to prevent recursively
+    // calling to compute volume. This could happen in cases where a11y and ringer sounds belong
+    // to the same volume group.
+    if (volumeSource != ringVolumeSrc && volumeSource == a11yVolumeSrc
             && (AUDIO_MODE_RINGTONE == mEngine->getPhoneState()) &&
             mOutputs.isActive(ringVolumeSrc, 0)) {
         auto &ringCurves = getVolumeCurves(AUDIO_STREAM_RING);
@@ -7640,8 +7748,12 @@
         // when the phone is ringing we must consider that music could have been paused just before
         // by the music application and behave as if music was active if the last music track was
         // just stopped
-        if (isStreamActive(AUDIO_STREAM_MUSIC, SONIFICATION_HEADSET_MUSIC_DELAY) ||
-                mLimitRingtoneVolume) {
+        // Verify that the current volume source is not the music volume to prevent recursively
+        // calling to compute volume. This could happen in cases where music and
+        // (alarm, ring, notification, system, etc.) sounds belong to the same volume group.
+        if (volumeSource != musicVolumeSrc &&
+            (isStreamActive(AUDIO_STREAM_MUSIC, SONIFICATION_HEADSET_MUSIC_DELAY)
+                || mLimitRingtoneVolume)) {
             volumeDb += SONIFICATION_HEADSET_VOLUME_FACTOR_DB;
             DeviceTypeSet musicDevice =
                     mEngine->getOutputDevicesForAttributes(attributes_initializer(AUDIO_USAGE_MEDIA),
@@ -7719,26 +7831,16 @@
                outputDesc->getMuteCount(volumeSource), outputDesc->isActive(volumeSource));
         return NO_ERROR;
     }
-    VolumeSource callVolSrc = toVolumeSource(AUDIO_STREAM_VOICE_CALL, false);
-    VolumeSource btScoVolSrc = toVolumeSource(AUDIO_STREAM_BLUETOOTH_SCO, false);
-    bool isVoiceVolSrc = (volumeSource != VOLUME_SOURCE_NONE) && (callVolSrc == volumeSource);
-    bool isBtScoVolSrc = (volumeSource != VOLUME_SOURCE_NONE) && (btScoVolSrc == volumeSource);
 
-    bool isScoRequested = isScoRequestedForComm();
-    bool isHAUsed = isHearingAidUsedForComm();
-
-    // do not change in call volume if bluetooth is connected and vice versa
-    // if sco and call follow same curves, bypass forceUseForComm
-    if ((callVolSrc != btScoVolSrc) &&
-            ((isVoiceVolSrc && isScoRequested) ||
-             (isBtScoVolSrc && !(isScoRequested || isHAUsed))) &&
-            !isSingleDeviceType(deviceTypes, AUDIO_DEVICE_OUT_TELEPHONY_TX)) {
-        ALOGV("%s cannot set volume group %d volume when is%srequested for comm", __func__,
-             volumeSource, isScoRequested ? " " : " not ");
+    bool isVoiceVolSrc;
+    bool isBtScoVolSrc;
+    if (!isVolumeConsistentForCalls(
+            volumeSource, deviceTypes, isVoiceVolSrc, isBtScoVolSrc, __func__)) {
         // Do not return an error here as AudioService will always set both voice call
-        // and bluetooth SCO volumes due to stream aliasing.
+        // and Bluetooth SCO volumes due to stream aliasing.
         return NO_ERROR;
     }
+
     if (deviceTypes.empty()) {
         deviceTypes = outputDesc->devices().types();
         index = curves.getVolumeIndex(deviceTypes);
@@ -7759,25 +7861,55 @@
         volumeDb = 0.0f;
     }
     const bool muted = (index == 0) && (volumeDb != 0.0f);
-    outputDesc->setVolume(
-            volumeDb, muted, volumeSource, curves.getStreamTypes(), deviceTypes, delayMs, force);
+    outputDesc->setVolume(volumeDb, muted, volumeSource, curves.getStreamTypes(),
+            deviceTypes, delayMs, force, isVoiceVolSrc);
 
     if (outputDesc == mPrimaryOutput && (isVoiceVolSrc || isBtScoVolSrc)) {
-        float voiceVolume;
-        // Force voice volume to max or mute for Bluetooth SCO as other attenuations are managed by the headset
-        if (isVoiceVolSrc) {
-            voiceVolume = (float)index/(float)curves.getVolumeIndexMax();
-        } else {
-            voiceVolume = index == 0 ? 0.0 : 1.0;
-        }
-        if (voiceVolume != mLastVoiceVolume) {
-            mpClientInterface->setVoiceVolume(voiceVolume, delayMs);
-            mLastVoiceVolume = voiceVolume;
-        }
+        setVoiceVolume(index, curves, isVoiceVolSrc, delayMs);
     }
     return NO_ERROR;
 }
 
+void AudioPolicyManager::setVoiceVolume(
+        int index, IVolumeCurves &curves, bool isVoiceVolSrc, int delayMs) {
+    float voiceVolume;
+    // Force voice volume to max or mute for Bluetooth SCO as other attenuations are managed
+    // by the headset
+    if (isVoiceVolSrc) {
+        voiceVolume = (float)index/(float)curves.getVolumeIndexMax();
+    } else {
+        voiceVolume = index == 0 ? 0.0 : 1.0;
+    }
+    if (voiceVolume != mLastVoiceVolume) {
+        mpClientInterface->setVoiceVolume(voiceVolume, delayMs);
+        mLastVoiceVolume = voiceVolume;
+    }
+}
+
+bool AudioPolicyManager::isVolumeConsistentForCalls(VolumeSource volumeSource,
+                                                   const DeviceTypeSet& deviceTypes,
+                                                   bool& isVoiceVolSrc,
+                                                   bool& isBtScoVolSrc,
+                                                   const char* caller) {
+    const VolumeSource callVolSrc = toVolumeSource(AUDIO_STREAM_VOICE_CALL, false);
+    const VolumeSource btScoVolSrc = toVolumeSource(AUDIO_STREAM_BLUETOOTH_SCO, false);
+    const bool isScoRequested = isScoRequestedForComm();
+    const bool isHAUsed = isHearingAidUsedForComm();
+
+    isVoiceVolSrc = (volumeSource != VOLUME_SOURCE_NONE) && (callVolSrc == volumeSource);
+    isBtScoVolSrc = (volumeSource != VOLUME_SOURCE_NONE) && (btScoVolSrc == volumeSource);
+
+    if ((callVolSrc != btScoVolSrc) &&
+            ((isVoiceVolSrc && isScoRequested) ||
+             (isBtScoVolSrc && !(isScoRequested || isHAUsed))) &&
+            !isSingleDeviceType(deviceTypes, AUDIO_DEVICE_OUT_TELEPHONY_TX)) {
+        ALOGV("%s cannot set volume group %d volume when is%srequested for comm", caller,
+             volumeSource, isScoRequested ? " " : " not ");
+        return false;
+    }
+    return true;
+}
+
 void AudioPolicyManager::applyStreamVolumes(const sp<AudioOutputDescriptor>& outputDesc,
                                             const DeviceTypeSet& deviceTypes,
                                             int delayMs,
@@ -8049,77 +8181,54 @@
 
 void AudioPolicyManager::updateAudioProfiles(const sp<DeviceDescriptor>& devDesc,
                                              audio_io_handle_t ioHandle,
-                                             AudioProfileVector &profiles)
-{
-    String8 reply;
-    audio_devices_t device = devDesc->type();
-
-    // Format MUST be checked first to update the list of AudioProfile
-    if (profiles.hasDynamicFormat()) {
-        reply = mpClientInterface->getParameters(
-                ioHandle, String8(AudioParameter::keyStreamSupportedFormats));
-        ALOGV("%s: supported formats %d, %s", __FUNCTION__, ioHandle, reply.c_str());
-        AudioParameter repliedParameters(reply);
-        FormatVector formats;
-        if (repliedParameters.get(
-                String8(AudioParameter::keyStreamSupportedFormats), reply) == NO_ERROR) {
-            formats = formatsFromString(reply.c_str());
-        } else if (devDesc->hasValidAudioProfile()) {
-            ALOGD("%s: using the device profiles", __func__);
-            formats = devDesc->getAudioProfiles().getSupportedFormats();
-        } else {
-            ALOGE("%s: failed to retrieve format, bailing out", __func__);
-            return;
-        }
-        mReportedFormatsMap[devDesc] = formats;
-        if (device == AUDIO_DEVICE_OUT_HDMI
-                || isDeviceOfModule(devDesc, AUDIO_HARDWARE_MODULE_ID_MSD)) {
-            modifySurroundFormats(devDesc, &formats);
-        }
-        addProfilesForFormats(profiles, formats);
+                                             const sp<IOProfile>& profile) {
+    if (!profile->hasDynamicAudioProfile()) {
+        return;
     }
 
-    for (audio_format_t format : profiles.getSupportedFormats()) {
-        std::optional<ChannelMaskSet> channelMasks;
-        SampleRateSet samplingRates;
-        AudioParameter requestedParameters;
-        requestedParameters.addInt(String8(AudioParameter::keyFormat), format);
+    audio_port_v7 devicePort;
+    devDesc->toAudioPort(&devicePort);
 
-        if (profiles.hasDynamicRateFor(format)) {
-            reply = mpClientInterface->getParameters(
-                    ioHandle,
-                    requestedParameters.toString() + ";" +
-                    AudioParameter::keyStreamSupportedSamplingRates);
-            ALOGV("%s: supported sampling rates %s", __FUNCTION__, reply.c_str());
-            AudioParameter repliedParameters(reply);
-            if (repliedParameters.get(
-                    String8(AudioParameter::keyStreamSupportedSamplingRates), reply) == NO_ERROR) {
-                samplingRates = samplingRatesFromString(reply.c_str());
-            } else {
-                samplingRates = devDesc->getAudioProfiles().getSampleRatesFor(format);
-            }
-        }
-        if (profiles.hasDynamicChannelsFor(format)) {
-            reply = mpClientInterface->getParameters(ioHandle,
-                                                     requestedParameters.toString() + ";" +
-                                                     AudioParameter::keyStreamSupportedChannels);
-            ALOGV("%s: supported channel masks %s", __FUNCTION__, reply.c_str());
-            AudioParameter repliedParameters(reply);
-            if (repliedParameters.get(
-                    String8(AudioParameter::keyStreamSupportedChannels), reply) == NO_ERROR) {
-                channelMasks = channelMasksFromString(reply.c_str());
-            } else {
-                channelMasks = devDesc->getAudioProfiles().getChannelMasksFor(format);
-            }
-            if (channelMasks.has_value() && (device == AUDIO_DEVICE_OUT_HDMI
-                    || isDeviceOfModule(devDesc, AUDIO_HARDWARE_MODULE_ID_MSD))) {
-                modifySurroundChannelMasks(&channelMasks.value());
-            }
-        }
-        addDynamicAudioProfileAndSort(
-                profiles, new AudioProfile(
-                        format, channelMasks.value_or(ChannelMaskSet()), samplingRates));
+    audio_port_v7 mixPort;
+    profile->toAudioPort(&mixPort);
+    mixPort.ext.mix.handle = ioHandle;
+
+    status_t status = mpClientInterface->getAudioMixPort(&devicePort, &mixPort);
+    if (status != NO_ERROR) {
+        ALOGE("%s failed to query the attributes of the mix port", __func__);
+        return;
     }
+
+    std::set<audio_format_t> supportedFormats;
+    for (size_t i = 0; i < mixPort.num_audio_profiles; ++i) {
+        supportedFormats.insert(mixPort.audio_profiles[i].format);
+    }
+    FormatVector formats(supportedFormats.begin(), supportedFormats.end());
+    mReportedFormatsMap[devDesc] = formats;
+
+    if (devDesc->type() == AUDIO_DEVICE_OUT_HDMI ||
+        isDeviceOfModule(devDesc,AUDIO_HARDWARE_MODULE_ID_MSD)) {
+        modifySurroundFormats(devDesc, &formats);
+        size_t modifiedNumProfiles = 0;
+        for (size_t i = 0; i < mixPort.num_audio_profiles; ++i) {
+            if (std::find(formats.begin(), formats.end(), mixPort.audio_profiles[i].format) ==
+                formats.end()) {
+                // Skip the format that is not present after modifying surround formats.
+                continue;
+            }
+            memcpy(&mixPort.audio_profiles[modifiedNumProfiles], &mixPort.audio_profiles[i],
+                   sizeof(struct audio_profile));
+            ChannelMaskSet channels(mixPort.audio_profiles[modifiedNumProfiles].channel_masks,
+                    mixPort.audio_profiles[modifiedNumProfiles].channel_masks +
+                            mixPort.audio_profiles[modifiedNumProfiles].num_channel_masks);
+            modifySurroundChannelMasks(&channels);
+            std::copy(channels.begin(), channels.end(),
+                      std::begin(mixPort.audio_profiles[modifiedNumProfiles].channel_masks));
+            mixPort.audio_profiles[modifiedNumProfiles++].num_channel_masks = channels.size();
+        }
+        mixPort.num_audio_profiles = modifiedNumProfiles;
+    }
+    profile->importAudioPort(mixPort);
 }
 
 status_t AudioPolicyManager::installPatch(const char *caller,
@@ -8246,7 +8355,7 @@
         mpClientInterface->setParameters(output, String8(param));
         free(param);
     }
-    updateAudioProfiles(device, output, profile->getAudioProfiles());
+    updateAudioProfiles(device, output, profile);
     if (!profile->hasValidAudioProfile()) {
         ALOGW("%s() missing param", __func__);
         desc->close();
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.h b/services/audiopolicy/managerdefault/AudioPolicyManager.h
index 88bafef..8f8550c 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.h
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.h
@@ -535,8 +535,9 @@
          *        and currently active, allow to have proper drain and avoid pops
          * @param requiresVolumeCheck true if called requires to reapply volume if the routing did
          * not change (but the output is still routed).
+         * @param skipMuteDelay if true will skip mute delay when installing audio patch
          * @return the number of ms we have slept to allow new routing to take effect in certain
-         * cases.
+         *        cases.
          */
         uint32_t setOutputDevices(const sp<SwAudioOutputDescriptor>& outputDesc,
                                   const DeviceVector &device,
@@ -544,7 +545,8 @@
                                   int delayMs = 0,
                                   audio_patch_handle_t *patchHandle = NULL,
                                   bool requiresMuteCheck = true,
-                                  bool requiresVolumeCheck = false);
+                                  bool requiresVolumeCheck = false,
+                                  bool skipMuteDelay = false);
         status_t resetOutputDevice(const sp<AudioOutputDescriptor>& outputDesc,
                                    int delayMs = 0,
                                    audio_patch_handle_t *patchHandle = NULL);
@@ -573,6 +575,20 @@
                                            DeviceTypeSet deviceTypes,
                                            int delayMs = 0, bool force = false);
 
+        void setVoiceVolume(int index, IVolumeCurves &curves, bool isVoiceVolSrc, int delayMs);
+
+        // returns true if the supplied set of volume source and devices are consistent with
+        // call volume rules:
+        // if Bluetooth SCO and voice call use different volume curves:
+        // - do not apply voice call volume if Bluetooth SCO is used for call
+        // - do not apply Bluetooth SCO volume if SCO or Hearing Aid is not used for call.
+        // Also updates the booleans isVoiceVolSrc and isBtScoVolSrc according to the
+        // volume source supplied.
+        bool isVolumeConsistentForCalls(VolumeSource volumeSource,
+                                       const DeviceTypeSet& deviceTypes,
+                                       bool& isVoiceVolSrc,
+                                       bool& isBtScoVolSrc,
+                                       const char* caller);
         // apply all stream volumes to the specified output and device
         void applyStreamVolumes(const sp<AudioOutputDescriptor>& outputDesc,
                                 const DeviceTypeSet& deviceTypes,
@@ -647,8 +663,10 @@
         /**
          * @brief updates routing for all outputs (including call if call in progress).
          * @param delayMs delay for unmuting if required
+         * @param skipDelays if true all the delays will be skip while updating routing
          */
-        void updateCallAndOutputRouting(bool forceVolumeReeval = true, uint32_t delayMs = 0);
+        void updateCallAndOutputRouting(bool forceVolumeReeval = true, uint32_t delayMs = 0,
+                bool skipDelays = false);
 
         bool isCallRxAudioSource(const sp<SourceClientDescriptor> &source) {
             return mCallRxSourceClient != nullptr && source == mCallRxSourceClient;
@@ -1040,9 +1058,9 @@
         void modifySurroundFormats(const sp<DeviceDescriptor>& devDesc, FormatVector *formatsPtr);
         void modifySurroundChannelMasks(ChannelMaskSet *channelMasksPtr);
 
-        // If any, resolve any "dynamic" fields of an Audio Profiles collection
+        // If any, resolve any "dynamic" fields of the Audio Profiles collection of and IOProfile
         void updateAudioProfiles(const sp<DeviceDescriptor>& devDesc, audio_io_handle_t ioHandle,
-                AudioProfileVector &profiles);
+                const sp<IOProfile> &profiles);
 
         // Notify the policy client to prepare for disconnecting external device.
         void prepareToDisconnectExternalDevice(const sp<DeviceDescriptor> &device);
@@ -1241,6 +1259,21 @@
                 const char* context,
                 bool matchAddress = true);
 
+        /**
+         * @brief changeOutputDevicesMuteState mute/unmute devices using checkDeviceMuteStrategies
+         * @param devices devices to mute/unmute
+         */
+        void changeOutputDevicesMuteState(const AudioDeviceTypeAddrVector& devices);
+
+        /**
+         * @brief Returns a vector of software output descriptor that support the queried devices
+         * @param devices devices to query
+         * @param openOutputs open outputs where the devices are supported as determined by
+         *      SwAudioOutputDescriptor::supportsAtLeastOne
+         */
+        std::vector<sp<SwAudioOutputDescriptor>> getSoftwareOutputsForDevices(
+                const AudioDeviceTypeAddrVector& devices) const;
+
         bool isScoRequestedForComm() const;
 
         bool isHearingAidUsedForComm() const;
@@ -1298,8 +1331,15 @@
                                        uint32_t flags,
                                        bool isInput);
 
+        /**
+         * Returns the preferred mixer attributes info for the given device port id and strategy.
+         * Bit-perfect mixer attributes will be returned if it is active and
+         * `activeBitPerfectPreferred` is true.
+         */
         sp<PreferredMixerAttributesInfo> getPreferredMixerAttributesInfo(
-                audio_port_handle_t devicePortId, product_strategy_t strategy);
+                audio_port_handle_t devicePortId,
+                product_strategy_t strategy,
+                bool activeBitPerfectPreferred = false);
 
         sp<SwAudioOutputDescriptor> reopenOutput(
                 sp<SwAudioOutputDescriptor> outputDesc,
diff --git a/services/audiopolicy/service/AudioPolicyClientImpl.cpp b/services/audiopolicy/service/AudioPolicyClientImpl.cpp
index 2874824..6de71a3 100644
--- a/services/audiopolicy/service/AudioPolicyClientImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyClientImpl.cpp
@@ -164,6 +164,8 @@
     status_t status = af->openInput(request, &response);
     if (status == OK) {
         *input = VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_audio_module_handle_t(response.input));
+        *config = VALUE_OR_RETURN_STATUS(
+                aidl2legacy_AudioConfig_audio_config_t(response.config, true /*isInput*/));
     }
     return status;
 }
@@ -340,4 +342,14 @@
     return af->invalidateTracks(portIds);
 }
 
+status_t AudioPolicyService::AudioPolicyClient::getAudioMixPort(
+        const struct audio_port_v7 *devicePort,
+        struct audio_port_v7 *port) {
+    sp<IAudioFlinger> af = AudioSystem::get_audio_flinger();
+    if (af == 0) {
+        return PERMISSION_DENIED;
+    }
+    return af->getAudioMixPort(devicePort, port);
+}
+
 } // namespace android
diff --git a/services/audiopolicy/service/AudioPolicyService.cpp b/services/audiopolicy/service/AudioPolicyService.cpp
index d7aa5c9..5fbbc30 100644
--- a/services/audiopolicy/service/AudioPolicyService.cpp
+++ b/services/audiopolicy/service/AudioPolicyService.cpp
@@ -25,6 +25,7 @@
 #include <sys/time.h>
 #include <dlfcn.h>
 
+#include <android/content/pm/IPackageManagerNative.h>
 #include <audio_utils/clock.h>
 #include <binder/IServiceManager.h>
 #include <utils/Log.h>
@@ -51,6 +52,7 @@
 
 namespace android {
 using binder::Status;
+using media::audio::common::Spatialization;
 
 static const char kDeadlockedString[] = "AudioPolicyService may be deadlocked\n";
 static const char kCmdDeadlockedString[] = "AudioPolicyService command thread may be deadlocked\n";
@@ -215,6 +217,27 @@
 {
     delete interface;
 }
+
+namespace {
+int getTargetSdkForPackageName(std::string_view packageName) {
+    const auto binder = defaultServiceManager()->checkService(String16{"package_native"});
+    int targetSdk = -1;
+    if (binder != nullptr) {
+        const auto pm = interface_cast<content::pm::IPackageManagerNative>(binder);
+        if (pm != nullptr) {
+            const auto status = pm->getTargetSdkVersionForPackage(
+                    String16{packageName.data(), packageName.size()}, &targetSdk);
+            ALOGI("Capy check package %s, sdk %d", packageName.data(), targetSdk);
+            return status.isOk() ? targetSdk : -1;
+        }
+    }
+    return targetSdk;
+}
+
+bool doesPackageTargetAtLeastU(std::string_view packageName) {
+    return getTargetSdkForPackageName(packageName) >= __ANDROID_API_U__;
+}
+} // anonymous
 // ----------------------------------------------------------------------------
 
 AudioPolicyService::AudioPolicyService()
@@ -552,7 +575,7 @@
 
     if (mSpatializer != nullptr) {
         // Note: mSpatializer != nullptr =>  mAudioPolicyManager != nullptr
-        if (mSpatializer->getLevel() != media::SpatializationLevel::NONE) {
+        if (mSpatializer->getLevel() != Spatialization::Level::NONE) {
             audio_io_handle_t currentOutput = mSpatializer->getOutput();
             audio_io_handle_t newOutput;
             const audio_attributes_t attr = attributes_initializer(AUDIO_USAGE_MEDIA);
@@ -577,8 +600,8 @@
             if (status != NO_ERROR) {
                 mAudioPolicyManager->releaseSpatializerOutput(newOutput);
             }
-        } else if (mSpatializer->getLevel() == media::SpatializationLevel::NONE
-                               && mSpatializer->getOutput() != AUDIO_IO_HANDLE_NONE) {
+        } else if (mSpatializer->getLevel() == Spatialization::Level::NONE &&
+                   mSpatializer->getOutput() != AUDIO_IO_HANDLE_NONE) {
             audio_io_handle_t output = mSpatializer->detachOutput();
 
             if (output != AUDIO_IO_HANDLE_NONE) {
@@ -1926,10 +1949,14 @@
     checkOp();
     mOpCallback = new RecordAudioOpCallback(this);
     ALOGV("start watching op %d for %s", mAppOp, mAttributionSource.toString().c_str());
+    int flags = doesPackageTargetAtLeastU(
+            mAttributionSource.packageName.value_or("")) ?
+            AppOpsManager::WATCH_FOREGROUND_CHANGES : 0;
     // TODO: We need to always watch AppOpsManager::OP_RECORD_AUDIO too
     // since it controls the mic permission for legacy apps.
     mAppOpsManager.startWatchingMode(mAppOp, VALUE_OR_FATAL(aidl2legacy_string_view_String16(
         mAttributionSource.packageName.value_or(""))),
+        flags,
         mOpCallback);
 }
 
diff --git a/services/audiopolicy/service/AudioPolicyService.h b/services/audiopolicy/service/AudioPolicyService.h
index d0cde64..8d5628f 100644
--- a/services/audiopolicy/service/AudioPolicyService.h
+++ b/services/audiopolicy/service/AudioPolicyService.h
@@ -856,6 +856,9 @@
 
         status_t invalidateTracks(const std::vector<audio_port_handle_t>& portIds) override;
 
+        status_t getAudioMixPort(const struct audio_port_v7 *devicePort,
+                                 struct audio_port_v7 *port) override;
+
      private:
         AudioPolicyService *mAudioPolicyService;
     };
diff --git a/services/audiopolicy/service/Spatializer.cpp b/services/audiopolicy/service/Spatializer.cpp
index 1245b1e..7859c2c 100644
--- a/services/audiopolicy/service/Spatializer.cpp
+++ b/services/audiopolicy/service/Spatializer.cpp
@@ -43,16 +43,16 @@
 
 namespace android {
 
-using aidl_utils::statusTFromBinderStatus;
 using aidl_utils::binderStatusFromStatusT;
+using aidl_utils::statusTFromBinderStatus;
 using android::content::AttributionSourceState;
 using binder::Status;
 using media::HeadTrackingMode;
 using media::Pose3f;
-using media::SpatializationLevel;
-using media::SpatializationMode;
-using media::SpatializerHeadTrackingMode;
 using media::SensorPoseProvider;
+using media::audio::common::HeadTracking;
+using media::audio::common::Spatialization;
+using ::android::internal::ToString;
 
 using namespace std::chrono_literals;
 
@@ -302,7 +302,7 @@
     }
     mSupportsHeadTracking = supportsHeadTracking[0];
 
-    std::vector<media::SpatializationLevel> spatializationLevels;
+    std::vector<Spatialization::Level> spatializationLevels;
     status = getHalParameter<true>(effect, SPATIALIZER_PARAM_SUPPORTED_LEVELS,
             &spatializationLevels);
     if (status != NO_ERROR) {
@@ -316,7 +316,7 @@
             ALOGW("%s: ignoring spatializationLevel:%d", __func__, (int)spatializationLevel);
             continue;
         }
-        if (spatializationLevel == media::SpatializationLevel::NONE) {
+        if (spatializationLevel == Spatialization::Level::NONE) {
             noneLevelFound = true;
         } else {
             activeLevelFound = true;
@@ -330,7 +330,7 @@
         return BAD_VALUE;
     }
 
-    std::vector<media::SpatializationMode> spatializationModes;
+    std::vector<Spatialization::Mode> spatializationModes;
     status = getHalParameter<true>(effect, SPATIALIZER_PARAM_SUPPORTED_SPATIALIZATION_MODES,
             &spatializationModes);
     if (status != NO_ERROR) {
@@ -373,9 +373,9 @@
 
     // Currently we expose only RELATIVE_WORLD.
     // This is a limitation of the head tracking library based on a UX choice.
-    mHeadTrackingModes.push_back(SpatializerHeadTrackingMode::DISABLED);
+    mHeadTrackingModes.push_back(HeadTracking::Mode::DISABLED);
     if (mSupportsHeadTracking) {
-        mHeadTrackingModes.push_back(SpatializerHeadTrackingMode::RELATIVE_WORLD);
+        mHeadTrackingModes.push_back(HeadTracking::Mode::RELATIVE_WORLD);
     }
     mediametrics::LogItem(mMetricsId)
         .set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_CREATE)
@@ -440,7 +440,7 @@
 void Spatializer::binderDied(__unused const wp<IBinder> &who) {
     {
         std::lock_guard lock(mLock);
-        mLevel = SpatializationLevel::NONE;
+        mLevel = Spatialization::Level::NONE;
         mSpatializerCallback.clear();
     }
     ALOGV("%s", __func__);
@@ -448,20 +448,20 @@
 }
 
 // ISpatializer
-Status Spatializer::getSupportedLevels(std::vector<SpatializationLevel> *levels) {
+Status Spatializer::getSupportedLevels(std::vector<Spatialization::Level> *levels) {
     ALOGV("%s", __func__);
     if (levels == nullptr) {
         return binderStatusFromStatusT(BAD_VALUE);
     }
-    // SpatializationLevel::NONE is already required from the effect or we don't load it.
+    // Spatialization::Level::NONE is already required from the effect or we don't load it.
     levels->insert(levels->end(), mLevels.begin(), mLevels.end());
     return Status::ok();
 }
 
-Status Spatializer::setLevel(SpatializationLevel level) {
-    ALOGV("%s level %s", __func__, media::toString(level).c_str());
-    mLocalLog.log("%s with %s", __func__, media::toString(level).c_str());
-    if (level != SpatializationLevel::NONE
+Status Spatializer::setLevel(Spatialization::Level level) {
+    ALOGV("%s level %s", __func__,  ToString(level).c_str());
+    mLocalLog.log("%s with %s", __func__, ToString(level).c_str());
+    if (level != Spatialization::Level::NONE
             && std::find(mLevels.begin(), mLevels.end(), level) == mLevels.end()) {
         return binderStatusFromStatusT(BAD_VALUE);
     }
@@ -488,7 +488,7 @@
     return Status::ok();
 }
 
-Status Spatializer::getLevel(SpatializationLevel *level) {
+Status Spatializer::getLevel(Spatialization::Level *level) {
     if (level == nullptr) {
         return binderStatusFromStatusT(BAD_VALUE);
     }
@@ -509,7 +509,7 @@
 }
 
 Status Spatializer::getSupportedHeadTrackingModes(
-        std::vector<SpatializerHeadTrackingMode>* modes) {
+        std::vector<HeadTracking::Mode>* modes) {
     std::lock_guard lock(mLock);
     ALOGV("%s", __func__);
     if (modes == nullptr) {
@@ -519,24 +519,24 @@
     return Status::ok();
 }
 
-Status Spatializer::setDesiredHeadTrackingMode(SpatializerHeadTrackingMode mode) {
-    ALOGV("%s mode %s", __func__, media::toString(mode).c_str());
+Status Spatializer::setDesiredHeadTrackingMode(HeadTracking::Mode mode) {
+    ALOGV("%s mode %s", __func__, ToString(mode).c_str());
 
     if (!mSupportsHeadTracking) {
         return binderStatusFromStatusT(INVALID_OPERATION);
     }
-    mLocalLog.log("%s with %s", __func__, media::toString(mode).c_str());
+    mLocalLog.log("%s with %s", __func__, ToString(mode).c_str());
     std::lock_guard lock(mLock);
     switch (mode) {
-        case SpatializerHeadTrackingMode::OTHER:
+        case HeadTracking::Mode::OTHER:
             return binderStatusFromStatusT(BAD_VALUE);
-        case SpatializerHeadTrackingMode::DISABLED:
+        case HeadTracking::Mode::DISABLED:
             mDesiredHeadTrackingMode = HeadTrackingMode::STATIC;
             break;
-        case SpatializerHeadTrackingMode::RELATIVE_WORLD:
+        case HeadTracking::Mode::RELATIVE_WORLD:
             mDesiredHeadTrackingMode = HeadTrackingMode::WORLD_RELATIVE;
             break;
-        case SpatializerHeadTrackingMode::RELATIVE_SCREEN:
+        case HeadTracking::Mode::RELATIVE_SCREEN:
             mDesiredHeadTrackingMode = HeadTrackingMode::SCREEN_RELATIVE;
             break;
     }
@@ -547,7 +547,7 @@
     return Status::ok();
 }
 
-Status Spatializer::getActualHeadTrackingMode(SpatializerHeadTrackingMode *mode) {
+Status Spatializer::getActualHeadTrackingMode(HeadTracking::Mode *mode) {
     if (mode == nullptr) {
         return binderStatusFromStatusT(BAD_VALUE);
     }
@@ -600,8 +600,8 @@
         binder->unlinkToDeath(this);
         mSpatializerCallback.clear();
 
-        levelChanged = mLevel != SpatializationLevel::NONE;
-        mLevel = SpatializationLevel::NONE;
+        levelChanged = mLevel != Spatialization::Level::NONE;
+        mLevel = Spatialization::Level::NONE;
     }
 
     if (levelChanged) {
@@ -690,7 +690,7 @@
     return Status::ok();
 }
 
-Status Spatializer::getSupportedModes(std::vector<SpatializationMode> *modes) {
+Status Spatializer::getSupportedModes(std::vector<Spatialization::Mode> *modes) {
     ALOGV("%s", __func__);
     if (modes == nullptr) {
         return binderStatusFromStatusT(BAD_VALUE);
@@ -771,7 +771,7 @@
     const std::vector<float> headToStage(6, 0.0);
     setEffectParameter_l(SPATIALIZER_PARAM_HEAD_TO_STAGE, headToStage);
     setEffectParameter_l(SPATIALIZER_PARAM_HEADTRACKING_MODE,
-            std::vector<SpatializerHeadTrackingMode>{SpatializerHeadTrackingMode::DISABLED});
+            std::vector<HeadTracking::Mode>{HeadTracking::Mode::DISABLED});
 }
 
 void Spatializer::onHeadToStagePoseMsg(const std::vector<float>& headToStage) {
@@ -804,21 +804,21 @@
 void Spatializer::onActualModeChangeMsg(HeadTrackingMode mode) {
     ALOGV("%s(%d)", __func__, (int) mode);
     sp<media::ISpatializerHeadTrackingCallback> callback;
-    SpatializerHeadTrackingMode spatializerMode;
+    HeadTracking::Mode spatializerMode;
     {
         std::lock_guard lock(mLock);
         if (!mSupportsHeadTracking) {
-            spatializerMode = SpatializerHeadTrackingMode::DISABLED;
+            spatializerMode = HeadTracking::Mode::DISABLED;
         } else {
             switch (mode) {
                 case HeadTrackingMode::STATIC:
-                    spatializerMode = SpatializerHeadTrackingMode::DISABLED;
+                    spatializerMode = HeadTracking::Mode::DISABLED;
                     break;
                 case HeadTrackingMode::WORLD_RELATIVE:
-                    spatializerMode = SpatializerHeadTrackingMode::RELATIVE_WORLD;
+                    spatializerMode = HeadTracking::Mode::RELATIVE_WORLD;
                     break;
                 case HeadTrackingMode::SCREEN_RELATIVE:
-                    spatializerMode = SpatializerHeadTrackingMode::RELATIVE_SCREEN;
+                    spatializerMode = HeadTracking::Mode::RELATIVE_SCREEN;
                     break;
                 default:
                     LOG_ALWAYS_FATAL("Unknown mode: %d", mode);
@@ -826,11 +826,11 @@
         }
         mActualHeadTrackingMode = spatializerMode;
         if (mEngine != nullptr) {
-            if (spatializerMode == SpatializerHeadTrackingMode::DISABLED) {
+            if (spatializerMode == HeadTracking::Mode::DISABLED) {
                 resetEngineHeadPose_l();
             } else {
                 setEffectParameter_l(SPATIALIZER_PARAM_HEADTRACKING_MODE,
-                                     std::vector<SpatializerHeadTrackingMode>{spatializerMode});
+                                     std::vector<HeadTracking::Mode>{spatializerMode});
             }
         }
         callback = mHeadTrackingCallback;
@@ -974,12 +974,12 @@
         if (mPoseController != nullptr) {
             // TODO(b/253297301, b/255433067) reenable low latency condition check
             // for Head Tracking after Bluetooth HAL supports it correctly.
-            if (mNumActiveTracks > 0 && mLevel != SpatializationLevel::NONE
+            if (mNumActiveTracks > 0 && mLevel != Spatialization::Level::NONE
                 && mDesiredHeadTrackingMode != HeadTrackingMode::STATIC
                 && mHeadSensor != SpatializerPoseController::INVALID_SENSOR) {
                 if (mEngine != nullptr) {
                     setEffectParameter_l(SPATIALIZER_PARAM_HEADTRACKING_MODE,
-                            std::vector<SpatializerHeadTrackingMode>{mActualHeadTrackingMode});
+                            std::vector<HeadTracking::Mode>{mActualHeadTrackingMode});
                 }
                 mPoseController->setHeadSensor(mHeadSensor);
                 mPoseController->setScreenSensor(mScreenSensor);
@@ -996,20 +996,20 @@
     if (mOutput != AUDIO_IO_HANDLE_NONE && supportsSetLatencyMode) {
         const status_t status =
                 AudioSystem::setRequestedLatencyMode(mOutput, requestedLatencyMode);
-        ALOGD("%s: setRequestedLatencyMode for output thread(%d) to %s returned %d",
-                __func__, mOutput, toString(requestedLatencyMode).c_str(), status);
+        ALOGD("%s: setRequestedLatencyMode for output thread(%d) to %s returned %d", __func__,
+              mOutput, toString(requestedLatencyMode).c_str(), status);
     }
 }
 
 void Spatializer::checkEngineState_l() {
     if (mEngine != nullptr) {
-        if (mLevel != SpatializationLevel::NONE && mNumActiveTracks > 0) {
+        if (mLevel != Spatialization::Level::NONE && mNumActiveTracks > 0) {
             mEngine->setEnabled(true);
             setEffectParameter_l(SPATIALIZER_PARAM_LEVEL,
-                    std::vector<SpatializationLevel>{mLevel});
+                    std::vector<Spatialization::Level>{mLevel});
         } else {
             setEffectParameter_l(SPATIALIZER_PARAM_LEVEL,
-                    std::vector<SpatializationLevel>{SpatializationLevel::NONE});
+                    std::vector<Spatialization::Level>{Spatialization::Level::NONE});
             mEngine->setEnabled(false);
         }
     }
@@ -1067,21 +1067,21 @@
     // 1. Capabilities (mLevels, mHeadTrackingModes, mSpatializationModes, mChannelMasks, etc)
     ss.append(prefixSpace).append("Supported levels: [");
     for (auto& level : mLevels) {
-        base::StringAppendF(&ss, " %s", media::toString(level).c_str());
+        base::StringAppendF(&ss, " %s", ToString(level).c_str());
     }
-    base::StringAppendF(&ss, "], mLevel: %s", media::toString(mLevel).c_str());
+    base::StringAppendF(&ss, "], mLevel: %s", ToString(mLevel).c_str());
 
     base::StringAppendF(&ss, "\n%smHeadTrackingModes: [", prefixSpace.c_str());
     for (auto& mode : mHeadTrackingModes) {
-        base::StringAppendF(&ss, " %s", media::toString(mode).c_str());
+        base::StringAppendF(&ss, " %s", ToString(mode).c_str());
     }
     base::StringAppendF(&ss, "], Desired: %s, Actual %s\n",
                         media::toString(mDesiredHeadTrackingMode).c_str(),
-                        media::toString(mActualHeadTrackingMode).c_str());
+                        ToString(mActualHeadTrackingMode).c_str());
 
     base::StringAppendF(&ss, "%smSpatializationModes: [", prefixSpace.c_str());
     for (auto& mode : mSpatializationModes) {
-        base::StringAppendF(&ss, " %s", media::toString(mode).c_str());
+        base::StringAppendF(&ss, " %s", ToString(mode).c_str());
     }
     ss += "]\n";
 
diff --git a/services/audiopolicy/service/Spatializer.h b/services/audiopolicy/service/Spatializer.h
index 0d4d3f6..4ef07ce 100644
--- a/services/audiopolicy/service/Spatializer.h
+++ b/services/audiopolicy/service/Spatializer.h
@@ -20,10 +20,9 @@
 #include <android-base/stringprintf.h>
 #include <android/media/BnEffect.h>
 #include <android/media/BnSpatializer.h>
-#include <android/media/SpatializationLevel.h>
-#include <android/media/SpatializationMode.h>
-#include <android/media/SpatializerHeadTrackingMode.h>
 #include <android/media/audio/common/AudioLatencyMode.h>
+#include <android/media/audio/common/HeadTracking.h>
+#include <android/media/audio/common/Spatialization.h>
 #include <audio_utils/SimpleLog.h>
 #include <math.h>
 #include <media/AudioEffect.h>
@@ -106,16 +105,17 @@
 
     /** ISpatializer, see ISpatializer.aidl */
     binder::Status release() override;
-    binder::Status getSupportedLevels(std::vector<media::SpatializationLevel>* levels) override;
-    binder::Status setLevel(media::SpatializationLevel level) override;
-    binder::Status getLevel(media::SpatializationLevel *level) override;
+    binder::Status getSupportedLevels(
+            std::vector<media::audio::common::Spatialization::Level>* levels) override;
+    binder::Status setLevel(media::audio::common::Spatialization::Level level) override;
+    binder::Status getLevel(media::audio::common::Spatialization::Level *level) override;
     binder::Status isHeadTrackingSupported(bool *supports);
     binder::Status getSupportedHeadTrackingModes(
-            std::vector<media::SpatializerHeadTrackingMode>* modes) override;
+            std::vector<media::audio::common::HeadTracking::Mode>* modes) override;
     binder::Status setDesiredHeadTrackingMode(
-            media::SpatializerHeadTrackingMode mode) override;
+            media::audio::common::HeadTracking::Mode mode) override;
     binder::Status getActualHeadTrackingMode(
-            media::SpatializerHeadTrackingMode* mode) override;
+            media::audio::common::HeadTracking::Mode* mode) override;
     binder::Status recenterHeadTracker() override;
     binder::Status setGlobalTransform(const std::vector<float>& screenToStage) override;
     binder::Status setHeadSensor(int sensorHandle) override;
@@ -123,7 +123,8 @@
     binder::Status setDisplayOrientation(float physicalToLogicalAngle) override;
     binder::Status setHingeAngle(float hingeAngle) override;
     binder::Status setFoldState(bool folded) override;
-    binder::Status getSupportedModes(std::vector<media::SpatializationMode>* modes) override;
+    binder::Status getSupportedModes(
+            std::vector<media::audio::common::Spatialization::Mode>* modes) override;
     binder::Status registerHeadTrackingCallback(
         const sp<media::ISpatializerHeadTrackingCallback>& callback) override;
     binder::Status setParameter(int key, const std::vector<unsigned char>& value) override;
@@ -145,7 +146,10 @@
     status_t loadEngineConfiguration(sp<EffectHalInterface> effect);
 
     /** Level getter for use by local classes. */
-    media::SpatializationLevel getLevel() const { std::lock_guard lock(mLock); return mLevel; }
+    media::audio::common::Spatialization::Level getLevel() const {
+        std::lock_guard lock(mLock);
+        return mLevel;
+    }
 
     /** Called by audio policy service when the special output mixer dedicated to spatialization
      * is opened and the spatializer engine must be created.
@@ -360,7 +364,8 @@
     sp<media::ISpatializerHeadTrackingCallback> mHeadTrackingCallback GUARDED_BY(mLock);
 
     /** Requested spatialization level */
-    media::SpatializationLevel mLevel GUARDED_BY(mLock) = media::SpatializationLevel::NONE;
+    media::audio::common::Spatialization::Level mLevel GUARDED_BY(mLock) =
+            media::audio::common::Spatialization::Level::NONE;
 
     /** Control logic for head-tracking, etc. */
     std::shared_ptr<SpatializerPoseController> mPoseController GUARDED_BY(mLock);
@@ -370,8 +375,8 @@
             = media::HeadTrackingMode::STATIC;
 
     /** Last-reported actual head-tracking mode. */
-    media::SpatializerHeadTrackingMode mActualHeadTrackingMode GUARDED_BY(mLock)
-            = media::SpatializerHeadTrackingMode::DISABLED;
+    media::audio::common::HeadTracking::Mode mActualHeadTrackingMode GUARDED_BY(mLock)
+            = media::audio::common::HeadTracking::Mode::DISABLED;
 
     /** Selected Head pose sensor */
     int32_t mHeadSensor GUARDED_BY(mLock) = SpatializerPoseController::INVALID_SENSOR;
@@ -388,9 +393,9 @@
     /** Last hinge angle */
     float mHingeAngle GUARDED_BY(mLock) = 0.f;  // foldable: 0.f is closed, M_PI flat open.
 
-    std::vector<media::SpatializationLevel> mLevels;
-    std::vector<media::SpatializerHeadTrackingMode> mHeadTrackingModes;
-    std::vector<media::SpatializationMode> mSpatializationModes;
+    std::vector<media::audio::common::Spatialization::Level> mLevels;
+    std::vector<media::audio::common::HeadTracking::Mode> mHeadTrackingModes;
+    std::vector<media::audio::common::Spatialization::Mode> mSpatializationModes;
     std::vector<audio_channel_mask_t> mChannelMasks;
     bool mSupportsHeadTracking;
 
diff --git a/services/audiopolicy/tests/AudioPolicyManagerTestClient.h b/services/audiopolicy/tests/AudioPolicyManagerTestClient.h
index 3629c16..7ef0266 100644
--- a/services/audiopolicy/tests/AudioPolicyManagerTestClient.h
+++ b/services/audiopolicy/tests/AudioPolicyManagerTestClient.h
@@ -201,6 +201,26 @@
         return mAudioParameters.toString();
     }
 
+    status_t getAudioMixPort(const struct audio_port_v7 *devicePort __unused,
+                             struct audio_port_v7 *mixPort) override {
+        mixPort->num_audio_profiles = 0;
+        for (auto format : mSupportedFormats) {
+            const int i = mixPort->num_audio_profiles;
+            mixPort->audio_profiles[i].format = format;
+            mixPort->audio_profiles[i].num_sample_rates = 1;
+            mixPort->audio_profiles[i].sample_rates[0] = 48000;
+            mixPort->audio_profiles[i].num_channel_masks = 0;
+            for (const auto& cm : mSupportedChannelMasks) {
+                if (audio_channel_mask_is_valid(cm)) {
+                    mixPort->audio_profiles[i].channel_masks[
+                            mixPort->audio_profiles[i].num_channel_masks++] = cm;
+                }
+            }
+            mixPort->num_audio_profiles++;
+        }
+        return NO_ERROR;
+    }
+
     void addSupportedFormat(audio_format_t format) {
         mSupportedFormats.insert(format);
     }
diff --git a/services/audiopolicy/tests/AudioPolicyTestClient.h b/services/audiopolicy/tests/AudioPolicyTestClient.h
index 2ae0e97..e55e935 100644
--- a/services/audiopolicy/tests/AudioPolicyTestClient.h
+++ b/services/audiopolicy/tests/AudioPolicyTestClient.h
@@ -106,6 +106,10 @@
     status_t invalidateTracks(const std::vector<audio_port_handle_t>& /*portIds*/) override {
         return NO_INIT;
     }
+    status_t getAudioMixPort(const struct audio_port_v7 *devicePort __unused,
+                             struct audio_port_v7 *mixPort __unused) override {
+        return INVALID_OPERATION;
+    }
 };
 
 } // namespace android
diff --git a/services/audiopolicy/tests/audiopolicymanager_tests.cpp b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
index 5e58dbb..7c5ab08 100644
--- a/services/audiopolicy/tests/audiopolicymanager_tests.cpp
+++ b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
@@ -1232,6 +1232,19 @@
     EXPECT_FALSE(isBitPerfect);
     EXPECT_EQ(bitPerfectOutput, output);
 
+    const audio_attributes_t dtmfAttr = {
+            .content_type = AUDIO_CONTENT_TYPE_UNKNOWN,
+            .usage = AUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING,
+    };
+    audio_io_handle_t dtmfOutput = AUDIO_IO_HANDLE_NONE;
+    selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+    portId = AUDIO_PORT_HANDLE_NONE;
+    getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+            48000, AUDIO_OUTPUT_FLAG_NONE, &dtmfOutput, &portId, dtmfAttr,
+            AUDIO_SESSION_NONE, anotherUid, &isBitPerfect);
+    EXPECT_FALSE(isBitPerfect);
+    EXPECT_EQ(bitPerfectOutput, dtmfOutput);
+
     // When configuration matches preferred mixer attributes, which is bit-perfect, but the client
     // is not the owner of preferred mixer attributes, the playback will not be bit-perfect.
     getOutputForAttr(&selectedDeviceId, bitPerfectFormat, bitPerfectChannelMask,
@@ -1423,6 +1436,7 @@
     ASSERT_NO_FATAL_FAILURE(AudioPolicyManagerTest::SetUp());
     mClient->addSupportedFormat(AUDIO_FORMAT_AC3);
     mClient->addSupportedFormat(AUDIO_FORMAT_E_AC3);
+    mClient->addSupportedChannelMask(AUDIO_CHANNEL_OUT_STEREO);
     mManager->setDeviceConnectionState(
             AUDIO_DEVICE_OUT_HDMI, AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
             "" /*address*/, "" /*name*/, AUDIO_FORMAT_DEFAULT);
@@ -1548,13 +1562,13 @@
     mManager->setForceUse(
             AUDIO_POLICY_FORCE_FOR_ENCODED_SURROUND, AUDIO_POLICY_FORCE_ENCODED_SURROUND_MANUAL);
 
-    ASSERT_EQ(NO_ERROR, mManager->setSurroundFormatEnabled(GetParam(), false /*enabled*/));
-    auto formats = getFormatsFromPorts();
-    ASSERT_EQ(0, formats.count(GetParam()));
-
     ASSERT_EQ(NO_ERROR, mManager->setSurroundFormatEnabled(GetParam(), true /*enabled*/));
-    formats = getFormatsFromPorts();
+    auto formats = getFormatsFromPorts();
     ASSERT_EQ(1, formats.count(GetParam()));
+
+    ASSERT_EQ(NO_ERROR, mManager->setSurroundFormatEnabled(GetParam(), false /*enabled*/));
+    formats = getFormatsFromPorts();
+    ASSERT_EQ(0, formats.count(GetParam()));
 }
 
 TEST_P(AudioPolicyManagerTestForHdmi,
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index 3e7af3d..ae5e9a1 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -117,6 +117,10 @@
     android_atomic_write(level, &gLogLevel);
 }
 
+int32_t format_as(CameraService::StatusInternal s) {
+  return fmt::underlying(s);
+}
+
 // ----------------------------------------------------------------------------
 
 static const std::string sDumpPermission("android.permission.DUMP");
@@ -305,14 +309,23 @@
     for (auto& i : mListenerList) {
         if (shouldSkipStatusUpdates(systemCameraKind, i->isVendorListener(), i->getListenerPid(),
                 i->getListenerUid())) {
-            ALOGV("Skipping torch callback for system-only camera device %s",
-                    cameraId.c_str());
+            ALOGV("%s: Skipping torch callback for system-only camera device %s",
+                    __FUNCTION__, cameraId.c_str());
             continue;
         }
         auto ret = i->getListener()->onTorchStatusChanged(mapToInterface(status),
                 cameraId);
         i->handleBinderStatus(ret, "%s: Failed to trigger onTorchStatusChanged for %d:%d: %d",
                 __FUNCTION__, i->getListenerUid(), i->getListenerPid(), ret.exceptionCode());
+        // Also trigger the torch callbacks for cameras that were remapped to the current cameraId
+        // for the specific package that this listener belongs to.
+        std::vector<std::string> remappedCameraIds =
+                findOriginalIdsForRemappedCameraId(cameraId, i->getListenerUid());
+        for (auto& remappedCameraId : remappedCameraIds) {
+            ret = i->getListener()->onTorchStatusChanged(mapToInterface(status), remappedCameraId);
+            i->handleBinderStatus(ret, "%s: Failed to trigger onTorchStatusChanged for %d:%d: %d",
+                    __FUNCTION__, i->getListenerUid(), i->getListenerPid(), ret.exceptionCode());
+        }
     }
 }
 
@@ -493,8 +506,8 @@
     }
 
     if (newStatus == StatusInternal::NOT_PRESENT) {
-        logDeviceRemoved(cameraId, fmt::sprintf("Device status changed from %d to %d", oldStatus,
-                newStatus));
+        logDeviceRemoved(cameraId, fmt::format("Device status changed from {} to {}",
+                oldStatus, newStatus));
 
         // Set the device status to NOT_PRESENT, clients will no longer be able to connect
         // to this device until the status changes
@@ -520,8 +533,8 @@
         removeStates(cameraId);
     } else {
         if (oldStatus == StatusInternal::NOT_PRESENT) {
-            logDeviceAdded(cameraId, fmt::sprintf("Device status changed from %d to %d", oldStatus,
-                    newStatus));
+            logDeviceAdded(cameraId, fmt::format("Device status changed from {} to {}",
+                    oldStatus, newStatus));
         }
         updateStatus(newStatus, cameraId);
     }
@@ -561,9 +574,9 @@
     if (updated) {
         std::string idCombo = id + " : " + physicalId;
         if (newStatus == StatusInternal::PRESENT) {
-            logDeviceAdded(idCombo, fmt::sprintf("Device status changed to %d", newStatus));
+            logDeviceAdded(idCombo, fmt::format("Device status changed to {}", newStatus));
         } else {
-            logDeviceRemoved(idCombo, fmt::sprintf("Device status changed to %d", newStatus));
+            logDeviceRemoved(idCombo, fmt::format("Device status changed to {}", newStatus));
         }
         // Avoid calling getSystemCameraKind() with mStatusListenerLock held (b/141756275)
         SystemCameraKind deviceKind = SystemCameraKind::PUBLIC;
@@ -729,11 +742,176 @@
     return Status::ok();
 }
 
+Status CameraService::remapCameraIds(const hardware::CameraIdRemapping&
+      cameraIdRemapping) {
+    if (!checkCallingPermission(toString16(sCameraInjectExternalCameraPermission))) {
+        const int pid = CameraThreadState::getCallingPid();
+        const int uid = CameraThreadState::getCallingUid();
+        ALOGE("%s: Permission Denial: can't configure camera ID mapping pid=%d, uid=%d",
+                __FUNCTION__, pid, uid);
+        return STATUS_ERROR(ERROR_PERMISSION_DENIED,
+                "Permission Denial: no permission to configure camera id mapping");
+    }
+    TCameraIdRemapping cameraIdRemappingMap{};
+    binder::Status parseStatus = parseCameraIdRemapping(cameraIdRemapping, &cameraIdRemappingMap);
+    if (!parseStatus.isOk()) {
+        return parseStatus;
+    }
+    remapCameraIds(cameraIdRemappingMap);
+    return Status::ok();
+}
+
+Status CameraService::parseCameraIdRemapping(
+        const hardware::CameraIdRemapping& cameraIdRemapping,
+        /* out */ TCameraIdRemapping* cameraIdRemappingMap) {
+    std::string packageName;
+    std::string cameraIdToReplace, updatedCameraId;
+    for(const auto& packageIdRemapping: cameraIdRemapping.packageIdRemappings) {
+        packageName = packageIdRemapping.packageName;
+        if (packageName == "") {
+            return STATUS_ERROR(ERROR_ILLEGAL_ARGUMENT,
+                    "CameraIdRemapping: Package name cannot be empty");
+        }
+
+        if (packageIdRemapping.cameraIdsToReplace.size()
+            != packageIdRemapping.updatedCameraIds.size()) {
+            return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT,
+                    "CameraIdRemapping: Mismatch in CameraId Remapping lists sizes for package %s",
+                     packageName.c_str());
+        }
+        for(size_t i = 0; i < packageIdRemapping.cameraIdsToReplace.size(); i++) {
+            cameraIdToReplace = packageIdRemapping.cameraIdsToReplace[i];
+            updatedCameraId = packageIdRemapping.updatedCameraIds[i];
+            if (cameraIdToReplace == "" || updatedCameraId == "") {
+                return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT,
+                        "CameraIdRemapping: Camera Id cannot be empty for package %s",
+                        packageName.c_str());
+            }
+            if (cameraIdToReplace == updatedCameraId) {
+                return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT,
+                        "CameraIdRemapping: CameraIdToReplace cannot be the same"
+                        " as updatedCameraId for %s",
+                        packageName.c_str());
+            }
+            (*cameraIdRemappingMap)[packageName][cameraIdToReplace] = updatedCameraId;
+        }
+    }
+    return Status::ok();
+}
+
+void CameraService::remapCameraIds(const TCameraIdRemapping& cameraIdRemapping) {
+    // Acquire mServiceLock and prevent other clients from connecting
+    std::unique_ptr<AutoConditionLock> serviceLockWrapper =
+            AutoConditionLock::waitAndAcquire(mServiceLockWrapper);
+
+    // Collect all existing clients for camera Ids that are being
+    // remapped in the new cameraIdRemapping, but only if they were being used by a
+    // targeted packageName.
+    std::vector<sp<BasicClient>> clientsToDisconnect;
+    std::vector<std::string> cameraIdsToUpdate;
+    for (const auto& [packageName, injectionMap] : cameraIdRemapping) {
+        for (auto& [id0, id1] : injectionMap) {
+            ALOGI("%s: UPDATE:= %s: %s: %s", __FUNCTION__, packageName.c_str(),
+                    id0.c_str(), id1.c_str());
+            auto clientDescriptor = mActiveClientManager.get(id0);
+            if (clientDescriptor != nullptr) {
+                sp<BasicClient> clientSp = clientDescriptor->getValue();
+                if (clientSp->getPackageName() == packageName) {
+                    // This camera is being used by a targeted packageName and
+                    // being remapped to a new camera Id. We should disconnect it.
+                    clientsToDisconnect.push_back(clientSp);
+                    cameraIdsToUpdate.push_back(id0);
+                }
+            }
+        }
+    }
+
+    for (auto& clientSp : clientsToDisconnect) {
+        // Notify the clients about the disconnection.
+        clientSp->notifyError(hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_DISCONNECTED,
+                CaptureResultExtras{});
+    }
+
+    // Do not hold mServiceLock while disconnecting clients, but retain the condition
+    // blocking other clients from connecting in mServiceLockWrapper if held.
+    mServiceLock.unlock();
+
+    // Clear calling identity for disconnect() PID checks.
+    int64_t token = CameraThreadState::clearCallingIdentity();
+
+    // Disconnect clients.
+    for (auto& clientSp : clientsToDisconnect) {
+        // This also triggers a call to updateStatus() which also reads mCameraIdRemapping
+        // and requires mCameraIdRemappingLock.
+        clientSp->disconnect();
+    }
+
+    // Invoke destructors (which call disconnect()) now while we don't hold the mServiceLock.
+    clientsToDisconnect.clear();
+
+    CameraThreadState::restoreCallingIdentity(token);
+    mServiceLock.lock();
+
+    {
+        Mutex::Autolock lock(mCameraIdRemappingLock);
+        // Update mCameraIdRemapping.
+        mCameraIdRemapping.clear();
+        mCameraIdRemapping.insert(cameraIdRemapping.begin(), cameraIdRemapping.end());
+    }
+}
+
+std::vector<std::string> CameraService::findOriginalIdsForRemappedCameraId(
+    const std::string& inputCameraId, int clientUid) {
+    std::string packageName = getPackageNameFromUid(clientUid);
+    std::vector<std::string> cameraIds;
+    Mutex::Autolock lock(mCameraIdRemappingLock);
+    if (auto packageMapIter = mCameraIdRemapping.find(packageName);
+        packageMapIter != mCameraIdRemapping.end()) {
+        for (auto& [id0, id1]: packageMapIter->second) {
+            if (id1 == inputCameraId) {
+                cameraIds.push_back(id0);
+            }
+        }
+    }
+    return cameraIds;
+}
+
+std::string CameraService::resolveCameraId(
+    const std::string& inputCameraId,
+    int clientUid,
+    const std::string& packageName) {
+    std::string packageNameVal = packageName;
+    if (packageName.empty()) {
+        packageNameVal = getPackageNameFromUid(clientUid);
+    }
+   if (clientUid < AID_APP_START || packageNameVal.empty()) {
+        // We shouldn't remap cameras for processes with system/vendor UIDs.
+        return inputCameraId;
+    }
+    Mutex::Autolock lock(mCameraIdRemappingLock);
+    if (auto packageMapIter = mCameraIdRemapping.find(packageNameVal);
+        packageMapIter != mCameraIdRemapping.end()) {
+        auto packageMap = packageMapIter->second;
+        if (auto replacementIdIter = packageMap.find(inputCameraId);
+            replacementIdIter != packageMap.end()) {
+            ALOGI("%s: resolveCameraId: remapping cameraId %s for %s to %s",
+                    __FUNCTION__, inputCameraId.c_str(),
+                    packageNameVal.c_str(),
+                    replacementIdIter->second.c_str());
+            return replacementIdIter->second;
+        }
+    }
+    return inputCameraId;
+}
+
 Status CameraService::getCameraInfo(int cameraId, bool overrideToPortrait,
         CameraInfo* cameraInfo) {
     ATRACE_CALL();
     Mutex::Autolock l(mServiceLock);
-    std::string cameraIdStr = cameraIdIntToStrLocked(cameraId);
+    std::string unresolvedCameraId = cameraIdIntToStrLocked(cameraId);
+    std::string cameraIdStr = resolveCameraId(
+            unresolvedCameraId, CameraThreadState::getCallingUid());
+
     if (shouldRejectSystemCameraConnection(cameraIdStr)) {
         return STATUS_ERROR_FMT(ERROR_INVALID_OPERATION, "Unable to retrieve camera"
                 "characteristics for system only device %s: ", cameraIdStr.c_str());
@@ -799,9 +977,13 @@
     return cameraIdIntToStrLocked(cameraIdInt);
 }
 
-Status CameraService::getCameraCharacteristics(const std::string& cameraId,
+Status CameraService::getCameraCharacteristics(const std::string& unresolvedCameraId,
         int targetSdkVersion, bool overrideToPortrait, CameraMetadata* cameraInfo) {
     ATRACE_CALL();
+
+    const std::string cameraId = resolveCameraId(unresolvedCameraId,
+            CameraThreadState::getCallingUid());
+
     if (!cameraInfo) {
         ALOGE("%s: cameraInfo is NULL", __FUNCTION__);
         return STATUS_ERROR(ERROR_ILLEGAL_ARGUMENT, "cameraInfo is NULL");
@@ -886,10 +1068,12 @@
     return ret;
 }
 
-Status CameraService::getTorchStrengthLevel(const std::string& cameraId,
+Status CameraService::getTorchStrengthLevel(const std::string& unresolvedCameraId,
         int32_t* torchStrength) {
     ATRACE_CALL();
     Mutex::Autolock l(mServiceLock);
+    const std::string cameraId = resolveCameraId(
+            unresolvedCameraId, CameraThreadState::getCallingUid());
     if (!mInitialized) {
         ALOGE("%s: Camera HAL couldn't be initialized.", __FUNCTION__);
         return STATUS_ERROR(ERROR_DISCONNECTED, "Camera HAL couldn't be initialized.");
@@ -1008,7 +1192,7 @@
         int api1CameraId, int facing, int sensorOrientation, int clientPid, uid_t clientUid,
         int servicePid, std::pair<int, IPCTransport> deviceVersionAndTransport,
         apiLevel effectiveApiLevel, bool overrideForPerfClass, bool overrideToPortrait,
-        bool forceSlowJpegMode, /*out*/sp<BasicClient>* client) {
+        bool forceSlowJpegMode, const std::string& originalCameraId, /*out*/sp<BasicClient>* client) {
     // For HIDL devices
     if (deviceVersionAndTransport.second == IPCTransport::HIDL) {
         // Create CameraClient based on device version reported by the HAL.
@@ -1052,7 +1236,7 @@
         *client = new CameraDeviceClient(cameraService, tmp,
                 cameraService->mCameraServiceProxyWrapper, packageName, systemNativeClient,
                 featureId, cameraId, facing, sensorOrientation, clientPid, clientUid, servicePid,
-                overrideForPerfClass, overrideToPortrait);
+                overrideForPerfClass, overrideToPortrait, originalCameraId);
         ALOGI("%s: Camera2 API, override to portrait %d", __FUNCTION__, overrideToPortrait);
     }
     return Status::ok();
@@ -1143,7 +1327,7 @@
             kServiceName, /*systemNativeClient*/ false, {}, uid, USE_CALLING_PID,
             API_1, /*shimUpdateOnly*/ true, /*oomScoreOffset*/ 0,
             /*targetSdkVersion*/ __ANDROID_API_FUTURE__, /*overrideToPortrait*/ true,
-            /*forceSlowJpegMode*/false, /*out*/ tmp)
+            /*forceSlowJpegMode*/false, cameraIdStr, /*out*/ tmp)
             ).isOk()) {
         ALOGE("%s: Error initializing shim metadata: %s", __FUNCTION__, ret.toString8().c_str());
     }
@@ -1163,7 +1347,9 @@
         return STATUS_ERROR(ERROR_ILLEGAL_ARGUMENT, "Parameters must not be null");
     }
 
-    std::string cameraIdStr = std::to_string(cameraId);
+    std::string unresolvedCameraId = std::to_string(cameraId);
+    std::string cameraIdStr = resolveCameraId(unresolvedCameraId,
+            CameraThreadState::getCallingUid());
 
     // Check if we already have parameters
     {
@@ -1348,8 +1534,8 @@
     attributionSource.uid = clientUid;
     attributionSource.packageName = clientName;
     bool checkPermissionForCamera = permissionChecker.checkPermissionForPreflight(
-            toString16(sCameraPermission), attributionSource, String16(),
-            AppOpsManager::OP_NONE) != permission::PermissionChecker::PERMISSION_HARD_DENIED;
+            toString16(sCameraPermission), attributionSource, String16(), AppOpsManager::OP_NONE)
+            != permission::PermissionChecker::PERMISSION_HARD_DENIED;
     if (callingPid != getpid() &&
                 (deviceKind != SystemCameraKind::SYSTEM_ONLY_CAMERA) && !checkPermissionForCamera) {
         ALOGE("Permission Denial: can't use the camera pid=%d, uid=%d", clientPid, clientUid);
@@ -1676,12 +1862,15 @@
     ATRACE_CALL();
     Status ret = Status::ok();
 
-    std::string cameraIdStr = cameraIdIntToStr(api1CameraId);
+    std::string unresolvedCameraId = cameraIdIntToStr(api1CameraId);
+    std::string cameraIdStr = resolveCameraId(unresolvedCameraId,
+            CameraThreadState::getCallingUid());
+
     sp<Client> client = nullptr;
     ret = connectHelper<ICameraClient,Client>(cameraClient, cameraIdStr, api1CameraId,
             clientPackageName, /*systemNativeClient*/ false, {}, clientUid, clientPid, API_1,
             /*shimUpdateOnly*/ false, /*oomScoreOffset*/ 0, targetSdkVersion,
-            overrideToPortrait, forceSlowJpegMode, /*out*/client);
+            overrideToPortrait, forceSlowJpegMode, cameraIdStr, /*out*/client);
 
     if(!ret.isOk()) {
         logRejected(cameraIdStr, CameraThreadState::getCallingPid(), clientPackageName,
@@ -1761,7 +1950,7 @@
 
 Status CameraService::connectDevice(
         const sp<hardware::camera2::ICameraDeviceCallbacks>& cameraCb,
-        const std::string& cameraId,
+        const std::string& unresolvedCameraId,
         const std::string& clientPackageName,
         const std::optional<std::string>& clientFeatureId,
         int clientUid, int oomScoreOffset, int targetSdkVersion,
@@ -1781,6 +1970,10 @@
         clientPackageNameAdj = systemClient;
         systemNativeClient = true;
     }
+    const std::string cameraId = resolveCameraId(
+            unresolvedCameraId,
+            CameraThreadState::getCallingUid(),
+            clientPackageNameAdj);
 
     if (oomScoreOffset < 0) {
         std::string msg =
@@ -1818,9 +2011,9 @@
             cameraId, /*api1CameraId*/-1, clientPackageNameAdj, systemNativeClient, clientFeatureId,
             clientUid, USE_CALLING_PID, API_2, /*shimUpdateOnly*/ false, oomScoreOffset,
             targetSdkVersion, overrideToPortrait, /*forceSlowJpegMode*/false,
-            /*out*/client);
+            unresolvedCameraId, /*out*/client);
 
-    if(!ret.isOk()) {
+    if (!ret.isOk()) {
         logRejected(cameraId, callingPid, clientPackageNameAdj, toStdString(ret.toString8()));
         return ret;
     }
@@ -1887,7 +2080,7 @@
         int api1CameraId, const std::string& clientPackageNameMaybe, bool systemNativeClient,
         const std::optional<std::string>& clientFeatureId, int clientUid, int clientPid,
         apiLevel effectiveApiLevel, bool shimUpdateOnly, int oomScoreOffset, int targetSdkVersion,
-        bool overrideToPortrait, bool forceSlowJpegMode,
+        bool overrideToPortrait, bool forceSlowJpegMode, const std::string& originalCameraId,
         /*out*/sp<CLIENT>& device) {
     binder::Status ret = binder::Status::ok();
 
@@ -2002,7 +2195,7 @@
                 clientFeatureId, cameraId, api1CameraId, facing,
                 orientation, clientPid, clientUid, getpid(),
                 deviceVersionAndTransport, effectiveApiLevel, overrideForPerfClass,
-                overrideToPortrait, forceSlowJpegMode,
+                overrideToPortrait, forceSlowJpegMode, originalCameraId,
                 /*out*/&tmp)).isOk()) {
             return ret;
         }
@@ -2016,6 +2209,9 @@
         if (err != OK) {
             ALOGE("%s: Could not initialize client from HAL.", __FUNCTION__);
             // Errors could be from the HAL module open call or from AppOpsManager
+            mServiceLock.unlock();
+            client->disconnect();
+            mServiceLock.lock();
             switch(err) {
                 case BAD_VALUE:
                     return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT,
@@ -2256,8 +2452,9 @@
     return OK;
 }
 
-Status CameraService::turnOnTorchWithStrengthLevel(const std::string& cameraId,
-        int32_t torchStrength, const sp<IBinder>& clientBinder) {
+Status CameraService::turnOnTorchWithStrengthLevel(const std::string& unresolvedCameraId,
+        int32_t torchStrength,
+        const sp<IBinder>& clientBinder) {
     Mutex::Autolock lock(mServiceLock);
 
     ATRACE_CALL();
@@ -2268,7 +2465,7 @@
     }
 
     int uid = CameraThreadState::getCallingUid();
-
+    const std::string cameraId = resolveCameraId(unresolvedCameraId, uid);
     if (shouldRejectSystemCameraConnection(cameraId)) {
         return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT, "Unable to change the strength level"
                 "for system only device %s: ", cameraId.c_str());
@@ -2384,7 +2581,8 @@
     return Status::ok();
 }
 
-Status CameraService::setTorchMode(const std::string& cameraId, bool enabled,
+Status CameraService::setTorchMode(const std::string& unresolvedCameraId,
+        bool enabled,
         const sp<IBinder>& clientBinder) {
     Mutex::Autolock lock(mServiceLock);
 
@@ -2396,6 +2594,7 @@
     }
 
     int uid = CameraThreadState::getCallingUid();
+    const std::string cameraId = resolveCameraId(unresolvedCameraId, uid);
 
     if (shouldRejectSystemCameraConnection(cameraId)) {
         return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT, "Unable to set torch mode"
@@ -2922,10 +3121,13 @@
     return ret;
 }
 
-Status CameraService::supportsCameraApi(const std::string& cameraId, int apiVersion,
+Status CameraService::supportsCameraApi(const std::string& unresolvedCameraId, int apiVersion,
         /*out*/ bool *isSupported) {
     ATRACE_CALL();
 
+    const std::string cameraId = resolveCameraId(
+            unresolvedCameraId, CameraThreadState::getCallingUid());
+
     ALOGV("%s: for camera ID = %s", __FUNCTION__, cameraId.c_str());
 
     switch (apiVersion) {
@@ -2984,10 +3186,13 @@
     return Status::ok();
 }
 
-Status CameraService::isHiddenPhysicalCamera(const std::string& cameraId,
+Status CameraService::isHiddenPhysicalCamera(const std::string& unresolvedCameraId,
         /*out*/ bool *isSupported) {
     ATRACE_CALL();
 
+    const std::string cameraId = resolveCameraId(unresolvedCameraId,
+            CameraThreadState::getCallingUid());
+
     ALOGV("%s: for camera ID = %s", __FUNCTION__, cameraId.c_str());
     *isSupported = mCameraProviderManager->isHiddenPhysicalCamera(cameraId);
 
@@ -4918,7 +5123,6 @@
     state->updateStatus(status, cameraId, rejectSourceStates, [this, &deviceKind,
                         &logicalCameraIds]
             (const std::string& cameraId, StatusInternal status) {
-
             if (status != StatusInternal::ENUMERATING) {
                 // Update torch status if it has a flash unit.
                 Mutex::Autolock al(mTorchStatusMutex);
@@ -4951,9 +5155,21 @@
                 auto ret = listener->getListener()->onStatusChanged(mapToInterface(status),
                         cameraId);
                 listener->handleBinderStatus(ret,
-                        "%s: Failed to trigger onStatusChanged callback for %d:%d: %d",
+                         "%s: Failed to trigger onStatusChanged callback for %d:%d: %d",
                         __FUNCTION__, listener->getListenerUid(), listener->getListenerPid(),
                         ret.exceptionCode());
+                // Also trigger the callbacks for cameras that were remapped to the current
+                // cameraId for the specific package that this listener belongs to.
+                std::vector<std::string> remappedCameraIds =
+                        findOriginalIdsForRemappedCameraId(cameraId, listener->getListenerUid());
+                for (auto& remappedCameraId : remappedCameraIds) {
+                    ret = listener->getListener()->onStatusChanged(
+                            mapToInterface(status), remappedCameraId);
+                    listener->handleBinderStatus(ret,
+                             "%s: Failed to trigger onStatusChanged callback for %d:%d: %d",
+                            __FUNCTION__, listener->getListenerUid(), listener->getListenerPid(),
+                            ret.exceptionCode());
+                }
             }
         });
 }
@@ -5163,6 +5379,8 @@
         return handleWatchCommand(args, in, out);
     } else if (args.size() >= 2 && args[0] == toString16("set-watchdog")) {
         return handleSetCameraServiceWatchdog(args);
+    } else if (args.size() >= 4 && args[0] == toString16("remap-camera-id")) {
+        return handleCameraIdRemapping(args, err);
     } else if (args.size() == 1 && args[0] == toString16("help")) {
         printHelp(out);
         return OK;
@@ -5171,6 +5389,23 @@
     return BAD_VALUE;
 }
 
+status_t CameraService::handleCameraIdRemapping(const Vector<String16>& args, int err) {
+    uid_t uid = IPCThreadState::self()->getCallingUid();
+    if (uid != AID_ROOT) {
+        dprintf(err, "Must be adb root\n");
+        return PERMISSION_DENIED;
+    }
+    if (args.size() != 4) {
+        dprintf(err, "Expected format: remap-camera-id <PACKAGE> <Id0> <Id1>\n");
+        return BAD_VALUE;
+    }
+    std::string packageName = toStdString(args[1]);
+    std::string cameraIdToReplace = toStdString(args[2]);
+    std::string cameraIdNew = toStdString(args[3]);
+    remapCameraIds({{packageName, {{cameraIdToReplace, cameraIdNew}}}});
+    return OK;
+}
+
 status_t CameraService::handleSetUidState(const Vector<String16>& args, int err) {
     std::string packageName = toStdString(args[1]);
 
@@ -5784,6 +6019,7 @@
         "  clear-stream-use-case-override clear the stream use case override\n"
         "  set-zoom-override <-1/0/1> enable or disable zoom override\n"
         "      Valid values -1: do not override, 0: override to OFF, 1: override to ZOOM\n"
+        "  remap-camera-id <PACKAGE> <Id0> <Id1> remaps camera ids. Must use adb root\n"
         "  watch <start|stop|dump|print|clear> manages tag monitoring in connected clients\n"
         "  help print this message\n");
 }
diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h
index bc65293..72831d5 100644
--- a/services/camera/libcameraservice/CameraService.h
+++ b/services/camera/libcameraservice/CameraService.h
@@ -20,6 +20,7 @@
 #include <android/hardware/BnCameraService.h>
 #include <android/hardware/BnSensorPrivacyListener.h>
 #include <android/hardware/ICameraServiceListener.h>
+#include <android/hardware/CameraIdRemapping.h>
 #include <android/hardware/camera2/BnCameraInjectionSession.h>
 #include <android/hardware/camera2/ICameraInjectionCallback.h>
 
@@ -61,6 +62,7 @@
 #include <utility>
 #include <unordered_map>
 #include <unordered_set>
+#include <vector>
 
 namespace android {
 
@@ -138,6 +140,9 @@
 
     /////////////////////////////////////////////////////////////////////
     // ICameraService
+    // IMPORTANT: All binder calls that deal with logicalCameraId should use
+    // resolveCameraId(logicalCameraId) to arrive at the correct cameraId to
+    // perform the operation on (in case of Id Remapping).
     virtual binder::Status     getNumberOfCameras(int32_t type, int32_t* numCameras);
 
     virtual binder::Status     getCameraInfo(int cameraId, bool overrideToPortrait,
@@ -222,6 +227,9 @@
     virtual binder::Status reportExtensionSessionStats(
             const hardware::CameraExtensionSessionStats& stats, std::string* sessionKey /*out*/);
 
+    virtual binder::Status remapCameraIds(const hardware::CameraIdRemapping&
+        cameraIdRemapping);
+
     // Extra permissions checks
     virtual status_t    onTransact(uint32_t code, const Parcel& data,
                                    Parcel* reply, uint32_t flags);
@@ -355,7 +363,7 @@
         static bool isValidAudioRestriction(int32_t mode);
 
         // Override rotate-and-crop AUTO behavior
-        virtual status_t setRotateAndCropOverride(uint8_t rotateAndCrop) = 0;
+        virtual status_t setRotateAndCropOverride(uint8_t rotateAndCrop, bool fromHal = false) = 0;
 
         // Override autoframing AUTO behaviour
         virtual status_t setAutoframingOverride(uint8_t autoframingValue) = 0;
@@ -636,6 +644,8 @@
         UNKNOWN = static_cast<int32_t>(hardware::ICameraServiceListener::STATUS_UNKNOWN)
     };
 
+    friend int32_t format_as(StatusInternal s);
+
     /**
      * Container class for the state of each logical camera device, including: ID, status, and
      * dependencies on other devices.  The mapping of camera ID -> state saved in mCameraStates
@@ -915,7 +925,7 @@
             int api1CameraId, const std::string& clientPackageNameMaybe, bool systemNativeClient,
             const std::optional<std::string>& clientFeatureId, int clientUid, int clientPid,
             apiLevel effectiveApiLevel, bool shimUpdateOnly, int scoreOffset, int targetSdkVersion,
-            bool overrideToPortrait, bool forceSlowJpegMode,
+            bool overrideToPortrait, bool forceSlowJpegMode, const std::string& originalCameraId,
             /*out*/sp<CLIENT>& device);
 
     // Lock guarding camera service state
@@ -943,6 +953,46 @@
     // Mutex guarding mCameraStates map
     mutable Mutex mCameraStatesLock;
 
+    /**
+     * Mapping from packageName -> {cameraIdToReplace -> newCameraIdtoUse}.
+     *
+     * This specifies that for packageName, for every binder operation targeting
+     * cameraIdToReplace, use newCameraIdToUse instead.
+     */
+    typedef std::map<std::string, std::map<std::string, std::string>> TCameraIdRemapping;
+    TCameraIdRemapping mCameraIdRemapping{};
+    /** Mutex guarding mCameraIdRemapping. */
+    Mutex mCameraIdRemappingLock;
+
+    /** Parses cameraIdRemapping parcelable into the native cameraIdRemappingMap. */
+    binder::Status parseCameraIdRemapping(
+            const hardware::CameraIdRemapping& cameraIdRemapping,
+            /* out */ TCameraIdRemapping* cameraIdRemappingMap);
+
+    /**
+     * Resolve the (potentially remapped) camera Id to use for packageName.
+     *
+     * This returns the Camera Id to use in case inputCameraId was remapped to a
+     * different Id for the given packageName. Otherwise, it returns the inputCameraId.
+     *
+     * If the packageName is not provided, it will be inferred from the clientUid.
+     */
+    std::string resolveCameraId(
+            const std::string& inputCameraId,
+            int clientUid,
+            const std::string& packageName = "");
+
+    /**
+     * Updates the state of mCameraIdRemapping, while disconnecting active clients as necessary.
+     */
+    void remapCameraIds(const TCameraIdRemapping& cameraIdRemapping);
+
+    /**
+     * Finds the Camera Ids that were remapped to the inputCameraId for the given client.
+     */
+    std::vector<std::string> findOriginalIdsForRemappedCameraId(
+        const std::string& inputCameraId, int clientUid);
+
     // Circular buffer for storing event logging for dumps
     RingBuffer<std::string> mEventLog;
     Mutex mLogLock;
@@ -1325,6 +1375,9 @@
     // Set or clear the zoom override flag
     status_t handleSetZoomOverride(const Vector<String16>& args);
 
+    // Set Camera Id remapping using 'cmd'
+    status_t handleCameraIdRemapping(const Vector<String16>& args, int errFd);
+
     // Handle 'watch' command as passed through 'cmd'
     status_t handleWatchCommand(const Vector<String16> &args, int inFd, int outFd);
 
@@ -1370,14 +1423,15 @@
      */
     static std::string getFormattedCurrentTime();
 
-    static binder::Status makeClient(const sp<CameraService>& cameraService,
-            const sp<IInterface>& cameraCb, const std::string& packageName,
-            bool systemNativeClient, const std::optional<std::string>& featureId,
-            const std::string& cameraId, int api1CameraId, int facing, int sensorOrientation,
-            int clientPid, uid_t clientUid, int servicePid,
+    static binder::Status makeClient(
+            const sp<CameraService>& cameraService, const sp<IInterface>& cameraCb,
+            const std::string& packageName, bool systemNativeClient,
+            const std::optional<std::string>& featureId, const std::string& cameraId, int api1CameraId,
+            int facing, int sensorOrientation, int clientPid, uid_t clientUid, int servicePid,
             std::pair<int, IPCTransport> deviceVersionAndIPCTransport, apiLevel effectiveApiLevel,
             bool overrideForPerfClass, bool overrideToPortrait, bool forceSlowJpegMode,
-            /*out*/sp<BasicClient>* client);
+            const std::string& originalCameraId,
+            /*out*/ sp<BasicClient>* client);
 
     static std::string toString(std::set<userid_t> intSet);
     static int32_t mapToInterface(TorchModeStatus status);
diff --git a/services/camera/libcameraservice/aidl/AidlUtils.cpp b/services/camera/libcameraservice/aidl/AidlUtils.cpp
index 7291c5f..2225cfe 100644
--- a/services/camera/libcameraservice/aidl/AidlUtils.cpp
+++ b/services/camera/libcameraservice/aidl/AidlUtils.cpp
@@ -78,7 +78,13 @@
 
     for (auto &handle : windowHandles) {
         native_handle_t* nh = makeFromAidl(handle);
-        iGBPs.push_back(new H2BGraphicBufferProducer(AImageReader_getHGBPFromHandle(nh)));
+        auto igbp = AImageReader_getHGBPFromHandle(nh);
+        if (igbp == nullptr) {
+            ALOGE("%s: Could not get HGBP from NativeHandle: %s. Skipping.",
+                    __FUNCTION__, handle.toString().c_str());
+            continue;
+        }
+        iGBPs.push_back(new H2BGraphicBufferProducer(igbp));
         native_handle_delete(nh);
     }
     UOutputConfiguration outputConfiguration(
diff --git a/services/camera/libcameraservice/api1/Camera2Client.cpp b/services/camera/libcameraservice/api1/Camera2Client.cpp
index b388e5a..caa6424 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.cpp
+++ b/services/camera/libcameraservice/api1/Camera2Client.cpp
@@ -73,7 +73,9 @@
                 cameraDeviceId, api1CameraId, cameraFacing, sensorOrientation, clientPid,
                 clientUid, servicePid, overrideForPerfClass, overrideToPortrait,
                 /*legacyClient*/ true),
-        mParameters(api1CameraId, cameraFacing)
+        mParameters(api1CameraId, cameraFacing),
+        mLatestRequestIds(kMaxRequestIds),
+        mLatestFailedRequestIds(kMaxRequestIds)
 {
     ATRACE_CALL();
 
@@ -1835,7 +1837,7 @@
                     (hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_RESULT == errorCode)) {
                 Mutex::Autolock al(mLatestRequestMutex);
 
-                mLatestFailedRequestId = resultExtras.requestId;
+                mLatestFailedRequestIds.add(resultExtras.requestId);
                 mLatestRequestSignal.signal();
             }
             mCaptureSequencer->notifyError(errorCode, resultExtras);
@@ -2340,7 +2342,7 @@
     return mDevice->setCameraServiceWatchdog(enabled);
 }
 
-status_t Camera2Client::setRotateAndCropOverride(uint8_t rotateAndCrop) {
+status_t Camera2Client::setRotateAndCropOverride(uint8_t rotateAndCrop, bool fromHal) {
     if (rotateAndCrop > ANDROID_SCALER_ROTATE_AND_CROP_AUTO) return BAD_VALUE;
 
     {
@@ -2354,7 +2356,7 @@
     }
 
     return mDevice->setRotateAndCropAutoBehavior(
-        static_cast<camera_metadata_enum_android_scaler_rotate_and_crop_t>(rotateAndCrop));
+        static_cast<camera_metadata_enum_android_scaler_rotate_and_crop_t>(rotateAndCrop), fromHal);
 }
 
 status_t Camera2Client::setAutoframingOverride(uint8_t autoframingValue) {
@@ -2410,7 +2412,10 @@
 
 status_t Camera2Client::waitUntilRequestIdApplied(int32_t requestId, nsecs_t timeout) {
     Mutex::Autolock l(mLatestRequestMutex);
-    while ((mLatestRequestId != requestId) && (mLatestFailedRequestId != requestId)) {
+    while ((std::find(mLatestRequestIds.begin(), mLatestRequestIds.end(), requestId) ==
+            mLatestRequestIds.end()) &&
+           (std::find(mLatestFailedRequestIds.begin(), mLatestFailedRequestIds.end(), requestId) ==
+            mLatestFailedRequestIds.end())) {
         nsecs_t startTime = systemTime();
 
         auto res = mLatestRequestSignal.waitRelative(mLatestRequestMutex, timeout);
@@ -2419,13 +2424,14 @@
         timeout -= (systemTime() - startTime);
     }
 
-    return (mLatestRequestId == requestId) ? OK : DEAD_OBJECT;
+    return (std::find(mLatestRequestIds.begin(), mLatestRequestIds.end(), requestId) !=
+             mLatestRequestIds.end()) ? OK : DEAD_OBJECT;
 }
 
 void Camera2Client::notifyRequestId(int32_t requestId) {
     Mutex::Autolock al(mLatestRequestMutex);
 
-    mLatestRequestId = requestId;
+    mLatestRequestIds.add(requestId);
     mLatestRequestSignal.signal();
 }
 
diff --git a/services/camera/libcameraservice/api1/Camera2Client.h b/services/camera/libcameraservice/api1/Camera2Client.h
index fe12690..2cb7af0 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.h
+++ b/services/camera/libcameraservice/api1/Camera2Client.h
@@ -22,11 +22,7 @@
 #include "common/Camera2ClientBase.h"
 #include "api1/client2/Parameters.h"
 #include "api1/client2/FrameProcessor.h"
-//#include "api1/client2/StreamingProcessor.h"
-//#include "api1/client2/JpegProcessor.h"
-//#include "api1/client2/ZslProcessor.h"
-//#include "api1/client2/CaptureSequencer.h"
-//#include "api1/client2/CallbackProcessor.h"
+#include <media/RingBuffer.h>
 
 namespace android {
 
@@ -85,7 +81,7 @@
     virtual status_t        setVideoTarget(const sp<IGraphicBufferProducer>& bufferProducer);
     virtual status_t        setAudioRestriction(int mode);
     virtual int32_t         getGlobalAudioRestriction();
-    virtual status_t        setRotateAndCropOverride(uint8_t rotateAndCrop);
+    virtual status_t        setRotateAndCropOverride(uint8_t rotateAndCrop, bool fromHal = false);
     virtual status_t        setAutoframingOverride(uint8_t autoframingMode);
 
     virtual bool            supportsCameraMute();
@@ -263,8 +259,8 @@
 
     mutable Mutex mLatestRequestMutex;
     Condition mLatestRequestSignal;
-    int32_t mLatestRequestId = -1;
-    int32_t mLatestFailedRequestId = -1;
+    static constexpr size_t kMaxRequestIds = BufferQueueDefs::NUM_BUFFER_SLOTS;
+    RingBuffer<int32_t> mLatestRequestIds, mLatestFailedRequestIds;
     status_t waitUntilRequestIdApplied(int32_t requestId, nsecs_t timeout);
     status_t waitUntilCurrentRequestIdLocked();
 };
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index c60f327..702d476 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -99,7 +99,8 @@
         uid_t clientUid,
         int servicePid,
         bool overrideForPerfClass,
-        bool overrideToPortrait) :
+        bool overrideToPortrait,
+        const std::string& originalCameraId) :
     Camera2ClientBase(cameraService, remoteCallback, cameraServiceProxyWrapper, clientPackageName,
             systemNativeClient, clientFeatureId, cameraId, /*API1 camera ID*/ -1, cameraFacing,
             sensorOrientation, clientPid, clientUid, servicePid, overrideForPerfClass,
@@ -107,8 +108,8 @@
     mInputStream(),
     mStreamingRequestId(REQUEST_ID_NONE),
     mRequestIdCounter(0),
-    mOverrideForPerfClass(overrideForPerfClass) {
-
+    mOverrideForPerfClass(overrideForPerfClass),
+    mOriginalCameraId(originalCameraId) {
     ATRACE_CALL();
     ALOGI("CameraDeviceClient %s: Opened", cameraId.c_str());
 }
@@ -323,7 +324,7 @@
 
         //The first capture settings should always match the logical camera id
         const std::string &logicalId = request.mPhysicalCameraSettings.begin()->id;
-        if (mDevice->getId() != logicalId) {
+        if (mDevice->getId() != logicalId && mOriginalCameraId != logicalId) {
             ALOGE("%s: Camera %s: Invalid camera request settings.", __FUNCTION__,
                     mCameraIdStr.c_str());
             return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
@@ -438,6 +439,7 @@
 
         CameraDeviceBase::PhysicalCameraSettingsList physicalSettingsList;
         for (const auto& it : request.mPhysicalCameraSettings) {
+            std::string resolvedId = (mOriginalCameraId == it.id) ? mDevice->getId() : it.id;
             if (it.settings.isEmpty()) {
                 ALOGE("%s: Camera %s: Sent empty metadata packet. Rejecting request.",
                         __FUNCTION__, mCameraIdStr.c_str());
@@ -448,7 +450,7 @@
             // Check whether the physical / logical stream has settings
             // consistent with the sensor pixel mode(s) it was configured with.
             // mCameraIdToStreamSet will only have ids that are high resolution
-            const auto streamIdSetIt = mHighResolutionCameraIdToStreamIdSet.find(it.id);
+            const auto streamIdSetIt = mHighResolutionCameraIdToStreamIdSet.find(resolvedId);
             if (streamIdSetIt != mHighResolutionCameraIdToStreamIdSet.end()) {
                 std::list<int> streamIdsUsedInRequest = getIntersection(streamIdSetIt->second,
                         outputStreamIds);
@@ -456,26 +458,25 @@
                         !isSensorPixelModeConsistent(streamIdsUsedInRequest, it.settings)) {
                      ALOGE("%s: Camera %s: Request settings CONTROL_SENSOR_PIXEL_MODE not "
                             "consistent with configured streams. Rejecting request.",
-                            __FUNCTION__, it.id.c_str());
+                            __FUNCTION__, resolvedId.c_str());
                     return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
                         "Request settings CONTROL_SENSOR_PIXEL_MODE are not consistent with "
                         "streams configured");
                 }
             }
 
-            const std::string &physicalId = it.id;
             bool hasTestPatternModePhysicalKey = std::find(mSupportedPhysicalRequestKeys.begin(),
                     mSupportedPhysicalRequestKeys.end(), ANDROID_SENSOR_TEST_PATTERN_MODE) !=
                     mSupportedPhysicalRequestKeys.end();
             bool hasTestPatternDataPhysicalKey = std::find(mSupportedPhysicalRequestKeys.begin(),
                     mSupportedPhysicalRequestKeys.end(), ANDROID_SENSOR_TEST_PATTERN_DATA) !=
                     mSupportedPhysicalRequestKeys.end();
-            if (physicalId != mDevice->getId()) {
+            if (resolvedId != mDevice->getId()) {
                 auto found = std::find(requestedPhysicalIds.begin(), requestedPhysicalIds.end(),
-                        it.id);
+                        resolvedId);
                 if (found == requestedPhysicalIds.end()) {
                     ALOGE("%s: Camera %s: Physical camera id: %s not part of attached outputs.",
-                            __FUNCTION__, mCameraIdStr.c_str(), physicalId.c_str());
+                            __FUNCTION__, mCameraIdStr.c_str(), resolvedId.c_str());
                     return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
                             "Invalid physical camera id");
                 }
@@ -495,11 +496,11 @@
                         }
                     }
 
-                    physicalSettingsList.push_back({it.id, filteredParams,
+                    physicalSettingsList.push_back({resolvedId, filteredParams,
                             hasTestPatternModePhysicalKey, hasTestPatternDataPhysicalKey});
                 }
             } else {
-                physicalSettingsList.push_back({it.id, it.settings});
+                physicalSettingsList.push_back({resolvedId, it.settings});
             }
         }
 
@@ -998,7 +999,7 @@
         res = STATUS_ERROR_FMT(CameraService::ERROR_INVALID_OPERATION,
                 "Camera %s: Error creating output stream (%d x %d, fmt %x, dataSpace %x): %s (%d)",
                 mCameraIdStr.c_str(), streamInfo.width, streamInfo.height, streamInfo.format,
-                streamInfo.dataSpace, strerror(-err), err);
+                static_cast<int>(streamInfo.dataSpace), strerror(-err), err);
     } else {
         int i = 0;
         for (auto& binder : binders) {
@@ -1095,7 +1096,8 @@
     if (err != OK) {
         res = STATUS_ERROR_FMT(CameraService::ERROR_INVALID_OPERATION,
                 "Camera %s: Error creating output stream (%d x %d, fmt %x, dataSpace %x): %s (%d)",
-                mCameraIdStr.c_str(), width, height, format, dataSpace, strerror(-err), err);
+                mCameraIdStr.c_str(), width, height, format, static_cast<int>(dataSpace),
+                strerror(-err), err);
     } else {
         // Can not add streamId to mStreamMap here, as the surface is deferred. Add it to
         // a separate list to track. Once the deferred surface is set, this id will be
@@ -1760,11 +1762,11 @@
     return mDevice->setCameraServiceWatchdog(enabled);
 }
 
-status_t CameraDeviceClient::setRotateAndCropOverride(uint8_t rotateAndCrop) {
+status_t CameraDeviceClient::setRotateAndCropOverride(uint8_t rotateAndCrop, bool fromHal) {
     if (rotateAndCrop > ANDROID_SCALER_ROTATE_AND_CROP_AUTO) return BAD_VALUE;
 
     return mDevice->setRotateAndCropAutoBehavior(
-        static_cast<camera_metadata_enum_android_scaler_rotate_and_crop_t>(rotateAndCrop));
+        static_cast<camera_metadata_enum_android_scaler_rotate_and_crop_t>(rotateAndCrop), fromHal);
 }
 
 status_t CameraDeviceClient::setAutoframingOverride(uint8_t autoframingValue) {
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.h b/services/camera/libcameraservice/api2/CameraDeviceClient.h
index 45c904a..1c19dbd 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.h
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.h
@@ -191,13 +191,15 @@
             uid_t clientUid,
             int servicePid,
             bool overrideForPerfClass,
-            bool overrideToPortrait);
+            bool overrideToPortrait,
+            const std::string& originalCameraId);
     virtual ~CameraDeviceClient();
 
     virtual status_t      initialize(sp<CameraProviderManager> manager,
             const std::string& monitorTags) override;
 
-    virtual status_t      setRotateAndCropOverride(uint8_t rotateAndCrop) override;
+    virtual status_t      setRotateAndCropOverride(uint8_t rotateAndCrop,
+            bool fromHal = false) override;
 
     virtual status_t      setAutoframingOverride(uint8_t autoframingValue) override;
 
@@ -368,6 +370,9 @@
     std::string mUserTag;
     // The last set video stabilization mode
     int mVideoStabilizationMode = -1;
+
+    // This only exists in case of camera ID Remapping.
+    std::string mOriginalCameraId;
 };
 
 }; // namespace android
diff --git a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp
index 99bdb0e..4ed352d 100644
--- a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp
@@ -80,7 +80,8 @@
     return OK;
 }
 
-status_t CameraOfflineSessionClient::setRotateAndCropOverride(uint8_t /*rotateAndCrop*/) {
+status_t CameraOfflineSessionClient::setRotateAndCropOverride(uint8_t /*rotateAndCrop*/,
+        bool /*fromHal*/) {
     // Since we're not submitting more capture requests, changes to rotateAndCrop override
     // make no difference.
     return OK;
diff --git a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
index 70bad03..8aad4e9 100644
--- a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
+++ b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
@@ -80,7 +80,7 @@
     status_t initialize(sp<CameraProviderManager> /*manager*/,
             const std::string& /*monitorTags*/) override;
 
-    status_t setRotateAndCropOverride(uint8_t rotateAndCrop) override;
+    status_t setRotateAndCropOverride(uint8_t rotateAndCrop, bool fromHal = false) override;
 
     status_t setAutoframingOverride(uint8_t autoframingValue) override;
 
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.cpp b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
index a54ba9b..43eb181 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.cpp
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
@@ -377,7 +377,8 @@
             rotateAndCropMode = ANDROID_SCALER_ROTATE_AND_CROP_90;
         }
 
-        static_cast<TClientBase *>(this)->setRotateAndCropOverride(rotateAndCropMode);
+        static_cast<TClientBase *>(this)->setRotateAndCropOverride(rotateAndCropMode,
+                                                                   /*fromHal*/ true);
     }
 }
 
diff --git a/services/camera/libcameraservice/common/CameraDeviceBase.h b/services/camera/libcameraservice/common/CameraDeviceBase.h
index 017da0f..01199af 100644
--- a/services/camera/libcameraservice/common/CameraDeviceBase.h
+++ b/services/camera/libcameraservice/common/CameraDeviceBase.h
@@ -444,7 +444,8 @@
      * and defaults to NONE.
      */
     virtual status_t setRotateAndCropAutoBehavior(
-            camera_metadata_enum_android_scaler_rotate_and_crop_t rotateAndCropValue) = 0;
+            camera_metadata_enum_android_scaler_rotate_and_crop_t rotateAndCropValue,
+            bool fromHal = false) = 0;
 
     /**
      * Set the current behavior for the AUTOFRAMING control when in AUTO.
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index 7db3787..de3fe97 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -70,6 +70,7 @@
 #include "utils/TraceHFR.h"
 
 #include <algorithm>
+#include <optional>
 #include <tuple>
 
 using namespace android::camera3;
@@ -3038,6 +3039,7 @@
         mPrevTriggers(0),
         mFrameNumber(0),
         mLatestRequestId(NAME_NOT_FOUND),
+        mLatestFailedRequestId(NAME_NOT_FOUND),
         mCurrentAfTriggerId(0),
         mCurrentPreCaptureTriggerId(0),
         mRotateAndCropOverride(ANDROID_SCALER_ROTATE_AND_CROP_NONE),
@@ -3288,7 +3290,7 @@
     ATRACE_CALL();
     Mutex::Autolock l(mLatestRequestMutex);
     status_t res;
-    while (mLatestRequestId != requestId) {
+    while (mLatestRequestId != requestId && mLatestFailedRequestId != requestId) {
         nsecs_t startTime = systemTime();
 
         res = mLatestRequestSignal.waitRelative(mLatestRequestMutex, timeout);
@@ -3584,6 +3586,7 @@
         captureRequest->mRotateAndCropChanged = (mComposerOutput && !mOverrideToPortrait) ? false :
             overrideAutoRotateAndCrop(captureRequest);
         captureRequest->mAutoframingChanged = overrideAutoframing(captureRequest);
+        captureRequest->mTestPatternChanged = overrideTestPattern(captureRequest);
     }
 
     // 'mNextRequests' will at this point contain either a set of HFR batched requests
@@ -3730,7 +3733,6 @@
         bool triggersMixedIn = (triggerCount > 0 || mPrevTriggers > 0);
         mPrevTriggers = triggerCount;
 
-        bool testPatternChanged = overrideTestPattern(captureRequest);
         bool settingsOverrideChanged = overrideSettingsOverride(captureRequest);
 
         // If the request is the same as last, or we had triggers now or last time or
@@ -3739,7 +3741,7 @@
                 (mPrevRequest != captureRequest || triggersMixedIn ||
                          captureRequest->mRotateAndCropChanged ||
                          captureRequest->mAutoframingChanged ||
-                         testPatternChanged || settingsOverrideChanged) &&
+                         captureRequest->mTestPatternChanged || settingsOverrideChanged) &&
                 // Request settings are all the same within one batch, so only treat the first
                 // request in a batch as new
                 !(batchedRequest && i > 0);
@@ -4017,8 +4019,11 @@
                 sp<Camera3Device> parent = mParent.promote();
                 if (parent != nullptr) {
                     const std::string& streamCameraId = outputStream->getPhysicalCameraId();
+                    // Consider the case where clients are sending a single logical camera request
+                    // to physical output/outputs
+                    bool singleRequest = captureRequest->mSettingsList.size() == 1;
                     for (const auto& settings : captureRequest->mSettingsList) {
-                        if ((streamCameraId.empty() &&
+                        if (((streamCameraId.empty() || singleRequest) &&
                                 parent->getId() == settings.cameraId) ||
                                 streamCameraId == settings.cameraId) {
                             outputStream->fireBufferRequestForFrameNumber(
@@ -4362,6 +4367,12 @@
                         hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_REQUEST,
                         captureRequest->mResultExtras);
             }
+            {
+                Mutex::Autolock al(mLatestRequestMutex);
+
+                mLatestFailedRequestId = captureRequest->mResultExtras.requestId;
+                mLatestRequestSignal.signal();
+            }
         }
 
         // Remove yet-to-be submitted inflight request from inflightMap
@@ -5392,9 +5403,13 @@
 }
 
 status_t Camera3Device::setRotateAndCropAutoBehavior(
-    camera_metadata_enum_android_scaler_rotate_and_crop_t rotateAndCropValue) {
+    camera_metadata_enum_android_scaler_rotate_and_crop_t rotateAndCropValue, bool fromHal) {
     ATRACE_CALL();
-    Mutex::Autolock il(mInterfaceLock);
+    // We shouldn't hold mInterfaceLock when called as an effect of a HAL
+    // callback since this can lead to a deadlock : b/299348355.
+    // mLock still protects state.
+    std::optional<Mutex::Autolock> maybeMutex =
+        fromHal ? std::nullopt : std::optional<Mutex::Autolock>(mInterfaceLock);
     Mutex::Autolock l(mLock);
     if (mRequestThread == nullptr) {
         return INVALID_OPERATION;
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index dfa3b19..4eed4b5 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -275,7 +275,7 @@
      * and defaults to NONE.
      */
     status_t setRotateAndCropAutoBehavior(
-            camera_metadata_enum_android_scaler_rotate_and_crop_t rotateAndCropValue);
+            camera_metadata_enum_android_scaler_rotate_and_crop_t rotateAndCropValue, bool fromHal);
 
     /**
      * Set the current behavior for the AUTOFRAMING control when in AUTO.
@@ -651,6 +651,8 @@
         bool                                mAutoframingAuto;
         // Indicates that the auto framing value within 'mSettingsList' was modified
         bool                                mAutoframingChanged = false;
+        // Indicates that the camera test pattern setting is modified
+        bool                                mTestPatternChanged = false;
 
         // Whether this capture request has its zoom ratio set to 1.0x before
         // the framework overrides it for camera HAL consumption.
@@ -1151,6 +1153,7 @@
         Condition          mLatestRequestSignal;
         // android.request.id for latest process_capture_request
         int32_t            mLatestRequestId;
+        int32_t            mLatestFailedRequestId;
         CameraMetadata     mLatestRequest;
         std::unordered_map<std::string, CameraMetadata> mLatestPhysicalRequest;
 
diff --git a/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp b/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
index c59138c..152687e 100644
--- a/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
+++ b/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
@@ -84,10 +84,10 @@
     status_t res = getEndpointUsage(&consumerUsage);
     if (res != OK) consumerUsage = 0;
 
-    lines << fmt::sprintf("      State: %d\n", mState);
+    lines << fmt::sprintf("      State: %d\n", static_cast<int>(mState));
     lines << fmt::sprintf("      Dims: %d x %d, format 0x%x, dataspace 0x%x\n",
             camera_stream::width, camera_stream::height,
-            camera_stream::format, camera_stream::data_space);
+            camera_stream::format, static_cast<int>(camera_stream::data_space));
     lines << fmt::sprintf("      Max size: %zu\n", mMaxSize);
     lines << fmt::sprintf("      Combined usage: 0x%" PRIx64 ", max HAL buffers: %d\n",
             mUsage | consumerUsage, camera_stream::max_buffers);
diff --git a/services/camera/libcameraservice/hidl/Utils.cpp b/services/camera/libcameraservice/hidl/Utils.cpp
index b5dddf7..d0302d0 100644
--- a/services/camera/libcameraservice/hidl/Utils.cpp
+++ b/services/camera/libcameraservice/hidl/Utils.cpp
@@ -88,7 +88,13 @@
     auto &windowHandles = hOutputConfiguration.windowHandles;
     iGBPs.reserve(windowHandles.size());
     for (auto &handle : windowHandles) {
-        iGBPs.push_back(new H2BGraphicBufferProducer(AImageReader_getHGBPFromHandle(handle)));
+        auto igbp = AImageReader_getHGBPFromHandle(handle);
+        if (igbp == nullptr) {
+            ALOGE("%s: Could not get HGBP from native_handle: %p. Skipping.",
+                    __FUNCTION__, handle.getNativeHandle());
+            continue;
+        }
+        iGBPs.push_back(new H2BGraphicBufferProducer(igbp));
     }
     hardware::camera2::params::OutputConfiguration outputConfiguration(
         iGBPs, convertFromHidl(hOutputConfiguration.rotation),
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp b/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp
index 921ad7d..efc58b4 100644
--- a/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp
@@ -60,15 +60,21 @@
     ],
     fuzz_config: {
         cc: [
-            "android-media-fuzzing-reports@google.com",
             "android-camera-fwk-eng@google.com",
         ],
-        componentid: 155276,
+        componentid: 41727,
         libfuzzer_options: [
             //based on b/187360866
             "timeout=770",
         ],
-
+        hotlists: [
+            "4593311",
+        ],
+        description: "The fuzzer targets the APIs of libcameraservice",
+        vector: "local_no_privileges_required",
+        service_privilege: "privileged",
+        users: "multi_user",
+        fuzzed_code_usage: "shipped",
     },
 }
 
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
index f7257e3..ee64284 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
@@ -622,7 +622,7 @@
     if (format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
         if (dataSpace != streamInfo.dataSpace) {
             std::string msg = fmt::sprintf("Camera %s:Surface dataSpace doesn't match: %d vs %d",
-                    logicalCameraId.c_str(), dataSpace, streamInfo.dataSpace);
+                    logicalCameraId.c_str(), static_cast<int>(dataSpace), static_cast<int>(streamInfo.dataSpace));
             ALOGE("%s: %s", __FUNCTION__, msg.c_str());
             return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
         }
diff --git a/services/camera/virtualcamera/OWNERS b/services/camera/virtualcamera/OWNERS
new file mode 100644
index 0000000..db34336
--- /dev/null
+++ b/services/camera/virtualcamera/OWNERS
@@ -0,0 +1,4 @@
+# Bug component: 1171888
+include platform/frameworks/base:/services/companion/java/com/android/server/companion/virtual/OWNERS
+caen@google.com
+jsebechlebsky@google.com
diff --git a/services/mediacodec/Android.bp b/services/mediacodec/Android.bp
index a2f17c2..506b3bc 100644
--- a/services/mediacodec/Android.bp
+++ b/services/mediacodec/Android.bp
@@ -95,31 +95,6 @@
         "android.hidl.memory@1.0",
     ],
 
-    runtime_libs: [
-        "libstagefright_soft_aacdec",
-        "libstagefright_soft_aacenc",
-        "libstagefright_soft_amrdec",
-        "libstagefright_soft_amrnbenc",
-        "libstagefright_soft_amrwbenc",
-        "libstagefright_soft_avcdec",
-        "libstagefright_soft_avcenc",
-        "libstagefright_soft_flacdec",
-        "libstagefright_soft_flacenc",
-        "libstagefright_soft_g711dec",
-        "libstagefright_soft_gsmdec",
-        "libstagefright_soft_hevcdec",
-        "libstagefright_soft_mp3dec",
-        "libstagefright_soft_mpeg2dec",
-        "libstagefright_soft_mpeg4dec",
-        "libstagefright_soft_mpeg4enc",
-        "libstagefright_soft_opusdec",
-        "libstagefright_soft_rawdec",
-        "libstagefright_soft_vorbisdec",
-        "libstagefright_soft_vpxdec",
-        "libstagefright_soft_vpxenc",
-        "libstagefright_softomx_plugin",
-    ],
-
     // OMX interfaces force this to stay in 32-bit mode;
     compile_multilib: "32",
 
diff --git a/services/mediacodec/main_swcodecservice.cpp b/services/mediacodec/main_swcodecservice.cpp
index d91b788..9aa5d3d 100644
--- a/services/mediacodec/main_swcodecservice.cpp
+++ b/services/mediacodec/main_swcodecservice.cpp
@@ -40,9 +40,5 @@
     SetUpMinijail(kSystemSeccompPolicyPath, kVendorSeccompPolicyPath);
     strcpy(argv[0], "media.swcodec");
 
-    ::android::hardware::configureRpcThreadpool(64, false);
-
     RegisterCodecServices();
-
-    ::android::hardware::joinRpcThreadpool();
 }
diff --git a/services/mediacodec/seccomp_policy/mediaswcodec-riscv64.policy b/services/mediacodec/seccomp_policy/mediaswcodec-riscv64.policy
index a55c3eb..0c6aafd 100644
--- a/services/mediacodec/seccomp_policy/mediaswcodec-riscv64.policy
+++ b/services/mediacodec/seccomp_policy/mediaswcodec-riscv64.policy
@@ -52,6 +52,9 @@
 getdents64: 1
 ppoll: 1
 
+clock_gettime: 1
+pipe2: 1
+
 # Required by AddressSanitizer
 gettid: 1
 sched_yield: 1
diff --git a/services/medialog/OWNERS b/services/medialog/OWNERS
index 21723ba..fe3205a 100644
--- a/services/medialog/OWNERS
+++ b/services/medialog/OWNERS
@@ -1,3 +1,4 @@
-elaurent@google.com
-gkasten@google.com
+# Bug component: 48436
+atneya@google.com
 hunga@google.com
+include platform/frameworks/av:/media/janitors/audio_OWNERS #{LAST_RESORT_SUGGESTION}
diff --git a/services/mediametrics/MediaMetricsService.cpp b/services/mediametrics/MediaMetricsService.cpp
index 1b5255a..f81db53 100644
--- a/services/mediametrics/MediaMetricsService.cpp
+++ b/services/mediametrics/MediaMetricsService.cpp
@@ -524,8 +524,8 @@
                                      "audiotrack",
                                      // other media
                                      "codec",
-                                     "freeze",
-                                     "judder",
+                                     "videofreeze",
+                                     "videojudder",
                                      "extractor",
                                      "mediadrm",
                                      "mediaparser",
diff --git a/services/mediametrics/OWNERS b/services/mediametrics/OWNERS
index e37a1f8..14aa2c1 100644
--- a/services/mediametrics/OWNERS
+++ b/services/mediametrics/OWNERS
@@ -1,2 +1,3 @@
+# Bug component: 1344
 essick@google.com
 hunga@google.com
diff --git a/services/mediametrics/statsd_codec.cpp b/services/mediametrics/statsd_codec.cpp
index ea76bcd..83b30f3 100644
--- a/services/mediametrics/statsd_codec.cpp
+++ b/services/mediametrics/statsd_codec.cpp
@@ -654,6 +654,16 @@
     }
     AStatsEvent_writeInt32(event, componentColorFormat);
 
+    uid_t app_uid = item->getUid();
+    metrics_proto.set_caller_uid(app_uid);
+    AStatsEvent_writeInt32(event, app_uid);
+
+    int64_t pixelFormat = -1;
+    if (item->getInt64("android.media.mediacodec.pixel-format", &pixelFormat)) {
+        metrics_proto.set_pixel_format(pixelFormat);
+    }
+    AStatsEvent_writeInt64(event, pixelFormat);
+
     int64_t firstRenderTimeUs = -1;
     item->getInt64("android.media.mediacodec.first-render-time-us", &firstRenderTimeUs);
     int64_t framesReleased = -1;
diff --git a/services/mediaresourcemanager/Android.bp b/services/mediaresourcemanager/Android.bp
index 73a96e9..794bda0 100644
--- a/services/mediaresourcemanager/Android.bp
+++ b/services/mediaresourcemanager/Android.bp
@@ -74,10 +74,14 @@
     name: "libresourcemanagerservice",
 
     srcs: [
+        "DefaultResourceModel.cpp",
+        "ProcessPriorityReclaimPolicy.cpp",
         "ResourceManagerMetrics.cpp",
         "ResourceManagerService.cpp",
+        "ResourceManagerServiceNew.cpp",
         "ResourceObserverService.cpp",
         "ResourceManagerServiceUtils.cpp",
+        "ResourceTracker.cpp",
         "ServiceLog.cpp",
         "UidObserver.cpp",
 
@@ -97,6 +101,7 @@
         "libstatssocket",
         "libprotobuf-cpp-lite",
         "libactivitymanager_aidl",
+        "aconfig_mediacodec_flags_c_lib",
     ],
 
     static_libs: [
diff --git a/services/mediaresourcemanager/DefaultResourceModel.cpp b/services/mediaresourcemanager/DefaultResourceModel.cpp
new file mode 100644
index 0000000..7bad715
--- /dev/null
+++ b/services/mediaresourcemanager/DefaultResourceModel.cpp
@@ -0,0 +1,145 @@
+/*
+**
+** Copyright 2023, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "DefaultResourceModel"
+#include <utils/Log.h>
+
+#include "ResourceManagerServiceUtils.h"
+#include "DefaultResourceModel.h"
+#include "ResourceTracker.h"
+
+namespace android {
+
+DefaultResourceModel::DefaultResourceModel(
+        const std::shared_ptr<ResourceTracker>& resourceTracker,
+        bool supportsMultipleSecureCodecs,
+        bool supportsSecureWithNonSecureCodec)
+    : mSupportsMultipleSecureCodecs(supportsMultipleSecureCodecs),
+      mSupportsSecureWithNonSecureCodec(supportsSecureWithNonSecureCodec),
+      mResourceTracker(resourceTracker) {
+}
+
+DefaultResourceModel::~DefaultResourceModel() {
+}
+
+bool DefaultResourceModel::getAllClients(
+        const ReclaimRequestInfo& reclimRequestInfo,
+        std::vector<ClientInfo>& clients) {
+
+    clients.clear();
+    MediaResourceParcel mediaResource{.type = reclimRequestInfo.mResources[0].type,
+                                      .subType = reclimRequestInfo.mResources[0].subType};
+    ResourceRequestInfo resourceRequestInfo{reclimRequestInfo.mCallingPid, &mediaResource};
+
+    // Resolve the secure-unsecure codec conflicts if there is any.
+    switch (reclimRequestInfo.mResources[0].type) {
+    case MediaResource::Type::kSecureCodec:
+        // Looking to start a secure codec.
+        // #1. Make sure if multiple secure codecs can coexist
+        if (!mSupportsMultipleSecureCodecs) {
+            if (!mResourceTracker->getNonConflictingClients(resourceRequestInfo, clients)) {
+                // A higher priority process owns an instance of a secure codec.
+                // So this request can't be fulfilled.
+                return false;
+            }
+        }
+        // #2. Make sure a secure codec can coexist if there is an instance
+        // of non-secure codec running already.
+        if (!mSupportsSecureWithNonSecureCodec) {
+            mediaResource.type = MediaResource::Type::kNonSecureCodec;
+            if (!mResourceTracker->getNonConflictingClients(resourceRequestInfo, clients)) {
+                // A higher priority process owns an instance of a non-secure codec.
+                // So this request can't be fulfilled.
+                return false;
+            }
+        }
+        break;
+    case MediaResource::Type::kNonSecureCodec:
+        // Looking to start a non-secure codec.
+        // Make sure a non-secure codec can coexist if there is an instance
+        // of secure codec running already.
+        if (!mSupportsSecureWithNonSecureCodec) {
+            mediaResource.type = MediaResource::Type::kSecureCodec;
+            if (!mResourceTracker->getNonConflictingClients(resourceRequestInfo, clients)) {
+                // A higher priority process owns an instance of a secure codec.
+                // So this request can't be fulfilled.
+                return false;
+            }
+        }
+        break;
+    default:
+        break;
+    }
+
+    if (!clients.empty()) {
+        // There is secure/unsecure codec co-existence conflict
+        // and we have only found processes with lower priority holding the
+        // resources. So, all of these need to be reclaimed.
+        return false;
+    }
+
+    // No more resource conflicts.
+    switch (reclimRequestInfo.mResources[0].type) {
+    case MediaResource::Type::kSecureCodec:
+    case MediaResource::Type::kNonSecureCodec:
+        // Handling Codec resource reclaim
+        return getCodecClients(reclimRequestInfo, clients);
+    case MediaResource::Type::kGraphicMemory:
+    case MediaResource::Type::kDrmSession:
+        // Handling DRM and GraphicMemory resource reclaim
+        mediaResource.id = reclimRequestInfo.mResources[0].id;
+        mediaResource.value = reclimRequestInfo.mResources[0].value;
+        return mResourceTracker->getAllClients(resourceRequestInfo, clients);
+    default:
+        break;
+    }
+
+    return !clients.empty();
+}
+
+bool DefaultResourceModel::getCodecClients(
+        const ReclaimRequestInfo& reclimRequestInfo,
+        std::vector<ClientInfo>& clients) {
+    MediaResourceParcel mediaResource;
+    ResourceRequestInfo resourceRequestInfo{reclimRequestInfo.mCallingPid, &mediaResource};
+
+    // 1. Look to find the client(s) with the other resources, for the given
+    // primary type.
+    MediaResource::SubType primarySubType = reclimRequestInfo.mResources[0].subType;
+    for (size_t index = 1; index < reclimRequestInfo.mResources.size(); index++) {
+        mediaResource.type = reclimRequestInfo.mResources[index].type;
+        mediaResource.subType = reclimRequestInfo.mResources[index].subType;
+        mResourceTracker->getAllClients(resourceRequestInfo, clients, primarySubType);
+    }
+
+    // 2. Get all clients of the same type.
+    mediaResource.type = reclimRequestInfo.mResources[0].type;
+    mediaResource.subType = reclimRequestInfo.mResources[0].subType;
+    mResourceTracker->getAllClients(resourceRequestInfo, clients);
+
+    // 3. Get all cliends of the different type.
+    MediaResourceType otherType =
+        (reclimRequestInfo.mResources[0].type == MediaResource::Type::kSecureCodec) ?
+        MediaResource::Type::kNonSecureCodec : MediaResource::Type::kSecureCodec;
+    mediaResource.type = otherType;
+    mResourceTracker->getAllClients(resourceRequestInfo, clients);
+
+    return !clients.empty();
+}
+
+} // namespace android
diff --git a/services/mediaresourcemanager/DefaultResourceModel.h b/services/mediaresourcemanager/DefaultResourceModel.h
new file mode 100644
index 0000000..1891eda
--- /dev/null
+++ b/services/mediaresourcemanager/DefaultResourceModel.h
@@ -0,0 +1,73 @@
+/*
+**
+** Copyright 2023, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+#ifndef ANDROID_MEDIA_DEFAULTRESOURCEMODEL_H_
+#define ANDROID_MEDIA_DEFAULTRESOURCEMODEL_H_
+
+#include "IResourceModel.h"
+
+namespace android {
+
+class ResourceTracker;
+
+/*
+ * Implements the Default Resource Model that handles:
+ *   - coexistence of secure codec with another secure/non-secure codecs
+ *   - sharing resources among other codecs
+ */
+class DefaultResourceModel : public IResourceModel {
+public:
+    DefaultResourceModel(const std::shared_ptr<ResourceTracker>& resourceTracker,
+                         bool supportsMultipleSecureCodecs = true,
+                         bool supportsSecureWithNonSecureCodec = true);
+    virtual ~DefaultResourceModel();
+
+    /*
+     * Set the codec co-existence properties
+     */
+    void config(bool supportsMultipleSecureCodecs, bool supportsSecureWithNonSecureCodec) {
+        mSupportsMultipleSecureCodecs = supportsMultipleSecureCodecs;
+        mSupportsSecureWithNonSecureCodec = supportsSecureWithNonSecureCodec;
+    }
+
+    /*
+     * Get a list of all clients that holds the resources requested.
+     * This implementation uses the ResourceModel to select the clients.
+     *
+     * @param[in]  reclaimRequestInfo Information about the Reclaim request
+     * @param[out] cliens The list of clients that hold the resources in question.
+     *
+     * @return true if there aren't any resource conflicts and false otherwise.
+     */
+    bool getAllClients(const ReclaimRequestInfo& reclaimRequestInfo,
+                       std::vector<ClientInfo>& clients) override;
+
+protected:
+    bool getCodecClients(const ReclaimRequestInfo& reclaimRequestInfo,
+                         std::vector<ClientInfo>& clients);
+
+protected:
+    // Keeping these protected to allow extending this implementation
+    // by other resource models.
+    bool mSupportsMultipleSecureCodecs;
+    bool mSupportsSecureWithNonSecureCodec;
+    std::shared_ptr<ResourceTracker> mResourceTracker;
+};
+
+} // namespace android
+
+#endif  // ANDROID_MEDIA_DEFAULTRESOURCEMODEL_H_
diff --git a/services/mediaresourcemanager/IReclaimPolicy.h b/services/mediaresourcemanager/IReclaimPolicy.h
new file mode 100644
index 0000000..dfbfc12
--- /dev/null
+++ b/services/mediaresourcemanager/IReclaimPolicy.h
@@ -0,0 +1,58 @@
+/*
+**
+** Copyright 2023, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+#ifndef ANDROID_MEDIA_IRECLAIMPOLICY_H_
+#define ANDROID_MEDIA_IRECLAIMPOLICY_H_
+
+#include <memory>
+#include <aidl/android/media/IResourceManagerClient.h>
+
+namespace android {
+
+struct ClientInfo;
+struct ReclaimRequestInfo;
+
+/*
+ * Interface that defines Reclaim Policy.
+ *
+ * This provides an interface to select/identify a client based on a specific
+ * Reclaim policy.
+ */
+class IReclaimPolicy {
+public:
+    IReclaimPolicy() {}
+
+    virtual ~IReclaimPolicy() {}
+
+    /*
+     * Based on the Reclaim policy, identify and return a client from the list
+     * of given clients that satisfy the resource requested.
+     *
+     * @param[in]  reclaimRequestInfo Information about the resource request
+     * @param[in]  client List of clients to select from.
+     * @param[out] targetClients Upon success, this will have the list of identified client(s).
+     *
+     * @return true on success, false otherwise
+     */
+    virtual bool getClients(const ReclaimRequestInfo& reclaimRequestInfo,
+                            const std::vector<ClientInfo>& clients,
+                            std::vector<ClientInfo>& targetClients) = 0;
+};
+
+} // namespace android
+
+#endif  // ANDROID_MEDIA_IRECLAIMPOLICY_H_
diff --git a/services/mediaresourcemanager/IResourceModel.h b/services/mediaresourcemanager/IResourceModel.h
new file mode 100644
index 0000000..f865f54
--- /dev/null
+++ b/services/mediaresourcemanager/IResourceModel.h
@@ -0,0 +1,67 @@
+/*
+**
+** Copyright 2023, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+#ifndef ANDROID_MEDIA_IRESOURCEMODEL_H_
+#define ANDROID_MEDIA_IRESOURCEMODEL_H_
+
+#include <memory>
+#include <vector>
+
+#include <aidl/android/media/IResourceManagerClient.h>
+#include <aidl/android/media/MediaResourceParcel.h>
+
+namespace android {
+
+struct ClientInfo;
+struct ReclaimRequestInfo;
+
+/*
+ * Interface that defines Resource Model.
+ *
+ * This provides an interface that manages the resource model.
+ * The primary functionality of the implementation of this resource model is to:
+ *  1. Define a resource model for a device (or family of devices)
+ *    For example (and not limited to):
+ *      - Can a secure codec coexist with another secure or unsecured codec?
+ *      - How many codecs can coexist?
+ *      - Can one type of codecs (for example avc) coexist with another type of codec
+ *        (for example hevc) independently? OR are they sharing the common
+ *        resource pool?
+ *  2. Provide a list of clients that hold requesting resources.
+ */
+class IResourceModel {
+public:
+    IResourceModel() {}
+
+    virtual ~IResourceModel() {}
+
+    /*
+     * Get a list of all clients that holds the resources requested.
+     * This implementation uses the ResourceModel to select the clients.
+     *
+     * @param[in]  reclaimRequestInfo Information about the Reclaim request
+     * @param[out] clients The list of clients that hold the resources in question.
+     *
+     * @return true if there aren't any resource conflicts and false otherwise.
+     */
+    virtual bool getAllClients(const ReclaimRequestInfo& reclaimRequestInfo,
+                               std::vector<ClientInfo>& clients) = 0;
+};
+
+} // namespace android
+
+#endif  // ANDROID_MEDIA_IRESOURCEMODEL_H_
diff --git a/services/mediaresourcemanager/ProcessPriorityReclaimPolicy.cpp b/services/mediaresourcemanager/ProcessPriorityReclaimPolicy.cpp
new file mode 100644
index 0000000..5b776a6
--- /dev/null
+++ b/services/mediaresourcemanager/ProcessPriorityReclaimPolicy.cpp
@@ -0,0 +1,135 @@
+/*
+**
+** Copyright 2023, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "ProcessPriorityReclaimPolicy"
+#include <utils/Log.h>
+
+#include "ResourceTracker.h"
+#include "ResourceManagerService.h"
+#include "ProcessPriorityReclaimPolicy.h"
+
+namespace android {
+
+using aidl::android::media::IResourceManagerClient;
+
+ProcessPriorityReclaimPolicy::ProcessPriorityReclaimPolicy(
+        const std::shared_ptr<ResourceTracker>& resourceTracker)
+    : mResourceTracker(resourceTracker) {
+}
+
+ProcessPriorityReclaimPolicy::~ProcessPriorityReclaimPolicy() {
+}
+
+// Process priority (oom score) based reclaim:
+//   - Find a process with lowest priority (than that of calling process).
+//   - Find the bigegst client (with required resources) from that process.
+bool ProcessPriorityReclaimPolicy::getClients(const ReclaimRequestInfo& reclaimRequestInfo,
+                                              const std::vector<ClientInfo>& clients,
+                                              std::vector<ClientInfo>& targetClients) {
+    // NOTE: This is the behavior of the existing reclaim policy.
+    // We can alter it to select more than one client to reclaim from, depending
+    // on the reclaim polocy.
+
+    MediaResource::Type type = reclaimRequestInfo.mResources[0].type;
+    MediaResource::SubType subType = reclaimRequestInfo.mResources[0].subType;
+    // Find one client to reclaim the needed resources from.
+    // 1. Get the priority of the (reclaim) requesting process.
+    int callingPid = reclaimRequestInfo.mCallingPid;
+    int callingPriority = -1;
+    if (!mResourceTracker->getPriority(callingPid, &callingPriority)) {
+        ALOGE("%s: can't get process priority for pid %d", __func__, callingPid);
+        return false;
+    }
+
+    ClientInfo clientInfo;
+    // 2 Look to find the biggest client from the lowest priority process that
+    // has the other resources and with the given primary type.
+    bool found = false;
+    int lowestPriority = -1;
+    MediaResource::SubType primarySubType = subType;
+    for (size_t index = 1; !found && (index < reclaimRequestInfo.mResources.size()); index++) {
+        MediaResource::Type type = reclaimRequestInfo.mResources[index].type;
+        MediaResource::SubType subType = reclaimRequestInfo.mResources[index].subType;
+        found = getBiggestClientFromLowestPriority(callingPid, callingPriority,
+                                                   type, subType, primarySubType,
+                                                   clients, clientInfo, lowestPriority);
+    }
+    // 3 If we haven't found a client yet, then select the biggest client of primary type.
+    if (!found) {
+        found = getBiggestClientFromLowestPriority(callingPid, callingPriority,
+                                                   type, subType,
+                                                   MediaResource::SubType::kUnspecifiedSubType,
+                                                   clients, clientInfo, lowestPriority);
+    }
+    // 4 If we haven't found a client yet, then select the biggest client of different type.
+    // This is applicable for code type only.
+    if (!found) {
+        if (type != MediaResource::Type::kSecureCodec &&
+            type != MediaResource::Type::kNonSecureCodec) {
+            return false;
+        }
+        MediaResourceType otherType = (type == MediaResource::Type::kSecureCodec) ?
+            MediaResource::Type::kNonSecureCodec : MediaResource::Type::kSecureCodec;
+        if (!getBiggestClientFromLowestPriority(callingPid, callingPriority,
+                                                otherType, subType,
+                                                MediaResource::SubType::kUnspecifiedSubType,
+                                                clients, clientInfo, lowestPriority)) {
+            return false;
+        }
+    }
+
+    targetClients.emplace_back(clientInfo);
+    ALOGI("%s: CallingProcess(%d:%d) will reclaim from the lowestPriorityProcess(%d:%d)",
+          __func__, callingPid, callingPriority, clientInfo.mPid, lowestPriority);
+
+    return true;
+}
+
+bool ProcessPriorityReclaimPolicy::getBiggestClientFromLowestPriority(
+        pid_t callingPid,
+        int callingPriority,
+        MediaResource::Type type, MediaResource::SubType subType,
+        MediaResource::SubType primarySubType,
+        const std::vector<ClientInfo>& clients,
+        ClientInfo& targetClient,
+        int& lowestPriority) {
+    // 1. Find the lowest priority process among all the clients with the
+    // requested resource type.
+    int lowestPriorityPid = -1;
+    lowestPriority = -1;
+    if (!mResourceTracker->getLowestPriorityPid(type, subType, primarySubType, clients,
+                                                lowestPriorityPid, lowestPriority)) {
+        ALOGD("%s: can't find a process with lower priority than that of the process[%d:%d]",
+              __func__, callingPid, callingPriority);
+        return false;
+    }
+
+    // 2. Make sure that the priority of the target process is less than
+    // requesting process.
+    if (lowestPriority <= callingPriority) {
+        ALOGD("%s: lowest priority %d vs caller priority %d",
+              __func__, lowestPriority, callingPriority);
+        return false;
+    }
+
+    // 3. Look to find the biggest client from that process for the given resources
+    return mResourceTracker->getBiggestClient(lowestPriorityPid, type, subType,
+                                              clients, targetClient, primarySubType);
+}
+
+} // namespace android
diff --git a/services/mediaresourcemanager/ProcessPriorityReclaimPolicy.h b/services/mediaresourcemanager/ProcessPriorityReclaimPolicy.h
new file mode 100644
index 0000000..77bf7e1
--- /dev/null
+++ b/services/mediaresourcemanager/ProcessPriorityReclaimPolicy.h
@@ -0,0 +1,89 @@
+/*
+**
+** Copyright 2023, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+#ifndef ANDROID_MEDIA_PROCESSPRIORITYRECLAIMPOLICY_H_
+#define ANDROID_MEDIA_PROCESSPRIORITYRECLAIMPOLICY_H_
+
+#include <media/MediaResource.h>
+#include "IReclaimPolicy.h"
+
+namespace android {
+
+class ResourceTracker;
+struct ClientInfo;
+
+/*
+ * Implementation of the Reclaim Policy based on the process priority.
+ *
+ * Find the lowest priority process (lower than the calling/requesting process’s priority)
+ * that has the required resources.
+ * From that process, find the biggest client and return the same for reclaiming.
+ * If there is a codec co-existence policy, that is addressed as below:
+ *   - if these are any conflicting codecs, reclaim all those conflicting clients.
+ * If no conflicting codecs, the reclaim policy will select a client in the order of:
+ *   - Find the biggest client from the lowest priority process that
+ *     has the other resources and with the given primary type.
+ *   - select the biggest client from the lower priority process that
+ *     has the primary type.
+ *   - If it's a codec reclaim request, then:
+ *      - select the biggest client from the lower priority process that
+ *        has the othe type (for example secure for a non-secure and vice versa).
+ */
+class ProcessPriorityReclaimPolicy : public IReclaimPolicy {
+public:
+    ProcessPriorityReclaimPolicy(const std::shared_ptr<ResourceTracker>& resourceTracker);
+
+    virtual ~ProcessPriorityReclaimPolicy();
+
+    /*
+     * Based on the process priority, identify and return a client from the list
+     * of given clients that satisfy the resource requested.
+     *
+     * @param[in]  reclaimRequestInfo Information about the resource request
+     * @param[in]  client List of clients to select from.
+     * @param[out] targetClients Upon success, this will have the list of identified client(s).
+     *
+     * @return true on success, false otherwise
+     */
+    bool getClients(const ReclaimRequestInfo& reclaimRequestInfo,
+                    const std::vector<ClientInfo>& clients,
+                    std::vector<ClientInfo>& targetClients) override;
+
+private:
+
+    // Get the biggest client with the given resources from the given list of clients.
+    // The client should belong to lowest possible priority than that of the
+    // calling/requesting process.
+    // returns true on success, false otherwise
+    //
+    bool getBiggestClientFromLowestPriority(
+        pid_t callingPid,
+        int callingPriority,
+        MediaResource::Type type,
+        MediaResource::SubType subType,
+        MediaResource::SubType primarySubType,
+        const std::vector<ClientInfo>& clients,
+        ClientInfo& targetClient,
+        int& lowestPriority);
+
+private:
+    std::shared_ptr<ResourceTracker> mResourceTracker;
+};
+
+} // namespace android
+
+#endif  // ANDROID_MEDIA_PROCESSPRIORITYRECLAIMPOLICY_H_
diff --git a/services/mediaresourcemanager/ResourceManagerMetrics.cpp b/services/mediaresourcemanager/ResourceManagerMetrics.cpp
index e26fd28..af85772 100644
--- a/services/mediaresourcemanager/ResourceManagerMetrics.cpp
+++ b/services/mediaresourcemanager/ResourceManagerMetrics.cpp
@@ -46,9 +46,12 @@
 
 inline const char* getCodecType(MediaResourceSubType codecType) {
     switch (codecType) {
-        case MediaResourceSubType::kAudioCodec:         return "Audio";
-        case MediaResourceSubType::kVideoCodec:         return "Video";
-        case MediaResourceSubType::kImageCodec:         return "Image";
+        case MediaResourceSubType::kHwAudioCodec:       return "Hw Audio";
+        case MediaResourceSubType::kSwAudioCodec:       return "Sw Audio";
+        case MediaResourceSubType::kHwVideoCodec:       return "Hw Video";
+        case MediaResourceSubType::kSwVideoCodec:       return "Sw Video";
+        case MediaResourceSubType::kHwImageCodec:       return "Hw Image";
+        case MediaResourceSubType::kSwImageCodec:       return "Sw Image";
         case MediaResourceSubType::kUnspecifiedSubType:
         default:
                                                         return "Unspecified";
@@ -56,39 +59,29 @@
     return "Unspecified";
 }
 
-static CodecBucket getCodecBucket(bool isHardware,
-                                  bool isEncoder,
-                                  MediaResourceSubType codecType) {
-    if (isHardware) {
-        switch (codecType) {
-            case MediaResourceSubType::kAudioCodec:
-                if (isEncoder) return HwAudioEncoder;
-                return HwAudioDecoder;
-            case MediaResourceSubType::kVideoCodec:
-                if (isEncoder) return HwVideoEncoder;
-                return HwVideoDecoder;
-            case MediaResourceSubType::kImageCodec:
-                if (isEncoder) return HwImageEncoder;
-                return HwImageDecoder;
-            case MediaResourceSubType::kUnspecifiedSubType:
-            default:
-                return CodecBucketUnspecified;
-        }
-    } else {
-        switch (codecType) {
-            case MediaResourceSubType::kAudioCodec:
-                if (isEncoder) return SwAudioEncoder;
-                return SwAudioDecoder;
-            case MediaResourceSubType::kVideoCodec:
-                if (isEncoder) return SwVideoEncoder;
-                return SwVideoDecoder;
-            case MediaResourceSubType::kImageCodec:
-                if (isEncoder) return SwImageEncoder;
-                return SwImageDecoder;
-            case MediaResourceSubType::kUnspecifiedSubType:
-            default:
-                return CodecBucketUnspecified;
-        }
+inline bool isHardwareCodec(MediaResourceSubType codecType) {
+    return (codecType == MediaResourceSubType::kHwAudioCodec ||
+            codecType == MediaResourceSubType::kHwVideoCodec ||
+            codecType == MediaResourceSubType::kHwImageCodec);
+}
+
+static CodecBucket getCodecBucket(bool isEncoder, MediaResourceSubType codecType) {
+    switch (codecType) {
+    case MediaResourceSubType::kHwAudioCodec:
+        return isEncoder? HwAudioEncoder : HwAudioDecoder;
+    case MediaResourceSubType::kSwAudioCodec:
+        return isEncoder? SwAudioEncoder : SwAudioDecoder;
+    case MediaResourceSubType::kHwVideoCodec:
+        return isEncoder? HwVideoEncoder : HwVideoDecoder;
+    case MediaResourceSubType::kSwVideoCodec:
+        return isEncoder? SwVideoEncoder : SwVideoDecoder;
+    case MediaResourceSubType::kHwImageCodec:
+        return isEncoder? HwImageEncoder : HwImageDecoder;
+    case MediaResourceSubType::kSwImageCodec:
+        return isEncoder? SwImageEncoder : SwImageDecoder;
+    case MediaResourceSubType::kUnspecifiedSubType:
+    default:
+        return CodecBucketUnspecified;
     }
 
     return CodecBucketUnspecified;
@@ -179,8 +172,10 @@
     std::scoped_lock lock(mLock);
     ClientConfigMap::iterator entry = mClientConfigMap.find(clientConfig.clientInfo.id);
     if (entry != mClientConfigMap.end() &&
-        (clientConfig.codecType == MediaResourceSubType::kVideoCodec ||
-        clientConfig.codecType == MediaResourceSubType::kImageCodec)) {
+        (clientConfig.codecType == MediaResourceSubType::kHwVideoCodec ||
+         clientConfig.codecType == MediaResourceSubType::kSwVideoCodec ||
+         clientConfig.codecType == MediaResourceSubType::kHwImageCodec ||
+         clientConfig.codecType == MediaResourceSubType::kSwImageCodec)) {
         int pid = clientConfig.clientInfo.pid;
         // Update the pixel count for this process
         updatePixelCount(pid, clientConfig.width * (long)clientConfig.height,
@@ -201,13 +196,13 @@
     mClientConfigMap[clientConfig.clientInfo.id] = clientConfig;
 
     // Update the concurrent codec count for this process.
-    CodecBucket codecBucket = getCodecBucket(clientConfig.isHardware,
-                                             clientConfig.isEncoder,
-                                             clientConfig.codecType);
+    CodecBucket codecBucket = getCodecBucket(clientConfig.isEncoder, clientConfig.codecType);
     increaseConcurrentCodecs(pid, codecBucket);
 
-    if (clientConfig.codecType == MediaResourceSubType::kVideoCodec ||
-        clientConfig.codecType == MediaResourceSubType::kImageCodec) {
+    if (clientConfig.codecType == MediaResourceSubType::kHwVideoCodec ||
+        clientConfig.codecType == MediaResourceSubType::kSwVideoCodec ||
+        clientConfig.codecType == MediaResourceSubType::kHwImageCodec ||
+        clientConfig.codecType == MediaResourceSubType::kSwImageCodec) {
         // Update the pixel count for this process
         increasePixelCount(pid, clientConfig.width * (long)clientConfig.height);
     }
@@ -236,7 +231,7 @@
          clientConfig.clientInfo.name.c_str(),
          static_cast<int32_t>(clientConfig.codecType),
          clientConfig.isEncoder,
-         clientConfig.isHardware,
+         isHardwareCodec(clientConfig.codecType),
          clientConfig.width, clientConfig.height,
          systemConcurrentCodecs,
          appConcurrentCodecs,
@@ -249,7 +244,7 @@
 
     ALOGV("%s: Pushed MEDIA_CODEC_STARTED atom: "
           "Process[pid(%d): uid(%d)] "
-          "Codec: [%s: %ju] is %s %s %s "
+          "Codec: [%s: %ju] is %s %s "
           "Timestamp: %jd "
           "Resolution: %d x %d "
           "ConcurrentCodec[%d]={System: %d App: %d} "
@@ -259,7 +254,6 @@
           pid, clientConfig.clientInfo.uid,
           clientConfig.clientInfo.name.c_str(),
           clientConfig.id,
-          clientConfig.isHardware? "hardware" : "software",
           getCodecType(clientConfig.codecType),
           clientConfig.isEncoder? "encoder" : "decoder",
           clientConfig.timeStamp,
@@ -273,13 +267,13 @@
     std::scoped_lock lock(mLock);
     int pid = clientConfig.clientInfo.pid;
     // Update the concurrent codec count for this process.
-    CodecBucket codecBucket = getCodecBucket(clientConfig.isHardware,
-                                             clientConfig.isEncoder,
-                                             clientConfig.codecType);
+    CodecBucket codecBucket = getCodecBucket(clientConfig.isEncoder, clientConfig.codecType);
     decreaseConcurrentCodecs(pid, codecBucket);
 
-    if (clientConfig.codecType == MediaResourceSubType::kVideoCodec ||
-        clientConfig.codecType == MediaResourceSubType::kImageCodec) {
+    if (clientConfig.codecType == MediaResourceSubType::kHwVideoCodec ||
+        clientConfig.codecType == MediaResourceSubType::kSwVideoCodec ||
+        clientConfig.codecType == MediaResourceSubType::kHwImageCodec ||
+        clientConfig.codecType == MediaResourceSubType::kSwImageCodec) {
         // Update the pixel count for this process
         decreasePixelCount(pid, clientConfig.width * (long)clientConfig.height);
     }
@@ -319,7 +313,7 @@
          clientConfig.clientInfo.name.c_str(),
          static_cast<int32_t>(clientConfig.codecType),
          clientConfig.isEncoder,
-         clientConfig.isHardware,
+         isHardwareCodec(clientConfig.codecType),
          clientConfig.width, clientConfig.height,
          systemConcurrentCodecs,
          appConcurrentCodecs,
@@ -327,7 +321,7 @@
          usageTime);
     ALOGV("%s: Pushed MEDIA_CODEC_STOPPED atom: "
           "Process[pid(%d): uid(%d)] "
-          "Codec: [%s: %ju] is %s %s %s "
+          "Codec: [%s: %ju] is %s %s "
           "Timestamp: %jd Usage time: %jd "
           "Resolution: %d x %d "
           "ConcurrentCodec[%d]={System: %d App: %d} "
@@ -336,7 +330,6 @@
           pid, clientConfig.clientInfo.uid,
           clientConfig.clientInfo.name.c_str(),
           clientConfig.id,
-          clientConfig.isHardware? "hardware" : "software",
           getCodecType(clientConfig.codecType),
           clientConfig.isEncoder? "encoder" : "decoder",
           clientConfig.timeStamp, usageTime,
@@ -433,9 +426,9 @@
 }
 
 void ResourceManagerMetrics::pushReclaimAtom(const ClientInfoParcel& clientInfo,
-                        const std::vector<int>& priorities,
-                        const std::vector<std::shared_ptr<IResourceManagerClient>>& clients,
-                        const PidUidVector& idList, bool reclaimed) {
+                                             const std::vector<int>& priorities,
+                                             const std::vector<ClientInfo>& targetClients,
+                                             bool reclaimed) {
     // Construct the metrics for codec reclaim as a pushed atom.
     // 1. Information about the requester.
     //  - UID and the priority (oom score)
@@ -460,7 +453,7 @@
     //    - UID and the Priority (oom score)
     int32_t reclaimStatus = MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED__RECLAIM_STATUS__RECLAIM_SUCCESS;
     if (!reclaimed) {
-      if (clients.size() == 0) {
+      if (targetClients.size() == 0) {
         // No clients to reclaim from
         reclaimStatus =
             MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED__RECLAIM_STATUS__RECLAIM_FAILED_NO_CLIENTS;
@@ -470,10 +463,9 @@
             MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED__RECLAIM_STATUS__RECLAIM_FAILED_RECLAIM_RESOURCES;
       }
     }
-    int32_t noOfCodecsReclaimed = clients.size();
+    int32_t noOfCodecsReclaimed = targetClients.size();
     int32_t targetIndex = 1;
-    for (PidUidVector::const_reference id : idList) {
-        int32_t targetUid = id.second;
+    for (const ClientInfo& targetClient : targetClients) {
         int targetPriority = priorities[targetIndex];
         // Post the pushed atom
         int result = stats_write(
@@ -485,7 +477,7 @@
             reclaimStatus,
             noOfCodecsReclaimed,
             targetIndex,
-            targetUid,
+            targetClient.mUid,
             targetPriority);
         ALOGI("%s: Pushed MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED atom: "
               "Requester[pid(%d): uid(%d): priority(%d)] "
@@ -497,7 +489,7 @@
               __func__, callingPid, requesterUid, requesterPriority,
               clientName.c_str(), noOfConcurrentCodecs,
               reclaimStatus, noOfCodecsReclaimed,
-              targetIndex, id.first, targetUid, targetPriority, result);
+              targetIndex, targetClient.mPid, targetClient.mUid, targetPriority, result);
         targetIndex++;
     }
 }
diff --git a/services/mediaresourcemanager/ResourceManagerMetrics.h b/services/mediaresourcemanager/ResourceManagerMetrics.h
index d99c5b1..a9bc34b 100644
--- a/services/mediaresourcemanager/ResourceManagerMetrics.h
+++ b/services/mediaresourcemanager/ResourceManagerMetrics.h
@@ -135,8 +135,8 @@
     // To be called when after a reclaim event.
     void pushReclaimAtom(const ClientInfoParcel& clientInfo,
                          const std::vector<int>& priorities,
-                         const std::vector<std::shared_ptr<IResourceManagerClient>>& clients,
-                         const PidUidVector& idList, bool reclaimed);
+                         const std::vector<ClientInfo>& targetClients,
+                         bool reclaimed);
 
     // Add this pid/uid set to monitor for the process termination state.
     void addPid(int pid, uid_t uid = 0);
diff --git a/services/mediaresourcemanager/ResourceManagerService.cpp b/services/mediaresourcemanager/ResourceManagerService.cpp
index 9552e25..4bdb6e1 100644
--- a/services/mediaresourcemanager/ResourceManagerService.cpp
+++ b/services/mediaresourcemanager/ResourceManagerService.cpp
@@ -24,164 +24,69 @@
 #include <binder/IPCThreadState.h>
 #include <binder/IServiceManager.h>
 #include <cutils/sched_policy.h>
-#include <dirent.h>
 #include <media/MediaResourcePolicy.h>
 #include <media/stagefright/foundation/ABase.h>
 #include <mediautils/BatteryNotifier.h>
 #include <mediautils/ProcessInfo.h>
 #include <mediautils/SchedulingPolicyService.h>
-#include <string.h>
-#include <sys/types.h>
-#include <sys/stat.h>
-#include <sys/time.h>
-#include <unistd.h>
+#include <com_android_media_codec_flags.h>
 
-#include "IMediaResourceMonitor.h"
 #include "ResourceManagerMetrics.h"
-#include "ResourceManagerService.h"
-#include "ResourceManagerServiceUtils.h"
+#include "ResourceManagerServiceNew.h"
 #include "ResourceObserverService.h"
 #include "ServiceLog.h"
 
+namespace CodecFeatureFlags = com::android::media::codec::flags;
+
 namespace android {
 
-class DeathNotifier : public std::enable_shared_from_this<DeathNotifier> {
-
-    // BinderDiedContext defines the cookie that is passed as DeathRecipient.
-    // Since this can maintain more context than a raw pointer, we can
-    // validate the scope of DeathNotifier, before deferencing it upon the binder death.
-    struct BinderDiedContext {
-        std::weak_ptr<DeathNotifier> mDeathNotifier;
-    };
-public:
-    DeathNotifier(const std::shared_ptr<IResourceManagerClient>& client,
-                  const std::shared_ptr<ResourceManagerService>& service,
-                  const ClientInfoParcel& clientInfo,
-                  AIBinder_DeathRecipient* recipient);
-
-    virtual ~DeathNotifier() {
-        unlink();
+void ResourceManagerService::getResourceDump(std::string& resourceLog) const {
+    PidResourceInfosMap mapCopy;
+    std::map<int, int> overridePidMapCopy;
+    {
+        std::scoped_lock lock{mLock};
+        mapCopy = mMap;  // Shadow copy, real copy will happen on write.
+        overridePidMapCopy = mOverridePidMap;
     }
 
-    void unlink() {
-        if (mClient != nullptr) {
-            // Register for the callbacks by linking to death notification.
-            AIBinder_unlinkToDeath(mClient->asBinder().get(), mRecipient, mCookie);
-            mClient = nullptr;
-        }
-    }
-
-    // Implement death recipient
-    static void BinderDiedCallback(void* cookie);
-    static void BinderUnlinkedCallback(void* cookie);
-    virtual void binderDied();
-
-private:
-    void link() {
-        // Create the context that is passed as cookie to the binder death notification.
-        // The context gets deleted at BinderUnlinkedCallback.
-        mCookie = new BinderDiedContext{.mDeathNotifier = weak_from_this()};
-        // Register for the callbacks by linking to death notification.
-        AIBinder_linkToDeath(mClient->asBinder().get(), mRecipient, mCookie);
-    }
-
-protected:
-    std::shared_ptr<IResourceManagerClient> mClient;
-    std::weak_ptr<ResourceManagerService> mService;
-    const ClientInfoParcel mClientInfo;
-    AIBinder_DeathRecipient* mRecipient;
-    BinderDiedContext* mCookie;
-};
-
-DeathNotifier::DeathNotifier(const std::shared_ptr<IResourceManagerClient>& client,
-                             const std::shared_ptr<ResourceManagerService>& service,
-                             const ClientInfoParcel& clientInfo,
-                             AIBinder_DeathRecipient* recipient)
-    : mClient(client), mService(service), mClientInfo(clientInfo),
-      mRecipient(recipient), mCookie(nullptr) {
-    link();
-}
-
-//static
-void DeathNotifier::BinderUnlinkedCallback(void* cookie) {
-    BinderDiedContext* context = reinterpret_cast<BinderDiedContext*>(cookie);
-    // Since we don't need the context anymore, we are deleting it now.
-    delete context;
-}
-
-//static
-void DeathNotifier::BinderDiedCallback(void* cookie) {
-    BinderDiedContext* context = reinterpret_cast<BinderDiedContext*>(cookie);
-
-    // Validate the context and check if the DeathNotifier object is still in scope.
-    if (context != nullptr) {
-        std::shared_ptr<DeathNotifier> thiz = context->mDeathNotifier.lock();
-        if (thiz != nullptr) {
-            thiz->binderDied();
+    const size_t SIZE = 256;
+    char buffer[SIZE];
+    resourceLog.append("  Processes:\n");
+    for (const auto& [pid, infos] : mapCopy) {
+        snprintf(buffer, SIZE, "    Pid: %d\n", pid);
+        resourceLog.append(buffer);
+        int priority = 0;
+        if (getPriority_l(pid, &priority)) {
+            snprintf(buffer, SIZE, "    Priority: %d\n", priority);
         } else {
-            ALOGI("DeathNotifier is out of scope already");
+            snprintf(buffer, SIZE, "    Priority: <unknown>\n");
         }
-    }
-}
+        resourceLog.append(buffer);
 
-void DeathNotifier::binderDied() {
-    // Don't check for pid validity since we know it's already dead.
-    std::shared_ptr<ResourceManagerService> service = mService.lock();
-    if (service == nullptr) {
-        ALOGW("ResourceManagerService is dead as well.");
-        return;
-    }
+        for (const auto& [infoKey, info] : infos) {
+            resourceLog.append("      Client:\n");
+            snprintf(buffer, SIZE, "        Id: %lld\n", (long long)info.clientId);
+            resourceLog.append(buffer);
 
-    service->overridePid(mClientInfo.pid, -1);
-    // thiz is freed in the call below, so it must be last call referring thiz
-    service->removeResource(mClientInfo, false /*checkValid*/);
-}
+            std::string clientName = info.name;
+            snprintf(buffer, SIZE, "        Name: %s\n", clientName.c_str());
+            resourceLog.append(buffer);
 
-class OverrideProcessInfoDeathNotifier : public DeathNotifier {
-public:
-    OverrideProcessInfoDeathNotifier(const std::shared_ptr<IResourceManagerClient>& client,
-                                     const std::shared_ptr<ResourceManagerService>& service,
-                                     const ClientInfoParcel& clientInfo,
-                                     AIBinder_DeathRecipient* recipient)
-            : DeathNotifier(client, service, clientInfo, recipient) {}
-
-    virtual ~OverrideProcessInfoDeathNotifier() {}
-
-    virtual void binderDied();
-};
-
-void OverrideProcessInfoDeathNotifier::binderDied() {
-    // Don't check for pid validity since we know it's already dead.
-    std::shared_ptr<ResourceManagerService> service = mService.lock();
-    if (service == nullptr) {
-        ALOGW("ResourceManagerService is dead as well.");
-        return;
-    }
-
-    service->removeProcessInfoOverride(mClientInfo.pid);
-}
-
-static void notifyResourceGranted(int pid, const std::vector<MediaResourceParcel>& resources) {
-    static const char* const kServiceName = "media_resource_monitor";
-    sp<IBinder> binder = defaultServiceManager()->checkService(String16(kServiceName));
-    if (binder != NULL) {
-        sp<IMediaResourceMonitor> service = interface_cast<IMediaResourceMonitor>(binder);
-        for (size_t i = 0; i < resources.size(); ++i) {
-            switch (resources[i].subType) {
-                case MediaResource::SubType::kAudioCodec:
-                    service->notifyResourceGranted(pid, IMediaResourceMonitor::TYPE_AUDIO_CODEC);
-                    break;
-                case MediaResource::SubType::kVideoCodec:
-                    service->notifyResourceGranted(pid, IMediaResourceMonitor::TYPE_VIDEO_CODEC);
-                    break;
-                case MediaResource::SubType::kImageCodec:
-                    service->notifyResourceGranted(pid, IMediaResourceMonitor::TYPE_IMAGE_CODEC);
-                    break;
-                case MediaResource::SubType::kUnspecifiedSubType:
-                    break;
+            const ResourceList& resources = info.resources;
+            resourceLog.append("        Resources:\n");
+            for (auto it = resources.begin(); it != resources.end(); it++) {
+                snprintf(buffer, SIZE, "          %s\n", toString(it->second).c_str());
+                resourceLog.append(buffer);
             }
         }
     }
+
+    resourceLog.append("  Process Pid override:\n");
+    for (auto it = overridePidMapCopy.begin(); it != overridePidMapCopy.end(); ++it) {
+        snprintf(buffer, SIZE, "    Original Pid: %d,  Override Pid: %d\n",
+            it->first, it->second);
+        resourceLog.append(buffer);
+    }
 }
 
 binder_status_t ResourceManagerService::dump(int fd, const char** /*args*/, uint32_t /*numArgs*/) {
@@ -196,20 +101,20 @@
         return PERMISSION_DENIED;
     }
 
-    PidResourceInfosMap mapCopy;
     bool supportsMultipleSecureCodecs;
     bool supportsSecureWithNonSecureCodec;
-    std::map<int, int> overridePidMapCopy;
     String8 serviceLog;
     {
         std::scoped_lock lock{mLock};
-        mapCopy = mMap;  // Shadow copy, real copy will happen on write.
         supportsMultipleSecureCodecs = mSupportsMultipleSecureCodecs;
         supportsSecureWithNonSecureCodec = mSupportsSecureWithNonSecureCodec;
         serviceLog = mServiceLog->toString("    " /* linePrefix */);
-        overridePidMapCopy = mOverridePidMap;
     }
 
+    // Get all the resource (and overload pid) logs
+    std::string resourceLog;
+    getResourceDump(resourceLog);
+
     const size_t SIZE = 256;
     char buffer[SIZE];
     snprintf(buffer, SIZE, "ResourceManagerService: %p\n", this);
@@ -221,41 +126,8 @@
             supportsSecureWithNonSecureCodec);
     result.append(buffer);
 
-    result.append("  Processes:\n");
-    for (const auto& [pid, infos] : mapCopy) {
-        snprintf(buffer, SIZE, "    Pid: %d\n", pid);
-        result.append(buffer);
-        int priority = 0;
-        if (getPriority_l(pid, &priority)) {
-            snprintf(buffer, SIZE, "    Priority: %d\n", priority);
-        } else {
-            snprintf(buffer, SIZE, "    Priority: <unknown>\n");
-        }
-        result.append(buffer);
+    result.append(resourceLog.c_str());
 
-        for (const auto& [infoKey, info] : infos) {
-            result.append("      Client:\n");
-            snprintf(buffer, SIZE, "        Id: %lld\n", (long long)info.clientId);
-            result.append(buffer);
-
-            std::string clientName = info.name;
-            snprintf(buffer, SIZE, "        Name: %s\n", clientName.c_str());
-            result.append(buffer);
-
-            const ResourceList& resources = info.resources;
-            result.append("        Resources:\n");
-            for (auto it = resources.begin(); it != resources.end(); it++) {
-                snprintf(buffer, SIZE, "          %s\n", toString(it->second).c_str());
-                result.append(buffer);
-            }
-        }
-    }
-    result.append("  Process Pid override:\n");
-    for (auto it = overridePidMapCopy.begin(); it != overridePidMapCopy.end(); ++it) {
-        snprintf(buffer, SIZE, "    Original Pid: %d,  Override Pid: %d\n",
-            it->first, it->second);
-        result.append(buffer);
-    }
     result.append("  Events logs (most recent at top):\n");
     result.append(serviceLog);
 
@@ -297,9 +169,7 @@
       mServiceLog(new ServiceLog()),
       mSupportsMultipleSecureCodecs(true),
       mSupportsSecureWithNonSecureCodec(true),
-      mCpuBoostCount(0),
-      mDeathRecipient(::ndk::ScopedAIBinder_DeathRecipient(
-                      AIBinder_DeathRecipient_new(DeathNotifier::BinderDiedCallback))) {
+      mCpuBoostCount(0) {
     mSystemCB->noteResetVideo();
     // Create ResourceManagerMetrics that handles all the metrics.
     mResourceManagerMetrics = std::make_unique<ResourceManagerMetrics>(mProcessInfo);
@@ -307,8 +177,7 @@
 
 //static
 void ResourceManagerService::instantiate() {
-    std::shared_ptr<ResourceManagerService> service =
-            ::ndk::SharedRefBase::make<ResourceManagerService>();
+    std::shared_ptr<ResourceManagerService> service = Create();
     binder_status_t status =
                         AServiceManager_addServiceWithFlags(
                         service->asBinder().get(), getServiceName(),
@@ -329,6 +198,42 @@
     //ABinderProcess_startThreadPool();
 }
 
+std::shared_ptr<ResourceManagerService> ResourceManagerService::Create() {
+    return Create(new ProcessInfo(), new SystemCallbackImpl());
+}
+
+std::shared_ptr<ResourceManagerService> ResourceManagerService::Create(
+        const sp<ProcessInfoInterface>& processInfo,
+        const sp<SystemCallbackInterface>& systemResource) {
+    std::shared_ptr<ResourceManagerService> service = nullptr;
+    // If codec importance feature is on, create the refactored implementation.
+    if (CodecFeatureFlags::codec_importance()) {
+        service = ::ndk::SharedRefBase::make<ResourceManagerServiceNew>(processInfo,
+                                                                        systemResource);
+    } else {
+        service = ::ndk::SharedRefBase::make<ResourceManagerService>(processInfo,
+                                                                     systemResource);
+    }
+
+    if (service != nullptr) {
+        service->init();
+    }
+
+    return service;
+}
+
+// TEST only function.
+std::shared_ptr<ResourceManagerService> ResourceManagerService::CreateNew(
+        const sp<ProcessInfoInterface>& processInfo,
+        const sp<SystemCallbackInterface>& systemResource) {
+    std::shared_ptr<ResourceManagerService> service =
+        ::ndk::SharedRefBase::make<ResourceManagerServiceNew>(processInfo, systemResource);
+    service->init();
+    return service;
+}
+
+void ResourceManagerService::init() {}
+
 ResourceManagerService::~ResourceManagerService() {}
 
 void ResourceManagerService::setObserverService(
@@ -353,8 +258,7 @@
     return Status::ok();
 }
 
-void ResourceManagerService::onFirstAdded(const MediaResourceParcel& resource,
-        const ResourceInfo& clientInfo) {
+void ResourceManagerService::onFirstAdded(const MediaResourceParcel& resource, uid_t uid) {
     // first time added
     if (resource.type == MediaResource::Type::kCpuBoost
      && resource.subType == MediaResource::SubType::kUnspecifiedSubType) {
@@ -366,13 +270,13 @@
         }
         mCpuBoostCount++;
     } else if (resource.type == MediaResource::Type::kBattery
-            && resource.subType == MediaResource::SubType::kVideoCodec) {
-        mSystemCB->noteStartVideo(clientInfo.uid);
+            && (resource.subType == MediaResource::SubType::kHwVideoCodec
+                || resource.subType == MediaResource::SubType::kSwVideoCodec)) {
+        mSystemCB->noteStartVideo(uid);
     }
 }
 
-void ResourceManagerService::onLastRemoved(const MediaResourceParcel& resource,
-        const ResourceInfo& clientInfo) {
+void ResourceManagerService::onLastRemoved(const MediaResourceParcel& resource, uid_t uid) {
     if (resource.type == MediaResource::Type::kCpuBoost
             && resource.subType == MediaResource::SubType::kUnspecifiedSubType
             && mCpuBoostCount > 0) {
@@ -380,25 +284,9 @@
             mSystemCB->requestCpusetBoost(false);
         }
     } else if (resource.type == MediaResource::Type::kBattery
-            && resource.subType == MediaResource::SubType::kVideoCodec) {
-        mSystemCB->noteStopVideo(clientInfo.uid);
-    }
-}
-
-void ResourceManagerService::mergeResources(MediaResourceParcel& r1,
-        const MediaResourceParcel& r2) {
-    // The resource entry on record is maintained to be in [0,INT64_MAX].
-    // Clamp if merging in the new resource value causes it to go out of bound.
-    // Note that the new resource value could be negative, eg.DrmSession, the
-    // value goes lower when the session is used more often. During reclaim
-    // the session with the highest value (lowest usage) would be closed.
-    if (r2.value < INT64_MAX - r1.value) {
-        r1.value += r2.value;
-        if (r1.value < 0) {
-            r1.value = 0;
-        }
-    } else {
-        r1.value = INT64_MAX;
+            && (resource.subType == MediaResource::SubType::kHwVideoCodec
+                || resource.subType == MediaResource::SubType::kSwVideoCodec)) {
+        mSystemCB->noteStopVideo(uid);
     }
 }
 
@@ -440,7 +328,7 @@
                 ALOGW("Ignoring request to add new resource entry with value <= 0");
                 continue;
             }
-            onFirstAdded(res, info);
+            onFirstAdded(res, info.uid);
             info.resources[resType] = res;
         } else {
             mergeResources(info.resources[resType], res);
@@ -454,8 +342,8 @@
         }
     }
     if (info.deathNotifier == nullptr && client != nullptr) {
-        info.deathNotifier = std::make_shared<DeathNotifier>(
-            client, ref<ResourceManagerService>(), clientInfo, mDeathRecipient.get());
+        info.deathNotifier = DeathNotifier::Create(
+            client, ref<ResourceManagerService>(), clientInfo);
     }
     if (mObserverService != nullptr && !resourceAdded.empty()) {
         mObserverService->onResourceAdded(uid, pid, resourceAdded);
@@ -511,7 +399,7 @@
             if (resource.value > res.value) {
                 resource.value -= res.value;
             } else {
-                onLastRemoved(res, info);
+                onLastRemoved(res, info.uid);
                 actualRemoved.value = resource.value;
                 info.resources.erase(resType);
             }
@@ -566,7 +454,7 @@
 
     const ResourceInfo& info = foundClient->second;
     for (auto it = info.resources.begin(); it != info.resources.end(); it++) {
-        onLastRemoved(it->second, info);
+        onLastRemoved(it->second, info.uid);
     }
 
     // Since this client has been removed, update the metrics collector.
@@ -580,17 +468,132 @@
     return Status::ok();
 }
 
-void ResourceManagerService::getClientForResource_l(int callingPid,
-        const MediaResourceParcel *res,
-        PidUidVector* idVector,
-        std::vector<std::shared_ptr<IResourceManagerClient>>* clients) {
+void ResourceManagerService::getClientForResource_l(
+        const ResourceRequestInfo& resourceRequestInfo,
+        std::vector<ClientInfo>& clientsInfo) {
+    int callingPid = resourceRequestInfo.mCallingPid;
+    const MediaResourceParcel* res = resourceRequestInfo.mResource;
     if (res == NULL) {
         return;
     }
-    std::shared_ptr<IResourceManagerClient> client;
-    if (getLowestPriorityBiggestClient_l(callingPid, res->type, res->subType, idVector, &client)) {
-        clients->push_back(client);
+
+    // Before looking into other processes, check if we have clients marked for
+    // pending removal in the same process.
+    ClientInfo clientInfo;
+    if (getBiggestClientPendingRemoval_l(callingPid, res->type, res->subType, clientInfo)) {
+        clientsInfo.emplace_back(clientInfo);
+        return;
     }
+
+    // Now find client(s) from a lowest priority process that has needed resources.
+    if (getLowestPriorityBiggestClient_l(resourceRequestInfo, clientInfo)) {
+        clientsInfo.push_back(clientInfo);
+    }
+}
+
+bool ResourceManagerService::getTargetClients(
+        int32_t callingPid,
+        const std::vector<MediaResourceParcel>& resources,
+        std::vector<ClientInfo>& targetClients) {
+    std::scoped_lock lock{mLock};
+    if (!mProcessInfo->isPidTrusted(callingPid)) {
+        pid_t actualCallingPid = IPCThreadState::self()->getCallingPid();
+        ALOGW("%s called with untrusted pid %d, using actual calling pid %d", __FUNCTION__,
+                callingPid, actualCallingPid);
+        callingPid = actualCallingPid;
+    }
+    const MediaResourceParcel *secureCodec = NULL;
+    const MediaResourceParcel *nonSecureCodec = NULL;
+    const MediaResourceParcel *graphicMemory = NULL;
+    const MediaResourceParcel *drmSession = NULL;
+    for (size_t i = 0; i < resources.size(); ++i) {
+        switch (resources[i].type) {
+            case MediaResource::Type::kSecureCodec:
+                secureCodec = &resources[i];
+                break;
+            case MediaResource::Type::kNonSecureCodec:
+                nonSecureCodec = &resources[i];
+                break;
+            case MediaResource::Type::kGraphicMemory:
+                graphicMemory = &resources[i];
+                break;
+            case MediaResource::Type::kDrmSession:
+                drmSession = &resources[i];
+                break;
+            default:
+                break;
+        }
+    }
+
+    // first pass to handle secure/non-secure codec conflict
+    if (secureCodec != NULL) {
+        MediaResourceParcel mediaResource{.type = MediaResource::Type::kSecureCodec,
+                                          .subType = secureCodec->subType};
+        ResourceRequestInfo resourceRequestInfo{callingPid, &mediaResource};
+        if (!mSupportsMultipleSecureCodecs) {
+            if (!getAllClients_l(resourceRequestInfo, targetClients)) {
+                return false;
+            }
+        }
+        if (!mSupportsSecureWithNonSecureCodec) {
+            mediaResource.type = MediaResource::Type::kNonSecureCodec;
+            if (!getAllClients_l(resourceRequestInfo, targetClients)) {
+                return false;
+            }
+        }
+    }
+    if (nonSecureCodec != NULL) {
+        if (!mSupportsSecureWithNonSecureCodec) {
+            MediaResourceParcel mediaResource{.type = MediaResource::Type::kSecureCodec,
+                                              .subType = nonSecureCodec->subType};
+            ResourceRequestInfo resourceRequestInfo{callingPid, &mediaResource};
+            if (!getAllClients_l(resourceRequestInfo, targetClients)) {
+                return false;
+            }
+        }
+    }
+
+    if (drmSession != NULL) {
+        ResourceRequestInfo resourceRequestInfo{callingPid, drmSession};
+        getClientForResource_l(resourceRequestInfo, targetClients);
+        if (targetClients.size() == 0) {
+            return false;
+        }
+    }
+
+    if (targetClients.size() == 0 && graphicMemory != nullptr) {
+        // if no secure/non-secure codec conflict, run second pass to handle other resources.
+        ResourceRequestInfo resourceRequestInfo{callingPid, graphicMemory};
+        getClientForResource_l(resourceRequestInfo, targetClients);
+    }
+
+    if (targetClients.size() == 0) {
+        // if we are here, run the third pass to free one codec with the same type.
+        if (secureCodec != nullptr) {
+            ResourceRequestInfo resourceRequestInfo{callingPid, secureCodec};
+            getClientForResource_l(resourceRequestInfo, targetClients);
+        }
+        if (nonSecureCodec != nullptr) {
+            ResourceRequestInfo resourceRequestInfo{callingPid, nonSecureCodec};
+            getClientForResource_l(resourceRequestInfo, targetClients);
+        }
+    }
+
+    if (targetClients.size() == 0) {
+        // if we are here, run the fourth pass to free one codec with the different type.
+        if (secureCodec != nullptr) {
+            MediaResource temp(MediaResource::Type::kNonSecureCodec, secureCodec->subType, 1);
+            ResourceRequestInfo resourceRequestInfo{callingPid, &temp};
+            getClientForResource_l(resourceRequestInfo, targetClients);
+        }
+        if (nonSecureCodec != nullptr) {
+            MediaResource temp(MediaResource::Type::kSecureCodec, nonSecureCodec->subType, 1);
+            ResourceRequestInfo resourceRequestInfo{callingPid, &temp};
+            getClientForResource_l(resourceRequestInfo, targetClients);
+        }
+    }
+
+    return !targetClients.empty();
 }
 
 Status ResourceManagerService::reclaimResource(const ClientInfoParcel& clientInfo,
@@ -602,158 +605,112 @@
     mServiceLog->add(log);
     *_aidl_return = false;
 
-    std::vector<std::shared_ptr<IResourceManagerClient>> clients;
-    PidUidVector idVector;
-    {
-        std::scoped_lock lock{mLock};
-        if (!mProcessInfo->isPidTrusted(callingPid)) {
-            pid_t actualCallingPid = IPCThreadState::self()->getCallingPid();
-            ALOGW("%s called with untrusted pid %d, using actual calling pid %d", __FUNCTION__,
-                    callingPid, actualCallingPid);
-            callingPid = actualCallingPid;
-        }
-        const MediaResourceParcel *secureCodec = NULL;
-        const MediaResourceParcel *nonSecureCodec = NULL;
-        const MediaResourceParcel *graphicMemory = NULL;
-        const MediaResourceParcel *drmSession = NULL;
-        for (size_t i = 0; i < resources.size(); ++i) {
-            switch (resources[i].type) {
-                case MediaResource::Type::kSecureCodec:
-                    secureCodec = &resources[i];
-                    break;
-                case MediaResource::Type::kNonSecureCodec:
-                    nonSecureCodec = &resources[i];
-                    break;
-                case MediaResource::Type::kGraphicMemory:
-                    graphicMemory = &resources[i];
-                    break;
-                case MediaResource::Type::kDrmSession:
-                    drmSession = &resources[i];
-                    break;
-                default:
-                    break;
-            }
-        }
-
-        // first pass to handle secure/non-secure codec conflict
-        if (secureCodec != NULL) {
-            if (!mSupportsMultipleSecureCodecs) {
-                if (!getAllClients_l(callingPid, MediaResource::Type::kSecureCodec,
-                            secureCodec->subType, &idVector, &clients)) {
-                    return Status::ok();
-                }
-            }
-            if (!mSupportsSecureWithNonSecureCodec) {
-                if (!getAllClients_l(callingPid, MediaResource::Type::kNonSecureCodec,
-                            secureCodec->subType, &idVector, &clients)) {
-                    return Status::ok();
-                }
-            }
-        }
-        if (nonSecureCodec != NULL) {
-            if (!mSupportsSecureWithNonSecureCodec) {
-                if (!getAllClients_l(callingPid, MediaResource::Type::kSecureCodec,
-                        nonSecureCodec->subType, &idVector, &clients)) {
-                    return Status::ok();
-                }
-            }
-        }
-        if (drmSession != NULL) {
-            getClientForResource_l(callingPid, drmSession, &idVector, &clients);
-            if (clients.size() == 0) {
-                return Status::ok();
-            }
-        }
-
-        if (clients.size() == 0) {
-            // if no secure/non-secure codec conflict, run second pass to handle other resources.
-            getClientForResource_l(callingPid, graphicMemory, &idVector, &clients);
-        }
-
-        if (clients.size() == 0) {
-            // if we are here, run the third pass to free one codec with the same type.
-            getClientForResource_l(callingPid, secureCodec, &idVector, &clients);
-            getClientForResource_l(callingPid, nonSecureCodec, &idVector, &clients);
-        }
-
-        if (clients.size() == 0) {
-            // if we are here, run the fourth pass to free one codec with the different type.
-            if (secureCodec != NULL) {
-                MediaResource temp(MediaResource::Type::kNonSecureCodec, secureCodec->subType, 1);
-                getClientForResource_l(callingPid, &temp, &idVector, &clients);
-            }
-            if (nonSecureCodec != NULL) {
-                MediaResource temp(MediaResource::Type::kSecureCodec, nonSecureCodec->subType, 1);
-                getClientForResource_l(callingPid, &temp, &idVector, &clients);
-            }
-        }
+    // Check if there are any resources to be reclaimed before processing.
+    if (resources.empty()) {
+        return Status::ok();
     }
 
-    *_aidl_return = reclaimUnconditionallyFrom(clients);
+    std::vector<ClientInfo> targetClients;
+    if (!getTargetClients(callingPid, resources, targetClients)) {
+        // Nothing to reclaim from.
+        ALOGI("%s: There aren't any clients to reclaim from", __func__);
+        return Status::ok();
+    }
+
+    *_aidl_return = reclaimUnconditionallyFrom(targetClients);
 
     // Log Reclaim Pushed Atom to statsd
-    pushReclaimAtom(clientInfo, clients, idVector, *_aidl_return);
+    pushReclaimAtom(clientInfo, targetClients, *_aidl_return);
 
     return Status::ok();
 }
 
 void ResourceManagerService::pushReclaimAtom(const ClientInfoParcel& clientInfo,
-                        const std::vector<std::shared_ptr<IResourceManagerClient>>& clients,
-                        const PidUidVector& idVector, bool reclaimed) {
+                                             const std::vector<ClientInfo>& targetClients,
+                                             bool reclaimed) {
     int32_t callingPid = clientInfo.pid;
     int requesterPriority = -1;
     getPriority_l(callingPid, &requesterPriority);
     std::vector<int> priorities;
     priorities.push_back(requesterPriority);
 
-    for (PidUidVector::const_reference id : idVector) {
+    for (const ClientInfo& targetClient : targetClients) {
         int targetPriority = -1;
-        getPriority_l(id.first, &targetPriority);
+        getPriority_l(targetClient.mPid, &targetPriority);
         priorities.push_back(targetPriority);
     }
-    mResourceManagerMetrics->pushReclaimAtom(clientInfo, priorities, clients,
-                                             idVector, reclaimed);
+    mResourceManagerMetrics->pushReclaimAtom(clientInfo, priorities, targetClients, reclaimed);
 }
 
-bool ResourceManagerService::reclaimUnconditionallyFrom(
-        const std::vector<std::shared_ptr<IResourceManagerClient>>& clients) {
-    if (clients.size() == 0) {
+std::shared_ptr<IResourceManagerClient> ResourceManagerService::getClient(
+        int pid, const int64_t& clientId) const {
+    std::map<int, ResourceInfos>::const_iterator found = mMap.find(pid);
+    if (found == mMap.end()) {
+        ALOGV("%s: didn't find pid %d for clientId %lld", __func__, pid, (long long) clientId);
+        return nullptr;
+    }
+
+    const ResourceInfos& infos = found->second;
+    ResourceInfos::const_iterator foundClient = infos.find(clientId);
+    if (foundClient == infos.end()) {
+        ALOGV("%s: didn't find clientId %lld", __func__, (long long) clientId);
+        return nullptr;
+    }
+
+    return foundClient->second.client;
+}
+
+bool ResourceManagerService::removeClient(int pid, const int64_t& clientId) {
+    std::map<int, ResourceInfos>::iterator found = mMap.find(pid);
+    if (found == mMap.end()) {
+        ALOGV("%s: didn't find pid %d for clientId %lld", __func__, pid, (long long) clientId);
         return false;
     }
 
-    std::shared_ptr<IResourceManagerClient> failedClient;
-    for (size_t i = 0; i < clients.size(); ++i) {
-        String8 log = String8::format("reclaimResource from client %p", clients[i].get());
+    ResourceInfos& infos = found->second;
+    ResourceInfos::iterator foundClient = infos.find(clientId);
+    if (foundClient == infos.end()) {
+        ALOGV("%s: didn't find clientId %lld", __func__, (long long) clientId);
+        return false;
+    }
+
+    infos.erase(foundClient);
+    return true;
+}
+
+bool ResourceManagerService::reclaimUnconditionallyFrom(
+        const std::vector<ClientInfo>& targetClients) {
+    if (targetClients.size() == 0) {
+        return false;
+    }
+
+    int64_t failedClientId = -1;
+    int32_t failedClientPid = -1;
+    for (const ClientInfo& targetClient : targetClients) {
+        std::shared_ptr<IResourceManagerClient> client = getClient(
+            targetClient.mPid, targetClient.mClientId);
+        if (client == nullptr) {
+            // skip already released clients.
+            continue;
+        }
+        String8 log = String8::format("reclaimResource from client %p", client.get());
         mServiceLog->add(log);
         bool success;
-        Status status = clients[i]->reclaimResource(&success);
+        Status status = client->reclaimResource(&success);
         if (!status.isOk() || !success) {
-            failedClient = clients[i];
+            failedClientId = targetClient.mClientId;
+            failedClientPid = targetClient.mPid;
             break;
         }
     }
 
-    if (failedClient == NULL) {
+    if (failedClientId == -1) {
         return true;
     }
 
-    int failedClientPid = -1;
     {
         std::scoped_lock lock{mLock};
-        bool found = false;
-        for (auto& [pid, infos] : mMap) {
-            for (const auto& [id, info] : infos) {
-                if (info.client == failedClient) {
-                    infos.erase(id);
-                    found = true;
-                    break;
-                }
-            }
-            if (found) {
-                failedClientPid = pid;
-                break;
-            }
-        }
+        bool found = removeClient(failedClientPid, failedClientId);
         if (found) {
             ALOGW("Failed to reclaim resources from client with pid %d", failedClientPid);
         } else {
@@ -764,6 +721,16 @@
     return false;
 }
 
+bool ResourceManagerService::overridePid_l(int32_t originalPid, int32_t newPid) {
+    mOverridePidMap.erase(originalPid);
+    if (newPid != -1) {
+        mOverridePidMap.emplace(originalPid, newPid);
+        return true;
+    }
+
+    return false;
+}
+
 Status ResourceManagerService::overridePid(int originalPid, int newPid) {
     String8 log = String8::format("overridePid(originalPid %d, newPid %d)",
             originalPid, newPid);
@@ -783,9 +750,7 @@
 
     {
         std::scoped_lock lock{mLock};
-        mOverridePidMap.erase(originalPid);
-        if (newPid != -1) {
-            mOverridePidMap.emplace(originalPid, newPid);
+        if (overridePid_l(originalPid, newPid)) {
             mResourceManagerMetrics->addPid(newPid);
         }
     }
@@ -793,6 +758,29 @@
     return Status::ok();
 }
 
+bool ResourceManagerService::overrideProcessInfo_l(
+        const std::shared_ptr<IResourceManagerClient>& client,
+        int pid,
+        int procState,
+        int oomScore) {
+    removeProcessInfoOverride_l(pid);
+
+    if (!mProcessInfo->overrideProcessInfo(pid, procState, oomScore)) {
+        // Override value is rejected by ProcessInfo.
+        return false;
+    }
+
+    ClientInfoParcel clientInfo{.pid = static_cast<int32_t>(pid),
+                                .uid = 0,
+                                .id = 0,
+                                .name = "<unknown client>"};
+    auto deathNotifier = DeathNotifier::Create(
+        client, ref<ResourceManagerService>(), clientInfo, true);
+
+    mProcessInfoOverrideMap.emplace(pid, ProcessInfoOverride{deathNotifier, client});
+    return true;
+}
+
 Status ResourceManagerService::overrideProcessInfo(
         const std::shared_ptr<IResourceManagerClient>& client, int pid, int procState,
         int oomScore) {
@@ -813,23 +801,12 @@
     }
 
     std::scoped_lock lock{mLock};
-    removeProcessInfoOverride_l(pid);
-
-    if (!mProcessInfo->overrideProcessInfo(pid, procState, oomScore)) {
+    if (!overrideProcessInfo_l(client, pid, procState, oomScore)) {
         // Override value is rejected by ProcessInfo.
         return Status::fromServiceSpecificError(BAD_VALUE);
     }
-
-    ClientInfoParcel clientInfo{.pid = static_cast<int32_t>(pid),
-                                .uid = 0,
-                                .id = 0,
-                                .name = "<unknown client>"};
-    auto deathNotifier = std::make_shared<OverrideProcessInfoDeathNotifier>(
-            client, ref<ResourceManagerService>(), clientInfo, mDeathRecipient.get());
-
-    mProcessInfoOverrideMap.emplace(pid, ProcessInfoOverride{deathNotifier, client});
-
     return Status::ok();
+
 }
 
 void ResourceManagerService::removeProcessInfoOverride(int pid) {
@@ -886,7 +863,7 @@
     String8 log = String8::format("reclaimResourcesFromClientsPendingRemoval(pid %d)", pid);
     mServiceLog->add(log);
 
-    std::vector<std::shared_ptr<IResourceManagerClient>> clients;
+    std::vector<ClientInfo> targetClients;
     {
         std::scoped_lock lock{mLock};
         if (!mProcessInfo->isPidTrusted(pid)) {
@@ -904,41 +881,43 @@
                 // Codec resources are segregated by audio, video and image domains.
                 case MediaResource::Type::kSecureCodec:
                 case MediaResource::Type::kNonSecureCodec:
-                    for (MediaResource::SubType subType : {MediaResource::SubType::kAudioCodec,
-                                                           MediaResource::SubType::kVideoCodec,
-                                                           MediaResource::SubType::kImageCodec}) {
-                        std::shared_ptr<IResourceManagerClient> client;
-                        uid_t uid = 0;
-                        if (getBiggestClientPendingRemoval_l(pid, type, subType, uid, &client)) {
-                            clients.push_back(client);
+                    for (MediaResource::SubType subType : {MediaResource::SubType::kHwAudioCodec,
+                                                           MediaResource::SubType::kSwAudioCodec,
+                                                           MediaResource::SubType::kHwVideoCodec,
+                                                           MediaResource::SubType::kSwVideoCodec,
+                                                           MediaResource::SubType::kHwImageCodec,
+                                                           MediaResource::SubType::kSwImageCodec}) {
+                        ClientInfo clientInfo;
+                        if (getBiggestClientPendingRemoval_l(pid, type, subType, clientInfo)) {
+                            targetClients.emplace_back(clientInfo);
                             continue;
                         }
                     }
                     break;
                 // Non-codec resources are shared by audio, video and image codecs (no subtype).
                 default:
-                    std::shared_ptr<IResourceManagerClient> client;
-                    uid_t uid = 0;
+                    ClientInfo clientInfo;
                     if (getBiggestClientPendingRemoval_l(pid, type,
-                            MediaResource::SubType::kUnspecifiedSubType, uid, &client)) {
-                        clients.push_back(client);
+                            MediaResource::SubType::kUnspecifiedSubType, clientInfo)) {
+                        targetClients.emplace_back(clientInfo);
                     }
                     break;
             }
         }
     }
 
-    if (!clients.empty()) {
-        reclaimUnconditionallyFrom(clients);
+    if (!targetClients.empty()) {
+        reclaimUnconditionallyFrom(targetClients);
     }
     return Status::ok();
 }
 
-bool ResourceManagerService::getPriority_l(int pid, int* priority) {
+bool ResourceManagerService::getPriority_l(int pid, int* priority) const {
     int newPid = pid;
 
-    if (mOverridePidMap.find(pid) != mOverridePidMap.end()) {
-        newPid = mOverridePidMap[pid];
+    std::map<int, int>::const_iterator found = mOverridePidMap.find(pid);
+    if (found != mOverridePidMap.end()) {
+        newPid = found->second;
         ALOGD("getPriority_l: use override pid %d instead original pid %d",
                 newPid, pid);
     }
@@ -946,73 +925,65 @@
     return mProcessInfo->getPriority(newPid, priority);
 }
 
-bool ResourceManagerService::getAllClients_l(int callingPid, MediaResource::Type type,
-        MediaResource::SubType subType,
-        PidUidVector* idVector,
-        std::vector<std::shared_ptr<IResourceManagerClient>>* clients) {
-    std::vector<std::shared_ptr<IResourceManagerClient>> temp;
-    PidUidVector tempIdList;
+bool ResourceManagerService::getAllClients_l(
+        const ResourceRequestInfo& resourceRequestInfo,
+        std::vector<ClientInfo>& clientsInfo) {
+    MediaResource::Type type = resourceRequestInfo.mResource->type;
+    MediaResource::SubType subType = resourceRequestInfo.mResource->subType;
 
     for (auto& [pid, infos] : mMap) {
         for (const auto& [id, info] : infos) {
             if (hasResourceType(type, subType, info.resources)) {
-                if (!isCallingPriorityHigher_l(callingPid, pid)) {
+                if (!isCallingPriorityHigher_l(resourceRequestInfo.mCallingPid, pid)) {
                     // some higher/equal priority process owns the resource,
                     // this request can't be fulfilled.
-                    ALOGE("getAllClients_l: can't reclaim resource %s from pid %d",
-                            asString(type), pid);
+                    ALOGE("%s: can't reclaim resource %s from pid %d",
+                          __func__, asString(type), pid);
+                    clientsInfo.clear();
                     return false;
                 }
-                temp.push_back(info.client);
-                tempIdList.emplace_back(pid, info.uid);
+                clientsInfo.emplace_back(pid, info.uid, info.clientId);
             }
         }
     }
-    if (temp.size() == 0) {
-        ALOGV("getAllClients_l: didn't find any resource %s", asString(type));
-        return true;
+    if (clientsInfo.size() == 0) {
+        ALOGV("%s: didn't find any resource %s", __func__, asString(type));
     }
-
-    clients->insert(std::end(*clients), std::begin(temp), std::end(temp));
-    idVector->insert(std::end(*idVector), std::begin(tempIdList), std::end(tempIdList));
     return true;
 }
 
-bool ResourceManagerService::getLowestPriorityBiggestClient_l(int callingPid,
-        MediaResource::Type type,
-        MediaResource::SubType subType,
-        PidUidVector* idVector,
-        std::shared_ptr<IResourceManagerClient> *client) {
+// Process priority (oom score) based reclaim:
+//   - Find a process with lowest priority (than that of calling process).
+//   - Find the bigegst client (with required resources) from that process.
+bool ResourceManagerService::getLowestPriorityBiggestClient_l(
+        const ResourceRequestInfo& resourceRequestInfo,
+        ClientInfo& clientInfo) {
+    int callingPid = resourceRequestInfo.mCallingPid;
+    MediaResource::Type type = resourceRequestInfo.mResource->type;
+    MediaResource::SubType subType = resourceRequestInfo.mResource->subType;
     int lowestPriorityPid;
     int lowestPriority;
     int callingPriority;
-    uid_t uid = 0;
 
-    // Before looking into other processes, check if we have clients marked for
-    // pending removal in the same process.
-    if (getBiggestClientPendingRemoval_l(callingPid, type, subType, uid, client)) {
-        idVector->emplace_back(callingPid, uid);
-        return true;
-    }
     if (!getPriority_l(callingPid, &callingPriority)) {
-        ALOGE("getLowestPriorityBiggestClient_l: can't get process priority for pid %d",
-                callingPid);
+        ALOGE("%s: can't get process priority for pid %d", __func__, callingPid);
         return false;
     }
     if (!getLowestPriorityPid_l(type, subType, &lowestPriorityPid, &lowestPriority)) {
         return false;
     }
     if (lowestPriority <= callingPriority) {
-        ALOGE("getLowestPriorityBiggestClient_l: lowest priority %d vs caller priority %d",
-                lowestPriority, callingPriority);
+        ALOGE("%s: lowest priority %d vs caller priority %d",
+              __func__, lowestPriority, callingPriority);
         return false;
     }
 
-    if (!getBiggestClient_l(lowestPriorityPid, type, subType, uid, client)) {
+    if (!getBiggestClient_l(lowestPriorityPid, type, subType, clientInfo)) {
         return false;
     }
 
-    idVector->emplace_back(lowestPriorityPid, uid);
+    ALOGI("%s: CallingProcess(%d:%d) will reclaim from the lowestPriorityProcess(%d:%d)",
+          __func__, callingPid, callingPriority, lowestPriorityPid, lowestPriority);
     return true;
 }
 
@@ -1063,15 +1034,12 @@
 }
 
 bool ResourceManagerService::getBiggestClientPendingRemoval_l(int pid, MediaResource::Type type,
-        MediaResource::SubType subType, uid_t& uid,
-        std::shared_ptr<IResourceManagerClient> *client) {
-    return getBiggestClient_l(pid, type, subType, uid, client, true /* pendingRemovalOnly */);
+        MediaResource::SubType subType, ClientInfo& clientInfo) {
+    return getBiggestClient_l(pid, type, subType, clientInfo, true /* pendingRemovalOnly */);
 }
 
 bool ResourceManagerService::getBiggestClient_l(int pid, MediaResource::Type type,
-        MediaResource::SubType subType, uid_t& uid,
-        std::shared_ptr<IResourceManagerClient> *client,
-        bool pendingRemovalOnly) {
+        MediaResource::SubType subType, ClientInfo& clientInfo, bool pendingRemovalOnly) {
     PidResourceInfosMap::iterator found = mMap.find(pid);
     if (found == mMap.end()) {
         ALOGE_IF(!pendingRemovalOnly,
@@ -1079,7 +1047,8 @@
         return false;
     }
 
-    std::shared_ptr<IResourceManagerClient> clientTemp;
+    uid_t   uid = -1;
+    int64_t clientId = -1;
     uint64_t largestValue = 0;
     const ResourceInfos& infos = found->second;
     for (const auto& [id, info] : infos) {
@@ -1092,21 +1061,23 @@
             if (hasResourceType(type, subType, resource)) {
                 if (resource.value > largestValue) {
                     largestValue = resource.value;
-                    clientTemp = info.client;
+                    clientId = info.clientId;
                     uid = info.uid;
                 }
             }
         }
     }
 
-    if (clientTemp == NULL) {
+    if (clientId == -1) {
         ALOGE_IF(!pendingRemovalOnly,
                  "getBiggestClient_l: can't find resource type %s and subtype %s for pid %d",
                  asString(type), asString(subType), pid);
         return false;
     }
 
-    *client = clientTemp;
+    clientInfo.mPid = pid;
+    clientInfo.mUid = uid;
+    clientInfo.mClientId = clientId;
     return true;
 }
 
@@ -1138,4 +1109,8 @@
     return mResourceManagerMetrics->getCurrentConcurrentPixelCount(pid);
 }
 
+void ResourceManagerService::notifyClientReleased(const ClientInfoParcel& clientInfo) {
+    mResourceManagerMetrics->notifyClientReleased(clientInfo);
+}
+
 } // namespace android
diff --git a/services/mediaresourcemanager/ResourceManagerService.h b/services/mediaresourcemanager/ResourceManagerService.h
index aa88ac6..44ed005 100644
--- a/services/mediaresourcemanager/ResourceManagerService.h
+++ b/services/mediaresourcemanager/ResourceManagerService.h
@@ -30,10 +30,10 @@
 #include <utils/String8.h>
 #include <utils/threads.h>
 
+#include "ResourceManagerServiceUtils.h"
+
 namespace android {
 
-class DeathNotifier;
-class ResourceManagerService;
 class ResourceObserverService;
 class ServiceLog;
 struct ProcessInfoInterface;
@@ -47,26 +47,6 @@
 using ::aidl::android::media::ClientInfoParcel;
 using ::aidl::android::media::ClientConfigParcel;
 
-typedef std::map<std::tuple<
-        MediaResource::Type, MediaResource::SubType, std::vector<uint8_t>>,
-        MediaResourceParcel> ResourceList;
-
-struct ResourceInfo {
-    uid_t uid;
-    int64_t clientId;
-    std::string name;
-    std::shared_ptr<IResourceManagerClient> client;
-    std::shared_ptr<DeathNotifier> deathNotifier = nullptr;
-    ResourceList resources;
-    bool pendingRemoval{false};
-};
-
-// vector of <PID, UID>
-typedef std::vector<std::pair<int32_t, uid_t>> PidUidVector;
-
-typedef std::map<int64_t, ResourceInfo> ResourceInfos;
-typedef std::map<int, ResourceInfos> PidResourceInfosMap;
-
 class ResourceManagerService : public BnResourceManagerService {
 public:
     struct SystemCallbackInterface : public RefBase {
@@ -79,14 +59,22 @@
     static char const *getServiceName() { return "media.resource_manager"; }
     static void instantiate();
 
-    virtual inline binder_status_t dump(
+        // Static creation methods.
+    static std::shared_ptr<ResourceManagerService> Create();
+    static std::shared_ptr<ResourceManagerService> Create(
+        const sp<ProcessInfoInterface>& processInfo,
+        const sp<SystemCallbackInterface>& systemResource);
+
+    virtual binder_status_t dump(
             int /*fd*/, const char** /*args*/, uint32_t /*numArgs*/);
 
     ResourceManagerService();
     explicit ResourceManagerService(const sp<ProcessInfoInterface> &processInfo,
             const sp<SystemCallbackInterface> &systemResource);
     virtual ~ResourceManagerService();
-    void setObserverService(const std::shared_ptr<ResourceObserverService>& observerService);
+
+    virtual void setObserverService(
+            const std::shared_ptr<ResourceObserverService>& observerService);
 
     // IResourceManagerService interface
     Status config(const std::vector<MediaResourcePolicyParcel>& policies) override;
@@ -116,8 +104,6 @@
 
     Status reclaimResourcesFromClientsPendingRemoval(int32_t pid) override;
 
-    Status removeResource(const ClientInfoParcel& clientInfo, bool checkValid);
-
     Status notifyClientCreated(const ClientInfoParcel& clientInfo) override;
 
     Status notifyClientStarted(const ClientConfigParcel& clientConfig) override;
@@ -126,85 +112,140 @@
 
     Status notifyClientConfigChanged(const ClientConfigParcel& clientConfig) override;
 
-private:
-    friend class ResourceManagerServiceTest;
-    friend class DeathNotifier;
-    friend class OverrideProcessInfoDeathNotifier;
+protected:
+    // To get notifications when a resource is added for the first time.
+    void onFirstAdded(const MediaResourceParcel& res, uid_t uid);
+    // To get notifications when a resource has been removed at last.
+    void onLastRemoved(const MediaResourceParcel& res, uid_t uid);
 
     // Reclaims resources from |clients|. Returns true if reclaim succeeded
     // for all clients.
-    bool reclaimUnconditionallyFrom(
-        const std::vector<std::shared_ptr<IResourceManagerClient>>& clients);
+    bool reclaimUnconditionallyFrom(const std::vector<ClientInfo>& targetClients);
+
+    // A helper function that returns true if the callingPid has higher priority than pid.
+    // Returns false otherwise.
+    bool isCallingPriorityHigher_l(int callingPid, int pid);
+
+    // To notify the metrics about client being released.
+    void notifyClientReleased(const ClientInfoParcel& clientInfo);
+
+    virtual Status removeResource(const ClientInfoParcel& clientInfo, bool checkValid);
+
+private:
+    friend class ResourceManagerServiceTest;
+    friend class ResourceManagerServiceTestBase;
+    friend class DeathNotifier;
+    friend class OverrideProcessInfoDeathNotifier;
+
+    // Gets the client who owns biggest piece of specified resource type from pid.
+    // Returns false with no change to client if there are no clients holding resources of this
+    // type.
+    bool getBiggestClient_l(int pid, MediaResource::Type type,
+                            MediaResource::SubType subType,
+                            ClientInfo& clientsInfo,
+                            bool pendingRemovalOnly = false);
+
+    // A helper function that gets the biggest clients of the process pid that
+    // is marked to be (pending) removed and has the needed resources.
+    bool getBiggestClientPendingRemoval_l(int pid, MediaResource::Type type,
+                                          MediaResource::SubType subType,
+                                          ClientInfo& clientsInfo);
+
+    // From the list of clients, pick/select client(s) based on the reclaim policy.
+    void getClientForResource_l(const ResourceRequestInfo& resourceRequestInfo,
+                                std::vector<ClientInfo>& clientsInfo);
+    // A helper function that pushes Reclaim Atom (for metric collection).
+    void pushReclaimAtom(const ClientInfoParcel& clientInfo,
+                         const std::vector<ClientInfo>& targetClients,
+                         bool reclaimed);
+
+    // Remove the override info for the given process
+    void removeProcessInfoOverride_l(int pid);
+
+    // Eventually we want to phase out this implementation of IResourceManagerService
+    // (ResourceManagerService) and replace that with the newer implementation
+    // (ResourceManagerServiceNew).
+    // So, marking the following methods as private virtual and for the newer implementation
+    // to override is the easiest way to maintain both implementation.
+
+    // Initializes the internal state of the ResourceManagerService
+    virtual void init();
+
+    // Gets the list of all the clients who own the list of specified resource type
+    // and satisfy the resource model and the reclaim policy.
+    virtual bool getTargetClients(
+        int32_t callingPid,
+        const std::vector<MediaResourceParcel>& resources,
+        std::vector<ClientInfo>& targetClients);
 
     // Gets the list of all the clients who own the specified resource type.
     // Returns false if any client belongs to a process with higher priority than the
     // calling process. The clients will remain unchanged if returns false.
-    bool getAllClients_l(int callingPid, MediaResource::Type type, MediaResource::SubType subType,
-            PidUidVector* idList,
-            std::vector<std::shared_ptr<IResourceManagerClient>>* clients);
+    virtual bool getAllClients_l(const ResourceRequestInfo& resourceRequestInfo,
+                                 std::vector<ClientInfo>& clientsInfo);
 
     // Gets the client who owns specified resource type from lowest possible priority process.
     // Returns false if the calling process priority is not higher than the lowest process
     // priority. The client will remain unchanged if returns false.
-    bool getLowestPriorityBiggestClient_l(int callingPid, MediaResource::Type type,
-            MediaResource::SubType subType, PidUidVector* idList,
-            std::shared_ptr<IResourceManagerClient> *client);
+    virtual bool getLowestPriorityBiggestClient_l(
+        const ResourceRequestInfo& resourceRequestInfo,
+        ClientInfo& clientInfo);
+
+    // override the pid of given process
+    virtual bool overridePid_l(int32_t originalPid, int32_t newPid);
+
+    // override the process info of given process
+    virtual bool overrideProcessInfo_l(const std::shared_ptr<IResourceManagerClient>& client,
+                                       int pid, int procState, int oomScore);
+
+    // Get priority from process's pid
+    virtual bool getPriority_l(int pid, int* priority) const;
 
     // Gets lowest priority process that has the specified resource type.
     // Returns false if failed. The output parameters will remain unchanged if failed.
-    bool getLowestPriorityPid_l(MediaResource::Type type, MediaResource::SubType subType, int *pid,
-                int *priority);
+    virtual bool getLowestPriorityPid_l(MediaResource::Type type, MediaResource::SubType subType,
+                                        int* lowestPriorityPid, int* lowestPriority);
 
-    // Gets the client who owns biggest piece of specified resource type from pid.
-    // Returns false with no change to client if there are no clients holdiing resources of thisi
-    // type.
-    bool getBiggestClient_l(int pid, MediaResource::Type type, MediaResource::SubType subType,
-            uid_t& uid, std::shared_ptr<IResourceManagerClient> *client,
-            bool pendingRemovalOnly = false);
-    // Same method as above, but with pendingRemovalOnly as true.
-    bool getBiggestClientPendingRemoval_l(int pid, MediaResource::Type type,
-            MediaResource::SubType subType, uid_t& uid,
-            std::shared_ptr<IResourceManagerClient> *client);
+    // Removes the pid from the override map.
+    virtual void removeProcessInfoOverride(int pid);
 
-    bool isCallingPriorityHigher_l(int callingPid, int pid);
+    // Get the client for given pid and the clientId from the map
+    virtual std::shared_ptr<IResourceManagerClient> getClient(
+        int pid, const int64_t& clientId) const;
 
-    // A helper function basically calls getLowestPriorityBiggestClient_l and add
-    // the result client to the given Vector.
-    void getClientForResource_l(int callingPid, const MediaResourceParcel *res,
-            PidUidVector* idList,
-            std::vector<std::shared_ptr<IResourceManagerClient>>* clients);
+    // Remove the client for given pid and the clientId from the map
+    virtual bool removeClient(int pid, const int64_t& clientId);
 
-    void onFirstAdded(const MediaResourceParcel& res, const ResourceInfo& clientInfo);
-    void onLastRemoved(const MediaResourceParcel& res, const ResourceInfo& clientInfo);
+    // Get all the resource status for dump
+    virtual void getResourceDump(std::string& resourceLog) const;
 
-    // Merge r2 into r1
-    void mergeResources(MediaResourceParcel& r1, const MediaResourceParcel& r2);
-
-    // Get priority from process's pid
-    bool getPriority_l(int pid, int* priority);
-
-    void removeProcessInfoOverride(int pid);
-
-    void removeProcessInfoOverride_l(int pid);
-
-    void pushReclaimAtom(const ClientInfoParcel& clientInfo,
-                         const std::vector<std::shared_ptr<IResourceManagerClient>>& clients,
-                         const PidUidVector& idList, bool reclaimed);
-
+    // The following utility functions are used only for testing by ResourceManagerServiceTest
+    // START: TEST only functions
     // Get the peak concurrent pixel count (associated with the video codecs) for the process.
     long getPeakConcurrentPixelCount(int pid) const;
     // Get the current concurrent pixel count (associated with the video codecs) for the process.
     long getCurrentConcurrentPixelCount(int pid) const;
+    // To create object of type ResourceManagerServiceNew
+    static std::shared_ptr<ResourceManagerService> CreateNew(
+        const sp<ProcessInfoInterface>& processInfo,
+        const sp<SystemCallbackInterface>& systemResource);
+    // Returns a unmodifiable reference to the internal resource state as a map
+    virtual const std::map<int, ResourceInfos>& getResourceMap() const {
+        return mMap;
+    }
+    // END: TEST only functions
 
+protected:
     mutable std::mutex mLock;
     sp<ProcessInfoInterface> mProcessInfo;
     sp<SystemCallbackInterface> mSystemCB;
     sp<ServiceLog> mServiceLog;
-    PidResourceInfosMap mMap;
     bool mSupportsMultipleSecureCodecs;
     bool mSupportsSecureWithNonSecureCodec;
     int32_t mCpuBoostCount;
-    ::ndk::ScopedAIBinder_DeathRecipient mDeathRecipient;
+
+private:
+    PidResourceInfosMap mMap;
     struct ProcessInfoOverride {
         std::shared_ptr<DeathNotifier> deathNotifier = nullptr;
         std::shared_ptr<IResourceManagerClient> client;
diff --git a/services/mediaresourcemanager/ResourceManagerServiceNew.cpp b/services/mediaresourcemanager/ResourceManagerServiceNew.cpp
new file mode 100644
index 0000000..dde389a
--- /dev/null
+++ b/services/mediaresourcemanager/ResourceManagerServiceNew.cpp
@@ -0,0 +1,362 @@
+/*
+**
+** Copyright 2023, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "ResourceManagerServiceNew"
+#include <utils/Log.h>
+#include <binder/IPCThreadState.h>
+#include <mediautils/ProcessInfo.h>
+
+#include "DefaultResourceModel.h"
+#include "ProcessPriorityReclaimPolicy.h"
+#include "ResourceManagerServiceNew.h"
+#include "ResourceTracker.h"
+#include "ServiceLog.h"
+
+namespace android {
+
+ResourceManagerServiceNew::ResourceManagerServiceNew(
+        const sp<ProcessInfoInterface>& processInfo,
+        const sp<SystemCallbackInterface>& systemResource) :
+  ResourceManagerService(processInfo, systemResource) {}
+
+ResourceManagerServiceNew::~ResourceManagerServiceNew() {}
+
+void ResourceManagerServiceNew::init() {
+    // Create the Resource Tracker
+    mResourceTracker = std::make_shared<ResourceTracker>(ref<ResourceManagerServiceNew>(),
+                                                         mProcessInfo);
+    setUpResourceModels();
+    setUpReclaimPolicies();
+}
+
+void ResourceManagerServiceNew::setUpResourceModels() {
+    std::scoped_lock lock{mLock};
+    // Create/Configure the default resource model.
+    if (mDefaultResourceModel == nullptr) {
+        mDefaultResourceModel = std::make_unique<DefaultResourceModel>(
+                mResourceTracker,
+                mSupportsMultipleSecureCodecs,
+                mSupportsSecureWithNonSecureCodec);
+    } else {
+        DefaultResourceModel* resourceModel =
+            static_cast<DefaultResourceModel*>(mDefaultResourceModel.get());
+        resourceModel->config(mSupportsMultipleSecureCodecs, mSupportsSecureWithNonSecureCodec);
+    }
+}
+
+void ResourceManagerServiceNew::setUpReclaimPolicies() {
+    mReclaimPolicies.clear();
+    // Process priority (oom score) as the Default reclaim policy.
+    mReclaimPolicies.push_back(std::make_unique<ProcessPriorityReclaimPolicy>(mResourceTracker));
+}
+
+Status ResourceManagerServiceNew::config(const std::vector<MediaResourcePolicyParcel>& policies) {
+    Status status = ResourceManagerService::config(policies);
+    // Change in the config dictates update to the resource model.
+    setUpResourceModels();
+    return status;
+}
+
+void ResourceManagerServiceNew::setObserverService(
+        const std::shared_ptr<ResourceObserverService>& observerService) {
+    ResourceManagerService::setObserverService(observerService);
+    mResourceTracker->setResourceObserverService(observerService);
+}
+
+Status ResourceManagerServiceNew::addResource(
+        const ClientInfoParcel& clientInfo,
+        const std::shared_ptr<IResourceManagerClient>& client,
+        const std::vector<MediaResourceParcel>& resources) {
+    int32_t pid = clientInfo.pid;
+    int32_t uid = clientInfo.uid;
+    int64_t clientId = clientInfo.id;
+    String8 log = String8::format("addResource(pid %d, uid %d clientId %lld, resources %s)",
+            pid, uid, (long long) clientId, getString(resources).c_str());
+    mServiceLog->add(log);
+
+    std::scoped_lock lock{mLock};
+    mResourceTracker->addResource(clientInfo, client, resources);
+    notifyResourceGranted(pid, resources);
+
+    return Status::ok();
+}
+
+Status ResourceManagerServiceNew::removeResource(
+        const ClientInfoParcel& clientInfo,
+        const std::vector<MediaResourceParcel>& resources) {
+    int32_t pid = clientInfo.pid;
+    int32_t uid = clientInfo.uid;
+    int64_t clientId = clientInfo.id;
+    String8 log = String8::format("removeResource(pid %d, uid %d clientId %lld, resources %s)",
+            pid, uid, (long long) clientId, getString(resources).c_str());
+    mServiceLog->add(log);
+
+    std::scoped_lock lock{mLock};
+    mResourceTracker->removeResource(clientInfo, resources);
+    return Status::ok();
+}
+
+Status ResourceManagerServiceNew::removeClient(const ClientInfoParcel& clientInfo) {
+    removeResource(clientInfo, true /*checkValid*/);
+    return Status::ok();
+}
+
+Status ResourceManagerServiceNew::removeResource(const ClientInfoParcel& clientInfo,
+                                                 bool checkValid) {
+    int32_t pid = clientInfo.pid;
+    int32_t uid = clientInfo.uid;
+    int64_t clientId = clientInfo.id;
+    String8 log = String8::format("removeResource(pid %d, uid %d clientId %lld)",
+            pid, uid, (long long) clientId);
+    mServiceLog->add(log);
+
+    std::scoped_lock lock{mLock};
+    if (mResourceTracker->removeResource(clientInfo, checkValid)) {
+        notifyClientReleased(clientInfo);
+    }
+    return Status::ok();
+}
+
+Status ResourceManagerServiceNew::reclaimResource(
+        const ClientInfoParcel& clientInfo,
+        const std::vector<MediaResourceParcel>& resources,
+        bool* _aidl_return) {
+    return ResourceManagerService::reclaimResource(clientInfo, resources, _aidl_return);
+}
+
+bool ResourceManagerServiceNew::overridePid_l(int32_t originalPid, int32_t newPid) {
+    return mResourceTracker->overridePid(originalPid, newPid);
+}
+
+Status ResourceManagerServiceNew::overridePid(int originalPid, int newPid) {
+    return ResourceManagerService::overridePid(originalPid, newPid);
+}
+
+bool ResourceManagerServiceNew::overrideProcessInfo_l(
+        const std::shared_ptr<IResourceManagerClient>& client,
+        int pid,
+        int procState,
+        int oomScore) {
+    return mResourceTracker->overrideProcessInfo(client, pid, procState, oomScore);
+}
+
+Status ResourceManagerServiceNew::overrideProcessInfo(
+        const std::shared_ptr<IResourceManagerClient>& client,
+        int pid,
+        int procState,
+        int oomScore) {
+    return ResourceManagerService::overrideProcessInfo(client, pid, procState, oomScore);
+}
+
+void ResourceManagerServiceNew::removeProcessInfoOverride(int pid) {
+    std::scoped_lock lock{mLock};
+
+    mResourceTracker->removeProcessInfoOverride(pid);
+}
+
+Status ResourceManagerServiceNew::markClientForPendingRemoval(const ClientInfoParcel& clientInfo) {
+    int32_t pid = clientInfo.pid;
+    int64_t clientId = clientInfo.id;
+    String8 log = String8::format(
+            "markClientForPendingRemoval(pid %d, clientId %lld)",
+            pid, (long long) clientId);
+    mServiceLog->add(log);
+
+    std::scoped_lock lock{mLock};
+    mResourceTracker->markClientForPendingRemoval(clientInfo);
+    return Status::ok();
+}
+
+Status ResourceManagerServiceNew::reclaimResourcesFromClientsPendingRemoval(int32_t pid) {
+    String8 log = String8::format("reclaimResourcesFromClientsPendingRemoval(pid %d)", pid);
+    mServiceLog->add(log);
+
+    std::vector<ClientInfo> targetClients;
+    {
+        std::scoped_lock lock{mLock};
+        mResourceTracker->getClientsMarkedPendingRemoval(pid, targetClients);
+    }
+
+    if (!targetClients.empty()) {
+        reclaimUnconditionallyFrom(targetClients);
+    }
+    return Status::ok();
+}
+
+Status ResourceManagerServiceNew::notifyClientCreated(const ClientInfoParcel& clientInfo) {
+    return ResourceManagerService::notifyClientCreated(clientInfo);
+}
+
+Status ResourceManagerServiceNew::notifyClientStarted(const ClientConfigParcel& clientConfig) {
+    return ResourceManagerService::notifyClientStarted(clientConfig);
+}
+
+Status ResourceManagerServiceNew::notifyClientStopped(const ClientConfigParcel& clientConfig) {
+    return ResourceManagerService::notifyClientStopped(clientConfig);
+}
+
+Status ResourceManagerServiceNew::notifyClientConfigChanged(
+        const ClientConfigParcel& clientConfig) {
+    return ResourceManagerService::notifyClientConfigChanged(clientConfig);
+}
+
+void ResourceManagerServiceNew::getResourceDump(std::string& resourceLog) const {
+    std::scoped_lock lock{mLock};
+    mResourceTracker->dump(resourceLog);
+}
+
+binder_status_t ResourceManagerServiceNew::dump(int fd, const char** args, uint32_t numArgs) {
+    return ResourceManagerService::dump(fd, args, numArgs);
+}
+
+bool ResourceManagerServiceNew::getTargetClients(
+        int callingPid,
+        const std::vector<MediaResourceParcel>& resources,
+        std::vector<ClientInfo>& targetClients) {
+    std::scoped_lock lock{mLock};
+    if (!mProcessInfo->isPidTrusted(callingPid)) {
+        pid_t actualCallingPid = IPCThreadState::self()->getCallingPid();
+        ALOGW("%s called with untrusted pid %d, using actual calling pid %d", __FUNCTION__,
+                callingPid, actualCallingPid);
+        callingPid = actualCallingPid;
+    }
+
+    // Use the Resource Model to get a list of all the clients that hold the
+    // needed/requested resources.
+    ReclaimRequestInfo reclaimRequestInfo{callingPid, resources};
+    std::vector<ClientInfo> clients;
+    if (!mDefaultResourceModel->getAllClients(reclaimRequestInfo, clients)) {
+        if (clients.empty()) {
+            ALOGI("%s: There aren't any clients with given resources. Nothing to reclaim",
+                  __func__);
+            return false;
+        }
+        // Since there was a conflict, we need to reclaim all elements.
+        targetClients = std::move(clients);
+    } else {
+        // Select a client among those have the needed resources.
+        getClientForResource_l(reclaimRequestInfo, clients, targetClients);
+    }
+    return !targetClients.empty();
+}
+
+void ResourceManagerServiceNew::getClientForResource_l(
+        const ReclaimRequestInfo& reclaimRequestInfo,
+        const std::vector<ClientInfo>& clients,
+        std::vector<ClientInfo>& targetClients) {
+    int callingPid = reclaimRequestInfo.mCallingPid;
+
+    // Before looking into other processes, check if we have clients marked for
+    // pending removal in the same process.
+    ClientInfo targetClient;
+    for (const MediaResourceParcel& resource : reclaimRequestInfo.mResources) {
+        if (mResourceTracker->getBiggestClientPendingRemoval(callingPid, resource.type,
+                                                             resource.subType, targetClient)) {
+            targetClients.emplace_back(targetClient);
+            return;
+        }
+    }
+
+    // Run through all the reclaim policies until a client to reclaim from is identified.
+    for (std::unique_ptr<IReclaimPolicy>& reclaimPolicy : mReclaimPolicies) {
+        if (reclaimPolicy->getClients(reclaimRequestInfo, clients, targetClients)) {
+            return;
+        }
+    }
+}
+
+bool ResourceManagerServiceNew::getLowestPriorityBiggestClient_l(
+        const ResourceRequestInfo& resourceRequestInfo,
+        ClientInfo& clientInfo) {
+    //NOTE: This function is used only by the test: ResourceManagerServiceTest
+    if (resourceRequestInfo.mResource == nullptr) {
+        return false;
+    }
+
+    // Use the DefaultResourceModel to get all the clients with the resources requested.
+    std::vector<MediaResourceParcel> resources{*resourceRequestInfo.mResource};
+    ReclaimRequestInfo reclaimRequestInfo{resourceRequestInfo.mCallingPid, resources};
+    std::vector<ClientInfo> clients;
+    mDefaultResourceModel->getAllClients(reclaimRequestInfo, clients);
+
+    // Use the ProcessPriorityReclaimPolicy to select a client to reclaim from.
+    std::unique_ptr<IReclaimPolicy> reclaimPolicy
+        = std::make_unique<ProcessPriorityReclaimPolicy>(mResourceTracker);
+    std::vector<ClientInfo> targetClients;
+    if (reclaimPolicy->getClients(reclaimRequestInfo, clients, targetClients)) {
+        if (!targetClients.empty()) {
+            clientInfo = targetClients[0];
+            return true;
+        }
+    }
+
+    return false;
+}
+
+bool ResourceManagerServiceNew::getPriority_l(int pid, int* priority) const {
+    return mResourceTracker->getPriority(pid, priority);
+}
+
+bool ResourceManagerServiceNew::getLowestPriorityPid_l(
+        MediaResource::Type type, MediaResource::SubType subType,
+        int* lowestPriorityPid, int* lowestPriority) {
+    //NOTE: This function is used only by the test: ResourceManagerServiceTest
+    return mResourceTracker->getLowestPriorityPid(type, subType,
+                                                  *lowestPriorityPid,
+                                                  *lowestPriority);
+}
+
+bool ResourceManagerServiceNew::getAllClients_l(
+        const ResourceRequestInfo& resourceRequestInfo,
+        std::vector<ClientInfo>& clientsInfo) {
+    //NOTE: This function is used only by the test: ResourceManagerServiceTest
+    MediaResource::Type type = resourceRequestInfo.mResource->type;
+    // Get the list of all clients that has requested resources.
+    std::vector<ClientInfo> clients;
+    mResourceTracker->getAllClients(resourceRequestInfo, clients);
+
+    // Check is there any high priority process holding up the resources already.
+    for (const ClientInfo& info : clients) {
+        if (!isCallingPriorityHigher_l(resourceRequestInfo.mCallingPid, info.mPid)) {
+            // some higher/equal priority process owns the resource,
+            // this request can't be fulfilled.
+            ALOGE("%s: can't reclaim resource %s from pid %d", __func__, asString(type), info.mPid);
+            return false;
+        }
+        clientsInfo.emplace_back(info);
+    }
+    if (clientsInfo.size() == 0) {
+        ALOGV("%s: didn't find any resource %s", __func__, asString(type));
+    }
+    return true;
+}
+
+std::shared_ptr<IResourceManagerClient> ResourceManagerServiceNew::getClient(
+        int pid, const int64_t& clientId) const {
+    return mResourceTracker->getClient(pid, clientId);
+}
+
+bool ResourceManagerServiceNew::removeClient(int pid, const int64_t& clientId) {
+    return mResourceTracker->removeClient(pid, clientId);
+}
+
+const std::map<int, ResourceInfos>& ResourceManagerServiceNew::getResourceMap() const {
+    return mResourceTracker->getResourceMap();
+}
+
+} // namespace android
diff --git a/services/mediaresourcemanager/ResourceManagerServiceNew.h b/services/mediaresourcemanager/ResourceManagerServiceNew.h
new file mode 100644
index 0000000..20c3d6e
--- /dev/null
+++ b/services/mediaresourcemanager/ResourceManagerServiceNew.h
@@ -0,0 +1,169 @@
+/*
+**
+** Copyright 2023, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+#ifndef ANDROID_MEDIA_RESOURCEMANAGERSERVICENEW_H
+#define ANDROID_MEDIA_RESOURCEMANAGERSERVICENEW_H
+
+#include "ResourceManagerService.h"
+
+namespace android {
+
+class IReclaimPolicy;
+class IResourceModel;
+class ResourceTracker;
+
+//
+// A newer implementation of IResourceManagerService, which
+// eventually will replace the older implementation in ResourceManagerService.
+//
+// To make the transition easier, this implementation overrides the
+// private virtual methods from ResourceManagerService.
+//
+// This implementation is devised to abstract and integrate:
+//   - resources into an independent abstraction
+//   - resource model as a separate interface (and implementation)
+//   - reclaim policy as a separate interface (and implementation)
+//
+class ResourceManagerServiceNew : public ResourceManagerService {
+public:
+
+    explicit ResourceManagerServiceNew(const sp<ProcessInfoInterface>& processInfo,
+                                       const sp<SystemCallbackInterface>& systemResource);
+    virtual ~ResourceManagerServiceNew();
+
+    // IResourceManagerService interface
+    Status config(const std::vector<MediaResourcePolicyParcel>& policies) override;
+
+    Status addResource(const ClientInfoParcel& clientInfo,
+                       const std::shared_ptr<IResourceManagerClient>& client,
+                       const std::vector<MediaResourceParcel>& resources) override;
+
+    Status removeResource(const ClientInfoParcel& clientInfo,
+                          const std::vector<MediaResourceParcel>& resources) override;
+
+    Status removeClient(const ClientInfoParcel& clientInfo) override;
+
+    Status reclaimResource(const ClientInfoParcel& clientInfo,
+                           const std::vector<MediaResourceParcel>& resources,
+                           bool* _aidl_return) override;
+
+    Status overridePid(int32_t originalPid, int32_t newPid) override;
+
+    Status overrideProcessInfo(const std::shared_ptr<IResourceManagerClient>& client,
+                               int32_t pid, int32_t procState, int32_t oomScore) override;
+
+    Status markClientForPendingRemoval(const ClientInfoParcel& clientInfo) override;
+
+    Status reclaimResourcesFromClientsPendingRemoval(int32_t pid) override;
+
+    Status notifyClientCreated(const ClientInfoParcel& clientInfo) override;
+
+    Status notifyClientStarted(const ClientConfigParcel& clientConfig) override;
+
+    Status notifyClientStopped(const ClientConfigParcel& clientConfig) override;
+
+    Status notifyClientConfigChanged(const ClientConfigParcel& clientConfig) override;
+
+    binder_status_t dump(int fd, const char** args, uint32_t numArgs) override;
+
+    friend class ResourceTracker;
+
+private:
+
+    // Set up the Resource models.
+    void setUpResourceModels();
+
+    // Set up the Reclaim Policies.
+    void setUpReclaimPolicies();
+
+    // From the list of clients, pick/select client(s) based on the reclaim policy.
+    void getClientForResource_l(
+        const ReclaimRequestInfo& reclaimRequestInfo,
+        const std::vector<ClientInfo>& clients,
+        std::vector<ClientInfo>& targetClients);
+
+    // Initializes the internal state of the ResourceManagerService
+    void init() override;
+
+    void setObserverService(
+            const std::shared_ptr<ResourceObserverService>& observerService) override;
+
+    // Gets the list of all the clients who own the specified resource type.
+    // Returns false if any client belongs to a process with higher priority than the
+    // calling process. The clients will remain unchanged if returns false.
+    bool getTargetClients(
+        int32_t callingPid,
+        const std::vector<MediaResourceParcel>& resources,
+        std::vector<ClientInfo>& targetClients) override;
+
+    // Removes the pid from the override map.
+    void removeProcessInfoOverride(int pid) override;
+
+    // Gets the list of all the clients who own the specified resource type.
+    // Returns false if any client belongs to a process with higher priority than the
+    // calling process. The clients will remain unchanged if returns false.
+    bool getAllClients_l(const ResourceRequestInfo& resourceRequestInfo,
+                         std::vector<ClientInfo>& clientsInfo) override;
+
+    // Gets the client who owns specified resource type from lowest possible priority process.
+    // Returns false if the calling process priority is not higher than the lowest process
+    // priority. The client will remain unchanged if returns false.
+    // This function is used only by the unit test.
+    bool getLowestPriorityBiggestClient_l(
+        const ResourceRequestInfo& resourceRequestInfo,
+        ClientInfo& clientInfo) override;
+
+    // override the pid of given process
+    bool overridePid_l(int32_t originalPid, int32_t newPid) override;
+
+    // override the process info of given process
+    bool overrideProcessInfo_l(const std::shared_ptr<IResourceManagerClient>& client,
+                               int pid, int procState, int oomScore) override;
+
+    // Get priority from process's pid
+    bool getPriority_l(int pid, int* priority) const override;
+
+    // Gets lowest priority process that has the specified resource type.
+    // Returns false if failed. The output parameters will remain unchanged if failed.
+    bool getLowestPriorityPid_l(MediaResource::Type type, MediaResource::SubType subType,
+                                int* lowestPriorityPid, int* lowestPriority) override;
+
+    // Get the client for given pid and the clientId from the map
+    std::shared_ptr<IResourceManagerClient> getClient(
+        int pid, const int64_t& clientId) const override;
+
+    // Remove the client for given pid and the clientId from the map
+    bool removeClient(int pid, const int64_t& clientId) override;
+
+    // Get all the resource status for dump
+    void getResourceDump(std::string& resourceLog) const override;
+
+    // Returns a unmodifiable reference to the internal resource state as a map
+    const std::map<int, ResourceInfos>& getResourceMap() const override;
+
+    Status removeResource(const ClientInfoParcel& clientInfo, bool checkValid) override;
+
+private:
+    std::shared_ptr<ResourceTracker> mResourceTracker;
+    std::unique_ptr<IResourceModel> mDefaultResourceModel;
+    std::vector<std::unique_ptr<IReclaimPolicy>> mReclaimPolicies;
+};
+
+// ----------------------------------------------------------------------------
+} // namespace android
+
+#endif // ANDROID_MEDIA_RESOURCEMANAGERSERVICENEW_H
diff --git a/services/mediaresourcemanager/ResourceManagerServiceUtils.cpp b/services/mediaresourcemanager/ResourceManagerServiceUtils.cpp
index 892b1b3..cd21327 100644
--- a/services/mediaresourcemanager/ResourceManagerServiceUtils.cpp
+++ b/services/mediaresourcemanager/ResourceManagerServiceUtils.cpp
@@ -19,34 +19,40 @@
 #define LOG_TAG "ResourceManagerServiceUtils"
 #include <utils/Log.h>
 
+#include <binder/IServiceManager.h>
+
+#include "IMediaResourceMonitor.h"
 #include "ResourceManagerService.h"
 #include "ResourceManagerServiceUtils.h"
 
 namespace android {
 
+// Bunch of utility functions that looks for a specific Resource.
+// Check whether a given resource (of type and subtype) is found in given resource parcel.
 bool hasResourceType(MediaResource::Type type, MediaResource::SubType subType,
-        const MediaResourceParcel& resource) {
+                     const MediaResourceParcel& resource) {
     if (type != resource.type) {
       return false;
     }
     switch (type) {
-        // Codec subtypes (e.g. video vs. audio) are each considered separate resources, so
-        // compare the subtypes as well.
-        case MediaResource::Type::kSecureCodec:
-        case MediaResource::Type::kNonSecureCodec:
-            if (resource.subType == subType) {
-                return true;
-            }
-            break;
-        // Non-codec resources are not segregated by the subtype (e.g. video vs. audio).
-        default:
+    // Codec subtypes (e.g. video vs. audio and hw vs. sw) are each considered separate resources,
+    // so compare the subtypes as well.
+    case MediaResource::Type::kSecureCodec:
+    case MediaResource::Type::kNonSecureCodec:
+        if (resource.subType == subType) {
             return true;
+        }
+        break;
+    // Non-codec resources are not segregated by the subtype (e.g. video vs. audio).
+    default:
+        return true;
     }
     return false;
 }
 
+// Check whether a given resource (of type and subtype) is found in given resource list.
 bool hasResourceType(MediaResource::Type type, MediaResource::SubType subType,
-        const ResourceList& resources) {
+                     const ResourceList& resources) {
     for (auto it = resources.begin(); it != resources.end(); it++) {
         if (hasResourceType(type, subType, it->second)) {
             return true;
@@ -55,8 +61,9 @@
     return false;
 }
 
+// Check whether a given resource (of type and subtype) is found in given resource info list.
 bool hasResourceType(MediaResource::Type type, MediaResource::SubType subType,
-        const ResourceInfos& infos) {
+                     const ResourceInfos& infos) {
     for (const auto& [id, info] : infos) {
         if (hasResourceType(type, subType, info.resources)) {
             return true;
@@ -77,12 +84,17 @@
     return found->second;
 }
 
+// Return modifiable ResourceInfo for a given client (look up by client id)
+// from the map of ResourceInfos.
+// If the item is not in the map, create one and add it to the map.
 ResourceInfo& getResourceInfoForEdit(const ClientInfoParcel& clientInfo,
-        const std::shared_ptr<IResourceManagerClient>& client, ResourceInfos& infos) {
+                                     const std::shared_ptr<IResourceManagerClient>& client,
+                                     ResourceInfos& infos) {
     ResourceInfos::iterator found = infos.find(clientInfo.id);
 
     if (found == infos.end()) {
-        ResourceInfo info{.uid = static_cast<uid_t>(clientInfo.uid),
+        ResourceInfo info{.pid = clientInfo.pid,
+                          .uid = static_cast<uid_t>(clientInfo.uid),
                           .clientId = clientInfo.id,
                           .name = clientInfo.name.empty()? "<unknown client>" : clientInfo.name,
                           .client = client,
@@ -95,4 +107,128 @@
     return found->second;
 }
 
+// Merge resources from r2 into r1.
+void mergeResources(MediaResourceParcel& r1, const MediaResourceParcel& r2) {
+    // The resource entry on record is maintained to be in [0,INT64_MAX].
+    // Clamp if merging in the new resource value causes it to go out of bound.
+    // Note that the new resource value could be negative, eg.DrmSession, the
+    // value goes lower when the session is used more often. During reclaim
+    // the session with the highest value (lowest usage) would be closed.
+    if (r2.value < INT64_MAX - r1.value) {
+        r1.value += r2.value;
+        if (r1.value < 0) {
+            r1.value = 0;
+        }
+    } else {
+        r1.value = INT64_MAX;
+    }
+}
+
+///////////////////////////////////////////////////////////////////////
+////////////// Death Notifier implementation   ////////////////////////
+///////////////////////////////////////////////////////////////////////
+
+DeathNotifier::DeathNotifier(const std::shared_ptr<IResourceManagerClient>& client,
+                             const std::weak_ptr<ResourceManagerService>& service,
+                             const ClientInfoParcel& clientInfo)
+    : mClient(client), mService(service), mClientInfo(clientInfo),
+      mCookie(nullptr),
+      mDeathRecipient(::ndk::ScopedAIBinder_DeathRecipient(
+                      AIBinder_DeathRecipient_new(BinderDiedCallback))) {
+    // Setting callback notification when DeathRecipient gets deleted.
+    AIBinder_DeathRecipient_setOnUnlinked(mDeathRecipient.get(), BinderUnlinkedCallback);
+}
+
+//static
+void DeathNotifier::BinderUnlinkedCallback(void* cookie) {
+    BinderDiedContext* context = reinterpret_cast<BinderDiedContext*>(cookie);
+    // Since we don't need the context anymore, we are deleting it now.
+    delete context;
+}
+
+//static
+void DeathNotifier::BinderDiedCallback(void* cookie) {
+    BinderDiedContext* context = reinterpret_cast<BinderDiedContext*>(cookie);
+
+    // Validate the context and check if the DeathNotifier object is still in scope.
+    if (context != nullptr) {
+        std::shared_ptr<DeathNotifier> thiz = context->mDeathNotifier.lock();
+        if (thiz != nullptr) {
+            thiz->binderDied();
+        } else {
+            ALOGI("DeathNotifier is out of scope already");
+        }
+    }
+}
+
+void DeathNotifier::binderDied() {
+    // Don't check for pid validity since we know it's already dead.
+    std::shared_ptr<ResourceManagerService> service = mService.lock();
+    if (service == nullptr) {
+        ALOGW("ResourceManagerService is dead as well.");
+        return;
+    }
+
+    service->overridePid(mClientInfo.pid, -1);
+    // thiz is freed in the call below, so it must be last call referring thiz
+    service->removeResource(mClientInfo, false /*checkValid*/);
+}
+
+void OverrideProcessInfoDeathNotifier::binderDied() {
+    // Don't check for pid validity since we know it's already dead.
+    std::shared_ptr<ResourceManagerService> service = mService.lock();
+    if (service == nullptr) {
+        ALOGW("ResourceManagerService is dead as well.");
+        return;
+    }
+
+    service->removeProcessInfoOverride(mClientInfo.pid);
+}
+
+std::shared_ptr<DeathNotifier> DeathNotifier::Create(
+    const std::shared_ptr<IResourceManagerClient>& client,
+    const std::weak_ptr<ResourceManagerService>& service,
+    const ClientInfoParcel& clientInfo,
+    bool overrideProcessInfo) {
+    std::shared_ptr<DeathNotifier> deathNotifier = nullptr;
+    if (overrideProcessInfo) {
+        deathNotifier = std::make_shared<OverrideProcessInfoDeathNotifier>(
+            client, service, clientInfo);
+    } else {
+        deathNotifier = std::make_shared<DeathNotifier>(client, service, clientInfo);
+    }
+
+    if (deathNotifier) {
+        deathNotifier->link();
+    }
+
+    return deathNotifier;
+}
+
+void notifyResourceGranted(int pid, const std::vector<MediaResourceParcel>& resources) {
+    static const char* const kServiceName = "media_resource_monitor";
+    sp<IBinder> binder = defaultServiceManager()->checkService(String16(kServiceName));
+    if (binder != NULL) {
+        sp<IMediaResourceMonitor> service = interface_cast<IMediaResourceMonitor>(binder);
+        for (size_t i = 0; i < resources.size(); ++i) {
+            switch (resources[i].subType) {
+                case MediaResource::SubType::kHwAudioCodec:
+                case MediaResource::SubType::kSwAudioCodec:
+                    service->notifyResourceGranted(pid, IMediaResourceMonitor::TYPE_AUDIO_CODEC);
+                    break;
+                case MediaResource::SubType::kHwVideoCodec:
+                case MediaResource::SubType::kSwVideoCodec:
+                    service->notifyResourceGranted(pid, IMediaResourceMonitor::TYPE_VIDEO_CODEC);
+                    break;
+                case MediaResource::SubType::kHwImageCodec:
+                case MediaResource::SubType::kSwImageCodec:
+                    service->notifyResourceGranted(pid, IMediaResourceMonitor::TYPE_IMAGE_CODEC);
+                    break;
+                case MediaResource::SubType::kUnspecifiedSubType:
+                    break;
+            }
+        }
+    }
+}
+
 } // namespace android
diff --git a/services/mediaresourcemanager/ResourceManagerServiceUtils.h b/services/mediaresourcemanager/ResourceManagerServiceUtils.h
index bbc26de..55ea149 100644
--- a/services/mediaresourcemanager/ResourceManagerServiceUtils.h
+++ b/services/mediaresourcemanager/ResourceManagerServiceUtils.h
@@ -18,11 +18,156 @@
 #ifndef ANDROID_MEDIA_RESOURCEMANAGERSERVICEUTILS_H_
 #define ANDROID_MEDIA_RESOURCEMANAGERSERVICEUTILS_H_
 
+#include <map>
+#include <memory>
 #include <vector>
+
+#include <aidl/android/media/BnResourceManagerService.h>
+#include <media/MediaResource.h>
 #include <utils/String8.h>
 
 namespace android {
 
+class ResourceManagerService;
+
+/*
+ * Death Notifier to track IResourceManagerClient's death.
+ */
+class DeathNotifier : public std::enable_shared_from_this<DeathNotifier> {
+
+    // BinderDiedContext defines the cookie that is passed as DeathRecipient.
+    // Since this can maintain more context than a raw pointer, we can
+    // validate the scope of DeathNotifier, before deferencing it upon the binder death.
+    struct BinderDiedContext {
+        std::weak_ptr<DeathNotifier> mDeathNotifier;
+    };
+public:
+    static std::shared_ptr<DeathNotifier> Create(
+        const std::shared_ptr<::aidl::android::media::IResourceManagerClient>& client,
+        const std::weak_ptr<ResourceManagerService>& service,
+        const ::aidl::android::media::ClientInfoParcel& clientInfo,
+        bool overrideProcessInfo = false);
+
+    DeathNotifier(const std::shared_ptr<::aidl::android::media::IResourceManagerClient>& client,
+                  const std::weak_ptr<ResourceManagerService>& service,
+                  const ::aidl::android::media::ClientInfoParcel& clientInfo);
+
+    virtual ~DeathNotifier() {
+        unlink();
+    }
+
+    // Implement death recipient
+    static void BinderDiedCallback(void* cookie);
+    static void BinderUnlinkedCallback(void* cookie);
+    virtual void binderDied();
+
+private:
+    void link() {
+        // Create the context that is passed as cookie to the binder death notification.
+        // The context gets deleted at BinderUnlinkedCallback.
+        mCookie = new BinderDiedContext{.mDeathNotifier = weak_from_this()};
+        // Register for the callbacks by linking to death notification.
+        AIBinder_linkToDeath(mClient->asBinder().get(), mDeathRecipient.get(), mCookie);
+    }
+
+    void unlink() {
+        if (mClient != nullptr) {
+            // Unlink from the death notification.
+            AIBinder_unlinkToDeath(mClient->asBinder().get(), mDeathRecipient.get(), mCookie);
+            mClient = nullptr;
+        }
+    }
+
+protected:
+    std::shared_ptr<::aidl::android::media::IResourceManagerClient> mClient;
+    std::weak_ptr<ResourceManagerService> mService;
+    const ::aidl::android::media::ClientInfoParcel mClientInfo;
+    BinderDiedContext* mCookie;
+    ::ndk::ScopedAIBinder_DeathRecipient mDeathRecipient;
+};
+
+class OverrideProcessInfoDeathNotifier : public DeathNotifier {
+public:
+    OverrideProcessInfoDeathNotifier(
+        const std::shared_ptr<::aidl::android::media::IResourceManagerClient>& client,
+        const std::weak_ptr<ResourceManagerService>& service,
+        const ::aidl::android::media::ClientInfoParcel& clientInfo)
+            : DeathNotifier(client, service, clientInfo) {}
+
+    virtual ~OverrideProcessInfoDeathNotifier() {}
+
+    virtual void binderDied();
+};
+
+// A map of tuple(type, sub-type, id) and the resource parcel.
+typedef std::map<std::tuple<
+        MediaResource::Type, MediaResource::SubType, std::vector<uint8_t>>,
+        ::aidl::android::media::MediaResourceParcel> ResourceList;
+
+// Encapsulation for Resource Info, that contains
+// - pid of the app
+// - uid of the app
+// - client id
+// - name of the client (specifically for the codec)
+// - the client associted with it
+// - death notifier for the (above) client
+// - list of resources associated with it
+// - A flag that marks whether this resource is pending to be removed.
+struct ResourceInfo {
+    pid_t pid;
+    uid_t uid;
+    int64_t clientId;
+    std::string name;
+    std::shared_ptr<::aidl::android::media::IResourceManagerClient> client;
+    std::shared_ptr<DeathNotifier> deathNotifier = nullptr;
+    ResourceList resources;
+    bool pendingRemoval{false};
+};
+
+/*
+ * Resource Reclaim request info that encapsulates
+ *  - the calling/requesting process pid.
+ *  - the list of resources requesting (to be reclaimed from others)
+ */
+struct ReclaimRequestInfo {
+    int mCallingPid = -1;
+    const std::vector<::aidl::android::media::MediaResourceParcel>& mResources;
+};
+
+/*
+ * Resource request info that encapsulates
+ *  - the calling/requesting process pid.
+ *  - the resource requesting (to be reclaimed from others)
+ */
+struct ResourceRequestInfo {
+    // pid of the calling/requesting process.
+    int mCallingPid = -1;
+    // resources requested.
+    const ::aidl::android::media::MediaResourceParcel* mResource;
+};
+
+/*
+ * Structure that defines the Client - a possible target to relcaim from.
+ * This encapsulates pid, uid of the process and the client id
+ * based on the reclaim policy.
+ */
+struct ClientInfo {
+    // pid of the process.
+    pid_t mPid = -1;
+    // uid of the process.
+    uid_t mUid = -1;
+    // Client Id.
+    int64_t mClientId = -1;
+    ClientInfo(pid_t pid = -1, uid_t uid = -1, const int64_t& clientId = -1)
+        : mPid(pid), mUid(uid), mClientId(clientId) {}
+};
+
+// Map of Resource information index through the client id.
+typedef std::map<int64_t, ResourceInfo> ResourceInfos;
+
+// Map of Resource information indexed through the process id.
+typedef std::map<int, ResourceInfos> PidResourceInfosMap;
+
 // templated function to stringify the given vector of items.
 template <typename T>
 String8 getString(const std::vector<T>& items) {
@@ -37,15 +182,15 @@
 
 //Check whether a given resource (of type and subtype) is found in given resource parcel.
 bool hasResourceType(MediaResource::Type type, MediaResource::SubType subType,
-        const MediaResourceParcel& resource);
+                     const ::aidl::android::media::MediaResourceParcel& resource);
 
 //Check whether a given resource (of type and subtype) is found in given resource list.
 bool hasResourceType(MediaResource::Type type, MediaResource::SubType subType,
-        const ResourceList& resources);
+                     const ResourceList& resources);
 
 //Check whether a given resource (of type and subtype) is found in given resource info list.
 bool hasResourceType(MediaResource::Type type, MediaResource::SubType subType,
-        const ResourceInfos& infos);
+                     const ResourceInfos& infos);
 
 // Return modifiable list of ResourceInfo for a given process (look up by pid)
 // from the map of ResourceInfos.
@@ -54,8 +199,19 @@
 // Return modifiable ResourceInfo for a given process (look up by pid)
 // from the map of ResourceInfos.
 // If the item is not in the map, create one and add it to the map.
-ResourceInfo& getResourceInfoForEdit(const ClientInfoParcel& clientInfo,
-        const std::shared_ptr<IResourceManagerClient>& client, ResourceInfos& infos);
+ResourceInfo& getResourceInfoForEdit(
+        const aidl::android::media::ClientInfoParcel& clientInfo,
+        const std::shared_ptr<aidl::android::media::IResourceManagerClient>& client,
+        ResourceInfos& infos);
+
+// Merge resources from r2 into r1.
+void mergeResources(::aidl::android::media::MediaResourceParcel& r1,
+                    const ::aidl::android::media::MediaResourceParcel& r2);
+
+// To notify the media_resource_monitor about the resource being granted.
+void notifyResourceGranted(
+        int pid,
+        const std::vector<::aidl::android::media::MediaResourceParcel>& resources);
 
 } // namespace android
 
diff --git a/services/mediaresourcemanager/ResourceObserverService.cpp b/services/mediaresourcemanager/ResourceObserverService.cpp
index 6c5cecf..72e249f 100644
--- a/services/mediaresourcemanager/ResourceObserverService.cpp
+++ b/services/mediaresourcemanager/ResourceObserverService.cpp
@@ -41,7 +41,8 @@
 };
 
 static MediaObservableType getObservableType(const MediaResourceParcel& res) {
-    if (res.subType == MediaResourceSubType::kVideoCodec) {
+    if (res.subType == MediaResourceSubType::kHwVideoCodec ||
+        res.subType == MediaResourceSubType::kSwVideoCodec) {
         if (res.type == MediaResourceType::kNonSecureCodec) {
             return MediaObservableType::kVideoNonSecureCodec;
         }
diff --git a/services/mediaresourcemanager/ResourceTracker.cpp b/services/mediaresourcemanager/ResourceTracker.cpp
new file mode 100644
index 0000000..8dc13cc
--- /dev/null
+++ b/services/mediaresourcemanager/ResourceTracker.cpp
@@ -0,0 +1,720 @@
+/*
+**
+** Copyright 2023, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "ResourceTracker"
+#include <utils/Log.h>
+
+#include <binder/IPCThreadState.h>
+#include <mediautils/ProcessInfo.h>
+#include "ResourceTracker.h"
+#include "ResourceManagerServiceNew.h"
+#include "ResourceObserverService.h"
+
+namespace android {
+
+inline bool isHwCodec(MediaResource::SubType subType) {
+    return subType == MediaResource::SubType::kHwImageCodec ||
+           subType == MediaResource::SubType::kHwVideoCodec;
+}
+
+// Check whether a given resource (of type and subtype) is found in given resource list
+// that also has the given Primary SubType.
+static bool hasResourceType(MediaResource::Type type, MediaResource::SubType subType,
+                            const ResourceList& resources, MediaResource::SubType primarySubType) {
+    bool foundResource = false;
+    bool matchedPrimary =
+        (primarySubType == MediaResource::SubType::kUnspecifiedSubType) ?  true : false;
+    for (auto it = resources.begin(); it != resources.end(); it++) {
+        if (hasResourceType(type, subType, it->second)) {
+            foundResource = true;
+        } else if (it->second.subType == primarySubType) {
+            matchedPrimary = true;
+        } else if (isHwCodec(it->second.subType) == isHwCodec(primarySubType)) {
+            matchedPrimary = true;
+        }
+        if (matchedPrimary && foundResource) {
+            return true;
+        }
+    }
+    return false;
+}
+
+// See if the given client is already in the list of clients.
+inline bool contains(const std::vector<ClientInfo>& clients, const int64_t& clientId) {
+    std::vector<ClientInfo>::const_iterator found =
+        std::find_if(clients.begin(), clients.end(),
+                     [clientId](const ClientInfo& client) -> bool {
+                         return client.mClientId == clientId;
+                     });
+
+    return found != clients.end();
+}
+
+
+ResourceTracker::ResourceTracker(const std::shared_ptr<ResourceManagerServiceNew>& service,
+                                 const sp<ProcessInfoInterface>& processInfo) :
+        mService(service),
+        mProcessInfo(processInfo) {
+}
+
+ResourceTracker::~ResourceTracker() {
+}
+
+void ResourceTracker::setResourceObserverService(
+        const std::shared_ptr<ResourceObserverService>& observerService) {
+    mObserverService = observerService;
+}
+
+ResourceInfos& ResourceTracker::getResourceInfosForEdit(int pid) {
+    std::map<int, ResourceInfos>::iterator found = mMap.find(pid);
+    if (found == mMap.end()) {
+        // new pid
+        ResourceInfos infosForPid;
+        auto [it, inserted] = mMap.emplace(pid, infosForPid);
+        found = it;
+    }
+
+    return found->second;
+}
+
+bool ResourceTracker::addResource(const ClientInfoParcel& clientInfo,
+                                  const std::shared_ptr<IResourceManagerClient>& client,
+                                  const std::vector<MediaResourceParcel>& resources) {
+    int32_t pid = clientInfo.pid;
+    int32_t uid = clientInfo.uid;
+
+    if (!mProcessInfo->isPidUidTrusted(pid, uid)) {
+        pid_t callingPid = IPCThreadState::self()->getCallingPid();
+        uid_t callingUid = IPCThreadState::self()->getCallingUid();
+        ALOGW("%s called with untrusted pid %d or uid %d, using calling pid %d, uid %d",
+                __func__, pid, uid, callingPid, callingUid);
+        pid = callingPid;
+        uid = callingUid;
+    }
+    ResourceInfos& infos = getResourceInfosForEdit(pid);
+    ResourceInfo& info = getResourceInfoForEdit(clientInfo, client, infos);
+    ResourceList resourceAdded;
+
+    for (const MediaResourceParcel& res : resources) {
+        const auto resType = std::tuple(res.type, res.subType, res.id);
+
+        if (res.value < 0 && res.type != MediaResource::Type::kDrmSession) {
+            ALOGV("%s: Ignoring request to remove negative value of non-drm resource", __func__);
+            continue;
+        }
+        if (info.resources.find(resType) == info.resources.end()) {
+            if (res.value <= 0) {
+                // We can't init a new entry with negative value, although it's allowed
+                // to merge in negative values after the initial add.
+                ALOGV("%s: Ignoring request to add new resource entry with value <= 0", __func__);
+                continue;
+            }
+            onFirstAdded(res, info.uid);
+            info.resources[resType] = res;
+        } else {
+            mergeResources(info.resources[resType], res);
+        }
+        // Add it to the list of added resources for observers.
+        auto it = resourceAdded.find(resType);
+        if (it == resourceAdded.end()) {
+            resourceAdded[resType] = res;
+        } else {
+            mergeResources(it->second, res);
+        }
+    }
+    if (info.deathNotifier == nullptr && client != nullptr) {
+        info.deathNotifier = DeathNotifier::Create(client, mService, clientInfo);
+    }
+    if (mObserverService != nullptr && !resourceAdded.empty()) {
+        mObserverService->onResourceAdded(uid, pid, resourceAdded);
+    }
+
+    return !resourceAdded.empty();
+}
+
+bool ResourceTracker::removeResource(const ClientInfoParcel& clientInfo,
+                                     const std::vector<MediaResourceParcel>& resources) {
+    int32_t pid = clientInfo.pid;
+    int64_t clientId = clientInfo.id;
+
+    if (!mProcessInfo->isPidTrusted(pid)) {
+        pid_t callingPid = IPCThreadState::self()->getCallingPid();
+        ALOGW("%s called with untrusted pid %d, using calling pid %d", __func__,
+                pid, callingPid);
+        pid = callingPid;
+    }
+    std::map<int, ResourceInfos>::iterator found = mMap.find(pid);
+    if (found == mMap.end()) {
+        ALOGV("%s: didn't find pid %d for clientId %lld", __func__, pid, (long long) clientId);
+        return false;
+    }
+
+    ResourceInfos& infos = found->second;
+    ResourceInfos::iterator foundClient = infos.find(clientId);
+    if (foundClient == infos.end()) {
+        ALOGV("%s: didn't find clientId %lld", __func__, (long long) clientId);
+        return false;
+    }
+
+    ResourceInfo& info = foundClient->second;
+    ResourceList resourceRemoved;
+    for (const MediaResourceParcel& res : resources) {
+        const auto resType = std::tuple(res.type, res.subType, res.id);
+
+        if (res.value < 0) {
+            ALOGV("%s: Ignoring request to remove negative value of resource", __func__);
+            continue;
+        }
+        // ignore if we don't have it
+        if (info.resources.find(resType) != info.resources.end()) {
+            MediaResourceParcel& resource = info.resources[resType];
+            MediaResourceParcel actualRemoved = res;
+            if (resource.value > res.value) {
+                resource.value -= res.value;
+            } else {
+                onLastRemoved(res, info.uid);
+                actualRemoved.value = resource.value;
+                info.resources.erase(resType);
+            }
+
+            // Add it to the list of removed resources for observers.
+            auto it = resourceRemoved.find(resType);
+            if (it == resourceRemoved.end()) {
+                resourceRemoved[resType] = actualRemoved;
+            } else {
+                mergeResources(it->second, actualRemoved);
+            }
+        }
+    }
+    if (mObserverService != nullptr && !resourceRemoved.empty()) {
+        mObserverService->onResourceRemoved(info.uid, pid, resourceRemoved);
+    }
+    return true;
+}
+
+bool ResourceTracker::removeResource(const ClientInfoParcel& clientInfo, bool validateCallingPid) {
+    int32_t pid = clientInfo.pid;
+    int64_t clientId = clientInfo.id;
+
+    if (validateCallingPid && !mProcessInfo->isPidTrusted(pid)) {
+        pid_t callingPid = IPCThreadState::self()->getCallingPid();
+        ALOGW("%s called with untrusted pid %d, using calling pid %d", __func__,
+                pid, callingPid);
+        pid = callingPid;
+    }
+    std::map<int, ResourceInfos>::iterator found = mMap.find(pid);
+    if (found == mMap.end()) {
+        ALOGV("%s: didn't find pid %d for clientId %lld", __func__, pid, (long long) clientId);
+        return false;
+    }
+
+    ResourceInfos& infos = found->second;
+    ResourceInfos::iterator foundClient = infos.find(clientId);
+    if (foundClient == infos.end()) {
+        ALOGV("%s: didn't find clientId %lld", __func__, (long long) clientId);
+        return false;
+    }
+
+    const ResourceInfo& info = foundClient->second;
+    for (auto& [resType, resParcel] : info.resources) {
+        onLastRemoved(resParcel, info.uid);
+    }
+
+    if (mObserverService != nullptr && !info.resources.empty()) {
+        mObserverService->onResourceRemoved(info.uid, pid, info.resources);
+    }
+
+    infos.erase(foundClient);
+    return true;
+}
+
+std::shared_ptr<IResourceManagerClient> ResourceTracker::getClient(
+        int pid, const int64_t& clientId) const {
+    std::map<int, ResourceInfos>::const_iterator found = mMap.find(pid);
+    if (found == mMap.end()) {
+        ALOGV("%s: didn't find pid %d for clientId %lld", __func__, pid, (long long) clientId);
+        return nullptr;
+    }
+
+    const ResourceInfos& infos = found->second;
+    ResourceInfos::const_iterator foundClient = infos.find(clientId);
+    if (foundClient == infos.end()) {
+        ALOGV("%s: didn't find clientId %lld", __func__, (long long) clientId);
+        return nullptr;
+    }
+
+    return foundClient->second.client;
+}
+
+bool ResourceTracker::removeClient(int pid, const int64_t& clientId) {
+    std::map<int, ResourceInfos>::iterator found = mMap.find(pid);
+    if (found == mMap.end()) {
+        ALOGV("%s: didn't find pid %d for clientId %lld", __func__, pid, (long long) clientId);
+        return false;
+    }
+
+    ResourceInfos& infos = found->second;
+    ResourceInfos::iterator foundClient = infos.find(clientId);
+    if (foundClient == infos.end()) {
+        ALOGV("%s: didn't find clientId %lld", __func__, (long long) clientId);
+        return false;
+    }
+
+    infos.erase(foundClient);
+    return true;
+}
+
+bool ResourceTracker::markClientForPendingRemoval(const ClientInfoParcel& clientInfo) {
+    int32_t pid = clientInfo.pid;
+    int64_t clientId = clientInfo.id;
+
+    if (!mProcessInfo->isPidTrusted(pid)) {
+        pid_t callingPid = IPCThreadState::self()->getCallingPid();
+        ALOGW("%s called with untrusted pid %d, using calling pid %d", __func__,
+                pid, callingPid);
+        pid = callingPid;
+    }
+    std::map<int, ResourceInfos>::iterator found = mMap.find(pid);
+    if (found == mMap.end()) {
+        ALOGV("%s: didn't find pid %d for clientId %lld", __func__, pid, (long long)clientId);
+        return false;
+    }
+
+    ResourceInfos& infos = found->second;
+    ResourceInfos::iterator foundClient = infos.find(clientId);
+    if (foundClient == infos.end()) {
+        ALOGV("%s: didn't find clientId %lld", __func__, (long long) clientId);
+        return false;
+    }
+
+    ResourceInfo& info = foundClient->second;
+    info.pendingRemoval = true;
+    return true;
+}
+
+bool ResourceTracker::getClientsMarkedPendingRemoval(int32_t pid,
+                                                     std::vector<ClientInfo>& targetClients) {
+    if (!mProcessInfo->isPidTrusted(pid)) {
+        pid_t callingPid = IPCThreadState::self()->getCallingPid();
+        ALOGW("%s called with untrusted pid %d, using calling pid %d", __func__, pid, callingPid);
+        pid = callingPid;
+    }
+
+    // Go through all the MediaResource types (and corresponding subtypes for
+    // each, if applicable) and see if the process (with given pid) holds any
+    // such resources that are marked as pending removal.
+    // Since the use-case of this function is to get all such resources (pending
+    // removal) and reclaim them all - the order in which we look for the
+    // resource type doesn't matter.
+    for (MediaResource::Type type : {MediaResource::Type::kSecureCodec,
+                                     MediaResource::Type::kNonSecureCodec,
+                                     MediaResource::Type::kGraphicMemory,
+                                     MediaResource::Type::kDrmSession}) {
+        switch (type) {
+        // Codec resources are segregated by audio, video and image domains.
+        case MediaResource::Type::kSecureCodec:
+        case MediaResource::Type::kNonSecureCodec:
+            for (MediaResource::SubType subType : {MediaResource::SubType::kHwAudioCodec,
+                                                   MediaResource::SubType::kSwAudioCodec,
+                                                   MediaResource::SubType::kHwVideoCodec,
+                                                   MediaResource::SubType::kSwVideoCodec,
+                                                   MediaResource::SubType::kHwImageCodec,
+                                                   MediaResource::SubType::kSwImageCodec}) {
+                ClientInfo clientInfo;
+                if (getBiggestClientPendingRemoval(pid, type, subType, clientInfo)) {
+                    if (!contains(targetClients, clientInfo.mClientId)) {
+                        targetClients.emplace_back(clientInfo);
+                    }
+                    continue;
+                }
+            }
+            break;
+        // Non-codec resources are shared by audio, video and image codecs (no subtype).
+        default:
+            ClientInfo clientInfo;
+            MediaResource::SubType subType = MediaResource::SubType::kUnspecifiedSubType;
+            if (getBiggestClientPendingRemoval(pid, type, subType, clientInfo)) {
+                if (!contains(targetClients, clientInfo.mClientId)) {
+                    targetClients.emplace_back(clientInfo);
+                }
+            }
+            break;
+        }
+    }
+
+    return true;
+}
+
+bool ResourceTracker::overridePid(int originalPid, int newPid) {
+    mOverridePidMap.erase(originalPid);
+    if (newPid != -1) {
+        mOverridePidMap.emplace(originalPid, newPid);
+        return true;
+    }
+    return false;
+}
+
+bool ResourceTracker::overrideProcessInfo(const std::shared_ptr<IResourceManagerClient>& client,
+                                          int pid, int procState, int oomScore) {
+    removeProcessInfoOverride(pid);
+
+    if (!mProcessInfo->overrideProcessInfo(pid, procState, oomScore)) {
+        // Override value is rejected by ProcessInfo.
+        return false;
+    }
+
+    ClientInfoParcel clientInfo{.pid = static_cast<int32_t>(pid),
+                                .uid = 0,
+                                .id = 0,
+                                .name = "<unknown client>"};
+    std::shared_ptr<DeathNotifier> deathNotifier =
+        DeathNotifier::Create(client, mService, clientInfo, true);
+
+    mProcessInfoOverrideMap.emplace(pid, ProcessInfoOverride{deathNotifier, client});
+
+    return true;
+}
+
+void ResourceTracker::removeProcessInfoOverride(int pid) {
+    auto it = mProcessInfoOverrideMap.find(pid);
+    if (it == mProcessInfoOverrideMap.end()) {
+        return;
+    }
+
+    mProcessInfo->removeProcessInfoOverride(pid);
+    mProcessInfoOverrideMap.erase(pid);
+}
+
+bool ResourceTracker::getAllClients(const ResourceRequestInfo& resourceRequestInfo,
+                                    std::vector<ClientInfo>& clients,
+                                    MediaResource::SubType primarySubType) {
+    MediaResource::Type type = resourceRequestInfo.mResource->type;
+    MediaResource::SubType subType = resourceRequestInfo.mResource->subType;
+    bool foundClient = false;
+
+    for (auto& [pid, /* ResourceInfos */ infos] : mMap) {
+        for (auto& [id, /* ResourceInfo */ info] : infos) {
+            if (hasResourceType(type, subType, info.resources, primarySubType)) {
+                if (!contains(clients, info.clientId)) {
+                    clients.emplace_back(info.pid, info.uid, info.clientId);
+                    foundClient = true;
+                }
+            }
+        }
+    }
+
+    return foundClient;
+}
+
+bool ResourceTracker::getLowestPriorityPid(MediaResource::Type type, MediaResource::SubType subType,
+                                           int& lowestPriorityPid, int& lowestPriority) {
+    int pid = -1;
+    int priority = -1;
+    for (auto& [tempPid, /* ResourceInfos */ infos] : mMap) {
+        if (infos.size() == 0) {
+            // no client on this process.
+            continue;
+        }
+        if (!hasResourceType(type, subType, infos)) {
+            // doesn't have the requested resource type
+            continue;
+        }
+        int tempPriority = -1;
+        if (!getPriority(tempPid, &tempPriority)) {
+            ALOGV("%s: can't get priority of pid %d, skipped", __func__, tempPid);
+            // TODO: remove this pid from mMap?
+            continue;
+        }
+        if (pid == -1 || tempPriority > priority) {
+            // initial the value
+            pid = tempPid;
+            priority = tempPriority;
+        }
+    }
+
+    bool success = (pid != -1);
+
+    if (success) {
+        lowestPriorityPid = pid;
+        lowestPriority = priority;
+    }
+    return success;
+}
+
+bool ResourceTracker::getLowestPriorityPid(MediaResource::Type type, MediaResource::SubType subType,
+                                           MediaResource::SubType primarySubType,
+                                           const std::vector<ClientInfo>& clients,
+                                           int& lowestPriorityPid, int& lowestPriority) {
+    int pid = -1;
+    int priority = -1;
+    for (const ClientInfo& client : clients) {
+        const ResourceInfo* info = getResourceInfo(client.mPid, client.mClientId);
+        if (info == nullptr) {
+            continue;
+        }
+        if (!hasResourceType(type, subType, info->resources, primarySubType)) {
+            // doesn't have the requested resource type
+            continue;
+        }
+        int tempPriority = -1;
+        if (!getPriority(client.mPid, &tempPriority)) {
+            ALOGV("%s: can't get priority of pid %d, skipped", __func__, client.mPid);
+            // TODO: remove this pid from mMap?
+            continue;
+        }
+        if (pid == -1 || tempPriority > priority) {
+            // initial the value
+            pid = client.mPid;
+            priority = tempPriority;
+        }
+    }
+
+    bool success = (pid != -1);
+
+    if (success) {
+        lowestPriorityPid = pid;
+        lowestPriority = priority;
+    }
+    return success;
+}
+
+bool ResourceTracker::getBiggestClientPendingRemoval(int pid, MediaResource::Type type,
+                                                     MediaResource::SubType subType,
+                                                     ClientInfo& clientInfo) {
+    std::map<int, ResourceInfos>::iterator found = mMap.find(pid);
+    if (found == mMap.end()) {
+        return false;
+    }
+
+    uid_t   uid = -1;
+    int64_t clientId = -1;
+    uint64_t largestValue = 0;
+    const ResourceInfos& infos = found->second;
+    for (const auto& [id, /* ResourceInfo */ info] : infos) {
+        const ResourceList& resources = info.resources;
+        // Skip if the client is not marked pending removal.
+        if (!info.pendingRemoval) {
+            continue;
+        }
+        for (auto it = resources.begin(); it != resources.end(); it++) {
+            const MediaResourceParcel& resource = it->second;
+            if (hasResourceType(type, subType, resource)) {
+                if (resource.value > largestValue) {
+                    largestValue = resource.value;
+                    clientId = info.clientId;
+                    uid = info.uid;
+                }
+            }
+        }
+    }
+
+    if (clientId == -1) {
+        return false;
+    }
+
+    clientInfo.mPid = pid;
+    clientInfo.mUid = uid;
+    clientInfo.mClientId = clientId;
+    return true;
+}
+
+bool ResourceTracker::getBiggestClient(int targetPid,
+                                       MediaResource::Type type, MediaResource::SubType subType,
+                                       const std::vector<ClientInfo>& clients,
+                                       ClientInfo& clientInfo,
+                                       MediaResource::SubType primarySubType) {
+    uid_t   uid = -1;
+    int64_t clientId = -1;
+    uint64_t largestValue = 0;
+
+    for (const ClientInfo& client : clients) {
+        // Skip the clients that doesn't belong go the targetPid
+        if (client.mPid != targetPid) {
+            continue;
+        }
+        const ResourceInfo* info = getResourceInfo(client.mPid, client.mClientId);
+        if (info == nullptr) {
+            continue;
+        }
+
+        const ResourceList& resources = info->resources;
+        bool matchedPrimary =
+            (primarySubType == MediaResource::SubType::kUnspecifiedSubType) ?  true : false;
+        for (auto it = resources.begin(); !matchedPrimary && it != resources.end(); it++) {
+            if (it->second.subType == primarySubType) {
+                matchedPrimary = true;
+            } else if (isHwCodec(it->second.subType) == isHwCodec(primarySubType)) {
+                matchedPrimary = true;
+            }
+        }
+        // Primary type doesn't match, skip the client
+        if (!matchedPrimary) {
+            continue;
+        }
+        for (auto it = resources.begin(); it != resources.end(); it++) {
+            const MediaResourceParcel& resource = it->second;
+            if (hasResourceType(type, subType, resource)) {
+                if (resource.value > largestValue) {
+                    largestValue = resource.value;
+                    clientId = info->clientId;
+                    uid = info->uid;
+                }
+            }
+        }
+    }
+
+    if (clientId == -1) {
+        ALOGE("%s: can't find resource type %s and subtype %s for pid %d",
+                 __func__, asString(type), asString(subType), targetPid);
+        return false;
+    }
+
+    clientInfo.mPid = targetPid;
+    clientInfo.mUid = uid;
+    clientInfo.mClientId = clientId;
+    return true;
+}
+
+void ResourceTracker::dump(std::string& resourceLogs) {
+    const size_t SIZE = 256;
+    char buffer[SIZE];
+    resourceLogs.append("  Processes:\n");
+    for (const auto& [pid, /* ResourceInfos */ infos] : mMap) {
+        snprintf(buffer, SIZE, "    Pid: %d\n", pid);
+        resourceLogs.append(buffer);
+        int priority = 0;
+        if (getPriority(pid, &priority)) {
+            snprintf(buffer, SIZE, "    Priority: %d\n", priority);
+        } else {
+            snprintf(buffer, SIZE, "    Priority: <unknown>\n");
+        }
+        resourceLogs.append(buffer);
+
+        for (const auto& [infoKey, /* ResourceInfo */ info] : infos) {
+            resourceLogs.append("      Client:\n");
+            snprintf(buffer, SIZE, "        Id: %lld\n", (long long)info.clientId);
+            resourceLogs.append(buffer);
+
+            std::string clientName = info.name;
+            snprintf(buffer, SIZE, "        Name: %s\n", clientName.c_str());
+            resourceLogs.append(buffer);
+
+            const ResourceList& resources = info.resources;
+            resourceLogs.append("        Resources:\n");
+            for (auto it = resources.begin(); it != resources.end(); it++) {
+                snprintf(buffer, SIZE, "          %s\n", toString(it->second).c_str());
+                resourceLogs.append(buffer);
+            }
+        }
+    }
+    resourceLogs.append("  Process Pid override:\n");
+    for (const auto& [oldPid, newPid] : mOverridePidMap) {
+        snprintf(buffer, SIZE, "    Original Pid: %d,  Override Pid: %d\n", oldPid, newPid);
+        resourceLogs.append(buffer);
+    }
+}
+
+void ResourceTracker::onFirstAdded(const MediaResourceParcel& resource, uid_t uid) {
+    std::shared_ptr<ResourceManagerServiceNew> service = mService.lock();
+    if (service == nullptr) {
+        ALOGW("%s: ResourceManagerService is invalid!", __func__);
+        return;
+    }
+
+    service->onFirstAdded(resource, uid);
+}
+
+void ResourceTracker::onLastRemoved(const MediaResourceParcel& resource, uid_t uid) {
+    std::shared_ptr<ResourceManagerServiceNew> service = mService.lock();
+    if (service == nullptr) {
+        ALOGW("%s: ResourceManagerService is invalid!", __func__);
+        return;
+    }
+
+    service->onLastRemoved(resource, uid);
+}
+
+bool ResourceTracker::getPriority(int pid, int* priority) {
+    int newPid = pid;
+
+    if (mOverridePidMap.find(pid) != mOverridePidMap.end()) {
+        newPid = mOverridePidMap[pid];
+        ALOGD("%s: use override pid %d instead original pid %d", __func__, newPid, pid);
+    }
+
+    return mProcessInfo->getPriority(newPid, priority);
+}
+
+bool ResourceTracker::getNonConflictingClients(const ResourceRequestInfo& resourceRequestInfo,
+                                               std::vector<ClientInfo>& clients) {
+    MediaResource::Type type = resourceRequestInfo.mResource->type;
+    MediaResource::SubType subType = resourceRequestInfo.mResource->subType;
+    for (auto& [pid, /* ResourceInfos */ infos] : mMap) {
+        for (const auto& [id, /* ResourceInfo */ info] : infos) {
+            if (hasResourceType(type, subType, info.resources)) {
+                if (!isCallingPriorityHigher(resourceRequestInfo.mCallingPid, pid)) {
+                    // some higher/equal priority process owns the resource,
+                    // this is a conflict.
+                    ALOGE("%s: The resource (%s) request from pid %d is conflicting",
+                          __func__, asString(type), pid);
+                    clients.clear();
+                    return false;
+                } else {
+                    if (!contains(clients, info.clientId)) {
+                        clients.emplace_back(info.pid, info.uid, info.clientId);
+                    }
+                }
+            }
+        }
+    }
+
+    return true;
+}
+
+const ResourceInfo* ResourceTracker::getResourceInfo(int pid, const int64_t& clientId) const {
+    std::map<int, ResourceInfos>::const_iterator found = mMap.find(pid);
+    if (found == mMap.end()) {
+        ALOGV("%s: didn't find pid %d for clientId %lld", __func__, pid, (long long) clientId);
+        return nullptr;
+    }
+
+    const ResourceInfos& infos = found->second;
+    ResourceInfos::const_iterator foundClient = infos.find(clientId);
+    if (foundClient == infos.end()) {
+        ALOGV("%s: didn't find clientId %lld", __func__, (long long) clientId);
+        return nullptr;
+    }
+
+    return &foundClient->second;
+}
+
+bool ResourceTracker::isCallingPriorityHigher(int callingPid, int pid) {
+    int callingPidPriority;
+    if (!getPriority(callingPid, &callingPidPriority)) {
+        return false;
+    }
+
+    int priority;
+    if (!getPriority(pid, &priority)) {
+        return false;
+    }
+
+    return (callingPidPriority < priority);
+}
+
+} // namespace android
diff --git a/services/mediaresourcemanager/ResourceTracker.h b/services/mediaresourcemanager/ResourceTracker.h
new file mode 100644
index 0000000..e5f33ec
--- /dev/null
+++ b/services/mediaresourcemanager/ResourceTracker.h
@@ -0,0 +1,242 @@
+/*
+**
+** Copyright 2023, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+#ifndef ANDROID_MEDIA_RESOURCETRACKER_H_
+#define ANDROID_MEDIA_RESOURCETRACKER_H_
+
+#include <map>
+#include <memory>
+#include <string>
+#include <vector>
+#include <media/MediaResource.h>
+#include <aidl/android/media/ClientInfoParcel.h>
+#include <aidl/android/media/IResourceManagerClient.h>
+#include <aidl/android/media/MediaResourceParcel.h>
+
+#include "ResourceManagerServiceUtils.h"
+
+namespace android {
+
+class DeathNotifier;
+class ResourceManagerServiceNew;
+class ResourceObserverService;
+struct ProcessInfoInterface;
+struct ResourceRequestInfo;
+struct ClientInfo;
+
+/*
+ * ResourceTracker abstracts the resources managed by the ResourceManager.
+ * It keeps track of the resource used by the clients (clientid) and by the process (pid)
+ */
+class ResourceTracker {
+public:
+    ResourceTracker(const std::shared_ptr<ResourceManagerServiceNew>& service,
+                    const sp<ProcessInfoInterface>& processInfo);
+    ~ResourceTracker();
+
+    /**
+     * Add or update resources for |clientInfo|.
+     *
+     * If |clientInfo| is not tracked yet, it records its associated |client| and adds
+     * |resources| to the tracked resources. If |clientInfo| is already tracked,
+     * it updates the tracked resources by adding |resources| to them (|client| in
+     * this case is unused and unchecked).
+     *
+     * @param clientInfo Info of the calling client.
+     * @param client Interface for the client.
+     * @param resources An array of resources to be added.
+     *
+     * @return true upon successfully adding/updating the resources, false
+     * otherwise.
+     */
+    bool addResource(const aidl::android::media::ClientInfoParcel& clientInfo,
+                     const std::shared_ptr<::aidl::android::media::IResourceManagerClient>& client,
+                     const std::vector<::aidl::android::media::MediaResourceParcel>& resources);
+
+    // Remove a set of resources from the given client.
+    // returns true on success, false otherwise.
+    bool removeResource(const aidl::android::media::ClientInfoParcel& clientInfo,
+                        const std::vector<::aidl::android::media::MediaResourceParcel>& resources);
+
+    /**
+     * Remove all resources tracked for |clientInfo|.
+     *
+     * If |validateCallingPid| is true, the (pid of the) calling process is validated that it
+     * is from a trusted process.
+     * Returns true on success (|clientInfo| was tracked and optionally the caller
+     * was a validated trusted process), false otherwise (|clientInfo| was not tracked,
+     * or the caller was not a trusted process)
+     */
+    bool removeResource(const aidl::android::media::ClientInfoParcel& clientInfo,
+                        bool validateCallingPid);
+
+    // Mark the client for pending removal.
+    // Such clients are primary candidate for reclaim.
+    // returns true on success, false otherwise.
+    bool markClientForPendingRemoval(const aidl::android::media::ClientInfoParcel& clientInfo);
+
+    // Get a list of clients that belong to process with given pid and are maked to be
+    // pending removal by markClientForPendingRemoval.
+    // returns true on success, false otherwise.
+    bool getClientsMarkedPendingRemoval(int32_t pid, std::vector<ClientInfo>& targetClients);
+
+    // Override the pid of originalPid with newPid
+    // To remove the pid entry from the override list, set newPid as -1
+    // returns true on successful override, false otherwise.
+    bool overridePid(int originalPid, int newPid);
+
+    // Override the process info {state, oom score} of the process with pid.
+    // returns true on success, false otherwise.
+    bool overrideProcessInfo(
+            const std::shared_ptr<aidl::android::media::IResourceManagerClient>& client,
+            int pid, int procState, int oomScore);
+
+    // Remove the overridden process info.
+    void removeProcessInfoOverride(int pid);
+
+    // Find all clients that have given resources.
+    // If applicable, match the primary type too.
+    // The |clients| (list) isn't cleared by this function to allow calling this
+    // function multiple times for different resources.
+    // returns true upon finding at lease one client with the given resource request info,
+    // false otherwise (no clients)
+    bool getAllClients(
+            const ResourceRequestInfo& resourceRequestInfo,
+            std::vector<ClientInfo>& clients,
+            MediaResource::SubType primarySubType = MediaResource::SubType::kUnspecifiedSubType);
+
+    // Look for the lowest priority process with the given resources.
+    // Upon success lowestPriorityPid and lowestPriority are
+    // set accordingly and it returns true.
+    // If there isn't a lower priority process with the given resources, it will return false
+    // with out updating lowestPriorityPid and lowerPriority.
+    bool getLowestPriorityPid(MediaResource::Type type, MediaResource::SubType subType,
+                              int& lowestPriorityPid, int& lowestPriority);
+
+    // Look for the lowest priority process with the given resources
+    // among the given client list.
+    // If applicable, match the primary type too.
+    // returns true on success, false otherwise.
+    bool getLowestPriorityPid(
+            MediaResource::Type type, MediaResource::SubType subType,
+            MediaResource::SubType primarySubType,
+            const std::vector<ClientInfo>& clients,
+            int& lowestPriorityPid, int& lowestPriority);
+
+    // Find the biggest client of the given process with given resources,
+    // that is marked as pending to be removed.
+    // returns true on success, false otherwise.
+    bool getBiggestClientPendingRemoval(
+            int pid, MediaResource::Type type,
+            MediaResource::SubType subType,
+            ClientInfo& clientInfo);
+
+    // Find the biggest client from the process pid, selecting them from the list of clients.
+    // If applicable, match the primary type too.
+    // Returns true when a client is found and clientInfo is updated accordingly.
+    // Upon failure to find a client, it will return false without updating
+    // clientInfo.
+    // Upon failure to find a client, it will return false.
+    bool getBiggestClient(
+            int targetPid,
+            MediaResource::Type type,
+            MediaResource::SubType subType,
+            const std::vector<ClientInfo>& clients,
+            ClientInfo& clientInfo,
+            MediaResource::SubType primarySubType = MediaResource::SubType::kUnspecifiedSubType);
+
+    // Find the client that belongs to given process(pid) and with the given clientId.
+    // A nullptr is returned upon failure to find the client.
+    std::shared_ptr<::aidl::android::media::IResourceManagerClient> getClient(
+            int pid, const int64_t& clientId) const;
+
+    // Removes the client from the given process(pid) with the given clientId.
+    // returns true on success, false otherwise.
+    bool removeClient(int pid, const int64_t& clientId);
+
+    // Set the resource observer service, to which to notify when the resources
+    // are added and removed.
+    void setResourceObserverService(
+            const std::shared_ptr<ResourceObserverService>& observerService);
+
+    // Dump all the resource allocations for all the processes into a given string
+    void dump(std::string& resourceLogs);
+
+    // get the priority of the process.
+    // If we can't get the priority of the process (with given pid), it will
+    // return false.
+    bool getPriority(int pid, int* priority);
+
+    // Check if the given resource request has conflicting clients.
+    // The resource conflict is defined by the ResourceModel (such as
+    // co-existence of secure codec with another secure or non-secure codec).
+    // But here, the ResourceTracker only looks for resources from lower
+    // priority processes.
+    // If is/are only higher or same priority process/es with the given resource,
+    // it will return false.
+    // Otherwise, adds all the clients to the list of clients and return true.
+    bool getNonConflictingClients(const ResourceRequestInfo& resourceRequestInfo,
+                                  std::vector<ClientInfo>& clients);
+
+    // Returns unmodifiable reference to the resource map.
+    const std::map<int, ResourceInfos>& getResourceMap() const {
+        return mMap;
+    }
+
+private:
+    // Get ResourceInfos associated with the given process.
+    // If none exists, this method will create and associate an empty object and return it.
+    ResourceInfos& getResourceInfosForEdit(int pid);
+
+    // A helper function that returns true if the callingPid has higher priority than pid.
+    // Returns false otherwise.
+    bool isCallingPriorityHigher(int callingPid, int pid);
+
+    // Locate the resource info corresponding to the process pid and
+    // the client clientId.
+    const ResourceInfo* getResourceInfo(int pid, const int64_t& clientId) const;
+
+    // Notify when a resource is added for the first time.
+    void onFirstAdded(const MediaResourceParcel& resource, uid_t uid);
+    // Notify when a resource is removed for the last time.
+    void onLastRemoved(const MediaResourceParcel& resource, uid_t uid);
+
+private:
+    // Structure that defines process info that needs to be overridden.
+    struct ProcessInfoOverride {
+        std::shared_ptr<DeathNotifier> deathNotifier = nullptr;
+        std::shared_ptr<::aidl::android::media::IResourceManagerClient> client;
+    };
+
+    // Map of Resource information indexed through the process id.
+    std::map<int, ResourceInfos> mMap;
+    // A weak reference (to avoid cyclic dependency) to the ResourceManagerService.
+    // ResourceTracker uses this to communicate back with the ResourceManagerService.
+    std::weak_ptr<ResourceManagerServiceNew> mService;
+    // To notify the ResourceObserverService abour resources are added or removed.
+    std::shared_ptr<ResourceObserverService> mObserverService;
+    // Map of pid and their overrided id.
+    std::map<int, int> mOverridePidMap;
+    // Map of pid and their overridden process info.
+    std::map<pid_t, ProcessInfoOverride> mProcessInfoOverrideMap;
+    // Interface that gets process specific information.
+    sp<ProcessInfoInterface> mProcessInfo;
+};
+
+} // namespace android
+
+#endif // ANDROID_MEDIA_RESOURCETRACKER_H_
diff --git a/services/mediaresourcemanager/aidl/android/media/ClientConfigParcel.aidl b/services/mediaresourcemanager/aidl/android/media/ClientConfigParcel.aidl
index 3c9c8c7..85f1970 100644
--- a/services/mediaresourcemanager/aidl/android/media/ClientConfigParcel.aidl
+++ b/services/mediaresourcemanager/aidl/android/media/ClientConfigParcel.aidl
@@ -33,33 +33,29 @@
     /**
      * Type of codec (Audio/Video/Image).
      */
-    MediaResourceSubType codecType;
+    MediaResourceSubType codecType = MediaResourceSubType.kUnspecifiedSubType;
 
     /**
      * true if this is an encoder, false if this is a decoder.
      */
-    boolean isEncoder;
-
-    /**
-     * true if this is hardware codec, false otherwise.
-     */
-    boolean isHardware;
+    boolean isEncoder = false;
 
     /*
      * Video Resolution of the codec when it was configured, as width and height (in pixels).
      */
-    int width;
-    int height;
+    int width = 0;
+    int height = 0;
 
     /*
      * Timestamp (in microseconds) when this configuration is created.
      */
-    long timeStamp;
+    long timeStamp = 0;
+
     /*
      * ID associated with the Codec.
      * This will be used by the metrics:
      * - Associate MediaCodecStarted with MediaCodecStopped Atom.
      * - Correlate MediaCodecReported Atom for codec configuration parameters.
      */
-    long id;
+    long id = 0;
 }
diff --git a/services/mediaresourcemanager/aidl/android/media/MediaResourceParcel.aidl b/services/mediaresourcemanager/aidl/android/media/MediaResourceParcel.aidl
index b0f2b71..6f180e9 100644
--- a/services/mediaresourcemanager/aidl/android/media/MediaResourceParcel.aidl
+++ b/services/mediaresourcemanager/aidl/android/media/MediaResourceParcel.aidl
@@ -25,18 +25,15 @@
  * {@hide}
  */
 parcelable MediaResourceParcel {
-    // TODO: default enum value is not supported yet.
-    // Set default enum value when b/142739329 is fixed.
-
     /**
      * Type of the media resource.
      */
-    MediaResourceType type;// = MediaResourceTypeEnum::kUnspecified;
+    MediaResourceType type = MediaResourceType.kUnspecified;
 
     /**
      * Sub-type of the media resource.
      */
-    MediaResourceSubType subType;// = MediaResourceSubTypeEnum::kUnspecifiedSubType;
+    MediaResourceSubType subType = MediaResourceSubType.kUnspecifiedSubType;
 
     /**
      * Identifier of the media resource (eg. Drm session id).
diff --git a/services/mediaresourcemanager/aidl/android/media/MediaResourceSubType.aidl b/services/mediaresourcemanager/aidl/android/media/MediaResourceSubType.aidl
index 72a0551..311b6c3 100644
--- a/services/mediaresourcemanager/aidl/android/media/MediaResourceSubType.aidl
+++ b/services/mediaresourcemanager/aidl/android/media/MediaResourceSubType.aidl
@@ -24,7 +24,10 @@
 @Backing(type="int")
 enum MediaResourceSubType {
     kUnspecifiedSubType = 0,
-    kAudioCodec = 1,
-    kVideoCodec = 2,
-    kImageCodec = 3,
+    kHwAudioCodec = 1,
+    kSwAudioCodec = 2,
+    kHwVideoCodec = 3,
+    kSwVideoCodec = 4,
+    kHwImageCodec = 5,
+    kSwImageCodec = 6,
 }
diff --git a/services/mediaresourcemanager/aidl/android/media/MediaResourceType.aidl b/services/mediaresourcemanager/aidl/android/media/MediaResourceType.aidl
index b2bb71b..353e59c 100644
--- a/services/mediaresourcemanager/aidl/android/media/MediaResourceType.aidl
+++ b/services/mediaresourcemanager/aidl/android/media/MediaResourceType.aidl
@@ -24,10 +24,13 @@
 @Backing(type="int")
 enum MediaResourceType {
     kUnspecified = 0,
+    // Codec resource type as secure or unsecure
     kSecureCodec = 1,
     kNonSecureCodec = 2,
+    // Other Codec resource types understood by the frameworks
     kGraphicMemory = 3,
     kCpuBoost = 4,
     kBattery = 5,
+    // DRM Session resource type
     kDrmSession = 6,
 }
diff --git a/services/mediaresourcemanager/fuzzer/Android.bp b/services/mediaresourcemanager/fuzzer/Android.bp
index a46d87a..b0db12b 100644
--- a/services/mediaresourcemanager/fuzzer/Android.bp
+++ b/services/mediaresourcemanager/fuzzer/Android.bp
@@ -46,6 +46,7 @@
         "libstatspull",
         "libstatssocket",
         "libactivitymanager_aidl",
+        "aconfig_mediacodec_flags_c_lib",
     ],
     fuzz_config: {
         cc: [
diff --git a/services/mediaresourcemanager/fuzzer/mediaresourcemanager_fuzzer.cpp b/services/mediaresourcemanager/fuzzer/mediaresourcemanager_fuzzer.cpp
index 6fa9831..643a4e5 100644
--- a/services/mediaresourcemanager/fuzzer/mediaresourcemanager_fuzzer.cpp
+++ b/services/mediaresourcemanager/fuzzer/mediaresourcemanager_fuzzer.cpp
@@ -208,9 +208,9 @@
         return nullptr;
     }
 
-    shared_ptr<ResourceManagerService> mService =
-        ::ndk::SharedRefBase::make<ResourceManagerService>(new TestProcessInfo(),
-                                                           new TestSystemCallback());
+    shared_ptr<ResourceManagerService> mService = ResourceManagerService::Create(
+            new TestProcessInfo(),
+            new TestSystemCallback());
     FuzzedDataProvider* mFuzzedDataProvider = nullptr;
 };
 
diff --git a/services/mediaresourcemanager/test/Android.bp b/services/mediaresourcemanager/test/Android.bp
index de24e1e..6a64823 100644
--- a/services/mediaresourcemanager/test/Android.bp
+++ b/services/mediaresourcemanager/test/Android.bp
@@ -12,7 +12,10 @@
     name: "ResourceManagerService_test",
     srcs: ["ResourceManagerService_test.cpp"],
     test_suites: ["device-tests"],
-    static_libs: ["libresourcemanagerservice"],
+    static_libs: [
+        "libresourcemanagerservice",
+        "aconfig_mediacodec_flags_c_lib",
+    ],
     shared_libs: [
         "libbinder",
         "libbinder_ndk",
@@ -20,11 +23,11 @@
         "libmedia",
         "libmediautils",
         "libutils",
-        "libmediautils",
         "libstats_media_metrics",
         "libstatspull",
         "libstatssocket",
         "libactivitymanager_aidl",
+        "server_configurable_flags",
     ],
     include_dirs: [
         "frameworks/av/include",
@@ -63,6 +66,7 @@
     static_libs: [
         "libresourcemanagerservice",
         "resourceobserver_aidl_interface-V1-ndk",
+        "aconfig_mediacodec_flags_c_lib",
     ],
     shared_libs: [
         "libbinder",
@@ -75,6 +79,7 @@
         "libstatspull",
         "libstatssocket",
         "libactivitymanager_aidl",
+        "server_configurable_flags",
     ],
     include_dirs: [
         "frameworks/av/include",
diff --git a/services/mediaresourcemanager/test/ResourceManagerServiceTestUtils.h b/services/mediaresourcemanager/test/ResourceManagerServiceTestUtils.h
index 474ff0f..58dbb8d 100644
--- a/services/mediaresourcemanager/test/ResourceManagerServiceTestUtils.h
+++ b/services/mediaresourcemanager/test/ResourceManagerServiceTestUtils.h
@@ -198,8 +198,8 @@
         return static_cast<TestClient*>(testClient.get());
     }
 
-    ResourceManagerServiceTestBase() {
-        ALOGI("ResourceManagerServiceTestBase created");
+    ResourceManagerServiceTestBase(bool newRM = false) : mNewRM(newRM) {
+        ALOGI("ResourceManagerServiceTestBase created with %s RM", newRM ? "new" : "old");
     }
 
     void SetUp() override {
@@ -207,8 +207,11 @@
         // silently ignored.
         ABinderProcess_startThreadPool();
         mSystemCB = new TestSystemCallback();
-        mService = ::ndk::SharedRefBase::make<ResourceManagerService>(
-            new TestProcessInfo, mSystemCB);
+        if (mNewRM) {
+            mService = ResourceManagerService::CreateNew(new TestProcessInfo, mSystemCB);
+        } else {
+            mService = ResourceManagerService::Create(new TestProcessInfo, mSystemCB);
+        }
         mTestClient1 = ::ndk::SharedRefBase::make<TestClient>(kTestPid1, kTestUid1, mService);
         mTestClient2 = ::ndk::SharedRefBase::make<TestClient>(kTestPid2, kTestUid2, mService);
         mTestClient3 = ::ndk::SharedRefBase::make<TestClient>(kTestPid2, kTestUid2, mService);
@@ -245,6 +248,8 @@
         EXPECT_EQ(client, info.client);
         EXPECT_TRUE(isEqualResources(resources, info.resources));
     }
+
+    bool mNewRM = false;
 };
 
 } // namespace android
diff --git a/services/mediaresourcemanager/test/ResourceManagerService_test.cpp b/services/mediaresourcemanager/test/ResourceManagerService_test.cpp
index ae3faea..9d4beef 100644
--- a/services/mediaresourcemanager/test/ResourceManagerService_test.cpp
+++ b/services/mediaresourcemanager/test/ResourceManagerService_test.cpp
@@ -28,32 +28,32 @@
 private:
     static MediaResource createSecureVideoCodecResource(int amount = 1) {
         return MediaResource(MediaResource::Type::kSecureCodec,
-            MediaResource::SubType::kVideoCodec, amount);
+            MediaResource::SubType::kHwVideoCodec, amount);
     }
 
     static MediaResource createNonSecureVideoCodecResource(int amount = 1) {
         return MediaResource(MediaResource::Type::kNonSecureCodec,
-            MediaResource::SubType::kVideoCodec, amount);
+            MediaResource::SubType::kHwVideoCodec, amount);
     }
 
     static MediaResource createSecureAudioCodecResource(int amount = 1) {
         return MediaResource(MediaResource::Type::kSecureCodec,
-            MediaResource::SubType::kAudioCodec, amount);
+            MediaResource::SubType::kHwAudioCodec, amount);
     }
 
     static MediaResource createNonSecureAudioCodecResource(int amount = 1) {
         return MediaResource(MediaResource::Type::kNonSecureCodec,
-            MediaResource::SubType::kAudioCodec, amount);
+            MediaResource::SubType::kHwAudioCodec, amount);
     }
 
     static MediaResource createSecureImageCodecResource(int amount = 1) {
         return MediaResource(MediaResource::Type::kSecureCodec,
-            MediaResource::SubType::kImageCodec, amount);
+            MediaResource::SubType::kHwImageCodec, amount);
     }
 
     static MediaResource createNonSecureImageCodecResource(int amount = 1) {
         return MediaResource(MediaResource::Type::kNonSecureCodec,
-            MediaResource::SubType::kImageCodec, amount);
+            MediaResource::SubType::kHwImageCodec, amount);
     }
 
     static MediaResource createGraphicMemoryResource(int amount = 1) {
@@ -77,8 +77,20 @@
     }
 
 public:
-    ResourceManagerServiceTest() : ResourceManagerServiceTestBase() {}
+    ResourceManagerServiceTest(bool newRM = false) : ResourceManagerServiceTestBase(newRM) {}
 
+    void updateConfig(bool bSupportsMultipleSecureCodecs, bool bSupportsSecureWithNonSecureCodec) {
+        std::vector<MediaResourcePolicyParcel> policies;
+        policies.push_back(
+                MediaResourcePolicy(
+                        IResourceManagerService::kPolicySupportsMultipleSecureCodecs,
+                        bSupportsMultipleSecureCodecs ? "true" : "false"));
+        policies.push_back(
+                MediaResourcePolicy(
+                        IResourceManagerService::kPolicySupportsSecureWithNonSecureCodec,
+                        bSupportsSecureWithNonSecureCodec ? "true" : "false"));
+        mService->config(policies);
+    }
 
     // test set up
     // ---------------------------------------------------------------------------------
@@ -129,7 +141,7 @@
         resources3.push_back(MediaResource(MediaResource::Type::kGraphicMemory, 100));
         mService->addResource(client3Info, mTestClient3, resources3);
 
-        const PidResourceInfosMap &map = mService->mMap;
+        const PidResourceInfosMap &map = mService->getResourceMap();
         EXPECT_EQ(2u, map.size());
         const auto& mapIndex1 = map.find(kTestPid1);
         EXPECT_TRUE(mapIndex1 != map.end());
@@ -159,7 +171,7 @@
         // Expected result:
         // 1) the client should have been added;
         // 2) both resource entries should have been rejected, resource list should be empty.
-        const PidResourceInfosMap &map = mService->mMap;
+        const PidResourceInfosMap &map = mService->getResourceMap();
         EXPECT_EQ(1u, map.size());
         const auto& mapIndex1 = map.find(kTestPid1);
         EXPECT_TRUE(mapIndex1 != map.end());
@@ -213,29 +225,11 @@
         EXPECT_TRUE(mService->mSupportsMultipleSecureCodecs);
         EXPECT_TRUE(mService->mSupportsSecureWithNonSecureCodec);
 
-        std::vector<MediaResourcePolicyParcel> policies1;
-        policies1.push_back(
-                MediaResourcePolicy(
-                        IResourceManagerService::kPolicySupportsMultipleSecureCodecs,
-                        "true"));
-        policies1.push_back(
-                MediaResourcePolicy(
-                        IResourceManagerService::kPolicySupportsSecureWithNonSecureCodec,
-                        "false"));
-        mService->config(policies1);
+        updateConfig(true, false);
         EXPECT_TRUE(mService->mSupportsMultipleSecureCodecs);
         EXPECT_FALSE(mService->mSupportsSecureWithNonSecureCodec);
 
-        std::vector<MediaResourcePolicyParcel> policies2;
-        policies2.push_back(
-                MediaResourcePolicy(
-                        IResourceManagerService::kPolicySupportsMultipleSecureCodecs,
-                        "false"));
-        policies2.push_back(
-                MediaResourcePolicy(
-                        IResourceManagerService::kPolicySupportsSecureWithNonSecureCodec,
-                        "true"));
-        mService->config(policies2);
+        updateConfig(false, true);
         EXPECT_FALSE(mService->mSupportsMultipleSecureCodecs);
         EXPECT_TRUE(mService->mSupportsSecureWithNonSecureCodec);
     }
@@ -254,7 +248,7 @@
         resources11.push_back(MediaResource(MediaResource::Type::kGraphicMemory, 200));
         mService->addResource(client1Info, mTestClient1, resources11);
 
-        const PidResourceInfosMap &map = mService->mMap;
+        const PidResourceInfosMap &map = mService->getResourceMap();
         EXPECT_EQ(1u, map.size());
         const auto& mapIndex1 = map.find(kTestPid1);
         EXPECT_TRUE(mapIndex1 != map.end());
@@ -272,13 +266,15 @@
 
         // test adding new types (including types that differs only in subType)
         resources11.push_back(MediaResource(MediaResource::Type::kNonSecureCodec, 1));
-        resources11.push_back(MediaResource(MediaResource::Type::kSecureCodec, MediaResource::SubType::kVideoCodec, 1));
+        resources11.push_back(MediaResource(MediaResource::Type::kSecureCodec,
+                                            MediaResource::SubType::kHwVideoCodec, 1));
         mService->addResource(client1Info, mTestClient1, resources11);
 
         expected.clear();
         expected.push_back(MediaResource(MediaResource::Type::kSecureCodec, 2));
         expected.push_back(MediaResource(MediaResource::Type::kNonSecureCodec, 1));
-        expected.push_back(MediaResource(MediaResource::Type::kSecureCodec, MediaResource::SubType::kVideoCodec, 1));
+        expected.push_back(MediaResource(MediaResource::Type::kSecureCodec,
+                                         MediaResource::SubType::kHwVideoCodec, 1));
         expected.push_back(MediaResource(MediaResource::Type::kGraphicMemory, 500));
         expectEqResourceInfo(infos1.at(getId(mTestClient1)), kTestUid1, mTestClient1, expected);
     }
@@ -297,7 +293,7 @@
         resources11.push_back(MediaResource(MediaResource::Type::kGraphicMemory, 200));
         mService->addResource(client1Info, mTestClient1, resources11);
 
-        const PidResourceInfosMap &map = mService->mMap;
+        const PidResourceInfosMap &map = mService->getResourceMap();
         EXPECT_EQ(1u, map.size());
         const auto& mapIndex1 = map.find(kTestPid1);
         EXPECT_TRUE(mapIndex1 != map.end());
@@ -337,8 +333,7 @@
         // ### secure codec can't coexist and secure codec can coexist with non-secure codec ###
         {
             addResource();
-            mService->mSupportsMultipleSecureCodecs = false;
-            mService->mSupportsSecureWithNonSecureCodec = true;
+            updateConfig(false, true);
 
             // priority too low to reclaim resource
             ClientInfoParcel clientInfo{.pid = static_cast<int32_t>(kLowPriorityPid),
@@ -372,7 +367,7 @@
                                      .name = "none"};
         {
             addResource();
-            mService->mSupportsSecureWithNonSecureCodec = true;
+            updateConfig(true, true);
 
             std::vector<MediaResourceParcel> resources;
             resources.push_back(MediaResource(MediaResource::Type::kNonSecureCodec, 1));
@@ -400,7 +395,7 @@
 
         {
             addResource();
-            mService->mSupportsSecureWithNonSecureCodec = true;
+            updateConfig(true, true);
 
             std::vector<MediaResourceParcel> resources;
             resources.push_back(MediaResource(MediaResource::Type::kNonSecureCodec, 1));
@@ -426,7 +421,7 @@
 
         {
             addResource();
-            mService->mSupportsSecureWithNonSecureCodec = true;
+            updateConfig(true, true);
 
             mService->markClientForPendingRemoval(client2Info);
 
@@ -464,7 +459,7 @@
                                      .name = "none"};
         mService->removeClient(client2Info);
 
-        const PidResourceInfosMap &map = mService->mMap;
+        const PidResourceInfosMap &map = mService->getResourceMap();
         EXPECT_EQ(2u, map.size());
         const ResourceInfos &infos1 = map.at(kTestPid1);
         const ResourceInfos &infos2 = map.at(kTestPid2);
@@ -476,21 +471,25 @@
 
     void testGetAllClients() {
         addResource();
-        MediaResource::Type type = MediaResource::Type::kSecureCodec;
-        MediaResource::SubType subType = MediaResource::SubType::kUnspecifiedSubType;
 
-        std::vector<std::shared_ptr<IResourceManagerClient> > clients;
-        PidUidVector idList;
-        EXPECT_FALSE(mService->getAllClients_l(kLowPriorityPid, type, subType, &idList, &clients));
+        std::vector<ClientInfo> targetClients;
+        MediaResource resource(MediaResource::Type::kSecureCodec,
+                               MediaResource::SubType::kUnspecifiedSubType,
+                               1);
+        ResourceRequestInfo requestInfoHigh { kHighPriorityPid, &resource};
+        ResourceRequestInfo requestInfoMid { kMidPriorityPid, &resource};
+        ResourceRequestInfo requestInfoLow { kLowPriorityPid, &resource};
+
+        EXPECT_FALSE(mService->getAllClients_l(requestInfoLow, targetClients));
         // some higher priority process (e.g. kTestPid2) owns the resource, so getAllClients_l
         // will fail.
-        EXPECT_FALSE(mService->getAllClients_l(kMidPriorityPid, type, subType, &idList, &clients));
-        EXPECT_TRUE(mService->getAllClients_l(kHighPriorityPid, type, subType, &idList, &clients));
+        EXPECT_FALSE(mService->getAllClients_l(requestInfoMid, targetClients));
+        EXPECT_TRUE(mService->getAllClients_l(requestInfoHigh, targetClients));
 
-        EXPECT_EQ(2u, clients.size());
+        EXPECT_EQ(2u, targetClients.size());
         // (OK to require ordering in clients[], as the pid map is sorted)
-        EXPECT_EQ(mTestClient3, clients[0]);
-        EXPECT_EQ(mTestClient1, clients[1]);
+        EXPECT_EQ(getId(mTestClient3), targetClients[0].mClientId);
+        EXPECT_EQ(getId(mTestClient1), targetClients[1].mClientId);
     }
 
     void testReclaimResourceSecure() {
@@ -514,8 +513,7 @@
         // ### secure codec can't coexist and secure codec can coexist with non-secure codec ###
         {
             addResource();
-            mService->mSupportsMultipleSecureCodecs = false;
-            mService->mSupportsSecureWithNonSecureCodec = true;
+            updateConfig(false, true);
 
             // priority too low
             CHECK_STATUS_FALSE(mService->reclaimResource(lowPriorityClient, resources, &result));
@@ -540,8 +538,7 @@
         // ### secure codecs can't coexist and secure codec can't coexist with non-secure codec ###
         {
             addResource();
-            mService->mSupportsMultipleSecureCodecs = false;
-            mService->mSupportsSecureWithNonSecureCodec = false;
+            updateConfig(false, false);
 
             // priority too low
             CHECK_STATUS_FALSE(mService->reclaimResource(lowPriorityClient, resources, &result));
@@ -561,8 +558,7 @@
         // ### secure codecs can coexist but secure codec can't coexist with non-secure codec ###
         {
             addResource();
-            mService->mSupportsMultipleSecureCodecs = true;
-            mService->mSupportsSecureWithNonSecureCodec = false;
+            updateConfig(true, false);
 
             // priority too low
             CHECK_STATUS_FALSE(mService->reclaimResource(lowPriorityClient, resources, &result));
@@ -593,8 +589,7 @@
         // ### secure codecs can coexist and secure codec can coexist with non-secure codec ###
         {
             addResource();
-            mService->mSupportsMultipleSecureCodecs = true;
-            mService->mSupportsSecureWithNonSecureCodec = true;
+            updateConfig(true, true);
 
             // priority too low
             CHECK_STATUS_FALSE(mService->reclaimResource(lowPriorityClient, resources, &result));
@@ -624,8 +619,7 @@
         // ### secure codecs can coexist and secure codec can coexist with non-secure codec ###
         {
             addResource();
-            mService->mSupportsMultipleSecureCodecs = true;
-            mService->mSupportsSecureWithNonSecureCodec = true;
+            updateConfig(true, true);
 
             std::vector<MediaResourceParcel> resources;
             resources.push_back(MediaResource(MediaResource::Type::kSecureCodec, 1));
@@ -671,7 +665,7 @@
         // ### secure codec can't coexist with non-secure codec ###
         {
             addResource();
-            mService->mSupportsSecureWithNonSecureCodec = false;
+            updateConfig(true, false);
 
             // priority too low
             CHECK_STATUS_FALSE(mService->reclaimResource(lowPriorityClient, resources, &result));
@@ -697,7 +691,7 @@
         // ### secure codec can coexist with non-secure codec ###
         {
             addResource();
-            mService->mSupportsSecureWithNonSecureCodec = true;
+            updateConfig(true, true);
 
             // priority too low
             CHECK_STATUS_FALSE(mService->reclaimResource(lowPriorityClient, resources, &result));
@@ -727,7 +721,7 @@
         // ### secure codec can coexist with non-secure codec ###
         {
             addResource();
-            mService->mSupportsSecureWithNonSecureCodec = true;
+            updateConfig(true, true);
 
             std::vector<MediaResourceParcel> resources;
             resources.push_back(MediaResource(MediaResource::Type::kNonSecureCodec, 1));
@@ -754,23 +748,22 @@
     }
 
     void testGetLowestPriorityBiggestClient() {
-        MediaResource::Type type = MediaResource::Type::kGraphicMemory;
-        MediaResource::SubType subType = MediaResource::SubType::kUnspecifiedSubType;
-        std::shared_ptr<IResourceManagerClient> client;
-        PidUidVector idList;
-        EXPECT_FALSE(mService->getLowestPriorityBiggestClient_l(kHighPriorityPid, type, subType,
-                &idList, &client));
+        ClientInfo clientInfo;
+        MediaResource resource(MediaResource::Type::kGraphicMemory,
+                               MediaResource::SubType::kUnspecifiedSubType,
+                               1);
+        ResourceRequestInfo requestInfoHigh { kHighPriorityPid, &resource};
+        ResourceRequestInfo requestInfoLow { kLowPriorityPid, &resource};
+        EXPECT_FALSE(mService->getLowestPriorityBiggestClient_l(requestInfoHigh, clientInfo));
 
         addResource();
 
-        EXPECT_FALSE(mService->getLowestPriorityBiggestClient_l(kLowPriorityPid, type, subType,
-                &idList, &client));
-        EXPECT_TRUE(mService->getLowestPriorityBiggestClient_l(kHighPriorityPid, type, subType,
-                &idList, &client));
+        EXPECT_FALSE(mService->getLowestPriorityBiggestClient_l(requestInfoLow, clientInfo));
+        EXPECT_TRUE(mService->getLowestPriorityBiggestClient_l(requestInfoHigh, clientInfo));
 
         // kTestPid1 is the lowest priority process with MediaResource::Type::kGraphicMemory.
         // mTestClient1 has the largest MediaResource::Type::kGraphicMemory within kTestPid1.
-        EXPECT_EQ(mTestClient1, client);
+        EXPECT_EQ(getId(mTestClient1), clientInfo.mClientId);
     }
 
     void testGetLowestPriorityPid() {
@@ -811,7 +804,8 @@
 
         // new client request should cause VIDEO_ON
         std::vector<MediaResourceParcel> resources1;
-        resources1.push_back(MediaResource(MediaResource::Type::kBattery, MediaResource::SubType::kVideoCodec, 1));
+        resources1.push_back(MediaResource(MediaResource::Type::kBattery,
+                                           MediaResource::SubType::kHwVideoCodec, 1));
         ClientInfoParcel client1Info{.pid = static_cast<int32_t>(kTestPid1),
                                      .uid = static_cast<int32_t>(kTestUid1),
                                      .id = getId(mTestClient1),
@@ -826,7 +820,8 @@
 
         // new client request should cause VIDEO_ON
         std::vector<MediaResourceParcel> resources2;
-        resources2.push_back(MediaResource(MediaResource::Type::kBattery, MediaResource::SubType::kVideoCodec, 2));
+        resources2.push_back(MediaResource(MediaResource::Type::kBattery,
+                                           MediaResource::SubType::kHwVideoCodec, 2));
         ClientInfoParcel client2Info{.pid = static_cast<int32_t>(kTestPid2),
                                      .uid = static_cast<int32_t>(kTestUid2),
                                      .id = getId(mTestClient2),
@@ -1372,9 +1367,9 @@
                                        int64_t id,
                                        const ClientInfoParcel& clientInfo,
                                        ClientConfigParcel& clientConfig) {
-        clientConfig.codecType = MediaResource::SubType::kVideoCodec;
+        clientConfig.codecType = hw? MediaResource::SubType::kHwVideoCodec :
+                                     MediaResource::SubType::kSwVideoCodec;
         clientConfig.isEncoder = encoder;
-        clientConfig.isHardware = hw;
         clientConfig.width = width;
         clientConfig.height = height;
         clientConfig.timeStamp = systemTime(SYSTEM_TIME_MONOTONIC) / 1000LL;
@@ -1505,6 +1500,11 @@
     }
 };
 
+class ResourceManagerServiceNewTest : public ResourceManagerServiceTest {
+public:
+    ResourceManagerServiceNewTest(bool newRM = true) : ResourceManagerServiceTest(newRM) {}
+};
+
 TEST_F(ResourceManagerServiceTest, config) {
     testConfig();
 }
@@ -1591,4 +1591,91 @@
     testConcurrentCodecs();
 }
 
+/////// test cases for ResourceManagerServiceNew ////
+TEST_F(ResourceManagerServiceNewTest, config) {
+    testConfig();
+}
+
+TEST_F(ResourceManagerServiceNewTest, addResource) {
+    addResource();
+}
+
+TEST_F(ResourceManagerServiceNewTest, combineResource) {
+    testCombineResource();
+}
+
+TEST_F(ResourceManagerServiceNewTest, combineResourceNegative) {
+    testCombineResourceWithNegativeValues();
+}
+
+TEST_F(ResourceManagerServiceNewTest, removeResource) {
+    testRemoveResource();
+}
+
+TEST_F(ResourceManagerServiceNewTest, removeClient) {
+    testRemoveClient();
+}
+
+TEST_F(ResourceManagerServiceNewTest, reclaimResource) {
+    testReclaimResourceSecure();
+    testReclaimResourceNonSecure();
+}
+
+TEST_F(ResourceManagerServiceNewTest, getAllClients_l) {
+    testGetAllClients();
+}
+
+TEST_F(ResourceManagerServiceNewTest, getLowestPriorityBiggestClient_l) {
+    testGetLowestPriorityBiggestClient();
+}
+
+TEST_F(ResourceManagerServiceNewTest, getLowestPriorityPid_l) {
+    testGetLowestPriorityPid();
+}
+
+TEST_F(ResourceManagerServiceNewTest, isCallingPriorityHigher_l) {
+    testIsCallingPriorityHigher();
+}
+
+TEST_F(ResourceManagerServiceNewTest, batteryStats) {
+    testBatteryStats();
+}
+
+TEST_F(ResourceManagerServiceNewTest, cpusetBoost) {
+    testCpusetBoost();
+}
+
+TEST_F(ResourceManagerServiceNewTest, overridePid) {
+    testOverridePid();
+}
+
+TEST_F(ResourceManagerServiceNewTest, markClientForPendingRemoval) {
+    testMarkClientForPendingRemoval();
+}
+
+TEST_F(ResourceManagerServiceNewTest, reclaimResources_withVideoCodec_reclaimsOnlyVideoCodec) {
+    testReclaimResources_withVideoCodec_reclaimsOnlyVideoCodec();
+}
+
+TEST_F(ResourceManagerServiceNewTest, reclaimResources_withAudioCodec_reclaimsOnlyAudioCodec) {
+    testReclaimResources_withAudioCodec_reclaimsOnlyAudioCodec();
+}
+
+TEST_F(ResourceManagerServiceNewTest, reclaimResources_withImageCodec_reclaimsOnlyImageCodec) {
+    testReclaimResources_withImageCodec_reclaimsOnlyImageCodec();
+}
+
+TEST_F(ResourceManagerServiceNewTest, reclaimResources_whenPartialResourceMatch_reclaims) {
+    testReclaimResources_whenPartialResourceMatch_reclaims();
+}
+
+TEST_F(ResourceManagerServiceNewTest,
+        reclaimResourcesFromMarkedClients_removesBiggestMarkedClientForSomeResources) {
+    testReclaimResourcesFromMarkedClients_removesBiggestMarkedClientForSomeResources();
+}
+
+TEST_F(ResourceManagerServiceNewTest, concurrentCodecs) {
+    testConcurrentCodecs();
+}
+
 } // namespace android
diff --git a/services/mediaresourcemanager/test/ResourceObserverService_test.cpp b/services/mediaresourcemanager/test/ResourceObserverService_test.cpp
index 85769d5..3f8ed2a 100644
--- a/services/mediaresourcemanager/test/ResourceObserverService_test.cpp
+++ b/services/mediaresourcemanager/test/ResourceObserverService_test.cpp
@@ -118,22 +118,22 @@
 
 static MediaResource createSecureVideoCodecResource(int amount = 1) {
     return MediaResource(MediaResource::Type::kSecureCodec,
-        MediaResource::SubType::kVideoCodec, amount);
+        MediaResource::SubType::kHwVideoCodec, amount);
 }
 
 static MediaResource createNonSecureVideoCodecResource(int amount = 1) {
     return MediaResource(MediaResource::Type::kNonSecureCodec,
-        MediaResource::SubType::kVideoCodec, amount);
+        MediaResource::SubType::kHwVideoCodec, amount);
 }
 
 static MediaResource createSecureAudioCodecResource(int amount = 1) {
     return MediaResource(MediaResource::Type::kSecureCodec,
-        MediaResource::SubType::kAudioCodec, amount);
+        MediaResource::SubType::kHwAudioCodec, amount);
 }
 
 static MediaResource createNonSecureAudioCodecResource(int amount = 1) {
     return MediaResource(MediaResource::Type::kNonSecureCodec,
-        MediaResource::SubType::kAudioCodec, amount);
+        MediaResource::SubType::kHwAudioCodec, amount);
 }
 
 // Operators for GTest macros.
diff --git a/services/oboeservice/OWNERS b/services/oboeservice/OWNERS
index f4d51f9..3285bf3 100644
--- a/services/oboeservice/OWNERS
+++ b/services/oboeservice/OWNERS
@@ -1 +1,4 @@
+# Bug component: 48436
+jiabin@google.com
 philburk@google.com
+include platform/frameworks/av:/media/janitors/audio_OWNERS #{LAST_RESORT_SUGGESTION}
diff --git a/tools/mainline_hook_partial.sh b/tools/mainline_hook_partial.sh
index cd3e579..63ae4c0 100755
--- a/tools/mainline_hook_partial.sh
+++ b/tools/mainline_hook_partial.sh
Binary files differ
diff --git a/tools/mainline_hook_project.sh b/tools/mainline_hook_project.sh
index 1cc3b2b..d58143e 100755
--- a/tools/mainline_hook_project.sh
+++ b/tools/mainline_hook_project.sh
@@ -16,8 +16,8 @@
 
 
 # tunables
-DEV_BRANCH=master
-MAINLINE_BRANCH=tm-mainline-prod
+DEV_BRANCH=main
+MAINLINE_BRANCH=udc-mainline-prod
 
 ###
 RED=$(tput setaf 1)