Merge "Add format change check rule" into main am: 718ccb5ef7 am: 2da7e2a6ea am: b4cef76cd7 am: d8f137b80b am: 42fa1a1537

Original change: https://android-review.googlesource.com/c/platform/frameworks/av/+/2711853

Change-Id: Ifdb31d54229e12a085cce9cf88cc407cabd65a38
Signed-off-by: Automerger Merge Worker <android-build-automerger-merge-worker@system.gserviceaccount.com>
diff --git a/camera/Android.bp b/camera/Android.bp
index a3fd7f9..e5ae954 100644
--- a/camera/Android.bp
+++ b/camera/Android.bp
@@ -43,6 +43,22 @@
     ],
 }
 
+aconfig_declarations {
+    name: "camera_platform_flags",
+    package: "com.android.internal.camera.flags",
+    srcs: ["camera_platform.aconfig"],
+}
+
+cc_aconfig_library {
+    name: "camera_platform_flags_c_lib",
+    aconfig_declarations: "camera_platform_flags",
+}
+
+java_aconfig_library {
+    name: "camera_platform_flags_java_lib",
+    aconfig_declarations: "camera_platform_flags",
+}
+
 cc_library_headers {
     name: "camera_headers",
     export_include_dirs: ["include"],
diff --git a/camera/CameraBase.cpp b/camera/CameraBase.cpp
index 1af899d..6759f3b 100644
--- a/camera/CameraBase.cpp
+++ b/camera/CameraBase.cpp
@@ -104,7 +104,6 @@
 
 namespace {
     sp<::android::hardware::ICameraService> gCameraService;
-    const int                 kCameraServicePollDelay = 500000; // 0.5s
     const char*               kCameraServiceName      = "media.camera";
 
     Mutex                     gLock;
@@ -142,14 +141,10 @@
 
         sp<IServiceManager> sm = defaultServiceManager();
         sp<IBinder> binder;
-        do {
-            binder = sm->getService(toString16(kCameraServiceName));
-            if (binder != 0) {
-                break;
-            }
-            ALOGW("CameraService not published, waiting...");
-            usleep(kCameraServicePollDelay);
-        } while(true);
+        binder = sm->waitForService(toString16(kCameraServiceName));
+        if (binder == nullptr) {
+            return nullptr;
+        }
         if (gDeathNotifier == NULL) {
             gDeathNotifier = new DeathNotifier();
         }
diff --git a/camera/camera_platform.aconfig b/camera/camera_platform.aconfig
new file mode 100644
index 0000000..6200489
--- /dev/null
+++ b/camera/camera_platform.aconfig
@@ -0,0 +1,16 @@
+package: "com.android.internal.camera.flags"
+
+flag {
+     namespace: "camera_platform"
+     name: "initial_test_flag"
+     description: "Flag infrastructure test flag"
+     bug: "292631208"
+}
+
+flag {
+     namespace: "camera_platform"
+     name: "camera_hsum_permission"
+     description: "Camera access by headless system user"
+     bug: "273539631"
+}
+
diff --git a/camera/cameraserver/Android.bp b/camera/cameraserver/Android.bp
index 8472562..13b705c 100644
--- a/camera/cameraserver/Android.bp
+++ b/camera/cameraserver/Android.bp
@@ -26,12 +26,15 @@
 
     srcs: ["main_cameraserver.cpp"],
 
+    defaults: [
+        "libcameraservice_deps",
+    ],
+
     header_libs: [
         "libmedia_headers",
     ],
 
     shared_libs: [
-        "libcameraservice",
         "liblog",
         "libutils",
         "libui",
@@ -40,15 +43,13 @@
         "libbinder_ndk",
         "libhidlbase",
         "android.hardware.camera.common@1.0",
-        "android.hardware.camera.provider@2.4",
-        "android.hardware.camera.provider@2.5",
-        "android.hardware.camera.provider@2.6",
-        "android.hardware.camera.provider@2.7",
-        "android.hardware.camera.provider-V2-ndk",
         "android.hardware.camera.device@1.0",
         "android.hardware.camera.device@3.2",
         "android.hardware.camera.device@3.4",
     ],
+    static_libs: [
+        "libcameraservice",
+    ],
     compile_multilib: "first",
     cflags: [
         "-Wall",
diff --git a/cmds/screenrecord/Overlay.cpp b/cmds/screenrecord/Overlay.cpp
index 17d7046..a19ef8e 100644
--- a/cmds/screenrecord/Overlay.cpp
+++ b/cmds/screenrecord/Overlay.cpp
@@ -90,9 +90,12 @@
 
 status_t Overlay::stop() {
     ALOGV("Overlay::stop");
-    Mutex::Autolock _l(mMutex);
-    mState = STOPPING;
-    mEventCond.signal();
+    {
+        Mutex::Autolock _l(mMutex);
+        mState = STOPPING;
+        mEventCond.signal();
+    }
+    join();
     return NO_ERROR;
 }
 
diff --git a/include/private/media/VideoFrame.h b/include/private/media/VideoFrame.h
index 11e1704..ec99fac 100644
--- a/include/private/media/VideoFrame.h
+++ b/include/private/media/VideoFrame.h
@@ -37,10 +37,12 @@
     // will calculate frame buffer size if |hasData| is set to true.
     VideoFrame(uint32_t width, uint32_t height,
             uint32_t displayWidth, uint32_t displayHeight,
+            uint32_t displayLeft, uint32_t displayTop,
             uint32_t tileWidth, uint32_t tileHeight,
             uint32_t angle, uint32_t bpp, uint32_t bitDepth, bool hasData, size_t iccSize):
         mWidth(width), mHeight(height),
         mDisplayWidth(displayWidth), mDisplayHeight(displayHeight),
+        mDisplayLeft(displayLeft), mDisplayTop(displayTop),
         mTileWidth(tileWidth), mTileHeight(tileHeight), mDurationUs(0),
         mRotationAngle(angle), mBytesPerPixel(bpp), mIccSize(iccSize),
         mBitDepth(bitDepth) {
@@ -82,6 +84,8 @@
     uint32_t mHeight;          // Decoded image height before rotation
     uint32_t mDisplayWidth;    // Display width before rotation
     uint32_t mDisplayHeight;   // Display height before rotation
+    uint32_t mDisplayLeft;     // Display left (column coordinate) before rotation
+    uint32_t mDisplayTop;      // Display top (row coordinate) before rotation
     uint32_t mTileWidth;       // Tile width (0 if image doesn't have grid)
     uint32_t mTileHeight;      // Tile height (0 if image doesn't have grid)
     int64_t  mDurationUs;      // Frame duration in microseconds
diff --git a/media/audioserver/Android.bp b/media/audioserver/Android.bp
index 828d861..2030dc7 100644
--- a/media/audioserver/Android.bp
+++ b/media/audioserver/Android.bp
@@ -25,21 +25,31 @@
         "libmediametrics_headers",
     ],
 
-    shared_libs: [
-        "packagemanager_aidl-cpp",
+    defaults: [
+        "libaaudioservice_dependencies",
+        "libaudioflinger_dependencies",
+        "libaudiopolicyservice_dependencies",
+        "latest_android_media_audio_common_types_cpp_shared",
+        "latest_android_hardware_audio_core_sounddose_ndk_shared",
+    ],
+
+    static_libs: [
         "libaaudioservice",
-        "libaudioclient",
         "libaudioflinger",
         "libaudiopolicyservice",
+        "libmedialogservice",
+        "libnbaio",
+    ],
+
+    shared_libs: [
+        "libaudioclient",
         "libaudioprocessing",
         "libbinder",
         "libcutils",
         "libhidlbase",
         "liblog",
         "libmedia",
-        "libmedialogservice",
         "libmediautils",
-        "libnbaio",
         "libnblog",
         "libpowermanager",
         "libutils",
@@ -59,9 +69,9 @@
         "frameworks/av/services/audiopolicy/engine/interface",
         "frameworks/av/services/audiopolicy/service",
         "frameworks/av/services/medialog",
+        "frameworks/av/services/oboeservice", // TODO oboeservice is the old folder name for aaudioservice. It will be changed.
 
-        // TODO oboeservice is the old folder name for aaudioservice. It will be changed.
-        "frameworks/av/services/oboeservice",
+
     ],
 
     init_rc: ["audioserver.rc"],
diff --git a/media/codec2/components/aac/C2SoftAacEnc.cpp b/media/codec2/components/aac/C2SoftAacEnc.cpp
index d865ab2..721a12a 100644
--- a/media/codec2/components/aac/C2SoftAacEnc.cpp
+++ b/media/codec2/components/aac/C2SoftAacEnc.cpp
@@ -69,7 +69,7 @@
         addParameter(
                 DefineParam(mChannelCount, C2_PARAMKEY_CHANNEL_COUNT)
                 .withDefault(new C2StreamChannelCountInfo::input(0u, 1))
-                .withFields({C2F(mChannelCount, value).inRange(1, 6)})
+                .withFields({C2F(mChannelCount, value).inRange(1, kMaxChannelCount)})
                 .withSetter(Setter<decltype(*mChannelCount)>::StrictValueWithNoDeps)
                 .build());
 
@@ -198,10 +198,17 @@
 }
 
 c2_status_t C2SoftAacEnc::onFlush_sm() {
+    if (mAACEncoder != nullptr) {
+        /* encoder's internal input buffer needs to be reset during flush */
+        if (AACENC_OK != aacEncoder_SetParam(mAACEncoder, AACENC_CONTROL_STATE, AACENC_INIT_ALL)) {
+            ALOGE("Failed to reset AAC encoder");
+        }
+    }
     mSentCodecSpecificData = false;
     mInputSize = 0u;
     mNextFrameTimestampUs.reset();
     mLastFrameEndTimestampUs.reset();
+    mRemainderLen = 0;
     return C2_OK;
 }
 
@@ -562,6 +569,11 @@
                 inBufferSize[0] -= outargs.numInSamples * sizeof(int16_t);
                 inargs.numInSamples -= outargs.numInSamples;
             }
+        } else {
+            // In case of error in encode call, discard remaining input bytes.
+            inBuffer[0] = nullptr;
+            inBufferSize[0] = 0;
+            inargs.numInSamples = 0;
         }
         ALOGV("encoderErr = %d mInputSize = %zu "
               "inargs.numInSamples = %d, mNextFrameTimestampUs = %lld",
@@ -597,10 +609,19 @@
                            &outBufDesc,
                            &inargs,
                            &outargs);
+
+        // after flush, discard remaining input bytes.
+        inBuffer[0] = nullptr;
         inBufferSize[0] = 0;
     }
 
     if (inBufferSize[0] > 0) {
+        if (inBufferSize[0] > kRemainderBufSize) {
+            ALOGE("Remaining bytes %d greater than remainder buffer size %zu", inBufferSize[0],
+                    kRemainderBufSize);
+            work->result = C2_CORRUPTED;
+            return;
+        }
         for (size_t i = 0; i < inBufferSize[0]; ++i) {
             mRemainder[i] = static_cast<uint8_t *>(inBuffer[0])[i];
         }
diff --git a/media/codec2/components/aac/C2SoftAacEnc.h b/media/codec2/components/aac/C2SoftAacEnc.h
index 9a28280..c79526c 100644
--- a/media/codec2/components/aac/C2SoftAacEnc.h
+++ b/media/codec2/components/aac/C2SoftAacEnc.h
@@ -47,6 +47,9 @@
             const std::shared_ptr<C2BlockPool> &pool) override;
 
 private:
+    static constexpr size_t kMaxChannelCount = 6;
+    static constexpr size_t kRemainderBufSize = kMaxChannelCount * sizeof(int16_t);
+
     std::shared_ptr<IntfImpl> mIntf;
 
     HANDLE_AACENCODER mAACEncoder;
@@ -63,7 +66,7 @@
     std::atomic_uint64_t mOutIndex;
 
     // We support max 6 channels
-    uint8_t mRemainder[6 * sizeof(int16_t)];
+    uint8_t mRemainder[kRemainderBufSize];
     size_t mRemainderLen;
 
     status_t initEncoder();
diff --git a/media/codec2/components/dav1d/Android.bp b/media/codec2/components/dav1d/Android.bp
new file mode 100644
index 0000000..f7850ad
--- /dev/null
+++ b/media/codec2/components/dav1d/Android.bp
@@ -0,0 +1,37 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_library {
+    name: "libcodec2_soft_av1dec_dav1d",
+    // TODO: b/277797541 - enable once ready
+    enabled: false,
+
+    defaults: [
+        "libcodec2_soft-defaults",
+        "libcodec2_soft_sanitize_all-defaults",
+    ],
+
+    cflags: [
+        "-DCODECNAME=\"c2.android.dav1d-av1.decoder\"",
+        "-Wno-unused-variable",
+    ],
+
+    srcs: ["C2SoftDav1dDec.cpp"],
+    static_libs: [
+        "libyuv_static",
+        "libdav1d_8bit",
+        "libdav1d_16bit",
+    ],
+
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.media.swcodec",
+    ],
+
+}
diff --git a/media/codec2/components/dav1d/C2SoftDav1dDec.cpp b/media/codec2/components/dav1d/C2SoftDav1dDec.cpp
new file mode 100644
index 0000000..b0cef41
--- /dev/null
+++ b/media/codec2/components/dav1d/C2SoftDav1dDec.cpp
@@ -0,0 +1,1493 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "C2SoftDav1dDec"
+#include <android-base/properties.h>
+#include <cutils/properties.h>
+#include <thread>
+
+#include <C2Debug.h>
+#include <C2PlatformSupport.h>
+#include <Codec2BufferUtils.h>
+#include <Codec2CommonUtils.h>
+#include <Codec2Mapper.h>
+#include <SimpleC2Interface.h>
+#include <libyuv.h>
+#include <log/log.h>
+#include <media/stagefright/foundation/AUtils.h>
+#include <media/stagefright/foundation/MediaDefs.h>
+#include "C2SoftDav1dDec.h"
+
+// libyuv version required for I410ToAB30Matrix and I210ToAB30Matrix.
+#if LIBYUV_VERSION >= 1780
+#include <algorithm>
+#define HAVE_LIBYUV_I410_I210_TO_AB30 1
+#else
+#define HAVE_LIBYUV_I410_I210_TO_AB30 0
+#endif
+
+namespace android {
+
+// Flag to enable dumping the bitsteram and the decoded pictures to files.
+static const bool ENABLE_DUMPING_FILES_DEFAULT = false;
+static const char ENABLE_DUMPING_FILES_PROPERTY[] = "debug.dav1d.enabledumping";
+
+// The number of frames to dump to a file
+static const int NUM_FRAMES_TO_DUMP_DEFAULT = INT_MAX;
+static const char NUM_FRAMES_TO_DUMP_PROPERTY[] = "debug.dav1d.numframestodump";
+
+// The number of threads used for the dav1d decoder.
+static const int NUM_THREADS_DAV1D_DEFAULT = 0;
+static const char NUM_THREADS_DAV1D_PROPERTY[] = "debug.dav1d.numthreads";
+
+// codecname set and passed in as a compile flag from Android.bp
+constexpr char COMPONENT_NAME[] = CODECNAME;
+
+constexpr size_t kMinInputBufferSize = 2 * 1024 * 1024;
+
+class C2SoftDav1dDec::IntfImpl : public SimpleInterface<void>::BaseParams {
+  public:
+    explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper>& helper)
+        : SimpleInterface<void>::BaseParams(helper, COMPONENT_NAME, C2Component::KIND_DECODER,
+                                            C2Component::DOMAIN_VIDEO, MEDIA_MIMETYPE_VIDEO_AV1) {
+        noPrivateBuffers();
+        noInputReferences();
+        noOutputReferences();
+        noInputLatency();
+        noTimeStretch();
+
+        addParameter(DefineParam(mAttrib, C2_PARAMKEY_COMPONENT_ATTRIBUTES)
+                             .withConstValue(new C2ComponentAttributesSetting(
+                                     C2Component::ATTRIB_IS_TEMPORAL))
+                             .build());
+
+        addParameter(DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
+                             .withDefault(new C2StreamPictureSizeInfo::output(0u, 320, 240))
+                             .withFields({
+                                     C2F(mSize, width).inRange(2, 4096),
+                                     C2F(mSize, height).inRange(2, 4096),
+                             })
+                             .withSetter(SizeSetter)
+                             .build());
+
+        addParameter(DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL)
+                             .withDefault(new C2StreamProfileLevelInfo::input(
+                                     0u, C2Config::PROFILE_AV1_0, C2Config::LEVEL_AV1_2_1))
+                             .withFields({C2F(mProfileLevel, profile)
+                                                  .oneOf({C2Config::PROFILE_AV1_0,
+                                                          C2Config::PROFILE_AV1_1}),
+                                          C2F(mProfileLevel, level)
+                                                  .oneOf({
+                                                          C2Config::LEVEL_AV1_2,
+                                                          C2Config::LEVEL_AV1_2_1,
+                                                          C2Config::LEVEL_AV1_2_2,
+                                                          C2Config::LEVEL_AV1_2_3,
+                                                          C2Config::LEVEL_AV1_3,
+                                                          C2Config::LEVEL_AV1_3_1,
+                                                          C2Config::LEVEL_AV1_3_2,
+                                                          C2Config::LEVEL_AV1_3_3,
+                                                          C2Config::LEVEL_AV1_4,
+                                                          C2Config::LEVEL_AV1_4_1,
+                                                          C2Config::LEVEL_AV1_4_2,
+                                                          C2Config::LEVEL_AV1_4_3,
+                                                          C2Config::LEVEL_AV1_5,
+                                                          C2Config::LEVEL_AV1_5_1,
+                                                          C2Config::LEVEL_AV1_5_2,
+                                                          C2Config::LEVEL_AV1_5_3,
+                                                  })})
+                             .withSetter(ProfileLevelSetter, mSize)
+                             .build());
+
+        mHdr10PlusInfoInput = C2StreamHdr10PlusInfo::input::AllocShared(0);
+        addParameter(DefineParam(mHdr10PlusInfoInput, C2_PARAMKEY_INPUT_HDR10_PLUS_INFO)
+                             .withDefault(mHdr10PlusInfoInput)
+                             .withFields({
+                                     C2F(mHdr10PlusInfoInput, m.value).any(),
+                             })
+                             .withSetter(Hdr10PlusInfoInputSetter)
+                             .build());
+
+        mHdr10PlusInfoOutput = C2StreamHdr10PlusInfo::output::AllocShared(0);
+        addParameter(DefineParam(mHdr10PlusInfoOutput, C2_PARAMKEY_OUTPUT_HDR10_PLUS_INFO)
+                             .withDefault(mHdr10PlusInfoOutput)
+                             .withFields({
+                                     C2F(mHdr10PlusInfoOutput, m.value).any(),
+                             })
+                             .withSetter(Hdr10PlusInfoOutputSetter)
+                             .build());
+
+        // default static info
+        C2HdrStaticMetadataStruct defaultStaticInfo{};
+        helper->addStructDescriptors<C2MasteringDisplayColorVolumeStruct, C2ColorXyStruct>();
+        addParameter(
+                DefineParam(mHdrStaticInfo, C2_PARAMKEY_HDR_STATIC_INFO)
+                        .withDefault(new C2StreamHdrStaticInfo::output(0u, defaultStaticInfo))
+                        .withFields({C2F(mHdrStaticInfo, mastering.red.x).inRange(0, 1),
+                                     C2F(mHdrStaticInfo, mastering.red.y).inRange(0, 1),
+                                     C2F(mHdrStaticInfo, mastering.green.x).inRange(0, 1),
+                                     C2F(mHdrStaticInfo, mastering.green.y).inRange(0, 1),
+                                     C2F(mHdrStaticInfo, mastering.blue.x).inRange(0, 1),
+                                     C2F(mHdrStaticInfo, mastering.blue.y).inRange(0, 1),
+                                     C2F(mHdrStaticInfo, mastering.white.x).inRange(0, 1),
+                                     C2F(mHdrStaticInfo, mastering.white.x).inRange(0, 1),
+                                     C2F(mHdrStaticInfo, mastering.maxLuminance).inRange(0, 65535),
+                                     C2F(mHdrStaticInfo, mastering.minLuminance).inRange(0, 6.5535),
+                                     C2F(mHdrStaticInfo, maxCll).inRange(0, 0XFFFF),
+                                     C2F(mHdrStaticInfo, maxFall).inRange(0, 0XFFFF)})
+                        .withSetter(HdrStaticInfoSetter)
+                        .build());
+
+        addParameter(DefineParam(mMaxSize, C2_PARAMKEY_MAX_PICTURE_SIZE)
+                             .withDefault(new C2StreamMaxPictureSizeTuning::output(0u, 320, 240))
+                             .withFields({
+                                     C2F(mSize, width).inRange(2, 2048, 2),
+                                     C2F(mSize, height).inRange(2, 2048, 2),
+                             })
+                             .withSetter(MaxPictureSizeSetter, mSize)
+                             .build());
+
+        addParameter(
+                DefineParam(mMaxInputSize, C2_PARAMKEY_INPUT_MAX_BUFFER_SIZE)
+                        .withDefault(new C2StreamMaxBufferSizeInfo::input(0u, kMinInputBufferSize))
+                        .withFields({
+                                C2F(mMaxInputSize, value).any(),
+                        })
+                        .calculatedAs(MaxInputSizeSetter, mMaxSize)
+                        .build());
+
+        C2ChromaOffsetStruct locations[1] = {C2ChromaOffsetStruct::ITU_YUV_420_0()};
+        std::shared_ptr<C2StreamColorInfo::output> defaultColorInfo =
+                C2StreamColorInfo::output::AllocShared(1u, 0u, 8u /* bitDepth */, C2Color::YUV_420);
+        memcpy(defaultColorInfo->m.locations, locations, sizeof(locations));
+
+        defaultColorInfo = C2StreamColorInfo::output::AllocShared(
+                {C2ChromaOffsetStruct::ITU_YUV_420_0()}, 0u, 8u /* bitDepth */, C2Color::YUV_420);
+        helper->addStructDescriptors<C2ChromaOffsetStruct>();
+
+        addParameter(DefineParam(mColorInfo, C2_PARAMKEY_CODED_COLOR_INFO)
+                             .withConstValue(defaultColorInfo)
+                             .build());
+
+        addParameter(DefineParam(mDefaultColorAspects, C2_PARAMKEY_DEFAULT_COLOR_ASPECTS)
+                             .withDefault(new C2StreamColorAspectsTuning::output(
+                                     0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED,
+                                     C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
+                             .withFields({C2F(mDefaultColorAspects, range)
+                                                  .inRange(C2Color::RANGE_UNSPECIFIED,
+                                                           C2Color::RANGE_OTHER),
+                                          C2F(mDefaultColorAspects, primaries)
+                                                  .inRange(C2Color::PRIMARIES_UNSPECIFIED,
+                                                           C2Color::PRIMARIES_OTHER),
+                                          C2F(mDefaultColorAspects, transfer)
+                                                  .inRange(C2Color::TRANSFER_UNSPECIFIED,
+                                                           C2Color::TRANSFER_OTHER),
+                                          C2F(mDefaultColorAspects, matrix)
+                                                  .inRange(C2Color::MATRIX_UNSPECIFIED,
+                                                           C2Color::MATRIX_OTHER)})
+                             .withSetter(DefaultColorAspectsSetter)
+                             .build());
+
+        addParameter(DefineParam(mCodedColorAspects, C2_PARAMKEY_VUI_COLOR_ASPECTS)
+                             .withDefault(new C2StreamColorAspectsInfo::input(
+                                     0u, C2Color::RANGE_LIMITED, C2Color::PRIMARIES_UNSPECIFIED,
+                                     C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
+                             .withFields({C2F(mCodedColorAspects, range)
+                                                  .inRange(C2Color::RANGE_UNSPECIFIED,
+                                                           C2Color::RANGE_OTHER),
+                                          C2F(mCodedColorAspects, primaries)
+                                                  .inRange(C2Color::PRIMARIES_UNSPECIFIED,
+                                                           C2Color::PRIMARIES_OTHER),
+                                          C2F(mCodedColorAspects, transfer)
+                                                  .inRange(C2Color::TRANSFER_UNSPECIFIED,
+                                                           C2Color::TRANSFER_OTHER),
+                                          C2F(mCodedColorAspects, matrix)
+                                                  .inRange(C2Color::MATRIX_UNSPECIFIED,
+                                                           C2Color::MATRIX_OTHER)})
+                             .withSetter(CodedColorAspectsSetter)
+                             .build());
+
+        addParameter(
+                DefineParam(mColorAspects, C2_PARAMKEY_COLOR_ASPECTS)
+                        .withDefault(new C2StreamColorAspectsInfo::output(
+                                0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED,
+                                C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
+                        .withFields(
+                                {C2F(mColorAspects, range)
+                                         .inRange(C2Color::RANGE_UNSPECIFIED, C2Color::RANGE_OTHER),
+                                 C2F(mColorAspects, primaries)
+                                         .inRange(C2Color::PRIMARIES_UNSPECIFIED,
+                                                  C2Color::PRIMARIES_OTHER),
+                                 C2F(mColorAspects, transfer)
+                                         .inRange(C2Color::TRANSFER_UNSPECIFIED,
+                                                  C2Color::TRANSFER_OTHER),
+                                 C2F(mColorAspects, matrix)
+                                         .inRange(C2Color::MATRIX_UNSPECIFIED,
+                                                  C2Color::MATRIX_OTHER)})
+                        .withSetter(ColorAspectsSetter, mDefaultColorAspects, mCodedColorAspects)
+                        .build());
+
+        std::vector<uint32_t> pixelFormats = {HAL_PIXEL_FORMAT_YCBCR_420_888};
+        if (isHalPixelFormatSupported((AHardwareBuffer_Format)HAL_PIXEL_FORMAT_YCBCR_P010)) {
+            pixelFormats.push_back(HAL_PIXEL_FORMAT_YCBCR_P010);
+        }
+        // If color format surface isn't added to supported formats, there is no way to know
+        // when the color-format is configured to surface. This is necessary to be able to
+        // choose 10-bit format while decoding 10-bit clips in surface mode.
+        pixelFormats.push_back(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
+
+        // TODO: support more formats?
+        addParameter(DefineParam(mPixelFormat, C2_PARAMKEY_PIXEL_FORMAT)
+                             .withDefault(new C2StreamPixelFormatInfo::output(
+                                     0u, HAL_PIXEL_FORMAT_YCBCR_420_888))
+                             .withFields({C2F(mPixelFormat, value).oneOf(pixelFormats)})
+                             .withSetter((Setter<decltype(*mPixelFormat)>::StrictValueWithNoDeps))
+                             .build());
+    }
+
+    static C2R SizeSetter(bool mayBlock, const C2P<C2StreamPictureSizeInfo::output>& oldMe,
+                          C2P<C2StreamPictureSizeInfo::output>& me) {
+        (void)mayBlock;
+        C2R res = C2R::Ok();
+        if (!me.F(me.v.width).supportsAtAll(me.v.width)) {
+            res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.width)));
+            me.set().width = oldMe.v.width;
+        }
+        if (!me.F(me.v.height).supportsAtAll(me.v.height)) {
+            res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.height)));
+            me.set().height = oldMe.v.height;
+        }
+        return res;
+    }
+
+    static C2R MaxPictureSizeSetter(bool mayBlock, C2P<C2StreamMaxPictureSizeTuning::output>& me,
+                                    const C2P<C2StreamPictureSizeInfo::output>& size) {
+        (void)mayBlock;
+        // TODO: get max width/height from the size's field helpers vs.
+        // hardcoding
+        me.set().width = c2_min(c2_max(me.v.width, size.v.width), 4096u);
+        me.set().height = c2_min(c2_max(me.v.height, size.v.height), 4096u);
+        return C2R::Ok();
+    }
+
+    static C2R MaxInputSizeSetter(bool mayBlock, C2P<C2StreamMaxBufferSizeInfo::input>& me,
+                                  const C2P<C2StreamMaxPictureSizeTuning::output>& maxSize) {
+        (void)mayBlock;
+        // assume compression ratio of 2, but enforce a floor
+        me.set().value =
+                c2_max((((maxSize.v.width + 63) / 64) * ((maxSize.v.height + 63) / 64) * 3072),
+                       kMinInputBufferSize);
+        return C2R::Ok();
+    }
+
+    static C2R DefaultColorAspectsSetter(bool mayBlock,
+                                         C2P<C2StreamColorAspectsTuning::output>& me) {
+        (void)mayBlock;
+        if (me.v.range > C2Color::RANGE_OTHER) {
+            me.set().range = C2Color::RANGE_OTHER;
+        }
+        if (me.v.primaries > C2Color::PRIMARIES_OTHER) {
+            me.set().primaries = C2Color::PRIMARIES_OTHER;
+        }
+        if (me.v.transfer > C2Color::TRANSFER_OTHER) {
+            me.set().transfer = C2Color::TRANSFER_OTHER;
+        }
+        if (me.v.matrix > C2Color::MATRIX_OTHER) {
+            me.set().matrix = C2Color::MATRIX_OTHER;
+        }
+        return C2R::Ok();
+    }
+
+    static C2R CodedColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::input>& me) {
+        (void)mayBlock;
+        if (me.v.range > C2Color::RANGE_OTHER) {
+            me.set().range = C2Color::RANGE_OTHER;
+        }
+        if (me.v.primaries > C2Color::PRIMARIES_OTHER) {
+            me.set().primaries = C2Color::PRIMARIES_OTHER;
+        }
+        if (me.v.transfer > C2Color::TRANSFER_OTHER) {
+            me.set().transfer = C2Color::TRANSFER_OTHER;
+        }
+        if (me.v.matrix > C2Color::MATRIX_OTHER) {
+            me.set().matrix = C2Color::MATRIX_OTHER;
+        }
+        return C2R::Ok();
+    }
+
+    static C2R ColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::output>& me,
+                                  const C2P<C2StreamColorAspectsTuning::output>& def,
+                                  const C2P<C2StreamColorAspectsInfo::input>& coded) {
+        (void)mayBlock;
+        // take default values for all unspecified fields, and coded values for specified ones
+        me.set().range = coded.v.range == RANGE_UNSPECIFIED ? def.v.range : coded.v.range;
+        me.set().primaries =
+                coded.v.primaries == PRIMARIES_UNSPECIFIED ? def.v.primaries : coded.v.primaries;
+        me.set().transfer =
+                coded.v.transfer == TRANSFER_UNSPECIFIED ? def.v.transfer : coded.v.transfer;
+        me.set().matrix = coded.v.matrix == MATRIX_UNSPECIFIED ? def.v.matrix : coded.v.matrix;
+        return C2R::Ok();
+    }
+
+    static C2R ProfileLevelSetter(bool mayBlock, C2P<C2StreamProfileLevelInfo::input>& me,
+                                  const C2P<C2StreamPictureSizeInfo::output>& size) {
+        (void)mayBlock;
+        (void)size;
+        (void)me;  // TODO: validate
+        return C2R::Ok();
+    }
+
+    std::shared_ptr<C2StreamColorAspectsTuning::output> getDefaultColorAspects_l() {
+        return mDefaultColorAspects;
+    }
+
+    std::shared_ptr<C2StreamColorAspectsInfo::output> getColorAspects_l() { return mColorAspects; }
+
+    static C2R Hdr10PlusInfoInputSetter(bool mayBlock, C2P<C2StreamHdr10PlusInfo::input>& me) {
+        (void)mayBlock;
+        (void)me;  // TODO: validate
+        return C2R::Ok();
+    }
+
+    static C2R Hdr10PlusInfoOutputSetter(bool mayBlock, C2P<C2StreamHdr10PlusInfo::output>& me) {
+        (void)mayBlock;
+        (void)me;  // TODO: validate
+        return C2R::Ok();
+    }
+
+    // unsafe getters
+    std::shared_ptr<C2StreamPixelFormatInfo::output> getPixelFormat_l() const {
+        return mPixelFormat;
+    }
+
+    static C2R HdrStaticInfoSetter(bool mayBlock, C2P<C2StreamHdrStaticInfo::output>& me) {
+        (void)mayBlock;
+        if (me.v.mastering.red.x > 1) {
+            me.set().mastering.red.x = 1;
+        }
+        if (me.v.mastering.red.y > 1) {
+            me.set().mastering.red.y = 1;
+        }
+        if (me.v.mastering.green.x > 1) {
+            me.set().mastering.green.x = 1;
+        }
+        if (me.v.mastering.green.y > 1) {
+            me.set().mastering.green.y = 1;
+        }
+        if (me.v.mastering.blue.x > 1) {
+            me.set().mastering.blue.x = 1;
+        }
+        if (me.v.mastering.blue.y > 1) {
+            me.set().mastering.blue.y = 1;
+        }
+        if (me.v.mastering.white.x > 1) {
+            me.set().mastering.white.x = 1;
+        }
+        if (me.v.mastering.white.y > 1) {
+            me.set().mastering.white.y = 1;
+        }
+        if (me.v.mastering.maxLuminance > 65535.0) {
+            me.set().mastering.maxLuminance = 65535.0;
+        }
+        if (me.v.mastering.minLuminance > 6.5535) {
+            me.set().mastering.minLuminance = 6.5535;
+        }
+        if (me.v.maxCll > 65535.0) {
+            me.set().maxCll = 65535.0;
+        }
+        if (me.v.maxFall > 65535.0) {
+            me.set().maxFall = 65535.0;
+        }
+        return C2R::Ok();
+    }
+
+  private:
+    std::shared_ptr<C2StreamProfileLevelInfo::input> mProfileLevel;
+    std::shared_ptr<C2StreamPictureSizeInfo::output> mSize;
+    std::shared_ptr<C2StreamMaxPictureSizeTuning::output> mMaxSize;
+    std::shared_ptr<C2StreamMaxBufferSizeInfo::input> mMaxInputSize;
+    std::shared_ptr<C2StreamColorInfo::output> mColorInfo;
+    std::shared_ptr<C2StreamPixelFormatInfo::output> mPixelFormat;
+    std::shared_ptr<C2StreamColorAspectsTuning::output> mDefaultColorAspects;
+    std::shared_ptr<C2StreamColorAspectsInfo::input> mCodedColorAspects;
+    std::shared_ptr<C2StreamColorAspectsInfo::output> mColorAspects;
+    std::shared_ptr<C2StreamHdr10PlusInfo::input> mHdr10PlusInfoInput;
+    std::shared_ptr<C2StreamHdr10PlusInfo::output> mHdr10PlusInfoOutput;
+    std::shared_ptr<C2StreamHdrStaticInfo::output> mHdrStaticInfo;
+};
+
+C2SoftDav1dDec::C2SoftDav1dDec(const char* name, c2_node_id_t id,
+                               const std::shared_ptr<IntfImpl>& intfImpl)
+    : SimpleC2Component(std::make_shared<SimpleInterface<IntfImpl>>(name, id, intfImpl)),
+      mIntf(intfImpl) {
+    mTimeStart = mTimeEnd = systemTime();
+}
+
+C2SoftDav1dDec::~C2SoftDav1dDec() {
+    onRelease();
+}
+
+c2_status_t C2SoftDav1dDec::onInit() {
+    return initDecoder() ? C2_OK : C2_CORRUPTED;
+}
+
+c2_status_t C2SoftDav1dDec::onStop() {
+    // TODO: b/277797541 - investigate if the decoder needs to be flushed.
+    mSignalledError = false;
+    mSignalledOutputEos = false;
+    return C2_OK;
+}
+
+void C2SoftDav1dDec::onReset() {
+    (void)onStop();
+    c2_status_t err = onFlush_sm();
+    if (err != C2_OK) {
+        ALOGW("Failed to flush the av1 decoder. Trying to hard reset.");
+        destroyDecoder();
+        if (!initDecoder()) {
+            ALOGE("Hard reset failed.");
+        }
+    }
+}
+
+void C2SoftDav1dDec::flushDav1d() {
+    if (mDav1dCtx) {
+        Dav1dPicture p;
+
+        while (mDecodedPictures.size() > 0) {
+            p = mDecodedPictures.front();
+            mDecodedPictures.pop_front();
+
+            dav1d_picture_unref(&p);
+        }
+
+        int res = 0;
+        while (true) {
+            memset(&p, 0, sizeof(p));
+
+            if ((res = dav1d_get_picture(mDav1dCtx, &p)) < 0) {
+                if (res != DAV1D_ERR(EAGAIN)) {
+                    ALOGE("Error decoding frame: %s\n", strerror(DAV1D_ERR(res)));
+                    break;
+                } else {
+                    res = 0;
+                    break;
+                }
+            } else {
+                dav1d_picture_unref(&p);
+            }
+        }
+
+        dav1d_flush(mDav1dCtx);
+    }
+}
+
+void C2SoftDav1dDec::onRelease() {
+    destroyDecoder();
+}
+
+c2_status_t C2SoftDav1dDec::onFlush_sm() {
+    flushDav1d();
+
+    mSignalledError = false;
+    mSignalledOutputEos = false;
+
+    return C2_OK;
+}
+
+static int GetCPUCoreCount() {
+    int cpuCoreCount = 1;
+#if defined(_SC_NPROCESSORS_ONLN)
+    cpuCoreCount = sysconf(_SC_NPROCESSORS_ONLN);
+#else
+    // _SC_NPROC_ONLN must be defined...
+    cpuCoreCount = sysconf(_SC_NPROC_ONLN);
+#endif
+    CHECK(cpuCoreCount >= 1);
+    ALOGV("Number of CPU cores: %d", cpuCoreCount);
+    return cpuCoreCount;
+}
+
+bool C2SoftDav1dDec::initDecoder() {
+    nsecs_t now = systemTime();
+#ifdef FILE_DUMP_ENABLE
+    snprintf(mInDataFileName, 256, "%s_%" PRId64 "d.%s", DUMP_FILE_PATH, now, INPUT_DATA_DUMP_EXT);
+    snprintf(mInSizeFileName, 256, "%s_%" PRId64 "d.%s", DUMP_FILE_PATH, now, INPUT_SIZE_DUMP_EXT);
+    snprintf(mDav1dOutYuvFileName, 256, "%s_%" PRId64 "dx.%s", DUMP_FILE_PATH, now,
+             OUTPUT_YUV_DUMP_EXT);
+
+    bool enableDumping = android::base::GetBoolProperty(ENABLE_DUMPING_FILES_PROPERTY,
+                                                        ENABLE_DUMPING_FILES_DEFAULT);
+
+    num_frames_to_dump =
+            android::base::GetIntProperty(NUM_FRAMES_TO_DUMP_PROPERTY, NUM_FRAMES_TO_DUMP_DEFAULT);
+
+    if (enableDumping) {
+        ALOGD("enableDumping = %d, num_frames_to_dump = %d", enableDumping, num_frames_to_dump);
+
+        mInDataFile = fopen(mInDataFileName, "wb");
+        if (mInDataFile == nullptr) {
+            ALOGD("Could not open file %s", mInDataFileName);
+        }
+
+        mInSizeFile = fopen(mInSizeFileName, "wb");
+        if (mInSizeFile == nullptr) {
+            ALOGD("Could not open file %s", mInSizeFileName);
+        }
+
+        mDav1dOutYuvFile = fopen(mDav1dOutYuvFileName, "wb");
+        if (mDav1dOutYuvFile == nullptr) {
+            ALOGD("Could not open file %s", mDav1dOutYuvFileName);
+        }
+    }
+#endif
+    mSignalledError = false;
+    mSignalledOutputEos = false;
+    mHalPixelFormat = HAL_PIXEL_FORMAT_YV12;
+    {
+        IntfImpl::Lock lock = mIntf->lock();
+        mPixelFormatInfo = mIntf->getPixelFormat_l();
+    }
+
+    const char* version = dav1d_version();
+
+    Dav1dSettings lib_settings;
+    dav1d_default_settings(&lib_settings);
+    int cpu_count = GetCPUCoreCount();
+    lib_settings.n_threads = std::max(cpu_count / 2, 1);  // use up to half the cores by default.
+
+    int32_t numThreads =
+            android::base::GetIntProperty(NUM_THREADS_DAV1D_PROPERTY, NUM_THREADS_DAV1D_DEFAULT);
+    if (numThreads > 0) lib_settings.n_threads = numThreads;
+
+    int res = 0;
+    if ((res = dav1d_open(&mDav1dCtx, &lib_settings))) {
+        ALOGE("dav1d_open failed. status: %d.", res);
+        return false;
+    } else {
+        ALOGD("dav1d_open succeeded(n_threads=%d,version=%s).", lib_settings.n_threads, version);
+    }
+
+    return true;
+}
+
+void C2SoftDav1dDec::destroyDecoder() {
+    if (mDav1dCtx) {
+        Dav1dPicture p;
+        while (mDecodedPictures.size() > 0) {
+            memset(&p, 0, sizeof(p));
+            p = mDecodedPictures.front();
+            mDecodedPictures.pop_front();
+
+            dav1d_picture_unref(&p);
+        }
+
+        dav1d_close(&mDav1dCtx);
+        mDav1dCtx = nullptr;
+        mOutputBufferIndex = 0;
+        mInputBufferIndex = 0;
+    }
+#ifdef FILE_DUMP_ENABLE
+    if (mInDataFile != nullptr) {
+        fclose(mInDataFile);
+        mInDataFile = nullptr;
+    }
+
+    if (mInSizeFile != nullptr) {
+        fclose(mInSizeFile);
+        mInSizeFile = nullptr;
+    }
+
+    if (mDav1dOutYuvFile != nullptr) {
+        fclose(mDav1dOutYuvFile);
+        mDav1dOutYuvFile = nullptr;
+    }
+#endif
+}
+
+void fillEmptyWork(const std::unique_ptr<C2Work>& work) {
+    uint32_t flags = 0;
+    if (work->input.flags & C2FrameData::FLAG_END_OF_STREAM) {
+        flags |= C2FrameData::FLAG_END_OF_STREAM;
+        ALOGV("signalling end_of_stream.");
+    }
+    work->worklets.front()->output.flags = (C2FrameData::flags_t)flags;
+    work->worklets.front()->output.buffers.clear();
+    work->worklets.front()->output.ordinal = work->input.ordinal;
+    work->workletsProcessed = 1u;
+}
+
+void C2SoftDav1dDec::finishWork(uint64_t index, const std::unique_ptr<C2Work>& work,
+                                const std::shared_ptr<C2GraphicBlock>& block) {
+    std::shared_ptr<C2Buffer> buffer = createGraphicBuffer(block, C2Rect(mWidth, mHeight));
+    {
+        IntfImpl::Lock lock = mIntf->lock();
+        buffer->setInfo(mIntf->getColorAspects_l());
+    }
+    auto fillWork = [buffer, index](const std::unique_ptr<C2Work>& work) {
+        uint32_t flags = 0;
+        if ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) &&
+            (c2_cntr64_t(index) == work->input.ordinal.frameIndex)) {
+            flags |= C2FrameData::FLAG_END_OF_STREAM;
+            ALOGV("signalling end_of_stream.");
+        }
+        work->worklets.front()->output.flags = (C2FrameData::flags_t)flags;
+        work->worklets.front()->output.buffers.clear();
+        work->worklets.front()->output.buffers.push_back(buffer);
+        work->worklets.front()->output.ordinal = work->input.ordinal;
+        work->workletsProcessed = 1u;
+    };
+    if (work && c2_cntr64_t(index) == work->input.ordinal.frameIndex) {
+        fillWork(work);
+    } else {
+        finish(index, fillWork);
+    }
+}
+
+void C2SoftDav1dDec::process(const std::unique_ptr<C2Work>& work,
+                             const std::shared_ptr<C2BlockPool>& pool) {
+    work->result = C2_OK;
+    work->workletsProcessed = 0u;
+    work->worklets.front()->output.configUpdate.clear();
+    work->worklets.front()->output.flags = work->input.flags;
+    if (mSignalledError || mSignalledOutputEos) {
+        work->result = C2_BAD_VALUE;
+        return;
+    }
+
+    size_t inOffset = 0u;
+    size_t inSize = 0u;
+    C2ReadView rView = mDummyReadView;
+    if (!work->input.buffers.empty()) {
+        rView = work->input.buffers[0]->data().linearBlocks().front().map().get();
+        inSize = rView.capacity();
+        if (inSize && rView.error()) {
+            ALOGE("read view map failed %d", rView.error());
+            work->result = C2_CORRUPTED;
+            return;
+        }
+    }
+
+    bool codecConfig = ((work->input.flags & C2FrameData::FLAG_CODEC_CONFIG) != 0);
+    bool end_of_stream = ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0);
+
+    if (codecConfig) {
+        fillEmptyWork(work);
+        return;
+    }
+
+    int64_t in_frameIndex = work->input.ordinal.frameIndex.peekll();
+    if (inSize) {
+        mInputBufferIndex = in_frameIndex;
+
+        uint8_t* bitstream = const_cast<uint8_t*>(rView.data() + inOffset);
+
+        mTimeStart = systemTime();
+        nsecs_t delay = mTimeStart - mTimeEnd;
+
+        // Send the bitstream data (inputBuffer) to dav1d.
+        if (mDav1dCtx) {
+            int i_ret = 0;
+
+            Dav1dSequenceHeader seq;
+            int res = dav1d_parse_sequence_header(&seq, bitstream, inSize);
+            if (res == 0) {
+                ALOGV("dav1d found a sequenceHeader (%dx%d) for in_frameIndex=%ld.", seq.max_width,
+                      seq.max_height, (long)in_frameIndex);
+            }
+
+            // insert OBU TD if it is not present.
+            // TODO: b/286852962
+            uint8_t obu_type = (bitstream[0] >> 3) & 0xf;
+            Dav1dData data;
+
+            uint8_t* ptr = (obu_type == DAV1D_OBU_TD) ? dav1d_data_create(&data, inSize)
+                                                      : dav1d_data_create(&data, inSize + 2);
+            if (ptr == nullptr) {
+                ALOGE("dav1d_data_create failed!");
+                i_ret = -1;
+
+            } else {
+                data.m.timestamp = in_frameIndex;
+
+                int new_Size;
+                if (obu_type != DAV1D_OBU_TD) {
+                    new_Size = (int)(inSize + 2);
+
+                    // OBU TD
+                    ptr[0] = 0x12;
+                    ptr[1] = 0;
+
+                    memcpy(ptr + 2, bitstream, inSize);
+                } else {
+                    new_Size = (int)(inSize);
+                    // TODO: b/277797541 - investigate how to wrap this pointer in Dav1dData to
+                    // avoid memcopy operations.
+                    memcpy(ptr, bitstream, new_Size);
+                }
+
+                // ALOGV("memcpy(ptr,bitstream,inSize=%ld,new_Size=%d,in_frameIndex=%ld,timestamp=%ld,"
+                //       "ptr[0,1,2,3,4]=%x,%x,%x,%x,%x)",
+                //       inSize, new_Size, frameIndex, data.m.timestamp, ptr[0], ptr[1], ptr[2],
+                //       ptr[3], ptr[4]);
+
+                // Dump the bitstream data (inputBuffer) if dumping is enabled.
+#ifdef FILE_DUMP_ENABLE
+                if (mInDataFile) {
+                    int ret = fwrite(ptr, 1, new_Size, mInDataFile);
+
+                    if (ret != new_Size) {
+                        ALOGE("Error in fwrite %s, requested %d, returned %d", mInDataFileName,
+                              new_Size, ret);
+                    }
+                }
+
+                // Dump the size per inputBuffer if dumping is enabled.
+                if (mInSizeFile) {
+                    int ret = fwrite(&new_Size, 1, 4, mInSizeFile);
+
+                    if (ret != 4) {
+                        ALOGE("Error in fwrite %s, requested %d, returned %d", mInSizeFileName, 4,
+                              ret);
+                    }
+                }
+#endif
+
+                bool b_draining = false;
+                int res;
+
+                do {
+                    res = dav1d_send_data(mDav1dCtx, &data);
+                    if (res < 0 && res != DAV1D_ERR(EAGAIN)) {
+                        ALOGE("Decoder feed error %s!", strerror(DAV1D_ERR(res)));
+                        /* bitstream decoding errors (typically DAV1D_ERR(EINVAL), are assumed
+                         * to be recoverable. Other errors returned from this function are
+                         * either unexpected, or considered critical failures.
+                         */
+                        i_ret = res == DAV1D_ERR(EINVAL) ? 0 : -1;
+                        break;
+                    }
+
+                    bool b_output_error = false;
+
+                    do {
+                        Dav1dPicture img;
+                        memset(&img, 0, sizeof(img));
+
+                        res = dav1d_get_picture(mDav1dCtx, &img);
+                        if (res == 0) {
+                            mDecodedPictures.push_back(img);
+
+                            if (!end_of_stream) break;
+                        } else if (res == DAV1D_ERR(EAGAIN)) {
+                            /* the decoder needs more data to be able to output something.
+                             * if there is more data pending, continue the loop below or
+                             * otherwise break */
+                            if (data.sz != 0) res = 0;
+                            break;
+                        } else {
+                            ALOGE("warning! Decoder error %d!", res);
+                            b_output_error = true;
+                            break;
+                        }
+                    } while (res == 0);
+
+                    if (b_output_error) break;
+
+                    /* on drain, we must ignore the 1st EAGAIN */
+                    if (!b_draining && (res == DAV1D_ERR(EAGAIN) || res == 0) &&
+                        (end_of_stream)) {
+                        b_draining = true;
+                        res = 0;
+                    }
+                } while (res == 0 && ((data.sz != 0) || b_draining));
+
+                if (data.sz > 0) {
+                    ALOGE("unexpected data.sz=%zu after dav1d_send_data", data.sz);
+                    dav1d_data_unref(&data);
+                }
+            }
+
+            mTimeEnd = systemTime();
+            nsecs_t decodeTime = mTimeEnd - mTimeStart;
+            // ALOGV("decodeTime=%4" PRId64 " delay=%4" PRId64 "\n", decodeTime, delay);
+
+            if (i_ret != 0) {
+                ALOGE("av1 decoder failed to decode frame. status: %d.", i_ret);
+                work->result = C2_CORRUPTED;
+                work->workletsProcessed = 1u;
+                mSignalledError = true;
+                return;
+            }
+        }
+    }
+
+    (void)outputBuffer(pool, work);
+
+    if (end_of_stream) {
+        drainInternal(DRAIN_COMPONENT_WITH_EOS, pool, work);
+        mSignalledOutputEos = true;
+    } else if (!inSize) {
+        fillEmptyWork(work);
+    }
+}
+
+void C2SoftDav1dDec::getHDRStaticParams(Dav1dPicture* picture,
+                                        const std::unique_ptr<C2Work>& work) {
+    C2StreamHdrStaticMetadataInfo::output hdrStaticMetadataInfo{};
+    bool infoPresent = false;
+
+    if (picture != nullptr) {
+        if (picture->mastering_display != nullptr) {
+            hdrStaticMetadataInfo.mastering.red.x =
+                    picture->mastering_display->primaries[0][0] / 65536.0;
+            hdrStaticMetadataInfo.mastering.red.y =
+                    picture->mastering_display->primaries[0][1] / 65536.0;
+
+            hdrStaticMetadataInfo.mastering.green.x =
+                    picture->mastering_display->primaries[1][0] / 65536.0;
+            hdrStaticMetadataInfo.mastering.green.y =
+                    picture->mastering_display->primaries[1][1] / 65536.0;
+
+            hdrStaticMetadataInfo.mastering.blue.x =
+                    picture->mastering_display->primaries[2][0] / 65536.0;
+            hdrStaticMetadataInfo.mastering.blue.y =
+                    picture->mastering_display->primaries[2][1] / 65536.0;
+
+            hdrStaticMetadataInfo.mastering.white.x =
+                    picture->mastering_display->white_point[0] / 65536.0;
+            hdrStaticMetadataInfo.mastering.white.y =
+                    picture->mastering_display->white_point[1] / 65536.0;
+
+            hdrStaticMetadataInfo.mastering.maxLuminance =
+                    picture->mastering_display->max_luminance / 256.0;
+            hdrStaticMetadataInfo.mastering.minLuminance =
+                    picture->mastering_display->min_luminance / 16384.0;
+
+            infoPresent = true;
+        }
+
+        if (picture->content_light != nullptr) {
+            hdrStaticMetadataInfo.maxCll = picture->content_light->max_content_light_level;
+            hdrStaticMetadataInfo.maxFall = picture->content_light->max_frame_average_light_level;
+            infoPresent = true;
+        }
+    }
+
+    // if (infoPresent) {
+    //   ALOGD("received a hdrStaticMetadataInfo (mastering.red=%f,%f mastering.green=%f,%f
+    //   mastering.blue=%f,%f mastering.white=%f,%f mastering.maxLuminance=%f
+    //   mastering.minLuminance=%f maxCll=%f maxFall=%f) at mOutputBufferIndex=%d.",
+    //   hdrStaticMetadataInfo.mastering.red.x,hdrStaticMetadataInfo.mastering.red.y,
+    //   hdrStaticMetadataInfo.mastering.green.x,hdrStaticMetadataInfo.mastering.green.y,
+    //   hdrStaticMetadataInfo.mastering.blue.x,hdrStaticMetadataInfo.mastering.blue.y,
+    //   hdrStaticMetadataInfo.mastering.white.x,hdrStaticMetadataInfo.mastering.white.y,
+    //   hdrStaticMetadataInfo.mastering.maxLuminance,hdrStaticMetadataInfo.mastering.minLuminance,
+    //   hdrStaticMetadataInfo.maxCll,
+    //   hdrStaticMetadataInfo.maxFall,
+    //   mOutputBufferIndex);
+    // }
+
+    // config if static info has changed
+    if (infoPresent && !(hdrStaticMetadataInfo == mHdrStaticMetadataInfo)) {
+        mHdrStaticMetadataInfo = hdrStaticMetadataInfo;
+        work->worklets.front()->output.configUpdate.push_back(
+                C2Param::Copy(mHdrStaticMetadataInfo));
+    }
+}
+
+void C2SoftDav1dDec::getHDR10PlusInfoData(Dav1dPicture* picture,
+                                          const std::unique_ptr<C2Work>& work) {
+    if (picture != nullptr) {
+        if (picture->itut_t35 != nullptr) {
+            std::vector<uint8_t> payload;
+            size_t payloadSize = picture->itut_t35->payload_size;
+            if (payloadSize > 0) {
+                payload.push_back(picture->itut_t35->country_code);
+                if (picture->itut_t35->country_code == 0xFF) {
+                    payload.push_back(picture->itut_t35->country_code_extension_byte);
+                }
+                payload.insert(payload.end(), picture->itut_t35->payload,
+                               picture->itut_t35->payload + picture->itut_t35->payload_size);
+            }
+
+            std::unique_ptr<C2StreamHdr10PlusInfo::output> hdr10PlusInfo =
+                    C2StreamHdr10PlusInfo::output::AllocUnique(payload.size());
+            if (!hdr10PlusInfo) {
+                ALOGE("Hdr10PlusInfo allocation failed");
+                mSignalledError = true;
+                work->result = C2_NO_MEMORY;
+                return;
+            }
+            memcpy(hdr10PlusInfo->m.value, payload.data(), payload.size());
+
+            // ALOGD("Received a hdr10PlusInfo from picture->itut_t32
+            // (payload_size=%ld,country_code=%d) at mOutputBufferIndex=%d.",
+            // picture->itut_t35->payload_size,
+            // picture->itut_t35->country_code,
+            // mOutputBufferIndex);
+
+            // config if hdr10Plus info has changed
+            if (nullptr == mHdr10PlusInfo || !(*hdr10PlusInfo == *mHdr10PlusInfo)) {
+                mHdr10PlusInfo = std::move(hdr10PlusInfo);
+                work->worklets.front()->output.configUpdate.push_back(std::move(mHdr10PlusInfo));
+            }
+        }
+    }
+}
+
+void C2SoftDav1dDec::getVuiParams(Dav1dPicture* picture) {
+    VuiColorAspects vuiColorAspects;
+
+    if (picture) {
+        vuiColorAspects.primaries = picture->seq_hdr->pri;
+        vuiColorAspects.transfer = picture->seq_hdr->trc;
+        vuiColorAspects.coeffs = picture->seq_hdr->mtrx;
+        vuiColorAspects.fullRange = picture->seq_hdr->color_range;
+
+        // ALOGD("Received a vuiColorAspects from dav1d
+        //       (primaries = % d, transfer = % d, coeffs = % d, fullRange = % d)
+        //               at mOutputBufferIndex = % d,
+        //       out_frameIndex = % ld.",
+        //                          vuiColorAspects.primaries,
+        //       vuiColorAspects.transfer, vuiColorAspects.coeffs, vuiColorAspects.fullRange,
+        //       mOutputBufferIndex, picture->m.timestamp);
+    }
+
+    // convert vui aspects to C2 values if changed
+    if (!(vuiColorAspects == mBitstreamColorAspects)) {
+        mBitstreamColorAspects = vuiColorAspects;
+        ColorAspects sfAspects;
+        C2StreamColorAspectsInfo::input codedAspects = {0u};
+        ColorUtils::convertIsoColorAspectsToCodecAspects(
+                vuiColorAspects.primaries, vuiColorAspects.transfer, vuiColorAspects.coeffs,
+                vuiColorAspects.fullRange, sfAspects);
+        if (!C2Mapper::map(sfAspects.mPrimaries, &codedAspects.primaries)) {
+            codedAspects.primaries = C2Color::PRIMARIES_UNSPECIFIED;
+        }
+        if (!C2Mapper::map(sfAspects.mRange, &codedAspects.range)) {
+            codedAspects.range = C2Color::RANGE_UNSPECIFIED;
+        }
+        if (!C2Mapper::map(sfAspects.mMatrixCoeffs, &codedAspects.matrix)) {
+            codedAspects.matrix = C2Color::MATRIX_UNSPECIFIED;
+        }
+        if (!C2Mapper::map(sfAspects.mTransfer, &codedAspects.transfer)) {
+            codedAspects.transfer = C2Color::TRANSFER_UNSPECIFIED;
+        }
+        std::vector<std::unique_ptr<C2SettingResult>> failures;
+        mIntf->config({&codedAspects}, C2_MAY_BLOCK, &failures);
+    }
+}
+
+void C2SoftDav1dDec::setError(const std::unique_ptr<C2Work>& work, c2_status_t error) {
+    mSignalledError = true;
+    work->result = error;
+    work->workletsProcessed = 1u;
+}
+
+bool C2SoftDav1dDec::allocTmpFrameBuffer(size_t size) {
+    if (size > mTmpFrameBufferSize) {
+        mTmpFrameBuffer = std::make_unique<uint16_t[]>(size);
+        if (mTmpFrameBuffer == nullptr) {
+            mTmpFrameBufferSize = 0;
+            return false;
+        }
+        mTmpFrameBufferSize = size;
+    }
+    return true;
+}
+
+#ifdef FILE_DUMP_ENABLE
+void C2SoftDav1dDec::writeDav1dOutYuvFile(const Dav1dPicture& p) {
+    if (mDav1dOutYuvFile != NULL) {
+        uint8_t* ptr;
+        const int hbd = p.p.bpc > 8;
+
+        ptr = (uint8_t*)p.data[0];
+        for (int y = 0; y < p.p.h; y++) {
+            int iSize = p.p.w << hbd;
+            int ret = fwrite(ptr, 1, iSize, mDav1dOutYuvFile);
+            if (ret != iSize) {
+                ALOGE("Error in fwrite %s, requested %d, returned %d", mDav1dOutYuvFileName, iSize,
+                      ret);
+                break;
+            }
+
+            ptr += p.stride[0];
+        }
+
+        if (p.p.layout != DAV1D_PIXEL_LAYOUT_I400) {
+            // u/v
+            const int ss_ver = p.p.layout == DAV1D_PIXEL_LAYOUT_I420;
+            const int ss_hor = p.p.layout != DAV1D_PIXEL_LAYOUT_I444;
+            const int cw = (p.p.w + ss_hor) >> ss_hor;
+            const int ch = (p.p.h + ss_ver) >> ss_ver;
+            for (int pl = 1; pl <= 2; pl++) {
+                ptr = (uint8_t*)p.data[pl];
+                for (int y = 0; y < ch; y++) {
+                    int iSize = cw << hbd;
+                    int ret = fwrite(ptr, 1, cw << hbd, mDav1dOutYuvFile);
+                    if (ret != iSize) {
+                        ALOGE("Error in fwrite %s, requested %d, returned %d", mDav1dOutYuvFileName,
+                              iSize, ret);
+                        break;
+                    }
+                    ptr += p.stride[1];
+                }
+            }
+        }
+    }
+}
+#endif
+
+bool C2SoftDav1dDec::outputBuffer(const std::shared_ptr<C2BlockPool>& pool,
+                                  const std::unique_ptr<C2Work>& work) {
+    if (!(work && pool)) return false;
+    if (mDav1dCtx == nullptr) return false;
+
+    // Get a decoded picture from dav1d if it is enabled.
+    Dav1dPicture img;
+    memset(&img, 0, sizeof(img));
+
+    int res = 0;
+    if (mDecodedPictures.size() > 0) {
+        img = mDecodedPictures.front();
+        mDecodedPictures.pop_front();
+        // ALOGD("Got a picture(out_frameIndex=%ld,timestamp=%ld) from the deque for
+        // outputBuffer.",img.m.timestamp,img.m.timestamp);
+    } else {
+        res = dav1d_get_picture(mDav1dCtx, &img);
+        if (res == 0) {
+            // ALOGD("Got a picture(out_frameIndex=%ld,timestamp=%ld) from dav1d for
+            // outputBuffer.",img.m.timestamp,img.m.timestamp);
+        } else {
+            ALOGE("failed to get a picture from dav1d for outputBuffer.");
+        }
+    }
+
+    if (res == DAV1D_ERR(EAGAIN)) {
+        ALOGD("Not enough data to output a picture.");
+        return false;
+    }
+    if (res != 0) {
+        ALOGE("The AV1 decoder failed to get a picture (res=%s).", strerror(DAV1D_ERR(res)));
+        return false;
+    }
+
+    const int width = img.p.w;
+    const int height = img.p.h;
+    if (width != mWidth || height != mHeight) {
+        mWidth = width;
+        mHeight = height;
+
+        C2StreamPictureSizeInfo::output size(0u, mWidth, mHeight);
+        std::vector<std::unique_ptr<C2SettingResult>> failures;
+        c2_status_t err = mIntf->config({&size}, C2_MAY_BLOCK, &failures);
+        if (err == C2_OK) {
+            work->worklets.front()->output.configUpdate.push_back(C2Param::Copy(size));
+        } else {
+            ALOGE("Config update size failed");
+            mSignalledError = true;
+            work->result = C2_CORRUPTED;
+            work->workletsProcessed = 1u;
+            return false;
+        }
+    }
+
+    getVuiParams(&img);
+    getHDRStaticParams(&img, work);
+    getHDR10PlusInfoData(&img, work);
+
+    // out_frameIndex that the decoded picture returns from dav1d.
+    int64_t out_frameIndex = img.m.timestamp;
+
+#if LIBYUV_VERSION < 1779
+    if (!(img.p.layout != DAV1D_PIXEL_LAYOUT_I400 || img.p.layout != DAV1D_PIXEL_LAYOUT_I420)) {
+        ALOGE("image_format %d not supported", img.p.layout);
+        mSignalledError = true;
+        work->workletsProcessed = 1u;
+        work->result = C2_CORRUPTED;
+        return false;
+    }
+#endif
+
+    const bool isMonochrome = img.p.layout == DAV1D_PIXEL_LAYOUT_I400;
+
+    int bitdepth = img.p.bpc;
+
+    std::shared_ptr<C2GraphicBlock> block;
+    uint32_t format = HAL_PIXEL_FORMAT_YV12;
+    std::shared_ptr<C2StreamColorAspectsInfo::output> codedColorAspects;
+    if (bitdepth == 10 && mPixelFormatInfo->value != HAL_PIXEL_FORMAT_YCBCR_420_888) {
+        IntfImpl::Lock lock = mIntf->lock();
+        codedColorAspects = mIntf->getColorAspects_l();
+        bool allowRGBA1010102 = false;
+        if (codedColorAspects->primaries == C2Color::PRIMARIES_BT2020 &&
+            codedColorAspects->matrix == C2Color::MATRIX_BT2020 &&
+            codedColorAspects->transfer == C2Color::TRANSFER_ST2084) {
+            allowRGBA1010102 = true;
+        }
+        format = getHalPixelFormatForBitDepth10(allowRGBA1010102);
+#if !HAVE_LIBYUV_I410_I210_TO_AB30
+        if ((format == HAL_PIXEL_FORMAT_RGBA_1010102) &&
+            (is_img_ready ? img.p.layout == DAV1D_PIXEL_LAYOUT_I420
+                          : buffer->image_format != libgav1::kImageFormatYuv420)) {
+            ALOGE("Only YUV420 output is supported when targeting RGBA_1010102");
+            mSignalledError = true;
+            work->result = C2_OMITTED;
+            work->workletsProcessed = 1u;
+            return false;
+        }
+#endif
+    }
+
+    if (mHalPixelFormat != format) {
+        C2StreamPixelFormatInfo::output pixelFormat(0u, format);
+        std::vector<std::unique_ptr<C2SettingResult>> failures;
+        c2_status_t err = mIntf->config({&pixelFormat}, C2_MAY_BLOCK, &failures);
+        if (err == C2_OK) {
+            work->worklets.front()->output.configUpdate.push_back(C2Param::Copy(pixelFormat));
+        } else {
+            ALOGE("Config update pixelFormat failed");
+            mSignalledError = true;
+            work->workletsProcessed = 1u;
+            work->result = C2_CORRUPTED;
+            return UNKNOWN_ERROR;
+        }
+        mHalPixelFormat = format;
+    }
+
+    C2MemoryUsage usage = {C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE};
+
+    // We always create a graphic block that is width aligned to 16 and height
+    // aligned to 2. We set the correct "crop" value of the image in the call to
+    // createGraphicBuffer() by setting the correct image dimensions.
+    c2_status_t err =
+            pool->fetchGraphicBlock(align(mWidth, 16), align(mHeight, 2), format, usage, &block);
+
+    if (err != C2_OK) {
+        ALOGE("fetchGraphicBlock for Output failed with status %d", err);
+        work->result = err;
+        return false;
+    }
+
+    C2GraphicView wView = block->map().get();
+
+    if (wView.error()) {
+        ALOGE("graphic view map failed %d", wView.error());
+        work->result = C2_CORRUPTED;
+        return false;
+    }
+
+    // ALOGV("provided (%dx%d) required (%dx%d), out frameindex %d", block->width(),
+    //       block->height(), mWidth, mHeight, (int)out_frameIndex);
+
+    mOutputBufferIndex = out_frameIndex;
+
+    uint8_t* dstY = const_cast<uint8_t*>(wView.data()[C2PlanarLayout::PLANE_Y]);
+    uint8_t* dstU = const_cast<uint8_t*>(wView.data()[C2PlanarLayout::PLANE_U]);
+    uint8_t* dstV = const_cast<uint8_t*>(wView.data()[C2PlanarLayout::PLANE_V]);
+
+    C2PlanarLayout layout = wView.layout();
+    size_t dstYStride = layout.planes[C2PlanarLayout::PLANE_Y].rowInc;
+    size_t dstUStride = layout.planes[C2PlanarLayout::PLANE_U].rowInc;
+    size_t dstVStride = layout.planes[C2PlanarLayout::PLANE_V].rowInc;
+
+    if (bitdepth == 10) {
+        // TODO: b/277797541 - Investigate if we can ask DAV1D to output the required format during
+        // decompression to avoid color conversion.
+        const uint16_t* srcY = (const uint16_t*)img.data[0];
+        const uint16_t* srcU = (const uint16_t*)img.data[1];
+        const uint16_t* srcV = (const uint16_t*)img.data[2];
+        size_t srcYStride = img.stride[0] / 2;
+        size_t srcUStride = img.stride[1] / 2;
+        size_t srcVStride = img.stride[1] / 2;
+
+        if (format == HAL_PIXEL_FORMAT_RGBA_1010102) {
+            bool processed = false;
+#if HAVE_LIBYUV_I410_I210_TO_AB30
+            if (img.p.layout == DAV1D_PIXEL_LAYOUT_I444) {
+                libyuv::I410ToAB30Matrix(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride, dstY,
+                                         dstYStride, &libyuv::kYuvV2020Constants, mWidth, mHeight);
+                processed = true;
+            } else if (img.p.layout == DAV1D_PIXEL_LAYOUT_I422) {
+                libyuv::I210ToAB30Matrix(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride, dstY,
+                                         dstYStride, &libyuv::kYuvV2020Constants, mWidth, mHeight);
+                processed = true;
+            }
+#endif  // HAVE_LIBYUV_I410_I210_TO_AB30
+            if (!processed) {
+                if (isMonochrome) {
+                    const size_t tmpSize = mWidth;
+                    const bool needFill = tmpSize > mTmpFrameBufferSize;
+                    if (!allocTmpFrameBuffer(tmpSize)) {
+                        ALOGE("Error allocating temp conversion buffer (%zu bytes)", tmpSize);
+                        setError(work, C2_NO_MEMORY);
+                        return false;
+                    }
+                    srcU = srcV = mTmpFrameBuffer.get();
+                    srcUStride = srcVStride = 0;
+                    if (needFill) {
+                        std::fill_n(mTmpFrameBuffer.get(), tmpSize, 512);
+                    }
+                }
+                convertYUV420Planar16ToY410OrRGBA1010102(
+                        (uint32_t*)dstY, srcY, srcU, srcV, srcYStride, srcUStride, srcVStride,
+                        dstYStride / sizeof(uint32_t), mWidth, mHeight,
+                        std::static_pointer_cast<const C2ColorAspectsStruct>(codedColorAspects));
+            }
+        } else if (format == HAL_PIXEL_FORMAT_YCBCR_P010) {
+            dstYStride /= 2;
+            dstUStride /= 2;
+            dstVStride /= 2;
+#if LIBYUV_VERSION >= 1779
+            if ((img.p.layout == DAV1D_PIXEL_LAYOUT_I444) ||
+                (img.p.layout == DAV1D_PIXEL_LAYOUT_I422)) {
+                // TODO(https://crbug.com/libyuv/952): replace this block with libyuv::I410ToP010
+                // and libyuv::I210ToP010 when they are available. Note it may be safe to alias dstY
+                // in I010ToP010, but the libyuv API doesn't make any guarantees.
+                const size_t tmpSize = dstYStride * mHeight + dstUStride * align(mHeight, 2);
+                if (!allocTmpFrameBuffer(tmpSize)) {
+                    ALOGE("Error allocating temp conversion buffer (%zu bytes)", tmpSize);
+                    setError(work, C2_NO_MEMORY);
+                    return false;
+                }
+                uint16_t* const tmpY = mTmpFrameBuffer.get();
+                uint16_t* const tmpU = tmpY + dstYStride * mHeight;
+                uint16_t* const tmpV = tmpU + dstUStride * align(mHeight, 2) / 2;
+                if (img.p.layout == DAV1D_PIXEL_LAYOUT_I444) {
+                    libyuv::I410ToI010(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride, tmpY,
+                                       dstYStride, tmpU, dstUStride, tmpV, dstUStride, mWidth,
+                                       mHeight);
+                } else {
+                    libyuv::I210ToI010(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride, tmpY,
+                                       dstYStride, tmpU, dstUStride, tmpV, dstUStride, mWidth,
+                                       mHeight);
+                }
+                libyuv::I010ToP010(tmpY, dstYStride, tmpU, dstUStride, tmpV, dstVStride,
+                                   (uint16_t*)dstY, dstYStride, (uint16_t*)dstU, dstUStride, mWidth,
+                                   mHeight);
+            } else {
+                convertYUV420Planar16ToP010((uint16_t*)dstY, (uint16_t*)dstU, srcY, srcU, srcV,
+                                            srcYStride, srcUStride, srcVStride, dstYStride,
+                                            dstUStride, mWidth, mHeight, isMonochrome);
+            }
+#else   // LIBYUV_VERSION < 1779
+            convertYUV420Planar16ToP010((uint16_t*)dstY, (uint16_t*)dstU, srcY, srcU, srcV,
+                                        srcYStride, srcUStride, srcVStride, dstYStride, dstUStride,
+                                        mWidth, mHeight, isMonochrome);
+#endif  // LIBYUV_VERSION >= 1779
+        } else {
+#if LIBYUV_VERSION >= 1779
+            if (img.p.layout == DAV1D_PIXEL_LAYOUT_I444) {
+                // TODO(https://crbug.com/libyuv/950): replace this block with libyuv::I410ToI420
+                // when it's available.
+                const size_t tmpSize = dstYStride * mHeight + dstUStride * align(mHeight, 2);
+                if (!allocTmpFrameBuffer(tmpSize)) {
+                    ALOGE("Error allocating temp conversion buffer (%zu bytes)", tmpSize);
+                    setError(work, C2_NO_MEMORY);
+                    return false;
+                }
+                uint16_t* const tmpY = mTmpFrameBuffer.get();
+                uint16_t* const tmpU = tmpY + dstYStride * mHeight;
+                uint16_t* const tmpV = tmpU + dstUStride * align(mHeight, 2) / 2;
+                libyuv::I410ToI010(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride, tmpY,
+                                   dstYStride, tmpU, dstUStride, tmpV, dstVStride, mWidth, mHeight);
+                libyuv::I010ToI420(tmpY, dstYStride, tmpU, dstUStride, tmpV, dstUStride, dstY,
+                                   dstYStride, dstU, dstUStride, dstV, dstVStride, mWidth, mHeight);
+            } else if (img.p.layout == DAV1D_PIXEL_LAYOUT_I422) {
+                libyuv::I210ToI420(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride, dstY,
+                                   dstYStride, dstU, dstUStride, dstV, dstVStride, mWidth, mHeight);
+            } else {
+                convertYUV420Planar16ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride,
+                                            srcUStride, srcVStride, dstYStride, dstUStride, mWidth,
+                                            mHeight, isMonochrome);
+            }
+#else   // LIBYUV_VERSION < 1779
+            convertYUV420Planar16ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride, srcUStride,
+                                        srcVStride, dstYStride, dstUStride, mWidth, mHeight,
+                                        isMonochrome);
+#endif  // LIBYUV_VERSION >= 1779
+        }
+
+        // Dump the output buffer if dumping is enabled (debug only).
+#ifdef FILE_DUMP_ENABLE
+        FILE* fp_out = mDav1dOutYuvFile;
+
+        // if(mOutputBufferIndex % 100 == 0)
+        ALOGV("output a 10bit picture %dx%d from dav1d "
+              "(mInputBufferIndex=%d,mOutputBufferIndex=%d,format=%d).",
+              mWidth, mHeight, mInputBufferIndex, mOutputBufferIndex, format);
+
+        if (fp_out && mOutputBufferIndex <= num_frames_to_dump) {
+            for (int i = 0; i < mHeight; i++) {
+                int ret = fwrite((uint8_t*)srcY + i * srcYStride * 2, 1, mWidth * 2, fp_out);
+                if (ret != mWidth * 2) {
+                    ALOGE("Error in fwrite, requested %d, returned %d", mWidth * 2, ret);
+                    break;
+                }
+            }
+
+            for (int i = 0; i < mHeight / 2; i++) {
+                int ret = fwrite((uint8_t*)srcU + i * srcUStride * 2, 1, mWidth, fp_out);
+                if (ret != mWidth) {
+                    ALOGE("Error in fwrite, requested %d, returned %d", mWidth, ret);
+                    break;
+                }
+            }
+
+            for (int i = 0; i < mHeight / 2; i++) {
+                int ret = fwrite((uint8_t*)srcV + i * srcVStride * 2, 1, mWidth, fp_out);
+                if (ret != mWidth) {
+                    ALOGE("Error in fwrite, requested %d, returned %d", mWidth, ret);
+                    break;
+                }
+            }
+        }
+#endif
+    } else {
+        const uint8_t* srcY = (const uint8_t*)img.data[0];
+        const uint8_t* srcU = (const uint8_t*)img.data[1];
+        const uint8_t* srcV = (const uint8_t*)img.data[2];
+
+        size_t srcYStride = img.stride[0];
+        size_t srcUStride = img.stride[1];
+        size_t srcVStride = img.stride[1];
+
+        // Dump the output buffer is dumping is enabled (debug only)
+#ifdef FILE_DUMP_ENABLE
+        FILE* fp_out = mDav1dOutYuvFile;
+        // if(mOutputBufferIndex % 100 == 0)
+        ALOGV("output a 8bit picture %dx%d from dav1d "
+              "(mInputBufferIndex=%d,mOutputBufferIndex=%d,format=%d).",
+              mWidth, mHeight, mInputBufferIndex, mOutputBufferIndex, format);
+
+        if (fp_out && mOutputBufferIndex <= num_frames_to_dump) {
+            for (int i = 0; i < mHeight; i++) {
+                int ret = fwrite((uint8_t*)srcY + i * srcYStride, 1, mWidth, fp_out);
+                if (ret != mWidth) {
+                    ALOGE("Error in fwrite, requested %d, returned %d", mWidth, ret);
+                    break;
+                }
+            }
+
+            for (int i = 0; i < mHeight / 2; i++) {
+                int ret = fwrite((uint8_t*)srcU + i * srcUStride, 1, mWidth / 2, fp_out);
+                if (ret != mWidth / 2) {
+                    ALOGE("Error in fwrite, requested %d, returned %d", mWidth / 2, ret);
+                    break;
+                }
+            }
+
+            for (int i = 0; i < mHeight / 2; i++) {
+                int ret = fwrite((uint8_t*)srcV + i * srcVStride, 1, mWidth / 2, fp_out);
+                if (ret != mWidth / 2) {
+                    ALOGE("Error in fwrite, requested %d, returned %d", mWidth / 2, ret);
+                    break;
+                }
+            }
+        }
+#endif
+        if (img.p.layout == DAV1D_PIXEL_LAYOUT_I444) {
+            libyuv::I444ToI420(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride, dstY,
+                               dstYStride, dstU, dstUStride, dstV, dstVStride, mWidth, mHeight);
+        } else if (img.p.layout == DAV1D_PIXEL_LAYOUT_I422) {
+            libyuv::I422ToI420(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride, dstY,
+                               dstYStride, dstU, dstUStride, dstV, dstVStride, mWidth, mHeight);
+        } else {
+            convertYUV420Planar8ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride, srcUStride,
+                                       srcVStride, dstYStride, dstUStride, dstVStride, mWidth,
+                                       mHeight, isMonochrome);
+        }
+    }
+
+    dav1d_picture_unref(&img);
+
+    finishWork(out_frameIndex, work, std::move(block));
+    block = nullptr;
+    return true;
+}
+
+c2_status_t C2SoftDav1dDec::drainInternal(uint32_t drainMode,
+                                          const std::shared_ptr<C2BlockPool>& pool,
+                                          const std::unique_ptr<C2Work>& work) {
+    if (drainMode == NO_DRAIN) {
+        ALOGW("drain with NO_DRAIN: no-op");
+        return C2_OK;
+    }
+    if (drainMode == DRAIN_CHAIN) {
+        ALOGW("DRAIN_CHAIN not supported");
+        return C2_OMITTED;
+    }
+
+    while (outputBuffer(pool, work)) {
+    }
+
+    if (drainMode == DRAIN_COMPONENT_WITH_EOS && work && work->workletsProcessed == 0u) {
+        fillEmptyWork(work);
+    }
+
+    return C2_OK;
+}
+
+c2_status_t C2SoftDav1dDec::drain(uint32_t drainMode, const std::shared_ptr<C2BlockPool>& pool) {
+    return drainInternal(drainMode, pool, nullptr);
+}
+
+class C2SoftDav1dFactory : public C2ComponentFactory {
+  public:
+    C2SoftDav1dFactory()
+        : mHelper(std::static_pointer_cast<C2ReflectorHelper>(
+                  GetCodec2PlatformComponentStore()->getParamReflector())) {}
+
+    virtual c2_status_t createComponent(c2_node_id_t id,
+                                        std::shared_ptr<C2Component>* const component,
+                                        std::function<void(C2Component*)> deleter) override {
+        *component = std::shared_ptr<C2Component>(
+                new C2SoftDav1dDec(COMPONENT_NAME, id,
+                                   std::make_shared<C2SoftDav1dDec::IntfImpl>(mHelper)),
+                deleter);
+        return C2_OK;
+    }
+
+    virtual c2_status_t createInterface(
+            c2_node_id_t id, std::shared_ptr<C2ComponentInterface>* const interface,
+            std::function<void(C2ComponentInterface*)> deleter) override {
+        *interface = std::shared_ptr<C2ComponentInterface>(
+                new SimpleInterface<C2SoftDav1dDec::IntfImpl>(
+                        COMPONENT_NAME, id, std::make_shared<C2SoftDav1dDec::IntfImpl>(mHelper)),
+                deleter);
+        return C2_OK;
+    }
+
+    virtual ~C2SoftDav1dFactory() override = default;
+
+  private:
+    std::shared_ptr<C2ReflectorHelper> mHelper;
+};
+
+}  // namespace android
+
+__attribute__((cfi_canonical_jump_table)) extern "C" ::C2ComponentFactory* CreateCodec2Factory() {
+    ALOGV("in %s", __func__);
+    return new ::android::C2SoftDav1dFactory();
+}
+
+__attribute__((cfi_canonical_jump_table)) extern "C" void DestroyCodec2Factory(
+        ::C2ComponentFactory* factory) {
+    ALOGV("in %s", __func__);
+    delete factory;
+}
diff --git a/media/codec2/components/dav1d/C2SoftDav1dDec.h b/media/codec2/components/dav1d/C2SoftDav1dDec.h
new file mode 100644
index 0000000..5201456
--- /dev/null
+++ b/media/codec2/components/dav1d/C2SoftDav1dDec.h
@@ -0,0 +1,143 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_C2_SOFT_DAV1D_DEC_H_
+#define ANDROID_C2_SOFT_DAV1D_DEC_H_
+
+#include <inttypes.h>
+
+#include <memory>
+
+#include <media/stagefright/foundation/ColorUtils.h>
+
+#include <C2Config.h>
+#include <SimpleC2Component.h>
+
+#include <dav1d/dav1d.h>
+#include <deque>
+
+//#define FILE_DUMP_ENABLE 1
+#define DUMP_FILE_PATH "/data/local/tmp/dump"
+#define INPUT_DATA_DUMP_EXT "av1"
+#define INPUT_SIZE_DUMP_EXT "size"
+#define OUTPUT_YUV_DUMP_EXT "yuv"
+
+namespace android {
+
+struct C2SoftDav1dDec : public SimpleC2Component {
+    class IntfImpl;
+
+    C2SoftDav1dDec(const char* name, c2_node_id_t id, const std::shared_ptr<IntfImpl>& intfImpl);
+    ~C2SoftDav1dDec();
+
+    // Begin SimpleC2Component overrides.
+    c2_status_t onInit() override;
+    c2_status_t onStop() override;
+    void onReset() override;
+    void onRelease() override;
+    c2_status_t onFlush_sm() override;
+    void process(const std::unique_ptr<C2Work>& work,
+                 const std::shared_ptr<C2BlockPool>& pool) override;
+    c2_status_t drain(uint32_t drainMode, const std::shared_ptr<C2BlockPool>& pool) override;
+    // End SimpleC2Component overrides.
+
+  private:
+    std::shared_ptr<IntfImpl> mIntf;
+
+    int mInputBufferIndex = 0;
+    int mOutputBufferIndex = 0;
+
+    Dav1dContext* mDav1dCtx = nullptr;
+    std::deque<Dav1dPicture> mDecodedPictures;
+
+    // configurations used by component in process
+    // (TODO: keep this in intf but make them internal only)
+    std::shared_ptr<C2StreamPixelFormatInfo::output> mPixelFormatInfo;
+
+    uint32_t mHalPixelFormat;
+    uint32_t mWidth;
+    uint32_t mHeight;
+    bool mSignalledOutputEos;
+    bool mSignalledError;
+    // Used during 10-bit I444/I422 to 10-bit P010 & 8-bit I420 conversions.
+    std::unique_ptr<uint16_t[]> mTmpFrameBuffer;
+    size_t mTmpFrameBufferSize = 0;
+
+    C2StreamHdrStaticMetadataInfo::output mHdrStaticMetadataInfo;
+    std::unique_ptr<C2StreamHdr10PlusInfo::output> mHdr10PlusInfo = nullptr;
+
+    // Color aspects. These are ISO values and are meant to detect changes in aspects to avoid
+    // converting them to C2 values for each frame
+    struct VuiColorAspects {
+        uint8_t primaries;
+        uint8_t transfer;
+        uint8_t coeffs;
+        uint8_t fullRange;
+
+        // default color aspects
+        VuiColorAspects()
+            : primaries(C2Color::PRIMARIES_UNSPECIFIED),
+              transfer(C2Color::TRANSFER_UNSPECIFIED),
+              coeffs(C2Color::MATRIX_UNSPECIFIED),
+              fullRange(C2Color::RANGE_UNSPECIFIED) {}
+
+        bool operator==(const VuiColorAspects& o) {
+            return primaries == o.primaries && transfer == o.transfer && coeffs == o.coeffs &&
+                   fullRange == o.fullRange;
+        }
+    } mBitstreamColorAspects;
+
+    nsecs_t mTimeStart = 0;  // Time at the start of decode()
+    nsecs_t mTimeEnd = 0;    // Time at the end of decode()
+
+    bool initDecoder();
+    void getHDRStaticParams(Dav1dPicture* picture, const std::unique_ptr<C2Work>& work);
+    void getHDR10PlusInfoData(Dav1dPicture* picture, const std::unique_ptr<C2Work>& work);
+    void getVuiParams(Dav1dPicture* picture);
+    void destroyDecoder();
+    void finishWork(uint64_t index, const std::unique_ptr<C2Work>& work,
+                    const std::shared_ptr<C2GraphicBlock>& block);
+    // Sets |work->result| and mSignalledError. Returns false.
+    void setError(const std::unique_ptr<C2Work>& work, c2_status_t error);
+    bool allocTmpFrameBuffer(size_t size);
+    bool outputBuffer(const std::shared_ptr<C2BlockPool>& pool,
+                      const std::unique_ptr<C2Work>& work);
+
+    c2_status_t drainInternal(uint32_t drainMode, const std::shared_ptr<C2BlockPool>& pool,
+                              const std::unique_ptr<C2Work>& work);
+
+    void flushDav1d();
+
+#ifdef FILE_DUMP_ENABLE
+    char mInDataFileName[256];
+    char mInSizeFileName[256];
+    char mDav1dOutYuvFileName[256];
+
+    FILE* mInDataFile = nullptr;
+    FILE* mInSizeFile = nullptr;
+    FILE* mDav1dOutYuvFile = nullptr;
+
+    void writeDav1dOutYuvFile(const Dav1dPicture& p);
+
+    int num_frames_to_dump = 0;
+#endif
+
+    C2_DO_NOT_COPY(C2SoftDav1dDec);
+};
+
+}  // namespace android
+
+#endif  // ANDROID_C2_SOFT_DAV1D_DEC_H_
diff --git a/media/codec2/components/gav1/C2SoftGav1Dec.cpp b/media/codec2/components/gav1/C2SoftGav1Dec.cpp
index 3e4247b..5f5f05d 100644
--- a/media/codec2/components/gav1/C2SoftGav1Dec.cpp
+++ b/media/codec2/components/gav1/C2SoftGav1Dec.cpp
@@ -752,6 +752,19 @@
     return true;
 }
 
+bool C2SoftGav1Dec::fillMonochromeRow(int value) {
+    const size_t tmpSize = mWidth;
+    const bool needFill = tmpSize > mTmpFrameBufferSize;
+    if (!allocTmpFrameBuffer(tmpSize)) {
+        ALOGE("Error allocating temp conversion buffer (%zu bytes)", tmpSize);
+        return false;
+    }
+    if (needFill) {
+        std::fill_n(mTmpFrameBuffer.get(), tmpSize, value);
+    }
+    return true;
+}
+
 bool C2SoftGav1Dec::outputBuffer(const std::shared_ptr<C2BlockPool> &pool,
                                  const std::unique_ptr<C2Work> &work) {
   if (!(work && pool)) return false;
@@ -773,6 +786,16 @@
     return false;
   }
 
+#if LIBYUV_VERSION < 1871
+  if (buffer->bitdepth > 10) {
+    ALOGE("bitdepth %d is not supported", buffer->bitdepth);
+    mSignalledError = true;
+    work->workletsProcessed = 1u;
+    work->result = C2_CORRUPTED;
+    return false;
+  }
+#endif
+
   const int width = buffer->displayed_width[0];
   const int height = buffer->displayed_height[0];
   if (width != mWidth || height != mHeight) {
@@ -816,7 +839,7 @@
   std::shared_ptr<C2GraphicBlock> block;
   uint32_t format = HAL_PIXEL_FORMAT_YV12;
   std::shared_ptr<C2StreamColorAspectsInfo::output> codedColorAspects;
-  if (buffer->bitdepth == 10 && mPixelFormatInfo->value != HAL_PIXEL_FORMAT_YCBCR_420_888) {
+  if (buffer->bitdepth >= 10 && mPixelFormatInfo->value != HAL_PIXEL_FORMAT_YCBCR_420_888) {
     IntfImpl::Lock lock = mIntf->lock();
     codedColorAspects = mIntf->getColorAspects_l();
     bool allowRGBA1010102 = false;
@@ -828,8 +851,9 @@
     format = getHalPixelFormatForBitDepth10(allowRGBA1010102);
 #if !HAVE_LIBYUV_I410_I210_TO_AB30
     if ((format == HAL_PIXEL_FORMAT_RGBA_1010102) &&
-        (buffer->image_format != libgav1::kImageFormatYuv420)) {
-        ALOGE("Only YUV420 output is supported when targeting RGBA_1010102");
+        (buffer->image_format != libgav1::kImageFormatYuv420) &&
+        (buffer->bitdepth == 10)) {
+        ALOGE("Only YUV420 output is supported for 10-bit when targeting RGBA_1010102");
       mSignalledError = true;
       work->result = C2_OMITTED;
       work->workletsProcessed = 1u;
@@ -837,6 +861,18 @@
     }
 #endif
   }
+  if (buffer->bitdepth == 12 && format == HAL_PIXEL_FORMAT_RGBA_1010102 &&
+      (buffer->image_format == libgav1::kImageFormatYuv422 ||
+       buffer->image_format == libgav1::kImageFormatYuv444)) {
+      // There are no 12-bit color conversion functions from YUV422/YUV444 to
+      // RGBA_1010102. Use 8-bit YV12 in this case.
+      format = HAL_PIXEL_FORMAT_YV12;
+  }
+  if (buffer->bitdepth == 12 && format == HAL_PIXEL_FORMAT_YCBCR_P010) {
+      // There are no 12-bit color conversion functions to P010. Use 8-bit YV12
+      // in this case.
+      format = HAL_PIXEL_FORMAT_YV12;
+  }
 
   if (mHalPixelFormat != format) {
     C2StreamPixelFormatInfo::output pixelFormat(0u, format);
@@ -890,7 +926,41 @@
   size_t dstUStride = layout.planes[C2PlanarLayout::PLANE_U].rowInc;
   size_t dstVStride = layout.planes[C2PlanarLayout::PLANE_V].rowInc;
 
-  if (buffer->bitdepth == 10) {
+  if (buffer->bitdepth == 12) {
+#if LIBYUV_VERSION >= 1871
+      const uint16_t *srcY = (const uint16_t *)buffer->plane[0];
+      const uint16_t *srcU = (const uint16_t *)buffer->plane[1];
+      const uint16_t *srcV = (const uint16_t *)buffer->plane[2];
+      size_t srcYStride = buffer->stride[0] / 2;
+      size_t srcUStride = buffer->stride[1] / 2;
+      size_t srcVStride = buffer->stride[2] / 2;
+      if (isMonochrome) {
+          if (!fillMonochromeRow(2048)) {
+              setError(work, C2_NO_MEMORY);
+              return false;
+          }
+          srcU = srcV = mTmpFrameBuffer.get();
+          srcUStride = srcVStride = 0;
+      }
+      if (format == HAL_PIXEL_FORMAT_RGBA_1010102) {
+          libyuv::I012ToAB30Matrix(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
+                                   dstY, dstYStride, &libyuv::kYuvV2020Constants,
+                                   mWidth, mHeight);
+      } else if (isMonochrome || buffer->image_format == libgav1::kImageFormatYuv420) {
+          libyuv::I012ToI420(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
+                             dstY, dstYStride, dstU, dstUStride, dstV, dstVStride,
+                             mWidth, mHeight);
+      } else if (buffer->image_format == libgav1::kImageFormatYuv444) {
+          libyuv::I412ToI420(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
+                             dstY, dstYStride, dstU, dstUStride, dstV, dstVStride,
+                             mWidth, mHeight);
+      } else {
+          libyuv::I212ToI420(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
+                             dstY, dstYStride, dstU, dstUStride, dstV, dstVStride,
+                             mWidth, mHeight);
+      }
+#endif  // LIBYUV_VERSION >= 1871
+  } else if (buffer->bitdepth == 10) {
     const uint16_t *srcY = (const uint16_t *)buffer->plane[0];
     const uint16_t *srcU = (const uint16_t *)buffer->plane[1];
     const uint16_t *srcV = (const uint16_t *)buffer->plane[2];
@@ -915,18 +985,12 @@
 #endif  // HAVE_LIBYUV_I410_I210_TO_AB30
         if (!processed) {
             if (isMonochrome) {
-                const size_t tmpSize = mWidth;
-                const bool needFill = tmpSize > mTmpFrameBufferSize;
-                if (!allocTmpFrameBuffer(tmpSize)) {
-                    ALOGE("Error allocating temp conversion buffer (%zu bytes)", tmpSize);
+                if (!fillMonochromeRow(512)) {
                     setError(work, C2_NO_MEMORY);
                     return false;
                 }
                 srcU = srcV = mTmpFrameBuffer.get();
                 srcUStride = srcVStride = 0;
-                if (needFill) {
-                    std::fill_n(mTmpFrameBuffer.get(), tmpSize, 512);
-                }
             }
             convertYUV420Planar16ToY410OrRGBA1010102(
                     (uint32_t *)dstY, srcY, srcU, srcV, srcYStride,
diff --git a/media/codec2/components/gav1/C2SoftGav1Dec.h b/media/codec2/components/gav1/C2SoftGav1Dec.h
index c3b27ea..0e09fcc 100644
--- a/media/codec2/components/gav1/C2SoftGav1Dec.h
+++ b/media/codec2/components/gav1/C2SoftGav1Dec.h
@@ -105,6 +105,7 @@
   // Sets |work->result| and mSignalledError. Returns false.
   void setError(const std::unique_ptr<C2Work> &work, c2_status_t error);
   bool allocTmpFrameBuffer(size_t size);
+  bool fillMonochromeRow(int value);
   bool outputBuffer(const std::shared_ptr<C2BlockPool>& pool,
                     const std::unique_ptr<C2Work>& work);
   c2_status_t drainInternal(uint32_t drainMode,
diff --git a/media/codec2/hal/client/client.cpp b/media/codec2/hal/client/client.cpp
index e3f8b1c..e7dd7c2 100644
--- a/media/codec2/hal/client/client.cpp
+++ b/media/codec2/hal/client/client.cpp
@@ -16,8 +16,9 @@
 
 //#define LOG_NDEBUG 0
 #define LOG_TAG "Codec2Client"
+#define ATRACE_TAG  ATRACE_TAG_VIDEO
 #include <android-base/logging.h>
-
+#include <utils/Trace.h>
 #include <codec2/hidl/client.h>
 #include <C2Debug.h>
 #include <C2BufferPriv.h>
@@ -2321,6 +2322,7 @@
         const C2ConstGraphicBlock& block,
         const QueueBufferInput& input,
         QueueBufferOutput* output) {
+    ScopedTrace trace(ATRACE_TAG,"Codec2Client::Component::queueToOutputSurface");
     return mOutputBufferQueue->outputBuffer(block, input, output);
 }
 
diff --git a/media/codec2/hal/client/output.cpp b/media/codec2/hal/client/output.cpp
index 4eebd1c..2f9773e 100644
--- a/media/codec2/hal/client/output.cpp
+++ b/media/codec2/hal/client/output.cpp
@@ -16,7 +16,9 @@
 
 //#define LOG_NDEBUG 0
 #define LOG_TAG "Codec2-OutputBufferQueue"
+#define ATRACE_TAG  ATRACE_TAG_VIDEO
 #include <android-base/logging.h>
+#include <utils/Trace.h>
 
 #include <android/hardware/graphics/bufferqueue/2.0/IGraphicBufferProducer.h>
 #include <codec2/hidl/output.h>
@@ -388,6 +390,7 @@
     uint32_t generation;
     uint64_t bqId;
     int32_t bqSlot;
+    ScopedTrace trace(ATRACE_TAG,"Codec2-OutputBufferQueue::outputBuffer");
     bool display = V1_0::utils::displayBufferQueueBlock(block);
     if (!getBufferQueueAssignment(block, &generation, &bqId, &bqSlot) ||
         bqId == 0) {
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.cpp b/media/codec2/sfplugin/CCodecBufferChannel.cpp
index 38fe2ed..490f957 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.cpp
+++ b/media/codec2/sfplugin/CCodecBufferChannel.cpp
@@ -600,6 +600,8 @@
     size_t bufferSize = 0;
     c2_status_t blockRes = C2_OK;
     bool copied = false;
+    ScopedTrace trace(ATRACE_TAG, android::base::StringPrintf(
+            "CCodecBufferChannel::decrypt(%s)", mName).c_str());
     if (mSendEncryptedInfoBuffer) {
         static const C2MemoryUsage kDefaultReadWriteUsage{
             C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE};
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.h b/media/codec2/sfplugin/CCodecBufferChannel.h
index 045a25f..6800107 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.h
+++ b/media/codec2/sfplugin/CCodecBufferChannel.h
@@ -62,8 +62,8 @@
     void setCrypto(const sp<ICrypto> &crypto) override;
     void setDescrambler(const sp<IDescrambler> &descrambler) override;
 
-    virtual status_t queueInputBuffer(const sp<MediaCodecBuffer> &buffer) override;
-    virtual status_t queueSecureInputBuffer(
+    status_t queueInputBuffer(const sp<MediaCodecBuffer> &buffer) override;
+    status_t queueSecureInputBuffer(
             const sp<MediaCodecBuffer> &buffer,
             bool secure,
             const uint8_t *key,
@@ -73,10 +73,10 @@
             const CryptoPlugin::SubSample *subSamples,
             size_t numSubSamples,
             AString *errorDetailMsg) override;
-    virtual status_t attachBuffer(
+    status_t attachBuffer(
             const std::shared_ptr<C2Buffer> &c2Buffer,
             const sp<MediaCodecBuffer> &buffer) override;
-    virtual status_t attachEncryptedBuffer(
+    status_t attachEncryptedBuffer(
             const sp<hardware::HidlMemory> &memory,
             bool secure,
             const uint8_t *key,
@@ -88,12 +88,12 @@
             size_t numSubSamples,
             const sp<MediaCodecBuffer> &buffer,
             AString* errorDetailMsg) override;
-    virtual status_t renderOutputBuffer(
+    status_t renderOutputBuffer(
             const sp<MediaCodecBuffer> &buffer, int64_t timestampNs) override;
-    virtual void pollForRenderedBuffers() override;
-    virtual status_t discardBuffer(const sp<MediaCodecBuffer> &buffer) override;
-    virtual void getInputBufferArray(Vector<sp<MediaCodecBuffer>> *array) override;
-    virtual void getOutputBufferArray(Vector<sp<MediaCodecBuffer>> *array) override;
+    void pollForRenderedBuffers() override;
+    status_t discardBuffer(const sp<MediaCodecBuffer> &buffer) override;
+    void getInputBufferArray(Vector<sp<MediaCodecBuffer>> *array) override;
+    void getOutputBufferArray(Vector<sp<MediaCodecBuffer>> *array) override;
 
     // Methods below are interface for CCodec to use.
 
diff --git a/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp b/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
index 9004bcf..261fd05 100644
--- a/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
+++ b/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
@@ -621,8 +621,8 @@
     uint8_t maxLvlChroma =  colorRange == C2Color::RANGE_FULL ? 255 : 240;
 
 #define CLIP3(min,v,max) (((v) < (min)) ? (min) : (((max) > (v)) ? (v) : (max)))
-    for (size_t y = 0; y < src.height(); ++y) {
-        for (size_t x = 0; x < src.width(); ++x) {
+    for (size_t y = 0; y < src.crop().height; ++y) {
+        for (size_t x = 0; x < src.crop().width; ++x) {
             uint8_t r = *pRed;
             uint8_t g = *pGreen;
             uint8_t b = *pBlue;
diff --git a/media/codec2/vndk/C2Buffer.cpp b/media/codec2/vndk/C2Buffer.cpp
index 018e269..a56a216 100644
--- a/media/codec2/vndk/C2Buffer.cpp
+++ b/media/codec2/vndk/C2Buffer.cpp
@@ -16,7 +16,9 @@
 
 //#define LOG_NDEBUG 0
 #define LOG_TAG "C2Buffer"
+#define ATRACE_TAG  ATRACE_TAG_VIDEO
 #include <utils/Log.h>
+#include <utils/Trace.h>
 
 #include <list>
 #include <map>
@@ -33,6 +35,7 @@
 
 namespace {
 
+using android::ScopedTrace;
 using android::C2AllocatorBlob;
 using android::C2AllocatorGralloc;
 using android::C2AllocatorIon;
@@ -1159,6 +1162,7 @@
         uint32_t capacity,
         C2MemoryUsage usage,
         std::shared_ptr<C2LinearBlock> *block /* nonnull */) {
+    ScopedTrace trace(ATRACE_TAG,"C2PooledBlockPool::fetchLinearBlock");
     if (mBufferPoolVer == VER_HIDL && mImpl) {
         return mImpl->fetchLinearBlock(capacity, usage, block);
     }
@@ -1174,6 +1178,7 @@
         uint32_t format,
         C2MemoryUsage usage,
         std::shared_ptr<C2GraphicBlock> *block) {
+    ScopedTrace trace(ATRACE_TAG,"C2PooledBlockPool::fetchGraphicBlock");
     if (mBufferPoolVer == VER_HIDL && mImpl) {
         return mImpl->fetchGraphicBlock(width, height, format, usage, block);
     }
diff --git a/media/codec2/vndk/platform/C2BqBuffer.cpp b/media/codec2/vndk/platform/C2BqBuffer.cpp
index 5fb0c8f..960fa79 100644
--- a/media/codec2/vndk/platform/C2BqBuffer.cpp
+++ b/media/codec2/vndk/platform/C2BqBuffer.cpp
@@ -16,8 +16,10 @@
 
 //#define LOG_NDEBUG 0
 #define LOG_TAG "C2BqBuffer"
+#define ATRACE_TAG  ATRACE_TAG_VIDEO
 #include <android/hardware_buffer.h>
 #include <utils/Log.h>
+#include <utils/Trace.h>
 
 #include <ui/BufferQueueDefs.h>
 #include <ui/GraphicBuffer.h>
@@ -37,6 +39,7 @@
 #include <map>
 #include <mutex>
 
+using ::android::ScopedTrace;
 using ::android::BufferQueueDefs::NUM_BUFFER_SLOTS;
 using ::android::C2AllocatorGralloc;
 using ::android::C2AndroidMemoryUsage;
@@ -1063,6 +1066,7 @@
         uint32_t format,
         C2MemoryUsage usage,
         std::shared_ptr<C2GraphicBlock> *block /* nonnull */) {
+    ScopedTrace trace(ATRACE_TAG,"C2BufferQueueBlockPool::fetchGraphicBlock");
     if (mImpl) {
         return mImpl->fetchGraphicBlock(width, height, format, usage, block, nullptr);
     }
diff --git a/media/libaaudio/include/aaudio/AAudio.h b/media/libaaudio/include/aaudio/AAudio.h
index 7648c76..c3b32e6 100644
--- a/media/libaaudio/include/aaudio/AAudio.h
+++ b/media/libaaudio/include/aaudio/AAudio.h
@@ -539,6 +539,22 @@
      * Available since API level 29.
      */
     AAUDIO_INPUT_PRESET_VOICE_PERFORMANCE = 10,
+
+    /**
+     * Use this preset for an echo canceller to capture the reference signal.
+     * Reserved for system components.
+     * Requires CAPTURE_AUDIO_OUTPUT permission
+     * Available since API level 35.
+     */
+    AAUDIO_INPUT_PRESET_SYSTEM_ECHO_REFERENCE = 1997,
+
+    /**
+     * Use this preset for preemptible, low-priority software hotword detection.
+     * Reserved for system components.
+     * Requires CAPTURE_AUDIO_HOTWORD permission.
+     * Available since API level 35.
+     */
+    AAUDIO_INPUT_PRESET_SYSTEM_HOTWORD = 1999,
 };
 typedef int32_t aaudio_input_preset_t;
 
@@ -623,6 +639,11 @@
  * (e.g. a USB audio interface, a DAC connected to headphones) to
  * specify allowable configurations of a particular device.
  *
+ * Channel masks are for input only, output only, or both input and output.
+ * These channel masks are different than those defined in AudioFormat.java.
+ * If an app gets a channel mask from Java API and wants to use it in AAudio,
+ * conversion should be done by the app.
+ *
  * Added in API level 32.
  */
 enum {
@@ -630,10 +651,6 @@
      * Invalid channel mask
      */
     AAUDIO_CHANNEL_INVALID = -1,
-
-    /**
-     * Output audio channel mask
-     */
     AAUDIO_CHANNEL_FRONT_LEFT = 1 << 0,
     AAUDIO_CHANNEL_FRONT_RIGHT = 1 << 1,
     AAUDIO_CHANNEL_FRONT_CENTER = 1 << 2,
@@ -661,62 +678,112 @@
     AAUDIO_CHANNEL_FRONT_WIDE_LEFT = 1 << 24,
     AAUDIO_CHANNEL_FRONT_WIDE_RIGHT = 1 << 25,
 
+    /**
+     * Supported for Input and Output
+     */
     AAUDIO_CHANNEL_MONO = AAUDIO_CHANNEL_FRONT_LEFT,
+    /**
+     * Supported for Input and Output
+     */
     AAUDIO_CHANNEL_STEREO = AAUDIO_CHANNEL_FRONT_LEFT |
                             AAUDIO_CHANNEL_FRONT_RIGHT,
+    /**
+     * Supported for only Output
+     */
     AAUDIO_CHANNEL_2POINT1 = AAUDIO_CHANNEL_FRONT_LEFT |
                              AAUDIO_CHANNEL_FRONT_RIGHT |
                              AAUDIO_CHANNEL_LOW_FREQUENCY,
+    /**
+     * Supported for only Output
+     */
     AAUDIO_CHANNEL_TRI = AAUDIO_CHANNEL_FRONT_LEFT |
                          AAUDIO_CHANNEL_FRONT_RIGHT |
                          AAUDIO_CHANNEL_FRONT_CENTER,
+    /**
+     * Supported for only Output
+     */
     AAUDIO_CHANNEL_TRI_BACK = AAUDIO_CHANNEL_FRONT_LEFT |
                               AAUDIO_CHANNEL_FRONT_RIGHT |
                               AAUDIO_CHANNEL_BACK_CENTER,
+    /**
+     * Supported for only Output
+     */
     AAUDIO_CHANNEL_3POINT1 = AAUDIO_CHANNEL_FRONT_LEFT |
                              AAUDIO_CHANNEL_FRONT_RIGHT |
                              AAUDIO_CHANNEL_FRONT_CENTER |
                              AAUDIO_CHANNEL_LOW_FREQUENCY,
+    /**
+     * Supported for Input and Output
+     */
     AAUDIO_CHANNEL_2POINT0POINT2 = AAUDIO_CHANNEL_FRONT_LEFT |
                                    AAUDIO_CHANNEL_FRONT_RIGHT |
                                    AAUDIO_CHANNEL_TOP_SIDE_LEFT |
                                    AAUDIO_CHANNEL_TOP_SIDE_RIGHT,
+    /**
+     * Supported for Input and Output
+     */
     AAUDIO_CHANNEL_2POINT1POINT2 = AAUDIO_CHANNEL_2POINT0POINT2 |
                                    AAUDIO_CHANNEL_LOW_FREQUENCY,
+    /**
+     * Supported for Input and Output
+     */
     AAUDIO_CHANNEL_3POINT0POINT2 = AAUDIO_CHANNEL_FRONT_LEFT |
                                    AAUDIO_CHANNEL_FRONT_RIGHT |
                                    AAUDIO_CHANNEL_FRONT_CENTER |
                                    AAUDIO_CHANNEL_TOP_SIDE_LEFT |
                                    AAUDIO_CHANNEL_TOP_SIDE_RIGHT,
+    /**
+     * Supported for Input and Output
+     */
     AAUDIO_CHANNEL_3POINT1POINT2 = AAUDIO_CHANNEL_3POINT0POINT2 |
                                    AAUDIO_CHANNEL_LOW_FREQUENCY,
+    /**
+     * Supported for only Output
+     */
     AAUDIO_CHANNEL_QUAD = AAUDIO_CHANNEL_FRONT_LEFT |
                           AAUDIO_CHANNEL_FRONT_RIGHT |
                           AAUDIO_CHANNEL_BACK_LEFT |
                           AAUDIO_CHANNEL_BACK_RIGHT,
+    /**
+     * Supported for only Output
+     */
     AAUDIO_CHANNEL_QUAD_SIDE = AAUDIO_CHANNEL_FRONT_LEFT |
                                AAUDIO_CHANNEL_FRONT_RIGHT |
                                AAUDIO_CHANNEL_SIDE_LEFT |
                                AAUDIO_CHANNEL_SIDE_RIGHT,
+    /**
+     * Supported for only Output
+     */
     AAUDIO_CHANNEL_SURROUND = AAUDIO_CHANNEL_FRONT_LEFT |
                               AAUDIO_CHANNEL_FRONT_RIGHT |
                               AAUDIO_CHANNEL_FRONT_CENTER |
                               AAUDIO_CHANNEL_BACK_CENTER,
+    /**
+     * Supported for only Output
+     */
     AAUDIO_CHANNEL_PENTA = AAUDIO_CHANNEL_QUAD |
                            AAUDIO_CHANNEL_FRONT_CENTER,
-    // aka 5POINT1_BACK
+    /**
+     * Supported for Input and Output. aka 5POINT1_BACK
+     */
     AAUDIO_CHANNEL_5POINT1 = AAUDIO_CHANNEL_FRONT_LEFT |
                              AAUDIO_CHANNEL_FRONT_RIGHT |
                              AAUDIO_CHANNEL_FRONT_CENTER |
                              AAUDIO_CHANNEL_LOW_FREQUENCY |
                              AAUDIO_CHANNEL_BACK_LEFT |
                              AAUDIO_CHANNEL_BACK_RIGHT,
+    /**
+     * Supported for only Output
+     */
     AAUDIO_CHANNEL_5POINT1_SIDE = AAUDIO_CHANNEL_FRONT_LEFT |
                                   AAUDIO_CHANNEL_FRONT_RIGHT |
                                   AAUDIO_CHANNEL_FRONT_CENTER |
                                   AAUDIO_CHANNEL_LOW_FREQUENCY |
                                   AAUDIO_CHANNEL_SIDE_LEFT |
                                   AAUDIO_CHANNEL_SIDE_RIGHT,
+    /**
+     * Supported for only Output
+     */
     AAUDIO_CHANNEL_6POINT1 = AAUDIO_CHANNEL_FRONT_LEFT |
                              AAUDIO_CHANNEL_FRONT_RIGHT |
                              AAUDIO_CHANNEL_FRONT_CENTER |
@@ -724,32 +791,55 @@
                              AAUDIO_CHANNEL_BACK_LEFT |
                              AAUDIO_CHANNEL_BACK_RIGHT |
                              AAUDIO_CHANNEL_BACK_CENTER,
+    /**
+     * Supported for only Output
+     */
     AAUDIO_CHANNEL_7POINT1 = AAUDIO_CHANNEL_5POINT1 |
                              AAUDIO_CHANNEL_SIDE_LEFT |
                              AAUDIO_CHANNEL_SIDE_RIGHT,
+    /**
+     * Supported for only Output
+     */
     AAUDIO_CHANNEL_5POINT1POINT2 = AAUDIO_CHANNEL_5POINT1 |
                                    AAUDIO_CHANNEL_TOP_SIDE_LEFT |
                                    AAUDIO_CHANNEL_TOP_SIDE_RIGHT,
+    /**
+     * Supported for only Output
+     */
     AAUDIO_CHANNEL_5POINT1POINT4 = AAUDIO_CHANNEL_5POINT1 |
                                    AAUDIO_CHANNEL_TOP_FRONT_LEFT |
                                    AAUDIO_CHANNEL_TOP_FRONT_RIGHT |
                                    AAUDIO_CHANNEL_TOP_BACK_LEFT |
                                    AAUDIO_CHANNEL_TOP_BACK_RIGHT,
+    /**
+     * Supported for only Output
+     */
     AAUDIO_CHANNEL_7POINT1POINT2 = AAUDIO_CHANNEL_7POINT1 |
                                    AAUDIO_CHANNEL_TOP_SIDE_LEFT |
                                    AAUDIO_CHANNEL_TOP_SIDE_RIGHT,
+    /**
+     * Supported for only Output
+     */
     AAUDIO_CHANNEL_7POINT1POINT4 = AAUDIO_CHANNEL_7POINT1 |
                                    AAUDIO_CHANNEL_TOP_FRONT_LEFT |
                                    AAUDIO_CHANNEL_TOP_FRONT_RIGHT |
                                    AAUDIO_CHANNEL_TOP_BACK_LEFT |
                                    AAUDIO_CHANNEL_TOP_BACK_RIGHT,
+    /**
+     * Supported for only Output
+     */
     AAUDIO_CHANNEL_9POINT1POINT4 = AAUDIO_CHANNEL_7POINT1POINT4 |
                                    AAUDIO_CHANNEL_FRONT_WIDE_LEFT |
                                    AAUDIO_CHANNEL_FRONT_WIDE_RIGHT,
+    /**
+     * Supported for only Output
+     */
     AAUDIO_CHANNEL_9POINT1POINT6 = AAUDIO_CHANNEL_9POINT1POINT4 |
                                    AAUDIO_CHANNEL_TOP_SIDE_LEFT |
                                    AAUDIO_CHANNEL_TOP_SIDE_RIGHT,
-
+    /**
+     * Supported for only Input
+     */
     AAUDIO_CHANNEL_FRONT_BACK = AAUDIO_CHANNEL_FRONT_CENTER |
                                 AAUDIO_CHANNEL_BACK_CENTER,
 };
diff --git a/media/libaaudio/src/client/AAudioFlowGraph.cpp b/media/libaaudio/src/client/AAudioFlowGraph.cpp
index 5444565..b7e0ae6 100644
--- a/media/libaaudio/src/client/AAudioFlowGraph.cpp
+++ b/media/libaaudio/src/client/AAudioFlowGraph.cpp
@@ -39,18 +39,21 @@
 
 aaudio_result_t AAudioFlowGraph::configure(audio_format_t sourceFormat,
                           int32_t sourceChannelCount,
+                          int32_t sourceSampleRate,
                           audio_format_t sinkFormat,
                           int32_t sinkChannelCount,
+                          int32_t sinkSampleRate,
                           bool useMonoBlend,
+                          bool useVolumeRamps,
                           float audioBalance,
-                          bool isExclusive) {
+                          aaudio::resampler::MultiChannelResampler::Quality resamplerQuality) {
     FlowGraphPortFloatOutput *lastOutput = nullptr;
 
-    // TODO change back to ALOGD
-    ALOGI("%s() source format = 0x%08x, channels = %d, sink format = 0x%08x, channels = %d, "
-          "useMonoBlend = %d, audioBalance = %f, isExclusive %d",
-          __func__, sourceFormat, sourceChannelCount, sinkFormat, sinkChannelCount,
-          useMonoBlend, audioBalance, isExclusive);
+    ALOGD("%s() source format = 0x%08x, channels = %d, sample rate = %d, "
+          "sink format = 0x%08x, channels = %d, sample rate = %d, "
+          "useMonoBlend = %d, audioBalance = %f, useVolumeRamps %d",
+          __func__, sourceFormat, sourceChannelCount, sourceSampleRate, sinkFormat,
+          sinkChannelCount, sinkSampleRate, useMonoBlend, audioBalance, useVolumeRamps);
 
     switch (sourceFormat) {
         case AUDIO_FORMAT_PCM_FLOAT:
@@ -85,6 +88,15 @@
         lastOutput = &mLimiter->output;
     }
 
+    if (sourceSampleRate != sinkSampleRate) {
+        mResampler.reset(aaudio::resampler::MultiChannelResampler::make(sourceChannelCount,
+                sourceSampleRate, sinkSampleRate, resamplerQuality));
+        mRateConverter = std::make_unique<SampleRateConverter>(sourceChannelCount,
+                                                               *mResampler);
+        lastOutput->connect(&mRateConverter->input);
+        lastOutput = &mRateConverter->output;
+    }
+
     // Expand the number of channels if required.
     if (sourceChannelCount == 1 && sinkChannelCount > 1) {
         mChannelConverter = std::make_unique<MonoToMultiConverter>(sinkChannelCount);
@@ -95,8 +107,7 @@
         return AAUDIO_ERROR_UNIMPLEMENTED;
     }
 
-    // Apply volume ramps for only exclusive streams.
-    if (isExclusive) {
+    if (useVolumeRamps) {
         // Apply volume ramps to set the left/right audio balance and target volumes.
         // The signals will be decoupled, volume ramps will be applied, before the signals are
         // combined again.
@@ -137,9 +148,14 @@
     return AAUDIO_OK;
 }
 
-void AAudioFlowGraph::process(const void *source, void *destination, int32_t numFrames) {
-    mSource->setData(source, numFrames);
-    mSink->read(destination, numFrames);
+int32_t AAudioFlowGraph::pull(void *destination, int32_t targetFramesToRead) {
+    return mSink->read(destination, targetFramesToRead);
+}
+
+int32_t AAudioFlowGraph::process(const void *source, int32_t numFramesToWrite, void *destination,
+                    int32_t targetFramesToRead) {
+    mSource->setData(source, numFramesToWrite);
+    return mSink->read(destination, targetFramesToRead);
 }
 
 /**
diff --git a/media/libaaudio/src/client/AAudioFlowGraph.h b/media/libaaudio/src/client/AAudioFlowGraph.h
index 35fef37..e1d517e 100644
--- a/media/libaaudio/src/client/AAudioFlowGraph.h
+++ b/media/libaaudio/src/client/AAudioFlowGraph.h
@@ -30,6 +30,7 @@
 #include <flowgraph/MonoToMultiConverter.h>
 #include <flowgraph/MultiToManyConverter.h>
 #include <flowgraph/RampLinear.h>
+#include <flowgraph/SampleRateConverter.h>
 
 class AAudioFlowGraph {
 public:
@@ -38,23 +39,57 @@
      *
      * @param sourceFormat
      * @param sourceChannelCount
+     * @param sourceSampleRate
      * @param sinkFormat
      * @param sinkChannelCount
+     * @param sinkSampleRate
      * @param useMonoBlend
+     * @param useVolumeRamps
      * @param audioBalance
-     * @param channelMask
-     * @param isExclusive
+     * @param resamplerQuality
      * @return
      */
     aaudio_result_t configure(audio_format_t sourceFormat,
                               int32_t sourceChannelCount,
+                              int32_t sourceSampleRate,
                               audio_format_t sinkFormat,
                               int32_t sinkChannelCount,
+                              int32_t sinkSampleRate,
                               bool useMonoBlend,
+                              bool useVolumeRamps,
                               float audioBalance,
-                              bool isExclusive);
+                              aaudio::resampler::MultiChannelResampler::Quality resamplerQuality);
 
-    void process(const void *source, void *destination, int32_t numFrames);
+    /**
+     * Attempt to read targetFramesToRead from the flowgraph.
+     * This function returns the number of frames actually read.
+     *
+     * This function does nothing if process() was not called before.
+     *
+     * @param destination
+     * @param targetFramesToRead
+     * @return numFramesRead
+     */
+    int32_t pull(void *destination, int32_t targetFramesToRead);
+
+    /**
+     * Set numFramesToWrite frames from the source into the flowgraph.
+     * Then, attempt to read targetFramesToRead from the flowgraph.
+     * This function returns the number of frames actually read.
+     *
+     * There may be data still in the flowgraph if targetFramesToRead is not large enough.
+     * Before calling process() again, pull() must be called until until all the data is consumed.
+     *
+     * TODO: b/289510598 - Calculate the exact number of input frames needed for Y output frames.
+     *
+     * @param source
+     * @param numFramesToWrite
+     * @param destination
+     * @param targetFramesToRead
+     * @return numFramesRead
+     */
+    int32_t process(const void *source, int32_t numFramesToWrite, void *destination,
+                    int32_t targetFramesToRead);
 
     /**
      * @param volume between 0.0 and 1.0
@@ -73,6 +108,8 @@
 
 private:
     std::unique_ptr<FLOWGRAPH_OUTER_NAMESPACE::flowgraph::FlowGraphSourceBuffered> mSource;
+    std::unique_ptr<RESAMPLER_OUTER_NAMESPACE::resampler::MultiChannelResampler> mResampler;
+    std::unique_ptr<FLOWGRAPH_OUTER_NAMESPACE::flowgraph::SampleRateConverter> mRateConverter;
     std::unique_ptr<FLOWGRAPH_OUTER_NAMESPACE::flowgraph::MonoBlend> mMonoBlend;
     std::unique_ptr<FLOWGRAPH_OUTER_NAMESPACE::flowgraph::Limiter> mLimiter;
     std::unique_ptr<FLOWGRAPH_OUTER_NAMESPACE::flowgraph::MonoToMultiConverter> mChannelConverter;
diff --git a/media/libaaudio/src/client/AudioStreamInternal.cpp b/media/libaaudio/src/client/AudioStreamInternal.cpp
index 84c715f..9b1ad72 100644
--- a/media/libaaudio/src/client/AudioStreamInternal.cpp
+++ b/media/libaaudio/src/client/AudioStreamInternal.cpp
@@ -63,6 +63,8 @@
 
 #define LOG_TIMESTAMPS            0
 
+#define ENABLE_SAMPLE_RATE_CONVERTER 1
+
 AudioStreamInternal::AudioStreamInternal(AAudioServiceInterface  &serviceInterface, bool inService)
         : AudioStream()
         , mClockModel()
@@ -132,12 +134,6 @@
 
     request.getConfiguration().setBufferCapacity(builder.getBufferCapacity());
 
-    request.getConfiguration().setHardwareSamplesPerFrame(builder.getHardwareSamplesPerFrame());
-    request.getConfiguration().setHardwareSampleRate(builder.getHardwareSampleRate());
-    request.getConfiguration().setHardwareFormat(builder.getHardwareFormat());
-
-    mDeviceChannelCount = getSamplesPerFrame(); // Assume it will be the same. Update if not.
-
     mServiceStreamHandleInfo = mServiceInterface.openStream(request, configurationOutput);
     if (getServiceHandle() < 0
             && (request.getConfiguration().getSamplesPerFrame() == 1
@@ -181,9 +177,6 @@
         setChannelMask(configurationOutput.getChannelMask());
     }
 
-    mDeviceChannelCount = configurationOutput.getSamplesPerFrame();
-
-    setSampleRate(configurationOutput.getSampleRate());
     setDeviceId(configurationOutput.getDeviceId());
     setSessionId(configurationOutput.getSessionId());
     setSharingMode(configurationOutput.getSharingMode());
@@ -194,8 +187,21 @@
     setIsContentSpatialized(configurationOutput.isContentSpatialized());
     setInputPreset(configurationOutput.getInputPreset());
 
+    setDeviceSampleRate(configurationOutput.getSampleRate());
+
+    if (getSampleRate() == AAUDIO_UNSPECIFIED) {
+        setSampleRate(configurationOutput.getSampleRate());
+    }
+
+#if !ENABLE_SAMPLE_RATE_CONVERTER
+    if (getSampleRate() != getDeviceSampleRate()) {
+        goto error;
+    }
+#endif
+
     // Save device format so we can do format conversion and volume scaling together.
     setDeviceFormat(configurationOutput.getFormat());
+    setDeviceSamplesPerFrame(configurationOutput.getSamplesPerFrame());
 
     setHardwareSamplesPerFrame(configurationOutput.getHardwareSamplesPerFrame());
     setHardwareSampleRate(configurationOutput.getHardwareSampleRate());
@@ -233,39 +239,46 @@
 }
 
 aaudio_result_t AudioStreamInternal::configureDataInformation(int32_t callbackFrames) {
-    int32_t framesPerHardwareBurst = mEndpointDescriptor.dataQueueDescriptor.framesPerBurst;
+    int32_t deviceFramesPerBurst = mEndpointDescriptor.dataQueueDescriptor.framesPerBurst;
 
     // Scale up the burst size to meet the minimum equivalent in microseconds.
     // This is to avoid waking the CPU too often when the HW burst is very small
-    // or at high sample rates.
-    int32_t framesPerBurst = framesPerHardwareBurst;
+    // or at high sample rates. The actual number of frames that we call back to
+    // the app with will be 0 < N <= framesPerBurst so round up the division.
+    int32_t framesPerBurst = (static_cast<int64_t>(deviceFramesPerBurst) * getSampleRate() +
+             getDeviceSampleRate() - 1) / getDeviceSampleRate();
     int32_t burstMicros = 0;
     const int32_t burstMinMicros = android::AudioSystem::getAAudioHardwareBurstMinUsec();
     do {
         if (burstMicros > 0) {  // skip first loop
+            deviceFramesPerBurst *= 2;
             framesPerBurst *= 2;
         }
         burstMicros = framesPerBurst * static_cast<int64_t>(1000000) / getSampleRate();
     } while (burstMicros < burstMinMicros);
     ALOGD("%s() original HW burst = %d, minMicros = %d => SW burst = %d\n",
-          __func__, framesPerHardwareBurst, burstMinMicros, framesPerBurst);
+          __func__, deviceFramesPerBurst, burstMinMicros, framesPerBurst);
 
     // Validate final burst size.
     if (framesPerBurst < MIN_FRAMES_PER_BURST || framesPerBurst > MAX_FRAMES_PER_BURST) {
         ALOGE("%s - framesPerBurst out of range = %d", __func__, framesPerBurst);
         return AAUDIO_ERROR_OUT_OF_RANGE;
     }
+    setDeviceFramesPerBurst(deviceFramesPerBurst);
     setFramesPerBurst(framesPerBurst); // only save good value
 
-    mBufferCapacityInFrames = mEndpointDescriptor.dataQueueDescriptor.capacityInFrames;
+    mDeviceBufferCapacityInFrames = mEndpointDescriptor.dataQueueDescriptor.capacityInFrames;
+
+    mBufferCapacityInFrames = static_cast<int64_t>(mDeviceBufferCapacityInFrames)
+            * getSampleRate() / getDeviceSampleRate();
     if (mBufferCapacityInFrames < getFramesPerBurst()
             || mBufferCapacityInFrames > MAX_BUFFER_CAPACITY_IN_FRAMES) {
         ALOGE("%s - bufferCapacity out of range = %d", __func__, mBufferCapacityInFrames);
         return AAUDIO_ERROR_OUT_OF_RANGE;
     }
 
-    mClockModel.setSampleRate(getSampleRate());
-    mClockModel.setFramesPerBurst(framesPerHardwareBurst);
+    mClockModel.setSampleRate(getDeviceSampleRate());
+    mClockModel.setFramesPerBurst(deviceFramesPerBurst);
 
     if (isDataCallbackSet()) {
         mCallbackFrames = callbackFrames;
@@ -315,7 +328,8 @@
         mTimeOffsetNanos = offsetMicros * AAUDIO_NANOS_PER_MICROSECOND;
     }
 
-    setBufferSize(mBufferCapacityInFrames / 2); // Default buffer size to match Q
+    // Default buffer size to match Q
+    setBufferSize(mBufferCapacityInFrames / 2);
     return AAUDIO_OK;
 }
 
@@ -374,9 +388,9 @@
     // Cache the buffer size which may be from client.
     const int32_t previousBufferSize = mBufferSizeInFrames;
     // Copy all available data from current data queue.
-    uint8_t buffer[getBufferCapacity() * getBytesPerFrame()];
-    android::fifo_frames_t fullFramesAvailable =
-            mAudioEndpoint->read(buffer, getBufferCapacity());
+    uint8_t buffer[getDeviceBufferCapacity() * getBytesPerFrame()];
+    android::fifo_frames_t fullFramesAvailable = mAudioEndpoint->read(buffer,
+            getDeviceBufferCapacity());
     mEndPointParcelable.closeDataFileDescriptor();
     aaudio_result_t result = mServiceInterface.exitStandby(
             mServiceStreamHandleInfo, endpointParcelable);
@@ -408,7 +422,7 @@
         goto exit;
     }
     // Write data from previous data buffer to new endpoint.
-    if (android::fifo_frames_t framesWritten =
+    if (const android::fifo_frames_t framesWritten =
                 mAudioEndpoint->write(buffer, fullFramesAvailable);
             framesWritten != fullFramesAvailable) {
         ALOGW("Some data lost after exiting standby, frames written: %d, "
@@ -448,7 +462,7 @@
         ALOGD("requestStart() but DISCONNECTED");
         return AAUDIO_ERROR_DISCONNECTED;
     }
-    aaudio_stream_state_t originalState = getState();
+    const aaudio_stream_state_t originalState = getState();
     setState(AAUDIO_STREAM_STATE_STARTING);
 
     // Clear any stale timestamps from the previous run.
@@ -605,7 +619,11 @@
     // Generated in server and passed to client. Return latest.
     if (mAtomicInternalTimestamp.isValid()) {
         Timestamp timestamp = mAtomicInternalTimestamp.read();
-        int64_t position = timestamp.getPosition() + mFramesOffsetFromService;
+        // This should not overflow as timestamp.getPosition() should be a position in a buffer and
+        // not the actual timestamp. timestamp.getNanoseconds() below uses the actual timestamp.
+        // At 48000 Hz we can run for over 100 years before overflowing the int64_t.
+        int64_t position = (timestamp.getPosition() + mFramesOffsetFromService) * getSampleRate() /
+                getDeviceSampleRate();
         if (position >= 0) {
             *framePosition = position;
             *timeNanoseconds = timestamp.getNanoseconds();
@@ -889,7 +907,8 @@
         adjustedFrames = maximumSize;
     } else {
         // Round to the next highest burst size.
-        int32_t numBursts = (adjustedFrames + getFramesPerBurst() - 1) / getFramesPerBurst();
+        int32_t numBursts = (static_cast<int64_t>(adjustedFrames) + getFramesPerBurst() - 1) /
+                getFramesPerBurst();
         adjustedFrames = numBursts * getFramesPerBurst();
         // Clip just in case maximumSize is not a multiple of getFramesPerBurst().
         adjustedFrames = std::min(maximumSize, adjustedFrames);
@@ -897,23 +916,32 @@
 
     if (mAudioEndpoint) {
         // Clip against the actual size from the endpoint.
-        int32_t actualFrames = 0;
+        int32_t actualFramesDevice = 0;
+        int32_t maximumFramesDevice = (static_cast<int64_t>(maximumSize) * getDeviceSampleRate()
+                + getSampleRate() - 1) / getSampleRate();
         // Set to maximum size so we can write extra data when ready in order to reduce glitches.
         // The amount we keep in the buffer is controlled by mBufferSizeInFrames.
-        mAudioEndpoint->setBufferSizeInFrames(maximumSize, &actualFrames);
+        mAudioEndpoint->setBufferSizeInFrames(maximumFramesDevice, &actualFramesDevice);
+        int32_t actualFrames = (static_cast<int64_t>(actualFramesDevice) * getSampleRate() +
+                 getDeviceSampleRate() - 1) / getDeviceSampleRate();
         // actualFrames should be <= actual maximum size of endpoint
         adjustedFrames = std::min(actualFrames, adjustedFrames);
     }
 
-    if (adjustedFrames != mBufferSizeInFrames) {
+    const int32_t bufferSizeInFrames = adjustedFrames;
+    const int32_t deviceBufferSizeInFrames = static_cast<int64_t>(bufferSizeInFrames) *
+            getDeviceSampleRate() / getSampleRate();
+
+    if (deviceBufferSizeInFrames != mDeviceBufferSizeInFrames) {
         android::mediametrics::LogItem(mMetricsId)
                 .set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_SETBUFFERSIZE)
-                .set(AMEDIAMETRICS_PROP_BUFFERSIZEFRAMES, adjustedFrames)
+                .set(AMEDIAMETRICS_PROP_BUFFERSIZEFRAMES, deviceBufferSizeInFrames)
                 .set(AMEDIAMETRICS_PROP_UNDERRUN, (int32_t) getXRunCount())
                 .record();
     }
 
-    mBufferSizeInFrames = adjustedFrames;
+    mBufferSizeInFrames = bufferSizeInFrames;
+    mDeviceBufferSizeInFrames = deviceBufferSizeInFrames;
     ALOGV("%s(%d) returns %d", __func__, requestedFrames, adjustedFrames);
     return (aaudio_result_t) adjustedFrames;
 }
@@ -922,10 +950,18 @@
     return mBufferSizeInFrames;
 }
 
+int32_t AudioStreamInternal::getDeviceBufferSize() const {
+    return mDeviceBufferSizeInFrames;
+}
+
 int32_t AudioStreamInternal::getBufferCapacity() const {
     return mBufferCapacityInFrames;
 }
 
+int32_t AudioStreamInternal::getDeviceBufferCapacity() const {
+    return mDeviceBufferCapacityInFrames;
+}
+
 bool AudioStreamInternal::isClockModelInControl() const {
     return isActive() && mAudioEndpoint->isFreeRunning() && mClockModel.isRunning();
 }
diff --git a/media/libaaudio/src/client/AudioStreamInternal.h b/media/libaaudio/src/client/AudioStreamInternal.h
index 9c06121..a5981b1 100644
--- a/media/libaaudio/src/client/AudioStreamInternal.h
+++ b/media/libaaudio/src/client/AudioStreamInternal.h
@@ -22,8 +22,9 @@
 
 #include "binding/AudioEndpointParcelable.h"
 #include "binding/AAudioServiceInterface.h"
-#include "client/IsochronousClockModel.h"
+#include "client/AAudioFlowGraph.h"
 #include "client/AudioEndpoint.h"
+#include "client/IsochronousClockModel.h"
 #include "core/AudioStream.h"
 #include "utility/AudioClock.h"
 
@@ -56,8 +57,12 @@
 
     int32_t getBufferSize() const override;
 
+    int32_t getDeviceBufferSize() const;
+
     int32_t getBufferCapacity() const override;
 
+    int32_t getDeviceBufferCapacity() const override;
+
     int32_t getXRunCount() const override {
         return mXRunCount;
     }
@@ -133,8 +138,6 @@
     // Calculate timeout for an operation involving framesPerOperation.
     int64_t calculateReasonableTimeout(int32_t framesPerOperation);
 
-    int32_t getDeviceChannelCount() const { return mDeviceChannelCount; }
-
     /**
      * @return true if running in audio service, versus in app process
      */
@@ -177,6 +180,8 @@
     int64_t                  mLastFramesWritten = 0;
     int64_t                  mLastFramesRead = 0;
 
+    AAudioFlowGraph          mFlowGraph;
+
 private:
     /*
      * Asynchronous write with data conversion.
@@ -206,13 +211,10 @@
 
     int64_t                  mServiceLatencyNanos = 0;
 
-    // Sometimes the hardware is operating with a different channel count from the app.
-    // Then we require conversion in AAudio.
-    int32_t                  mDeviceChannelCount = 0;
-
     int32_t                  mBufferSizeInFrames = 0; // local threshold to control latency
+    int32_t                  mDeviceBufferSizeInFrames = 0;
     int32_t                  mBufferCapacityInFrames = 0;
-
+    int32_t                  mDeviceBufferCapacityInFrames = 0;
 
 };
 
diff --git a/media/libaaudio/src/client/AudioStreamInternalCapture.cpp b/media/libaaudio/src/client/AudioStreamInternalCapture.cpp
index f5cc2be..7d7b4ef 100644
--- a/media/libaaudio/src/client/AudioStreamInternalCapture.cpp
+++ b/media/libaaudio/src/client/AudioStreamInternalCapture.cpp
@@ -47,6 +47,27 @@
 
 }
 
+aaudio_result_t AudioStreamInternalCapture::open(const AudioStreamBuilder &builder) {
+    aaudio_result_t result = AudioStreamInternal::open(builder);
+    if (result == AAUDIO_OK) {
+        result = mFlowGraph.configure(getDeviceFormat(),
+                             getDeviceSamplesPerFrame(),
+                             getDeviceSampleRate(),
+                             getFormat(),
+                             getSamplesPerFrame(),
+                             getSampleRate(),
+                             getRequireMonoBlend(),
+                             false /* useVolumeRamps */,
+                             getAudioBalance(),
+                             aaudio::resampler::MultiChannelResampler::Quality::Medium);
+
+        if (result != AAUDIO_OK) {
+            safeReleaseClose();
+        }
+    }
+    return result;
+}
+
 void AudioStreamInternalCapture::advanceClientToMatchServerPosition(int32_t serverMargin) {
     int64_t readCounter = mAudioEndpoint->getDataReadCounter();
     int64_t writeCounter = mAudioEndpoint->getDataWriteCounter() + serverMargin;
@@ -149,7 +170,8 @@
                 // Calculate frame position based off of the readCounter because
                 // the writeCounter might have just advanced in the background,
                 // causing us to sleep until a later burst.
-                int64_t nextPosition = mAudioEndpoint->getDataReadCounter() + getFramesPerBurst();
+                const int64_t nextPosition = mAudioEndpoint->getDataReadCounter() +
+                        getDeviceFramesPerBurst();
                 wakeTime = mClockModel.convertPositionToLatestTime(nextPosition);
             }
                 break;
@@ -166,42 +188,73 @@
 
 aaudio_result_t AudioStreamInternalCapture::readNowWithConversion(void *buffer,
                                                                 int32_t numFrames) {
-    // ALOGD("readNowWithConversion(%p, %d)",
-    //              buffer, numFrames);
     WrappingBuffer wrappingBuffer;
-    uint8_t *destination = (uint8_t *) buffer;
-    int32_t framesLeft = numFrames;
+    uint8_t *byteBuffer = (uint8_t *) buffer;
+    int32_t framesLeftInByteBuffer = numFrames;
+
+    if (framesLeftInByteBuffer > 0) {
+        // Pull data from the flowgraph in case there is residual data.
+        const int32_t framesActuallyWrittenToByteBuffer = mFlowGraph.pull(
+                (void *)byteBuffer,
+                framesLeftInByteBuffer);
+
+        const int32_t numBytesActuallyWrittenToByteBuffer =
+                framesActuallyWrittenToByteBuffer * getBytesPerFrame();
+        byteBuffer += numBytesActuallyWrittenToByteBuffer;
+        framesLeftInByteBuffer -= framesActuallyWrittenToByteBuffer;
+    }
 
     mAudioEndpoint->getFullFramesAvailable(&wrappingBuffer);
 
-    // Read data in one or two parts.
-    for (int partIndex = 0; framesLeft > 0 && partIndex < WrappingBuffer::SIZE; partIndex++) {
-        int32_t framesToProcess = framesLeft;
-        const int32_t framesAvailable = wrappingBuffer.numFrames[partIndex];
-        if (framesAvailable <= 0) break;
+    // Write data in one or two parts.
+    int partIndex = 0;
+    int framesReadFromAudioEndpoint = 0;
+    while (framesLeftInByteBuffer > 0 && partIndex < WrappingBuffer::SIZE) {
+        const int32_t totalFramesInWrappingBuffer = wrappingBuffer.numFrames[partIndex];
+        int32_t framesAvailableInWrappingBuffer = totalFramesInWrappingBuffer;
+        uint8_t *currentWrappingBuffer = (uint8_t *) wrappingBuffer.data[partIndex];
 
-        if (framesToProcess > framesAvailable) {
-            framesToProcess = framesAvailable;
+        // Put data from the wrapping buffer into the flowgraph 8 frames at a time.
+        // Continuously pull as much data as possible from the flowgraph into the byte buffer.
+        // The return value of mFlowGraph.process is the number of frames actually pulled.
+        while (framesAvailableInWrappingBuffer > 0 && framesLeftInByteBuffer > 0) {
+            const int32_t framesToReadFromWrappingBuffer = std::min(flowgraph::kDefaultBufferSize,
+                    framesAvailableInWrappingBuffer);
+
+            const int32_t numBytesToReadFromWrappingBuffer = getBytesPerDeviceFrame() *
+                    framesToReadFromWrappingBuffer;
+
+            // If framesActuallyWrittenToByteBuffer < framesLeftInByteBuffer, it is guaranteed
+            // that all the data is pulled. If there is no more space in the byteBuffer, the
+            // remaining data will be pulled in the following readNowWithConversion().
+            const int32_t framesActuallyWrittenToByteBuffer = mFlowGraph.process(
+                    (void *)currentWrappingBuffer,
+                    framesToReadFromWrappingBuffer,
+                    (void *)byteBuffer,
+                    framesLeftInByteBuffer);
+
+            const int32_t numBytesActuallyWrittenToByteBuffer =
+                    framesActuallyWrittenToByteBuffer * getBytesPerFrame();
+            byteBuffer += numBytesActuallyWrittenToByteBuffer;
+            framesLeftInByteBuffer -= framesActuallyWrittenToByteBuffer;
+            currentWrappingBuffer += numBytesToReadFromWrappingBuffer;
+            framesAvailableInWrappingBuffer -= framesToReadFromWrappingBuffer;
+
+            //ALOGD("%s() numBytesActuallyWrittenToByteBuffer %d, framesLeftInByteBuffer %d"
+            //      "framesAvailableInWrappingBuffer %d, framesReadFromAudioEndpoint %d"
+            //      , __func__, numBytesActuallyWrittenToByteBuffer, framesLeftInByteBuffer,
+            //      framesAvailableInWrappingBuffer, framesReadFromAudioEndpoint);
         }
-
-        const int32_t numBytes = getBytesPerFrame() * framesToProcess;
-        const int32_t numSamples = framesToProcess * getSamplesPerFrame();
-
-        const audio_format_t sourceFormat = getDeviceFormat();
-        const audio_format_t destinationFormat = getFormat();
-
-        memcpy_by_audio_format(destination, destinationFormat,
-                wrappingBuffer.data[partIndex], sourceFormat, numSamples);
-
-        destination += numBytes;
-        framesLeft -= framesToProcess;
+        framesReadFromAudioEndpoint += totalFramesInWrappingBuffer -
+                framesAvailableInWrappingBuffer;
+        partIndex++;
     }
 
-    int32_t framesProcessed = numFrames - framesLeft;
-    mAudioEndpoint->advanceReadIndex(framesProcessed);
+    // The audio endpoint should reference the number of frames written to the wrapping buffer.
+    mAudioEndpoint->advanceReadIndex(framesReadFromAudioEndpoint);
 
-    //ALOGD("readNowWithConversion() returns %d", framesProcessed);
-    return framesProcessed;
+    // The internal code should use the number of frames read from the app.
+    return numFrames - framesLeftInByteBuffer;
 }
 
 int64_t AudioStreamInternalCapture::getFramesWritten() {
diff --git a/media/libaaudio/src/client/AudioStreamInternalCapture.h b/media/libaaudio/src/client/AudioStreamInternalCapture.h
index 87017de..10e247d 100644
--- a/media/libaaudio/src/client/AudioStreamInternalCapture.h
+++ b/media/libaaudio/src/client/AudioStreamInternalCapture.h
@@ -32,6 +32,8 @@
                                         bool inService = false);
     virtual ~AudioStreamInternalCapture() = default;
 
+    aaudio_result_t open(const AudioStreamBuilder &builder) override;
+
     aaudio_result_t read(void *buffer,
                          int32_t numFrames,
                          int64_t timeoutNanoseconds) override;
diff --git a/media/libaaudio/src/client/AudioStreamInternalPlay.cpp b/media/libaaudio/src/client/AudioStreamInternalPlay.cpp
index 89dd8ff..ac927ae 100644
--- a/media/libaaudio/src/client/AudioStreamInternalPlay.cpp
+++ b/media/libaaudio/src/client/AudioStreamInternalPlay.cpp
@@ -48,14 +48,18 @@
 
 aaudio_result_t AudioStreamInternalPlay::open(const AudioStreamBuilder &builder) {
     aaudio_result_t result = AudioStreamInternal::open(builder);
+    const bool useVolumeRamps = (getSharingMode() == AAUDIO_SHARING_MODE_EXCLUSIVE);
     if (result == AAUDIO_OK) {
         result = mFlowGraph.configure(getFormat(),
                              getSamplesPerFrame(),
+                             getSampleRate(),
                              getDeviceFormat(),
-                             getDeviceChannelCount(),
+                             getDeviceSamplesPerFrame(),
+                             getDeviceSampleRate(),
                              getRequireMonoBlend(),
+                             useVolumeRamps,
                              getAudioBalance(),
-                             (getSharingMode() == AAUDIO_SHARING_MODE_EXCLUSIVE));
+                             aaudio::resampler::MultiChannelResampler::Quality::Medium);
 
         if (result != AAUDIO_OK) {
             safeReleaseClose();
@@ -186,7 +190,7 @@
     // Sleep if there is too much data in the buffer.
     // Calculate an ideal time to wake up.
     if (wakeTimePtr != nullptr
-            && (mAudioEndpoint->getFullFramesAvailable() >= getBufferSize())) {
+            && (mAudioEndpoint->getFullFramesAvailable() >= getDeviceBufferSize())) {
         // By default wake up a few milliseconds from now.  // TODO review
         int64_t wakeTime = currentNanoTime + (1 * AAUDIO_NANOS_PER_MILLISECOND);
         aaudio_stream_state_t state = getState();
@@ -206,12 +210,12 @@
                 // If the appBufferSize is smaller than the endpointBufferSize then
                 // we will have room to write data beyond the appBufferSize.
                 // That is a technique used to reduce glitches without adding latency.
-                const int32_t appBufferSize = getBufferSize();
+                const int64_t appBufferSize = getDeviceBufferSize();
                 // The endpoint buffer size is set to the maximum that can be written.
                 // If we use it then we must carve out some room to write data when we wake up.
-                const int32_t endBufferSize = mAudioEndpoint->getBufferSizeInFrames()
-                        - getFramesPerBurst();
-                const int32_t bestBufferSize = std::min(appBufferSize, endBufferSize);
+                const int64_t endBufferSize = mAudioEndpoint->getBufferSizeInFrames()
+                        - getDeviceFramesPerBurst();
+                const int64_t bestBufferSize = std::min(appBufferSize, endBufferSize);
                 int64_t targetReadPosition = mAudioEndpoint->getDataWriteCounter() - bestBufferSize;
                 wakeTime = mClockModel.convertPositionToTime(targetReadPosition);
             }
@@ -232,37 +236,78 @@
                                                             int32_t numFrames) {
     WrappingBuffer wrappingBuffer;
     uint8_t *byteBuffer = (uint8_t *) buffer;
-    int32_t framesLeft = numFrames;
+    int32_t framesLeftInByteBuffer = numFrames;
 
     mAudioEndpoint->getEmptyFramesAvailable(&wrappingBuffer);
 
     // Write data in one or two parts.
     int partIndex = 0;
-    while (framesLeft > 0 && partIndex < WrappingBuffer::SIZE) {
-        int32_t framesToWrite = framesLeft;
-        int32_t framesAvailable = wrappingBuffer.numFrames[partIndex];
-        if (framesAvailable > 0) {
-            if (framesToWrite > framesAvailable) {
-                framesToWrite = framesAvailable;
-            }
+    int framesWrittenToAudioEndpoint = 0;
+    while (framesLeftInByteBuffer > 0 && partIndex < WrappingBuffer::SIZE) {
+        int32_t framesAvailableInWrappingBuffer = wrappingBuffer.numFrames[partIndex];
+        uint8_t *currentWrappingBuffer = (uint8_t *) wrappingBuffer.data[partIndex];
 
-            int32_t numBytes = getBytesPerFrame() * framesToWrite;
+        if (framesAvailableInWrappingBuffer > 0) {
+            // Pull data from the flowgraph in case there is residual data.
+            const int32_t framesActuallyWrittenToWrappingBuffer = mFlowGraph.pull(
+                (void*) currentWrappingBuffer,
+                framesAvailableInWrappingBuffer);
 
-            mFlowGraph.process((void *)byteBuffer,
-                               wrappingBuffer.data[partIndex],
-                               framesToWrite);
+            const int32_t numBytesActuallyWrittenToWrappingBuffer =
+                framesActuallyWrittenToWrappingBuffer * getBytesPerDeviceFrame();
+            currentWrappingBuffer += numBytesActuallyWrittenToWrappingBuffer;
+            framesAvailableInWrappingBuffer -= framesActuallyWrittenToWrappingBuffer;
+            framesWrittenToAudioEndpoint += framesActuallyWrittenToWrappingBuffer;
+        }
 
-            byteBuffer += numBytes;
-            framesLeft -= framesToWrite;
-        } else {
-            break;
+        // Put data from byteBuffer into the flowgraph one buffer (8 frames) at a time.
+        // Continuously pull as much data as possible from the flowgraph into the wrapping buffer.
+        // The return value of mFlowGraph.process is the number of frames actually pulled.
+        while (framesAvailableInWrappingBuffer > 0 && framesLeftInByteBuffer > 0) {
+            const int32_t framesToWriteFromByteBuffer = std::min(flowgraph::kDefaultBufferSize,
+                    framesLeftInByteBuffer);
+
+            const int32_t numBytesToWriteFromByteBuffer = getBytesPerFrame() *
+                    framesToWriteFromByteBuffer;
+
+            //ALOGD("%s() framesLeftInByteBuffer %d, framesAvailableInWrappingBuffer %d"
+            //      "framesToWriteFromByteBuffer %d, numBytesToWriteFromByteBuffer %d"
+            //      , __func__, framesLeftInByteBuffer, framesAvailableInWrappingBuffer,
+            //      framesToWriteFromByteBuffer, numBytesToWriteFromByteBuffer);
+
+            const int32_t framesActuallyWrittenToWrappingBuffer = mFlowGraph.process(
+                    (void *)byteBuffer,
+                    framesToWriteFromByteBuffer,
+                    (void *)currentWrappingBuffer,
+                    framesAvailableInWrappingBuffer);
+
+            byteBuffer += numBytesToWriteFromByteBuffer;
+            framesLeftInByteBuffer -= framesToWriteFromByteBuffer;
+            const int32_t numBytesActuallyWrittenToWrappingBuffer =
+                    framesActuallyWrittenToWrappingBuffer * getBytesPerDeviceFrame();
+            currentWrappingBuffer += numBytesActuallyWrittenToWrappingBuffer;
+            framesAvailableInWrappingBuffer -= framesActuallyWrittenToWrappingBuffer;
+            framesWrittenToAudioEndpoint += framesActuallyWrittenToWrappingBuffer;
+
+            //ALOGD("%s() numBytesActuallyWrittenToWrappingBuffer %d, framesLeftInByteBuffer %d"
+            //      "framesActuallyWrittenToWrappingBuffer %d, numBytesToWriteFromByteBuffer %d"
+            //      "framesWrittenToAudioEndpoint %d"
+            //      , __func__, numBytesActuallyWrittenToWrappingBuffer, framesLeftInByteBuffer,
+            //      framesActuallyWrittenToWrappingBuffer, numBytesToWriteFromByteBuffer,
+            //      framesWrittenToAudioEndpoint);
         }
         partIndex++;
     }
-    int32_t framesWritten = numFrames - framesLeft;
-    mAudioEndpoint->advanceWriteIndex(framesWritten);
+    //ALOGD("%s() framesWrittenToAudioEndpoint %d, numFrames %d"
+    //              "framesLeftInByteBuffer %d"
+    //              , __func__, framesWrittenToAudioEndpoint, numFrames,
+    //              framesLeftInByteBuffer);
 
-    return framesWritten;
+    // The audio endpoint should reference the number of frames written to the wrapping buffer.
+    mAudioEndpoint->advanceWriteIndex(framesWrittenToAudioEndpoint);
+
+    // The internal code should use the number of frames read from the app.
+    return numFrames - framesLeftInByteBuffer;
 }
 
 int64_t AudioStreamInternalPlay::getFramesRead() {
@@ -284,7 +329,6 @@
     return mLastFramesWritten;
 }
 
-
 // Render audio in the application callback and then write the data to the stream.
 void *AudioStreamInternalPlay::callbackLoop() {
     ALOGD("%s() entering >>>>>>>>>>>>>>>", __func__);
diff --git a/media/libaaudio/src/client/AudioStreamInternalPlay.h b/media/libaaudio/src/client/AudioStreamInternalPlay.h
index e761807..b51b5d0 100644
--- a/media/libaaudio/src/client/AudioStreamInternalPlay.h
+++ b/media/libaaudio/src/client/AudioStreamInternalPlay.h
@@ -21,7 +21,6 @@
 #include <aaudio/AAudio.h>
 
 #include "binding/AAudioServiceInterface.h"
-#include "client/AAudioFlowGraph.h"
 #include "client/AudioStreamInternal.h"
 
 using android::sp;
@@ -89,13 +88,11 @@
      * Asynchronous write with data conversion.
      * @param buffer
      * @param numFrames
-     * @return fdrames written or negative error
+     * @return frames written or negative error
      */
     aaudio_result_t writeNowWithConversion(const void *buffer,
                                            int32_t numFrames);
 
-    AAudioFlowGraph          mFlowGraph;
-
 };
 
 } /* namespace aaudio */
diff --git a/media/libaaudio/src/core/AAudioAudio.cpp b/media/libaaudio/src/core/AAudioAudio.cpp
index 8a13a6f..1e27a81 100644
--- a/media/libaaudio/src/core/AAudioAudio.cpp
+++ b/media/libaaudio/src/core/AAudioAudio.cpp
@@ -571,13 +571,15 @@
 AAUDIO_API int64_t AAudioStream_getFramesWritten(AAudioStream* stream)
 {
     AudioStream *audioStream = convertAAudioStreamToAudioStream(stream);
-    return audioStream->getFramesWritten();
+    return audioStream->getFramesWritten() * audioStream->getSampleRate() /
+            audioStream->getDeviceSampleRate();
 }
 
 AAUDIO_API int64_t AAudioStream_getFramesRead(AAudioStream* stream)
 {
     AudioStream *audioStream = convertAAudioStreamToAudioStream(stream);
-    return audioStream->getFramesRead();
+    return audioStream->getFramesRead() * audioStream->getSampleRate() /
+            audioStream->getDeviceSampleRate();
 }
 
 AAUDIO_API aaudio_result_t AAudioStream_getTimestamp(AAudioStream* stream,
diff --git a/media/libaaudio/src/core/AAudioStreamParameters.cpp b/media/libaaudio/src/core/AAudioStreamParameters.cpp
index f305e46..1db62f3 100644
--- a/media/libaaudio/src/core/AAudioStreamParameters.cpp
+++ b/media/libaaudio/src/core/AAudioStreamParameters.cpp
@@ -184,6 +184,8 @@
         case AAUDIO_INPUT_PRESET_VOICE_RECOGNITION:
         case AAUDIO_INPUT_PRESET_UNPROCESSED:
         case AAUDIO_INPUT_PRESET_VOICE_PERFORMANCE:
+        case AAUDIO_INPUT_PRESET_SYSTEM_ECHO_REFERENCE:
+        case AAUDIO_INPUT_PRESET_SYSTEM_HOTWORD:
             break; // valid
         default:
             ALOGD("input preset not valid = %d", mInputPreset);
@@ -317,4 +319,4 @@
     ALOGD("mHardwareSamplesPerFrame = %6d", mHardwareSamplesPerFrame);
     ALOGD("mHardwareSampleRate   = %6d", mHardwareSampleRate);
     ALOGD("mHardwareAudioFormat  = %6d", (int)mHardwareAudioFormat);
-}
\ No newline at end of file
+}
diff --git a/media/libaaudio/src/core/AudioStream.cpp b/media/libaaudio/src/core/AudioStream.cpp
index 56ef1e6..e0fd325 100644
--- a/media/libaaudio/src/core/AudioStream.cpp
+++ b/media/libaaudio/src/core/AudioStream.cpp
@@ -134,7 +134,8 @@
             .set(AMEDIAMETRICS_PROP_ENCODINGHARDWARE,
                     android::toString(getHardwareFormat()).c_str())
             .set(AMEDIAMETRICS_PROP_CHANNELCOUNTHARDWARE, (int32_t)getHardwareSamplesPerFrame())
-            .set(AMEDIAMETRICS_PROP_SAMPLERATEHARDWARE, (int32_t)getHardwareSampleRate());
+            .set(AMEDIAMETRICS_PROP_SAMPLERATEHARDWARE, (int32_t)getHardwareSampleRate())
+            .set(AMEDIAMETRICS_PROP_SAMPLERATECLIENT, (int32_t)getSampleRate());
 
         if (getDirection() == AAUDIO_DIRECTION_OUTPUT) {
             item.set(AMEDIAMETRICS_PROP_PLAYERIID, mPlayerBase->getPlayerIId());
diff --git a/media/libaaudio/src/core/AudioStream.h b/media/libaaudio/src/core/AudioStream.h
index 9b4b734..f2f5cac 100644
--- a/media/libaaudio/src/core/AudioStream.h
+++ b/media/libaaudio/src/core/AudioStream.h
@@ -204,10 +204,18 @@
         return mBufferCapacity;
     }
 
+    virtual int32_t getDeviceBufferCapacity() const {
+        return mDeviceBufferCapacity;
+    }
+
     virtual int32_t getFramesPerBurst() const {
         return mFramesPerBurst;
     }
 
+    virtual int32_t getDeviceFramesPerBurst() const {
+        return mDeviceFramesPerBurst;
+    }
+
     virtual int32_t getXRunCount() const {
         return AAUDIO_ERROR_UNIMPLEMENTED;
     }
@@ -224,6 +232,10 @@
         return mSampleRate;
     }
 
+    aaudio_result_t getDeviceSampleRate() const {
+        return mDeviceSampleRate;
+    }
+
     aaudio_result_t getHardwareSampleRate() const {
         return mHardwareSampleRate;
     }
@@ -240,6 +252,10 @@
         return mSamplesPerFrame;
     }
 
+    aaudio_result_t getDeviceSamplesPerFrame() const {
+        return mDeviceSamplesPerFrame;
+    }
+
     aaudio_result_t getHardwareSamplesPerFrame() const {
         return mHardwareSamplesPerFrame;
     }
@@ -322,10 +338,10 @@
     }
 
     /**
-     * This is only valid after setChannelMask() and setDeviceFormat() have been called.
+     * This is only valid after setDeviceSamplesPerFrame() and setDeviceFormat() have been called.
      */
     int32_t getBytesPerDeviceFrame() const {
-        return getSamplesPerFrame() * audio_bytes_per_sample(getDeviceFormat());
+        return getDeviceSamplesPerFrame() * audio_bytes_per_sample(getDeviceFormat());
     }
 
     virtual int64_t getFramesWritten() = 0;
@@ -365,6 +381,11 @@
         mSamplesPerFrame = AAudioConvert_channelMaskToCount(channelMask);
     }
 
+    void setDeviceSamplesPerFrame(int32_t deviceSamplesPerFrame) {
+        mDeviceSamplesPerFrame = deviceSamplesPerFrame;
+    }
+
+
     /**
      * @return true if data callback has been specified
      */
@@ -542,6 +563,11 @@
     }
 
     // This should not be called after the open() call.
+    void setDeviceSampleRate(int32_t deviceSampleRate) {
+        mDeviceSampleRate = deviceSampleRate;
+    }
+
+    // This should not be called after the open() call.
     void setHardwareSampleRate(int32_t hardwareSampleRate) {
         mHardwareSampleRate = hardwareSampleRate;
     }
@@ -552,11 +578,21 @@
     }
 
     // This should not be called after the open() call.
+    void setDeviceFramesPerBurst(int32_t deviceFramesPerBurst) {
+        mDeviceFramesPerBurst = deviceFramesPerBurst;
+    }
+
+    // This should not be called after the open() call.
     void setBufferCapacity(int32_t bufferCapacity) {
         mBufferCapacity = bufferCapacity;
     }
 
     // This should not be called after the open() call.
+    void setDeviceBufferCapacity(int32_t deviceBufferCapacity) {
+        mDeviceBufferCapacity = deviceBufferCapacity;
+    }
+
+    // This should not be called after the open() call.
     void setSharingMode(aaudio_sharing_mode_t sharingMode) {
         mSharingMode = sharingMode;
     }
@@ -721,9 +757,11 @@
 
     // These do not change after open().
     int32_t                     mSamplesPerFrame = AAUDIO_UNSPECIFIED;
+    int32_t                     mDeviceSamplesPerFrame = AAUDIO_UNSPECIFIED;
     int32_t                     mHardwareSamplesPerFrame = AAUDIO_UNSPECIFIED;
     aaudio_channel_mask_t       mChannelMask = AAUDIO_UNSPECIFIED;
     int32_t                     mSampleRate = AAUDIO_UNSPECIFIED;
+    int32_t                     mDeviceSampleRate = AAUDIO_UNSPECIFIED;
     int32_t                     mHardwareSampleRate = AAUDIO_UNSPECIFIED;
     int32_t                     mDeviceId = AAUDIO_UNSPECIFIED;
     aaudio_sharing_mode_t       mSharingMode = AAUDIO_SHARING_MODE_SHARED;
@@ -732,7 +770,9 @@
     audio_format_t              mHardwareFormat = AUDIO_FORMAT_DEFAULT;
     aaudio_performance_mode_t   mPerformanceMode = AAUDIO_PERFORMANCE_MODE_NONE;
     int32_t                     mFramesPerBurst = 0;
+    int32_t                     mDeviceFramesPerBurst = 0;
     int32_t                     mBufferCapacity = 0;
+    int32_t                     mDeviceBufferCapacity = 0;
 
     aaudio_usage_t              mUsage           = AAUDIO_UNSPECIFIED;
     aaudio_content_type_t       mContentType     = AAUDIO_UNSPECIFIED;
diff --git a/media/libaaudio/src/core/VersionExperiment.txt b/media/libaaudio/src/core/VersionExperiment.txt
deleted file mode 100644
index 071239b..0000000
--- a/media/libaaudio/src/core/VersionExperiment.txt
+++ /dev/null
@@ -1,55 +0,0 @@
-
-// TODO Experiment with versioning. This may be removed or changed dramatically.
-// Please ignore for now. Do not review.
-#define OBOE_VERSION_EXPERIMENT  0
-#if OBOE_VERSION_EXPERIMENT
-
-#define OBOE_EARLIEST_SUPPORTED_VERSION  1
-#define OBOE_CURRENT_VERSION  2
-
-typedef struct OboeInterface_s {
-    int32_t size; // do not use size_t because its size can vary
-    int32_t version;
-    int32_t reserved1;
-    void *  reserved2;
-    oboe_result_t (*createStreamBuilder)(OboeStreamBuilder *);
-} OboeInterface_t;
-
-OboeInterface_t s_oboe_template = {
-        .size = sizeof(OboeInterface_t),
-        .version = OBOE_CURRENT_VERSION,
-        .reserved1 = 0,
-        .reserved2 = NULL,
-        .createStreamBuilder = Oboe_createStreamBuilder
-};
-
-oboe_result_t Oboe_Unimplemented(OboeInterface_t *oboe) {
-    (void) oboe;
-    return OBOE_ERROR_UNIMPLEMENTED;
-}
-
-typedef oboe_result_t (*OboeFunction_t)(OboeInterface_t *oboe);
-
-int32_t Oboe_Initialize(OboeInterface_t *oboe, uint32_t flags) {
-    if (oboe->version < OBOE_EARLIEST_SUPPORTED_VERSION) {
-        return OBOE_ERROR_INCOMPATIBLE;
-    }
-    // Fill in callers vector table.
-    uint8_t *start = (uint8_t*)&oboe->reserved1;
-    uint8_t *end;
-    if (oboe->size <= s_oboe_template.size) {
-        end = ((uint8_t *)oboe) + oboe->size;
-    } else {
-        end = ((uint8_t *)oboe) + s_oboe_template.size;
-        // Assume the rest of the structure is vectors.
-        // Point them all to OboeInternal_Unimplemented()
-        // Point to first vector past end of the known structure.
-        OboeFunction_t *next = (OboeFunction_t*)end;
-        while ((((uint8_t *)next) - ((uint8_t *)oboe)) < oboe->size) {
-            *next++ = Oboe_Unimplemented;
-        }
-    }
-    memcpy(&oboe->reserved1, &s_oboe_template.reserved1, end - start);
-    return OBOE_OK;
-}
-#endif /* OBOE_VERSION_EXPERIMENT -------------------------- */
diff --git a/media/libaaudio/src/fifo/FifoControllerBase.cpp b/media/libaaudio/src/fifo/FifoControllerBase.cpp
index ad6d041..e79bf96 100644
--- a/media/libaaudio/src/fifo/FifoControllerBase.cpp
+++ b/media/libaaudio/src/fifo/FifoControllerBase.cpp
@@ -21,7 +21,8 @@
 #include <stdint.h>
 #include "FifoControllerBase.h"
 
-using namespace android;  // TODO just import names needed
+using android::FifoControllerBase;
+using android::fifo_frames_t;
 
 FifoControllerBase::FifoControllerBase(fifo_frames_t capacity, fifo_frames_t threshold)
         : mCapacity(capacity)
diff --git a/media/libaaudio/src/flowgraph/resampler/MultiChannelResampler.cpp b/media/libaaudio/src/flowgraph/resampler/MultiChannelResampler.cpp
index a3ce58c..611ddcd 100644
--- a/media/libaaudio/src/flowgraph/resampler/MultiChannelResampler.cpp
+++ b/media/libaaudio/src/flowgraph/resampler/MultiChannelResampler.cpp
@@ -135,10 +135,9 @@
     int coefficientIndex = 0;
     double phase = 0.0; // ranges from 0.0 to 1.0, fraction between samples
     // Stretch the sinc function for low pass filtering.
-    const float cutoffScaler = normalizedCutoff *
-            ((outputRate < inputRate)
-             ? ((float)outputRate / inputRate)
-             : ((float)inputRate / outputRate));
+    const float cutoffScaler = (outputRate < inputRate)
+             ? (normalizedCutoff * (float)outputRate / inputRate)
+             : 1.0f; // Do not filter when upsampling.
     const int numTapsHalf = getNumTaps() / 2; // numTaps must be even.
     const float numTapsHalfInverse = 1.0f / numTapsHalf;
     for (int i = 0; i < numRows; i++) {
diff --git a/media/libaaudio/src/flowgraph/resampler/MultiChannelResampler.h b/media/libaaudio/src/flowgraph/resampler/MultiChannelResampler.h
index 717f3fd..9e47335 100644
--- a/media/libaaudio/src/flowgraph/resampler/MultiChannelResampler.h
+++ b/media/libaaudio/src/flowgraph/resampler/MultiChannelResampler.h
@@ -111,6 +111,9 @@
          * Set lower to reduce aliasing.
          * Default is 0.70.
          *
+         * Note that this value is ignored when upsampling, which is when
+         * the outputRate is higher than the inputRate.
+         *
          * @param normalizedCutoff anti-aliasing filter cutoff
          * @return address of this builder for chaining calls
          */
@@ -227,6 +230,10 @@
 
     /**
      * Generate the filter coefficients in optimal order.
+     *
+     * Note that normalizedCutoff is ignored when upsampling, which is when
+     * the outputRate is higher than the inputRate.
+     *
      * @param inputRate sample rate of the input stream
      * @param outputRate  sample rate of the output stream
      * @param numRows number of rows in the array that contain a set of tap coefficients
diff --git a/media/libaaudio/src/legacy/AudioStreamRecord.cpp b/media/libaaudio/src/legacy/AudioStreamRecord.cpp
index e760dab..fe4bf2c 100644
--- a/media/libaaudio/src/legacy/AudioStreamRecord.cpp
+++ b/media/libaaudio/src/legacy/AudioStreamRecord.cpp
@@ -208,6 +208,12 @@
     setBufferCapacity(getBufferCapacityFromDevice());
     setFramesPerBurst(getFramesPerBurstFromDevice());
 
+    // Use the same values for device values.
+    setDeviceSamplesPerFrame(getSamplesPerFrame());
+    setDeviceSampleRate(mAudioRecord->getSampleRate());
+    setDeviceBufferCapacity(getBufferCapacityFromDevice());
+    setDeviceFramesPerBurst(getFramesPerBurstFromDevice());
+
     setHardwareSamplesPerFrame(mAudioRecord->getHalChannelCount());
     setHardwareSampleRate(mAudioRecord->getHalSampleRate());
     setHardwareFormat(mAudioRecord->getHalFormat());
diff --git a/media/libaaudio/src/legacy/AudioStreamTrack.cpp b/media/libaaudio/src/legacy/AudioStreamTrack.cpp
index 67ee42e..59fdabc 100644
--- a/media/libaaudio/src/legacy/AudioStreamTrack.cpp
+++ b/media/libaaudio/src/legacy/AudioStreamTrack.cpp
@@ -203,6 +203,12 @@
     setBufferCapacity(getBufferCapacityFromDevice());
     setFramesPerBurst(getFramesPerBurstFromDevice());
 
+    // Use the same values for device values.
+    setDeviceSamplesPerFrame(getSamplesPerFrame());
+    setDeviceSampleRate(mAudioTrack->getSampleRate());
+    setDeviceBufferCapacity(getBufferCapacityFromDevice());
+    setDeviceFramesPerBurst(getFramesPerBurstFromDevice());
+
     setHardwareSamplesPerFrame(mAudioTrack->getHalChannelCount());
     setHardwareSampleRate(mAudioTrack->getHalSampleRate());
     setHardwareFormat(mAudioTrack->getHalFormat());
diff --git a/media/libaaudio/src/utility/AAudioUtilities.cpp b/media/libaaudio/src/utility/AAudioUtilities.cpp
index e8324a8..0cbf79d 100644
--- a/media/libaaudio/src/utility/AAudioUtilities.cpp
+++ b/media/libaaudio/src/utility/AAudioUtilities.cpp
@@ -383,11 +383,10 @@
                 return AUDIO_CHANNEL_OUT_7POINT1POINT2;
             case AAUDIO_CHANNEL_7POINT1POINT4:
                 return AUDIO_CHANNEL_OUT_7POINT1POINT4;
-            // TODO: add 9point1point4 and 9point1point6 when they are added in audio-hal-enums.h
-            // case AAUDIO_CHANNEL_9POINT1POINT4:
-            //     return AUDIO_CHANNEL_OUT_9POINT1POINT4;
-            // case AAUDIO_CHANNEL_9POINT1POINT6:
-            //     return AUDIO_CHANNEL_OUT_9POINT1POINT6;
+            case AAUDIO_CHANNEL_9POINT1POINT4:
+                return AUDIO_CHANNEL_OUT_9POINT1POINT4;
+            case AAUDIO_CHANNEL_9POINT1POINT6:
+                return AUDIO_CHANNEL_OUT_9POINT1POINT6;
             default:
                 ALOGE("%s() %#x unrecognized", __func__, channelMask);
                 return AUDIO_CHANNEL_INVALID;
@@ -465,11 +464,10 @@
                 return AAUDIO_CHANNEL_7POINT1POINT2;
             case AUDIO_CHANNEL_OUT_7POINT1POINT4:
                 return AAUDIO_CHANNEL_7POINT1POINT4;
-            // TODO: add 9point1point4 and 9point1point6 when they are added in audio-hal-enums.h
-            // case AUDIO_CHANNEL_OUT_9POINT1POINT4:
-            //     return AAUDIO_CHANNEL_9POINT1POINT4;
-            // case AUDIO_CHANNEL_OUT_9POINT1POINT6:
-            //     return AAUDIO_CHANNEL_9POINT1POINT6;
+            case AUDIO_CHANNEL_OUT_9POINT1POINT4:
+                return AAUDIO_CHANNEL_9POINT1POINT4;
+            case AUDIO_CHANNEL_OUT_9POINT1POINT6:
+                return AAUDIO_CHANNEL_9POINT1POINT6;
             default:
                 ALOGE("%s() %#x unrecognized", __func__, channelMask);
                 return AAUDIO_CHANNEL_INVALID;
diff --git a/media/libaaudio/src/utility/AudioClock.h b/media/libaaudio/src/utility/AudioClock.h
index d5d4ef4..37f5b39 100644
--- a/media/libaaudio/src/utility/AudioClock.h
+++ b/media/libaaudio/src/utility/AudioClock.h
@@ -33,7 +33,7 @@
 public:
     static int64_t getNanoseconds(clockid_t clockId = CLOCK_MONOTONIC) {
         struct timespec time;
-        int result = clock_gettime(clockId, &time);
+        const int result = clock_gettime(clockId, &time);
         if (result < 0) {
             return -errno;
         }
@@ -56,7 +56,7 @@
             time.tv_sec = nanoTime / AAUDIO_NANOS_PER_SECOND;
             // Calculate the fractional nanoseconds. Avoids expensive % operation.
             time.tv_nsec = nanoTime - (time.tv_sec * AAUDIO_NANOS_PER_SECOND);
-            int err = clock_nanosleep(clockId, TIMER_ABSTIME, &time, nullptr);
+            const int err = clock_nanosleep(clockId, TIMER_ABSTIME, &time, nullptr);
             switch (err) {
             case EINTR:
                 return 1;
@@ -86,7 +86,7 @@
             // Calculate the fractional nanoseconds. Avoids expensive % operation.
             time.tv_nsec = nanoseconds - (time.tv_sec * AAUDIO_NANOS_PER_SECOND);
             const int flags = 0; // documented as relative sleep
-            int err = clock_nanosleep(clockId, flags, &time, nullptr);
+            const int err = clock_nanosleep(clockId, flags, &time, nullptr);
             switch (err) {
             case EINTR:
                 return 1;
diff --git a/media/libaaudio/src/utility/MonotonicCounter.h b/media/libaaudio/src/utility/MonotonicCounter.h
index 51eb69b..b58634f 100644
--- a/media/libaaudio/src/utility/MonotonicCounter.h
+++ b/media/libaaudio/src/utility/MonotonicCounter.h
@@ -104,7 +104,7 @@
      */
     void roundUp64(int32_t period) {
         if (period > 0) {
-            int64_t numPeriods = (mCounter64 + period - 1) / period;
+            const int64_t numPeriods = (mCounter64 + period - 1) / period;
             mCounter64 = numPeriods * period;
         }
     }
diff --git a/media/libaaudio/tests/Android.bp b/media/libaaudio/tests/Android.bp
index 24041bc..0cfdfb2 100644
--- a/media/libaaudio/tests/Android.bp
+++ b/media/libaaudio/tests/Android.bp
@@ -145,6 +145,7 @@
     srcs: ["test_flowgraph.cpp"],
     shared_libs: [
         "libaaudio_internal",
+        "libaudioutils",
         "libbinder",
         "libcutils",
         "libutils",
diff --git a/media/libaaudio/tests/test_flowgraph.cpp b/media/libaaudio/tests/test_flowgraph.cpp
index 6f75f5a..7eb8b0d 100644
--- a/media/libaaudio/tests/test_flowgraph.cpp
+++ b/media/libaaudio/tests/test_flowgraph.cpp
@@ -25,6 +25,8 @@
 
 #include <gtest/gtest.h>
 
+#include <aaudio/AAudio.h>
+#include "client/AAudioFlowGraph.h"
 #include "flowgraph/ClipToRange.h"
 #include "flowgraph/Limiter.h"
 #include "flowgraph/MonoBlend.h"
@@ -37,8 +39,18 @@
 #include "flowgraph/SinkI32.h"
 #include "flowgraph/SourceI16.h"
 #include "flowgraph/SourceI24.h"
+#include "flowgraph/resampler/IntegerRatio.h"
 
 using namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph;
+using namespace RESAMPLER_OUTER_NAMESPACE::resampler;
+
+using TestFlowgraphResamplerParams = std::tuple<int32_t, int32_t, MultiChannelResampler::Quality>;
+
+enum {
+    PARAM_SOURCE_SAMPLE_RATE = 0,
+    PARAM_SINK_SAMPLE_RATE,
+    PARAM_RESAMPLER_QUALITY
+};
 
 constexpr int kBytesPerI24Packed = 3;
 
@@ -394,3 +406,240 @@
         EXPECT_NEAR(expected[i], output[i], tolerance);
     }
 }
+
+TEST(test_flowgraph, module_sinki16_multiple_reads) {
+    static constexpr int kNumSamples = 8;
+    std::array<int16_t, kNumSamples + 10> output; // larger than input
+
+    SourceFloat sourceFloat{1};
+    SinkI16 sinkI16{1};
+
+    sourceFloat.setData(kInputFloat.data(), kNumSamples);
+    sourceFloat.output.connect(&sinkI16.input);
+
+    output.fill(777);
+
+    // Read the first half of the data
+    int32_t numRead = sinkI16.read(output.data(), kNumSamples / 2);
+    ASSERT_EQ(kNumSamples / 2, numRead);
+    for (int i = 0; i < numRead; i++) {
+        EXPECT_EQ(kExpectedI16.at(i), output.at(i)) << ", i = " << i;
+    }
+
+    // Read the rest of the data
+    numRead = sinkI16.read(output.data(), output.size());
+    ASSERT_EQ(kNumSamples / 2, numRead);
+    for (int i = 0; i < numRead; i++) {
+        EXPECT_EQ(kExpectedI16.at(i + kNumSamples / 2), output.at(i)) << ", i = " << i;
+    }
+}
+
+void checkSampleRateConversionVariedSizes(int32_t sourceSampleRate,
+                    int32_t sinkSampleRate,
+                    MultiChannelResampler::Quality resamplerQuality) {
+    AAudioFlowGraph flowgraph;
+    aaudio_result_t result = flowgraph.configure(AUDIO_FORMAT_PCM_FLOAT /* sourceFormat */,
+            1 /* sourceChannelCount */,
+            sourceSampleRate,
+            AUDIO_FORMAT_PCM_FLOAT /* sinkFormat */,
+            1 /* sinkChannelCount */,
+            sinkSampleRate,
+            false /* useMonoBlend */,
+            false /* useVolumeRamps */,
+            0.0f /* audioBalance */,
+            resamplerQuality);
+
+    IntegerRatio ratio(sourceSampleRate, sinkSampleRate);
+    ratio.reduce();
+
+    ASSERT_EQ(AAUDIO_OK, result);
+
+    const int inputSize = ratio.getNumerator();
+    const int outputSize = ratio.getDenominator();
+    float input[inputSize];
+    float output[outputSize];
+
+    for (int i = 0; i < inputSize; i++) {
+        input[i] = i * 1.0f / inputSize;
+    }
+
+    int inputUsed = 0;
+    int outputRead = 0;
+    int curInputSize = 1;
+
+    // Process the data with larger and larger input buffer sizes.
+    while (inputUsed < inputSize) {
+        outputRead += flowgraph.process((void *) (input + inputUsed),
+                curInputSize,
+                (void *) (output + outputRead),
+                outputSize - outputRead);
+        inputUsed += curInputSize;
+        curInputSize = std::min(curInputSize + 5, inputSize - inputUsed);
+    }
+
+    ASSERT_EQ(outputSize, outputRead);
+
+    for (int i = 1; i < outputSize; i++) {
+        // The first values of the flowgraph will be close to zero.
+        // Besides those, the values should be strictly increasing.
+        if (output[i - 1] > 0.01f) {
+            EXPECT_GT(output[i], output[i - 1]);
+        }
+    }
+}
+
+TEST(test_flowgraph, flowgraph_varied_sizes_all) {
+    const int rates[] = {8000, 11025, 22050, 32000, 44100, 48000, 64000, 88200, 96000};
+    const MultiChannelResampler::Quality qualities[] =
+    {
+        MultiChannelResampler::Quality::Fastest,
+        MultiChannelResampler::Quality::Low,
+        MultiChannelResampler::Quality::Medium,
+        MultiChannelResampler::Quality::High,
+        MultiChannelResampler::Quality::Best
+    };
+    for (int srcRate : rates) {
+        for (int destRate : rates) {
+            for (auto quality : qualities) {
+                if (srcRate != destRate) {
+                    checkSampleRateConversionVariedSizes(srcRate, destRate, quality);
+                }
+            }
+        }
+    }
+}
+
+void checkSampleRateConversionPullLater(int32_t sourceSampleRate,
+                    int32_t sinkSampleRate,
+                    MultiChannelResampler::Quality resamplerQuality) {
+    AAudioFlowGraph flowgraph;
+    aaudio_result_t result = flowgraph.configure(AUDIO_FORMAT_PCM_FLOAT /* sourceFormat */,
+            1 /* sourceChannelCount */,
+            sourceSampleRate,
+            AUDIO_FORMAT_PCM_FLOAT /* sinkFormat */,
+            1 /* sinkChannelCount */,
+            sinkSampleRate,
+            false /* useMonoBlend */,
+            false /* useVolumeRamps */,
+            0.0f /* audioBalance */,
+            resamplerQuality);
+
+    IntegerRatio ratio(sourceSampleRate, sinkSampleRate);
+    ratio.reduce();
+
+    ASSERT_EQ(AAUDIO_OK, result);
+
+    const int inputSize = ratio.getNumerator();
+    const int outputSize = ratio.getDenominator();
+    float input[inputSize];
+    float output[outputSize];
+
+    for (int i = 0; i < inputSize; i++) {
+        input[i] = i * 1.0f / inputSize;
+    }
+
+    // Read half the data with process.
+    int outputRead = flowgraph.process((void *) input,
+            inputSize,
+            (void *) output,
+            outputSize / 2);
+
+    ASSERT_EQ(outputSize / 2, outputRead);
+
+    // Now read the other half of the data with pull.
+    outputRead += flowgraph.pull(
+            (void *) (output + outputRead),
+            outputSize - outputRead);
+
+    ASSERT_EQ(outputSize, outputRead);
+    for (int i = 1; i < outputSize; i++) {
+        // The first values of the flowgraph will be close to zero.
+        // Besides those, the values should be strictly increasing.
+        if (output[i - 1] > 0.01f) {
+            EXPECT_GT(output[i], output[i - 1]);
+        }
+    }
+}
+
+// TODO: b/289508408 - Remove non-parameterized tests if they get noisy.
+TEST(test_flowgraph, flowgraph_pull_later_all) {
+    const int rates[] = {8000, 11025, 22050, 32000, 44100, 48000, 64000, 88200, 96000};
+    const MultiChannelResampler::Quality qualities[] =
+    {
+        MultiChannelResampler::Quality::Fastest,
+        MultiChannelResampler::Quality::Low,
+        MultiChannelResampler::Quality::Medium,
+        MultiChannelResampler::Quality::High,
+        MultiChannelResampler::Quality::Best
+    };
+    for (int srcRate : rates) {
+        for (int destRate : rates) {
+            for (auto quality : qualities) {
+                if (srcRate != destRate) {
+                    checkSampleRateConversionPullLater(srcRate, destRate, quality);
+                }
+            }
+        }
+    }
+}
+
+class TestFlowgraphSampleRateConversion : public ::testing::Test,
+                        public ::testing::WithParamInterface<TestFlowgraphResamplerParams> {
+};
+
+const char* resamplerQualityToString(MultiChannelResampler::Quality quality) {
+    switch (quality) {
+        case MultiChannelResampler::Quality::Fastest: return "FASTEST";
+        case MultiChannelResampler::Quality::Low: return "LOW";
+        case MultiChannelResampler::Quality::Medium: return "MEDIUM";
+        case MultiChannelResampler::Quality::High: return "HIGH";
+        case MultiChannelResampler::Quality::Best: return "BEST";
+    }
+    return "UNKNOWN";
+}
+
+static std::string getTestName(
+        const ::testing::TestParamInfo<TestFlowgraphResamplerParams>& info) {
+    return std::string()
+            + std::to_string(std::get<PARAM_SOURCE_SAMPLE_RATE>(info.param))
+            + "__" + std::to_string(std::get<PARAM_SINK_SAMPLE_RATE>(info.param))
+            + "__" + resamplerQualityToString(std::get<PARAM_RESAMPLER_QUALITY>(info.param));
+}
+
+TEST_P(TestFlowgraphSampleRateConversion, test_flowgraph_pull_later) {
+    checkSampleRateConversionPullLater(std::get<PARAM_SOURCE_SAMPLE_RATE>(GetParam()),
+            std::get<PARAM_SINK_SAMPLE_RATE>(GetParam()),
+            std::get<PARAM_RESAMPLER_QUALITY>(GetParam()));
+}
+
+TEST_P(TestFlowgraphSampleRateConversion, test_flowgraph_varied_sizes) {
+    checkSampleRateConversionVariedSizes(std::get<PARAM_SOURCE_SAMPLE_RATE>(GetParam()),
+            std::get<PARAM_SINK_SAMPLE_RATE>(GetParam()),
+            std::get<PARAM_RESAMPLER_QUALITY>(GetParam()));
+}
+
+INSTANTIATE_TEST_SUITE_P(
+        test_flowgraph,
+        TestFlowgraphSampleRateConversion,
+        ::testing::Values(
+                TestFlowgraphResamplerParams({8000, 11025, MultiChannelResampler::Quality::Best}),
+                TestFlowgraphResamplerParams({8000, 48000, MultiChannelResampler::Quality::Best}),
+                TestFlowgraphResamplerParams({8000, 44100, MultiChannelResampler::Quality::Best}),
+                TestFlowgraphResamplerParams({11025, 24000, MultiChannelResampler::Quality::Best}),
+                TestFlowgraphResamplerParams({11025, 48000,
+                        MultiChannelResampler::Quality::Fastest}),
+                TestFlowgraphResamplerParams({11025, 48000, MultiChannelResampler::Quality::Low}),
+                TestFlowgraphResamplerParams({11025, 48000,
+                        MultiChannelResampler::Quality::Medium}),
+                TestFlowgraphResamplerParams({11025, 48000, MultiChannelResampler::Quality::High}),
+                TestFlowgraphResamplerParams({11025, 48000, MultiChannelResampler::Quality::Best}),
+                TestFlowgraphResamplerParams({11025, 44100, MultiChannelResampler::Quality::Best}),
+                TestFlowgraphResamplerParams({11025, 88200, MultiChannelResampler::Quality::Best}),
+                TestFlowgraphResamplerParams({16000, 48000, MultiChannelResampler::Quality::Best}),
+                TestFlowgraphResamplerParams({44100, 48000, MultiChannelResampler::Quality::Low}),
+                TestFlowgraphResamplerParams({44100, 48000, MultiChannelResampler::Quality::Best}),
+                TestFlowgraphResamplerParams({48000, 11025, MultiChannelResampler::Quality::Best}),
+                TestFlowgraphResamplerParams({48000, 44100, MultiChannelResampler::Quality::Best}),
+                TestFlowgraphResamplerParams({44100, 11025, MultiChannelResampler::Quality::Best})),
+        &getTestName
+);
diff --git a/media/libaaudio/tests/test_resampler.cpp b/media/libaaudio/tests/test_resampler.cpp
index 1e4f59c..13e4a20 100644
--- a/media/libaaudio/tests/test_resampler.cpp
+++ b/media/libaaudio/tests/test_resampler.cpp
@@ -101,14 +101,20 @@
         }
     }
 
+    // Flush out remaining frames from the flowgraph
+    while (!mcResampler->isWriteNeeded()) {
+        mcResampler->readNextFrame(output);
+        output++;
+        numRead++;
+    }
+
     ASSERT_LE(numRead, kNumOutputSamples);
     // Some frames are lost priming the FIR filter.
-    const int kMaxAlgorithmicFrameLoss = 16;
+    const int kMaxAlgorithmicFrameLoss = 5;
     EXPECT_GT(numRead, kNumOutputSamples - kMaxAlgorithmicFrameLoss);
 
     int sinkZeroCrossingCount = countZeroCrossingsWithHysteresis(outputBuffer.get(), numRead);
-    // Some cycles may get chopped off at the end.
-    const int kMaxZeroCrossingDelta = 3;
+    const int kMaxZeroCrossingDelta = std::max(sinkRate / sourceRate / 2, 1);
     EXPECT_LE(abs(sourceZeroCrossingCount - sinkZeroCrossingCount), kMaxZeroCrossingDelta);
 
     // Detect glitches by looking for spikes in the second derivative.
@@ -136,8 +142,7 @@
 
 
 TEST(test_resampler, resampler_scan_all) {
-    // TODO Add 64000, 88200, 96000 when they work. Failing now.
-    const int rates[] = {8000, 11025, 22050, 32000, 44100, 48000};
+    const int rates[] = {8000, 11025, 22050, 32000, 44100, 48000, 64000, 88200, 96000};
     const MultiChannelResampler::Quality qualities[] =
     {
         MultiChannelResampler::Quality::Fastest,
@@ -193,10 +198,9 @@
     checkResampler(11025, 44100, MultiChannelResampler::Quality::Best);
 }
 
-// TODO This fails because the output is very low.
-//TEST(test_resampler, resampler_11025_88200_best) {
-//    checkResampler(11025, 88200, MultiChannelResampler::Quality::Best);
-//}
+TEST(test_resampler, resampler_11025_88200_best) {
+    checkResampler(11025, 88200, MultiChannelResampler::Quality::Best);
+}
 
 TEST(test_resampler, resampler_16000_48000_best) {
     checkResampler(16000, 48000, MultiChannelResampler::Quality::Best);
diff --git a/media/libaudioclient/Android.bp b/media/libaudioclient/Android.bp
index 01e3d53..d35708c 100644
--- a/media/libaudioclient/Android.bp
+++ b/media/libaudioclient/Android.bp
@@ -336,6 +336,7 @@
     srcs: [
         "aidl/android/media/AudioAttributesEx.aidl",
         "aidl/android/media/AudioMix.aidl",
+        "aidl/android/media/AudioMixUpdate.aidl",
         "aidl/android/media/AudioMixerAttributesInternal.aidl",
         "aidl/android/media/AudioMixerBehavior.aidl",
         "aidl/android/media/AudioMixCallbackFlag.aidl",
diff --git a/media/libaudioclient/AudioSystem.cpp b/media/libaudioclient/AudioSystem.cpp
index 6616197..5bfdd5f 100644
--- a/media/libaudioclient/AudioSystem.cpp
+++ b/media/libaudioclient/AudioSystem.cpp
@@ -20,6 +20,7 @@
 #include <utils/Log.h>
 
 #include <android/media/IAudioPolicyService.h>
+#include <android/media/AudioMixUpdate.h>
 #include <android/media/BnCaptureStateListener.h>
 #include <binder/IServiceManager.h>
 #include <binder/ProcessState.h>
@@ -108,7 +109,7 @@
 }
 
 // establish binder interface to AudioFlinger service
-const sp<IAudioFlinger> AudioSystem::get_audio_flinger() {
+const sp<IAudioFlinger> AudioSystem::getAudioFlingerImpl(bool canStartThreadPool = true) {
     sp<IAudioFlinger> af;
     sp<AudioFlingerClient> afc;
     bool reportNoError = false;
@@ -132,12 +133,10 @@
                 binder = gAudioFlingerBinder;
             } else {
                 sp<IServiceManager> sm = defaultServiceManager();
-                do {
-                    binder = sm->getService(String16(IAudioFlinger::DEFAULT_SERVICE_NAME));
-                    if (binder != nullptr) break;
-                    ALOGW("AudioFlinger not published, waiting...");
-                    usleep(500000); // 0.5 s
-                } while (true);
+                binder = sm->waitForService(String16(IAudioFlinger::DEFAULT_SERVICE_NAME));
+                if (binder == nullptr) {
+                    return nullptr;
+                }
             }
             binder->linkToDeath(gAudioFlingerClient);
             const auto afs = interface_cast<media::IAudioFlingerService>(binder);
@@ -147,7 +146,9 @@
         afc = gAudioFlingerClient;
         af = gAudioFlinger;
         // Make sure callbacks can be received by gAudioFlingerClient
-        ProcessState::self()->startThreadPool();
+        if(canStartThreadPool) {
+            ProcessState::self()->startThreadPool();
+        }
     }
     const int64_t token = IPCThreadState::self()->clearCallingIdentity();
     af->registerClient(afc);
@@ -156,6 +157,14 @@
     return af;
 }
 
+const sp<IAudioFlinger> AudioSystem:: get_audio_flinger() {
+    return getAudioFlingerImpl();
+}
+
+const sp<IAudioFlinger> AudioSystem:: get_audio_flinger_for_fuzzer() {
+    return getAudioFlingerImpl(false);
+}
+
 const sp<AudioSystem::AudioFlingerClient> AudioSystem::getAudioFlingerClient() {
     // calling get_audio_flinger() will initialize gAudioFlingerClient if needed
     const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
@@ -870,14 +879,10 @@
         Mutex::Autolock _l(gLockAPS);
         if (gAudioPolicyService == 0) {
             sp<IServiceManager> sm = defaultServiceManager();
-            sp<IBinder> binder;
-            do {
-                binder = sm->getService(String16("media.audio_policy"));
-                if (binder != 0)
-                    break;
-                ALOGW("AudioPolicyService not published, waiting...");
-                usleep(500000); // 0.5 s
-            } while (true);
+            sp<IBinder> binder = sm->waitForService(String16("media.audio_policy"));
+            if (binder == nullptr) {
+                return nullptr;
+            }
             if (gAudioPolicyServiceClient == NULL) {
                 gAudioPolicyServiceClient = new AudioPolicyServiceClient();
             }
@@ -1838,6 +1843,27 @@
     return statusTFromBinderStatus(aps->registerPolicyMixes(mixesAidl, registration));
 }
 
+status_t AudioSystem::updatePolicyMixes(
+        const std::vector<std::pair<AudioMix, std::vector<AudioMixMatchCriterion>>>&
+                mixesWithUpdates) {
+    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    if (aps == 0) return PERMISSION_DENIED;
+
+    std::vector<media::AudioMixUpdate> updatesAidl;
+    updatesAidl.reserve(mixesWithUpdates.size());
+
+    for (const auto& update : mixesWithUpdates) {
+        media::AudioMixUpdate updateAidl;
+        updateAidl.audioMix = VALUE_OR_RETURN_STATUS(legacy2aidl_AudioMix(update.first));
+        RETURN_STATUS_IF_ERROR(convertRange(update.second.begin(), update.second.end(),
+                                            std::back_inserter(updateAidl.newCriteria),
+                                            legacy2aidl_AudioMixMatchCriterion));
+        updatesAidl.emplace_back(updateAidl);
+    }
+
+    return statusTFromBinderStatus(aps->updatePolicyMixes(updatesAidl));
+}
+
 status_t AudioSystem::setUidDeviceAffinities(uid_t uid, const AudioDeviceTypeAddrVector& devices) {
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
@@ -2093,8 +2119,7 @@
         return BAD_VALUE;
     }
 
-    const sp<IAudioPolicyService>
-            & aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
     std::vector<AudioFormatDescription> formatsAidl;
diff --git a/media/libaudioclient/AudioTrack.cpp b/media/libaudioclient/AudioTrack.cpp
index f3539a1..8b8f7cd 100644
--- a/media/libaudioclient/AudioTrack.cpp
+++ b/media/libaudioclient/AudioTrack.cpp
@@ -817,7 +817,7 @@
     (void) updateAndGetPosition_l();
 
     // save start timestamp
-    if (isOffloadedOrDirect_l()) {
+    if (isAfTrackOffloadedOrDirect_l()) {
         if (getTimestamp_l(mStartTs) != OK) {
             mStartTs.mPosition = 0;
         }
@@ -838,7 +838,7 @@
         mTimestampStaleTimeReported = false;
         mPreviousLocation = ExtendedTimestamp::LOCATION_INVALID;
 
-        if (!isOffloadedOrDirect_l()
+        if (!isAfTrackOffloadedOrDirect_l()
                 && mStartEts.mTimeNs[ExtendedTimestamp::LOCATION_SERVER] > 0) {
             // Server side has consumed something, but is it finished consuming?
             // It is possible since flush and stop are asynchronous that the server
@@ -1917,6 +1917,7 @@
     mAfChannelCount = audio_channel_count_from_out_mask(output.afChannelMask);
     mAfFormat = output.afFormat;
     mAfLatency = output.afLatencyMs;
+    mAfTrackFlags = output.afTrackFlags;
 
     mLatency = mAfLatency + (1000LL * mFrameCount) / mSampleRate;
 
@@ -3182,7 +3183,7 @@
     // To avoid a race, read the presented frames first.  This ensures that presented <= consumed.
 
     status_t status;
-    if (isOffloadedOrDirect_l()) {
+    if (isAfTrackOffloadedOrDirect_l()) {
         // use Binder to get timestamp
         media::AudioTimestampInternal ts;
         mAudioTrack->getTimestamp(&ts, &status);
@@ -3294,7 +3295,7 @@
         ALOGV_IF(status != WOULD_BLOCK, "%s(%d): getTimestamp error:%#x", __func__, mPortId, status);
         return status;
     }
-    if (isOffloadedOrDirect_l()) {
+    if (isAfTrackOffloadedOrDirect_l()) {
         if (isOffloaded_l() && (mState == STATE_PAUSED || mState == STATE_PAUSED_STOPPING)) {
             // use cached paused position in case another offloaded track is running.
             timestamp.mPosition = mPausedPosition;
@@ -3740,7 +3741,7 @@
     // This is conservatively figured - if we encounter an unexpected error
     // then we will not wait.
     bool wait = false;
-    if (isOffloadedOrDirect_l()) {
+    if (isAfTrackOffloadedOrDirect_l()) {
         AudioTimestamp ts;
         status_t status = getTimestamp_l(ts);
         if (status == WOULD_BLOCK) {
diff --git a/media/libaudioclient/IAudioFlinger.cpp b/media/libaudioclient/IAudioFlinger.cpp
index 4bd12b8..515e708 100644
--- a/media/libaudioclient/IAudioFlinger.cpp
+++ b/media/libaudioclient/IAudioFlinger.cpp
@@ -116,6 +116,8 @@
             legacy2aidl_audio_channel_mask_t_AudioChannelLayout(afChannelMask, false /*isInput*/));
     aidl.afFormat = VALUE_OR_RETURN(
             legacy2aidl_audio_format_t_AudioFormatDescription(afFormat));
+    aidl.afTrackFlags = VALUE_OR_RETURN(
+            legacy2aidl_audio_output_flags_t_int32_t_mask(afTrackFlags));
     aidl.outputId = VALUE_OR_RETURN(legacy2aidl_audio_io_handle_t_int32_t(outputId));
     aidl.portId = VALUE_OR_RETURN(legacy2aidl_audio_port_handle_t_int32_t(portId));
     aidl.audioTrack = audioTrack;
@@ -144,6 +146,8 @@
                                                                 false /*isInput*/));
     legacy.afFormat = VALUE_OR_RETURN(
             aidl2legacy_AudioFormatDescription_audio_format_t(aidl.afFormat));
+    legacy.afTrackFlags = VALUE_OR_RETURN(
+            aidl2legacy_int32_t_audio_output_flags_t_mask(aidl.afTrackFlags));
     legacy.outputId = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_io_handle_t(aidl.outputId));
     legacy.portId = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_port_handle_t(aidl.portId));
     legacy.audioTrack = aidl.audioTrack;
diff --git a/media/libaudioclient/aidl/android/media/AudioMixUpdate.aidl b/media/libaudioclient/aidl/android/media/AudioMixUpdate.aidl
new file mode 100644
index 0000000..d481b1c
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioMixUpdate.aidl
@@ -0,0 +1,31 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.AudioMix;
+import android.media.AudioMixMatchCriterion;
+
+
+/**
+ * {@hide}
+ */
+parcelable AudioMixUpdate {
+    // Audio mix to update.
+    AudioMix audioMix;
+    // Updated audio mixing rule.
+    AudioMixMatchCriterion[] newCriteria;
+}
diff --git a/media/libaudioclient/aidl/android/media/CreateTrackResponse.aidl b/media/libaudioclient/aidl/android/media/CreateTrackResponse.aidl
index 42e0bb4..ab60461 100644
--- a/media/libaudioclient/aidl/android/media/CreateTrackResponse.aidl
+++ b/media/libaudioclient/aidl/android/media/CreateTrackResponse.aidl
@@ -43,6 +43,7 @@
     AudioChannelLayout afChannelMask;
     AudioFormatDescription afFormat;
     int afLatencyMs;
+    int afTrackFlags;
     /** Interpreted as audio_io_handle_t. */
     int outputId;
     /** Interpreted as audio_port_handle_t. */
diff --git a/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl b/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl
index 3e9b27f..52c8da0 100644
--- a/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl
+++ b/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl
@@ -20,6 +20,7 @@
 
 import android.media.AudioDirectMode;
 import android.media.AudioMix;
+import android.media.AudioMixUpdate;
 import android.media.AudioMixerAttributesInternal;
 import android.media.AudioOffloadMode;
 import android.media.AudioPatchFw;
@@ -262,6 +263,8 @@
 
     void registerPolicyMixes(in AudioMix[] mixes, boolean registration);
 
+    void updatePolicyMixes(in AudioMixUpdate[] updates);
+
     void setUidDeviceAffinities(int /* uid_t */ uid, in AudioDevice[] devices);
 
     void removeUidDeviceAffinities(int /* uid_t */ uid);
diff --git a/media/libaudioclient/aidl/fuzzer/Android.bp b/media/libaudioclient/aidl/fuzzer/Android.bp
new file mode 100644
index 0000000..67258d9
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/Android.bp
@@ -0,0 +1,101 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+cc_defaults {
+    name: "libaudioclient_aidl_fuzzer_defaults",
+    static_libs: [
+        "android.hardware.audio.common@7.0-enums",
+        "effect-aidl-cpp",
+        "liblog",
+        "libcgrouprc",
+        "libcgrouprc_format",
+        "libjsoncpp",
+        "libmediametricsservice",
+        "libmedia_helper",
+        "libprocessgroup",
+        "shared-file-region-aidl-cpp",
+        "libfakeservicemanager"
+    ],
+    shared_libs: [
+        "libaudioclient",
+        "libaudioflinger",
+        "libmediautils",
+        "libnblog",
+        "libaudioprocessing",
+        "libnbaio",
+        "libpowermanager",
+        "libvibrator",
+        "packagemanager_aidl-cpp",
+        "android.hardware.audio.common-util",
+        "audioclient-types-aidl-cpp",
+        "audioflinger-aidl-cpp",
+        "audiopolicy-aidl-cpp",
+        "audiopolicy-types-aidl-cpp",
+        "av-types-aidl-cpp",
+        "capture_state_listener-aidl-cpp",
+        "libaudioclient_aidl_conversion",
+        "libaudiofoundation",
+        "libaudiomanager",
+        "libaudiopolicy",
+        "libaudioutils",
+        "libdl",
+        "libxml2",
+        "mediametricsservice-aidl-cpp",
+        "framework-permission-aidl-cpp",
+        "libvndksupport",
+        "libmediametrics",
+        "libfakeservicemanager",
+        "libactivitymanager_aidl",
+        "libheadtracking",
+        "libaudiopolicyservice",
+        "libsensorprivacy",
+        "libaudiopolicymanagerdefault",
+        "libaudiohal",
+        "libhidlbase",
+        "libpermission",
+        "libaudiohal@7.0",
+    ],
+    header_libs: [
+        "libaudiopolicymanager_interface_headers",
+        "libbinder_headers",
+        "libaudiofoundation_headers",
+        "libmedia_headers",
+        "libaudiohal_headers",
+        "libaudioflinger_headers",
+        "mediautils_headers",
+    ],
+     fuzz_config: {
+        cc: [
+            "android-media-fuzzing-reports@google.com",
+        ],
+        componentid: 155276,
+        hotlists: ["4593311"],
+        description: "The fuzzer targets the APIs of libaudioflinger",
+        vector: "local_no_privileges_required",
+        service_privilege: "privileged",
+        users: "multi_user",
+        fuzzed_code_usage: "shipped",
+    },
+}
+
+cc_fuzz {
+    name: "audioflinger_aidl_fuzzer",
+    srcs: ["audioflinger_aidl_fuzzer.cpp"],
+    defaults: [
+        "libaudioclient_aidl_fuzzer_defaults",
+        "service_fuzzer_defaults"
+    ],
+}
diff --git a/media/libaudioclient/aidl/fuzzer/audioflinger_aidl_fuzzer.cpp b/media/libaudioclient/aidl/fuzzer/audioflinger_aidl_fuzzer.cpp
new file mode 100644
index 0000000..fac5f53
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/audioflinger_aidl_fuzzer.cpp
@@ -0,0 +1,218 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+#include <AudioFlinger.h>
+#include <ISchedulingPolicyService.h>
+#include <fakeservicemanager/FakeServiceManager.h>
+#include <android-base/logging.h>
+#include <android/binder_interface_utils.h>
+#include <android/binder_process.h>
+#include <android/media/IAudioPolicyService.h>
+#include <binder/IActivityManager.h>
+#include <binder/IPermissionController.h>
+#include <binder/IServiceManager.h>
+#include <binder/PermissionController.h>
+#include <fuzzbinder/libbinder_driver.h>
+#include <fuzzer/FuzzedDataProvider.h>
+#include <media/IAudioFlinger.h>
+#include <mediautils/SchedulingPolicyService.h>
+#include <sensorprivacy/SensorPrivacyManager.h>
+#include <service/AudioPolicyService.h>
+
+using namespace android;
+using namespace android::binder;
+using android::fuzzService;
+
+static sp<media::IAudioFlingerService> gAudioFlingerService;
+
+class FuzzerSchedulingPolicyService : public BnInterface<ISchedulingPolicyService> {
+    int32_t requestPriority(int32_t /*pid_t*/, int32_t /*tid*/, int32_t /*prio*/, bool /*isForApp*/,
+                            bool /*asynchronous*/) {
+        return 0;
+    }
+
+    int32_t requestCpusetBoost(bool /*enable*/, const sp<IBinder>& /*client*/) { return 0; }
+};
+
+class FuzzerPermissionController : public BnInterface<IPermissionController> {
+  public:
+    bool checkPermission(const String16& /*permission*/, int32_t /*pid*/, int32_t /*uid*/) {
+        return true;
+    }
+    int32_t noteOp(const String16& /*op*/, int32_t /*uid*/, const String16& /*packageName*/) {
+        return 0;
+    }
+    void getPackagesForUid(const uid_t /*uid*/, Vector<String16>& /*packages*/) {}
+    bool isRuntimePermission(const String16& /*permission*/) { return true; }
+    int32_t getPackageUid(const String16& /*package*/, int /*flags*/) { return 0; }
+};
+
+class FuzzerSensorPrivacyManager : public BnInterface<hardware::ISensorPrivacyManager> {
+  public:
+    Status supportsSensorToggle(int32_t /*toggleType*/, int32_t /*sensor*/,
+                                bool* /*_aidl_return*/) override {
+        return Status::fromStatusT(UNKNOWN_TRANSACTION);
+    }
+    Status addSensorPrivacyListener(
+            const sp<hardware::ISensorPrivacyListener>& /*listener*/) override {
+        return Status::fromStatusT(::android::UNKNOWN_TRANSACTION);
+    }
+    Status addToggleSensorPrivacyListener(
+            const sp<hardware::ISensorPrivacyListener>& /*listener*/) override {
+        return Status::fromStatusT(UNKNOWN_TRANSACTION);
+    }
+    Status removeSensorPrivacyListener(
+            const sp<hardware::ISensorPrivacyListener>& /*listener*/) override {
+        return Status::fromStatusT(::android::UNKNOWN_TRANSACTION);
+    }
+    Status removeToggleSensorPrivacyListener(
+            const sp<hardware::ISensorPrivacyListener>& /*listener*/) override {
+        return Status::fromStatusT(::android::UNKNOWN_TRANSACTION);
+    }
+    Status isSensorPrivacyEnabled(bool* /*_aidl_return*/) override {
+        return Status::fromStatusT(UNKNOWN_TRANSACTION);
+    }
+    Status isCombinedToggleSensorPrivacyEnabled(int32_t /*sensor*/,
+                                                bool* /*_aidl_return*/) override {
+        return Status::fromStatusT(UNKNOWN_TRANSACTION);
+    }
+    Status isToggleSensorPrivacyEnabled(int32_t /*toggleType*/, int32_t /*sensor*/,
+                                        bool* /*_aidl_return*/) override {
+        return Status::fromStatusT(UNKNOWN_TRANSACTION);
+    }
+    Status setSensorPrivacy(bool /*enable*/) override {
+        return Status::fromStatusT(UNKNOWN_TRANSACTION);
+    }
+    Status setToggleSensorPrivacy(int32_t /*userId*/, int32_t /*source*/, int32_t /*sensor*/,
+                                  bool /*enable*/) override {
+        return Status::fromStatusT(UNKNOWN_TRANSACTION);
+    }
+    Status setToggleSensorPrivacyForProfileGroup(int32_t /*userId*/, int32_t /*source*/,
+                                                 int32_t /*sensor*/, bool /*enable*/) override {
+        return Status::fromStatusT(UNKNOWN_TRANSACTION);
+    }
+};
+
+class FuzzerActivityManager : public BnInterface<IActivityManager> {
+  public:
+    int32_t openContentUri(const String16& /*stringUri*/) override { return 0; }
+
+    status_t registerUidObserver(const sp<IUidObserver>& /*observer*/, const int32_t /*event*/,
+                                 const int32_t /*cutpoint*/,
+                                 const String16& /*callingPackage*/) override {
+        return OK;
+    }
+
+    status_t unregisterUidObserver(const sp<IUidObserver>& /*observer*/) override { return OK; }
+
+    bool isUidActive(const uid_t /*uid*/, const String16& /*callingPackage*/) override {
+        return true;
+    }
+
+    int32_t getUidProcessState(const uid_t /*uid*/, const String16& /*callingPackage*/) override {
+        return ActivityManager::PROCESS_STATE_UNKNOWN;
+    }
+
+    status_t checkPermission(const String16& /*permission*/, const pid_t /*pid*/,
+                             const uid_t /*uid*/, int32_t* /*outResult*/) override {
+        return NO_ERROR;
+    }
+
+    status_t registerUidObserverForUids(const sp<IUidObserver>& /*observer*/ ,
+                                        const int32_t /*event*/ ,
+                                        const int32_t /*cutpoint*/ ,
+                                        const String16& /*callingPackage*/ ,
+                                        const int32_t uids[] ,
+                                        size_t /*nUids*/ ,
+                                        /*out*/ sp<IBinder>& /*observerToken*/ ) {
+        (void)uids;
+        return OK;
+    }
+
+    status_t addUidToObserver(const sp<IBinder>& /*observerToken*/ ,
+                              const String16& /*callingPackage*/ ,
+                              int32_t /*uid*/ ) override {
+        return NO_ERROR;
+    }
+
+    status_t removeUidFromObserver(const sp<IBinder>& /*observerToken*/ ,
+                                   const String16& /*callingPackage*/ ,
+                                   int32_t /*uid*/ ) override {
+        return NO_ERROR;
+    }
+
+    status_t logFgsApiBegin(int32_t /*apiType*/ , int32_t /*appUid*/ ,
+                            int32_t /*appPid*/ ) override {
+        return NO_ERROR;
+    }
+    status_t logFgsApiEnd(int32_t /*apiType*/ , int32_t /*appUid*/ ,
+                          int32_t /*appPid*/ ) override {
+        return NO_ERROR;
+    }
+    status_t logFgsApiStateChanged(int32_t /*apiType*/ , int32_t /*state*/ ,
+                                   int32_t /*appUid*/ ,
+                                   int32_t /*appPid*/ ) override {
+        return NO_ERROR;
+    }
+};
+
+extern "C" int LLVMFuzzerInitialize(int* /* argc */, char*** /* argv */) {
+    /* Create a FakeServiceManager instance and add required services */
+    sp<FakeServiceManager> fakeServiceManager = new FakeServiceManager();
+    setDefaultServiceManager(fakeServiceManager);
+    ABinderProcess_setThreadPoolMaxThreadCount(0);
+    sp<FuzzerActivityManager> am = new FuzzerActivityManager();
+    fakeServiceManager->addService(String16("activity"), IInterface::asBinder(am));
+
+    sp<FuzzerSensorPrivacyManager> sensorPrivacyManager = new FuzzerSensorPrivacyManager();
+    fakeServiceManager->addService(String16("sensor_privacy"),
+                                   IInterface::asBinder(sensorPrivacyManager));
+    sp<FuzzerPermissionController> permissionController = new FuzzerPermissionController();
+    fakeServiceManager->addService(String16("permission"),
+                                   IInterface::asBinder(permissionController));
+
+    sp<FuzzerSchedulingPolicyService> schedulingService = new FuzzerSchedulingPolicyService();
+    fakeServiceManager->addService(String16("scheduling_policy"),
+                                   IInterface::asBinder(schedulingService));
+
+    const auto audioFlingerObj = sp<AudioFlinger>::make();
+    const auto afAdapter = sp<AudioFlingerServerAdapter>::make(audioFlingerObj);
+
+    fakeServiceManager->addService(String16(IAudioFlinger::DEFAULT_SERVICE_NAME),
+                                   IInterface::asBinder(afAdapter), false /* allowIsolated */,
+                                   IServiceManager::DUMP_FLAG_PRIORITY_DEFAULT);
+
+    const auto audioPolicyService = sp<AudioPolicyService>::make();
+    fakeServiceManager->addService(String16("media.audio_policy"), audioPolicyService,
+                                   false /* allowIsolated */,
+                                   IServiceManager::DUMP_FLAG_PRIORITY_DEFAULT);
+
+    sp<IBinder> binder =
+            fakeServiceManager->getService(String16(IAudioFlinger::DEFAULT_SERVICE_NAME));
+    gAudioFlingerService = interface_cast<media::IAudioFlingerService>(binder);
+    return 0;
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    if (!gAudioFlingerService) {
+        return 0;
+    }
+
+    fuzzService(media::IAudioFlingerService::asBinder(gAudioFlingerService),
+                FuzzedDataProvider(data, size));
+
+    return 0;
+}
diff --git a/media/libaudioclient/include/media/AudioSystem.h b/media/libaudioclient/include/media/AudioSystem.h
index 8f8c9dd..a1f7941 100644
--- a/media/libaudioclient/include/media/AudioSystem.h
+++ b/media/libaudioclient/include/media/AudioSystem.h
@@ -178,6 +178,7 @@
 
     // helper function to obtain AudioFlinger service handle
     static const sp<IAudioFlinger> get_audio_flinger();
+    static const sp<IAudioFlinger> get_audio_flinger_for_fuzzer();
 
     static float linearToLog(int volume);
     static int logToLinear(float volume);
@@ -461,6 +462,10 @@
 
     static status_t registerPolicyMixes(const Vector<AudioMix>& mixes, bool registration);
 
+    static status_t updatePolicyMixes(
+        const std::vector<
+                std::pair<AudioMix, std::vector<AudioMixMatchCriterion>>>& mixesWithUpdates);
+
     static status_t setUidDeviceAffinities(uid_t uid, const AudioDeviceTypeAddrVector& devices);
 
     static status_t removeUidDeviceAffinities(uid_t uid);
@@ -876,6 +881,7 @@
     static audio_io_handle_t getOutput(audio_stream_type_t stream);
     static const sp<AudioFlingerClient> getAudioFlingerClient();
     static sp<AudioIoDescriptor> getIoDescriptor(audio_io_handle_t ioHandle);
+    static const sp<IAudioFlinger> getAudioFlingerImpl(bool canStartThreadPool);
 
     // Invokes all registered error callbacks with the given error code.
     static void reportError(status_t err);
diff --git a/media/libaudioclient/include/media/AudioTrack.h b/media/libaudioclient/include/media/AudioTrack.h
index 8f712db..523383f 100644
--- a/media/libaudioclient/include/media/AudioTrack.h
+++ b/media/libaudioclient/include/media/AudioTrack.h
@@ -1238,6 +1238,11 @@
             bool     isDirect_l() const
                 { return (mFlags & AUDIO_OUTPUT_FLAG_DIRECT) != 0; }
 
+            bool     isAfTrackOffloadedOrDirect_l() const
+                { return isOffloadedOrDirect_l() ||
+                        (mAfTrackFlags & (AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD|
+                                AUDIO_OUTPUT_FLAG_DIRECT)) != 0; }
+
             // pure pcm data is mixable (which excludes HW_AV_SYNC, with embedded timing)
             bool     isPurePcmData_l() const
                 { return audio_is_linear_pcm(mFormat)
@@ -1295,6 +1300,7 @@
     uint32_t                mAfSampleRate;          // AudioFlinger sample rate
     uint32_t                mAfChannelCount;        // AudioFlinger channel count
     audio_format_t          mAfFormat;              // AudioFlinger format
+    audio_output_flags_t    mAfTrackFlags;          // AudioFlinger track flags
 
     // constant after constructor or set()
     audio_format_t          mFormat;                // as requested by client, not forced to 16-bit
diff --git a/media/libaudioclient/include/media/IAudioFlinger.h b/media/libaudioclient/include/media/IAudioFlinger.h
index 3c96862..35a3208 100644
--- a/media/libaudioclient/include/media/IAudioFlinger.h
+++ b/media/libaudioclient/include/media/IAudioFlinger.h
@@ -119,6 +119,7 @@
         uint32_t afLatencyMs;
         audio_channel_mask_t afChannelMask;
         audio_format_t afFormat;
+        audio_output_flags_t afTrackFlags;
         audio_io_handle_t outputId;
         audio_port_handle_t portId;
         sp<media::IAudioTrack> audioTrack;
diff --git a/media/libaudioclient/tests/audiorouting_tests.cpp b/media/libaudioclient/tests/audiorouting_tests.cpp
index 19d1abc..e6916cc 100644
--- a/media/libaudioclient/tests/audiorouting_tests.cpp
+++ b/media/libaudioclient/tests/audiorouting_tests.cpp
@@ -53,7 +53,7 @@
         ASSERT_NE(nullptr, ap);
         ASSERT_EQ(OK, ap->loadResource("/data/local/tmp/bbb_2ch_24kHz_s16le.raw"))
                 << "Unable to open Resource";
-        EXPECT_EQ(OK, ap->create()) << "track creation failed";
+        ASSERT_EQ(OK, ap->create()) << "track creation failed";
         sp<OnAudioDeviceUpdateNotifier> cb = sp<OnAudioDeviceUpdateNotifier>::make();
         EXPECT_EQ(OK, ap->getAudioTrackHandle()->addAudioDeviceCallback(cb));
         EXPECT_EQ(OK, ap->start()) << "audio track start failed";
@@ -87,7 +87,7 @@
     sp<AudioCapture> capture = sp<AudioCapture>::make(
             AUDIO_SOURCE_REMOTE_SUBMIX, 48000, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO);
     ASSERT_NE(nullptr, capture);
-    EXPECT_EQ(OK, capture->create()) << "record creation failed";
+    ASSERT_EQ(OK, capture->create()) << "record creation failed";
     sp<OnAudioDeviceUpdateNotifier> cbCapture = sp<OnAudioDeviceUpdateNotifier>::make();
     EXPECT_EQ(OK, capture->getAudioRecordHandle()->addAudioDeviceCallback(cbCapture));
 
@@ -98,7 +98,7 @@
     ASSERT_NE(nullptr, playback);
     ASSERT_EQ(OK, playback->loadResource("/data/local/tmp/bbb_2ch_24kHz_s16le.raw"))
             << "Unable to open Resource";
-    EXPECT_EQ(OK, playback->create()) << "track creation failed";
+    ASSERT_EQ(OK, playback->create()) << "track creation failed";
     sp<OnAudioDeviceUpdateNotifier> cbPlayback = sp<OnAudioDeviceUpdateNotifier>::make();
     EXPECT_EQ(OK, playback->getAudioTrackHandle()->addAudioDeviceCallback(cbPlayback));
 
@@ -180,7 +180,7 @@
     ASSERT_NE(nullptr, playback);
     ASSERT_EQ(OK, playback->loadResource("/data/local/tmp/bbb_2ch_24kHz_s16le.raw"))
             << "Unable to open Resource";
-    EXPECT_EQ(OK, playback->create()) << "track creation failed";
+    ASSERT_EQ(OK, playback->create()) << "track creation failed";
     sp<OnAudioDeviceUpdateNotifier> cbPlayback = sp<OnAudioDeviceUpdateNotifier>::make();
     EXPECT_EQ(OK, playback->getAudioTrackHandle()->addAudioDeviceCallback(cbPlayback));
 
@@ -188,7 +188,7 @@
     sp<AudioCapture> captureA = sp<AudioCapture>::make(
             AUDIO_SOURCE_REMOTE_SUBMIX, 48000, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO);
     ASSERT_NE(nullptr, captureA);
-    EXPECT_EQ(OK, captureA->create()) << "record creation failed";
+    ASSERT_EQ(OK, captureA->create()) << "record creation failed";
     sp<OnAudioDeviceUpdateNotifier> cbCaptureA = sp<OnAudioDeviceUpdateNotifier>::make();
     EXPECT_EQ(OK, captureA->getAudioRecordHandle()->addAudioDeviceCallback(cbCaptureA));
 
@@ -199,7 +199,7 @@
             AUDIO_SOURCE_REMOTE_SUBMIX, 48000, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO,
             AUDIO_INPUT_FLAG_NONE, AUDIO_SESSION_ALLOCATE, AudioRecord::TRANSFER_CALLBACK, &attr);
     ASSERT_NE(nullptr, captureB);
-    EXPECT_EQ(OK, captureB->create()) << "record creation failed";
+    ASSERT_EQ(OK, captureB->create()) << "record creation failed";
     sp<OnAudioDeviceUpdateNotifier> cbCaptureB = sp<OnAudioDeviceUpdateNotifier>::make();
     EXPECT_EQ(OK, captureB->getAudioRecordHandle()->addAudioDeviceCallback(cbCaptureB));
 
diff --git a/media/libheif/HeifDecoderImpl.cpp b/media/libheif/HeifDecoderImpl.cpp
index 2ba1fc3..085a7e4 100644
--- a/media/libheif/HeifDecoderImpl.cpp
+++ b/media/libheif/HeifDecoderImpl.cpp
@@ -41,8 +41,8 @@
 namespace android {
 
 void initFrameInfo(HeifFrameInfo *info, const VideoFrame *videoFrame) {
-    info->mWidth = videoFrame->mWidth;
-    info->mHeight = videoFrame->mHeight;
+    info->mWidth = videoFrame->mDisplayWidth;
+    info->mHeight = videoFrame->mDisplayHeight;
     info->mRotationAngle = videoFrame->mRotationAngle;
     info->mBytesPerPixel = videoFrame->mBytesPerPixel;
     info->mDurationUs = videoFrame->mDurationUs;
@@ -476,35 +476,37 @@
 }
 
 bool HeifDecoderImpl::setOutputColor(HeifColorFormat heifColor) {
-    if (heifColor == (HeifColorFormat)mOutputColor) {
-        return true;
-    }
-
+    android_pixel_format_t outputColor;
     switch(heifColor) {
         case kHeifColorFormat_RGB565:
         {
-            mOutputColor = HAL_PIXEL_FORMAT_RGB_565;
+            outputColor = HAL_PIXEL_FORMAT_RGB_565;
             break;
         }
         case kHeifColorFormat_RGBA_8888:
         {
-            mOutputColor = HAL_PIXEL_FORMAT_RGBA_8888;
+            outputColor = HAL_PIXEL_FORMAT_RGBA_8888;
             break;
         }
         case kHeifColorFormat_BGRA_8888:
         {
-            mOutputColor = HAL_PIXEL_FORMAT_BGRA_8888;
+            outputColor = HAL_PIXEL_FORMAT_BGRA_8888;
             break;
         }
         case kHeifColorFormat_RGBA_1010102:
         {
-            mOutputColor = HAL_PIXEL_FORMAT_RGBA_1010102;
+            outputColor = HAL_PIXEL_FORMAT_RGBA_1010102;
             break;
         }
         default:
             ALOGE("Unsupported output color format %d", heifColor);
             return false;
     }
+    if (outputColor == mOutputColor) {
+        return true;
+    }
+
+    mOutputColor = outputColor;
 
     if (mFrameDecoded) {
         return reinit(nullptr);
@@ -740,8 +742,11 @@
     //       Either document why it is safe in this case or address the
     //       issue (e.g. by copying).
     VideoFrame* videoFrame = static_cast<VideoFrame*>(mFrameMemory->unsecurePointer());
-    uint8_t* src = videoFrame->getFlattenedData() + videoFrame->mRowBytes * mCurScanline++;
-    memcpy(dst, src, videoFrame->mBytesPerPixel * videoFrame->mWidth);
+    uint8_t* src = videoFrame->getFlattenedData() +
+                   (videoFrame->mRowBytes * (mCurScanline + videoFrame->mDisplayTop)) +
+                   (videoFrame->mBytesPerPixel * videoFrame->mDisplayLeft);
+    mCurScanline++;
+    memcpy(dst, src, videoFrame->mBytesPerPixel * videoFrame->mDisplayWidth);
     return true;
 }
 
diff --git a/media/libmedia/IMediaDeathNotifier.cpp b/media/libmedia/IMediaDeathNotifier.cpp
index c43ef66..f498453 100644
--- a/media/libmedia/IMediaDeathNotifier.cpp
+++ b/media/libmedia/IMediaDeathNotifier.cpp
@@ -38,16 +38,10 @@
     Mutex::Autolock _l(sServiceLock);
     if (sMediaPlayerService == 0) {
         sp<IServiceManager> sm = defaultServiceManager();
-        sp<IBinder> binder;
-        do {
-            binder = sm->getService(String16("media.player"));
-            if (binder != 0) {
-                break;
-            }
-            ALOGW("Media player service not published, waiting...");
-            usleep(500000); // 0.5 s
-        } while (true);
-
+        sp<IBinder> binder = sm->waitForService(String16("media.player"));
+        if (binder == nullptr) {
+            return nullptr;
+        }
         if (sDeathNotifier == NULL) {
             sDeathNotifier = new DeathNotifier();
         }
diff --git a/media/libmedia/mediametadataretriever.cpp b/media/libmedia/mediametadataretriever.cpp
index 427a3bd..9731ea4 100644
--- a/media/libmedia/mediametadataretriever.cpp
+++ b/media/libmedia/mediametadataretriever.cpp
@@ -41,14 +41,10 @@
     if (sService == 0) {
         sp<IServiceManager> sm = defaultServiceManager();
         sp<IBinder> binder;
-        do {
-            binder = sm->getService(String16("media.player"));
-            if (binder != 0) {
-                break;
-            }
-            ALOGW("MediaPlayerService not published, waiting...");
-            usleep(500000); // 0.5 s
-        } while (true);
+        binder = sm->waitForService(String16("media.player"));
+        if (binder == nullptr) {
+            return nullptr;
+        }
         if (sDeathNotifier == NULL) {
             sDeathNotifier = new DeathNotifier();
         }
diff --git a/media/libmediametrics/MediaMetrics.cpp b/media/libmediametrics/MediaMetrics.cpp
index 2240223..26fe306 100644
--- a/media/libmediametrics/MediaMetrics.cpp
+++ b/media/libmediametrics/MediaMetrics.cpp
@@ -87,7 +87,7 @@
 }
 
 void mediametrics_setString(mediametrics_handle_t handle, attr_t attr,
-                                 const std::string &string) {
+                            const std::string &string) {
     mediametrics_setCString(handle, attr, string.c_str());
 }
 
diff --git a/media/libmediametrics/include/MediaMetricsConstants.h b/media/libmediametrics/include/MediaMetricsConstants.h
index f80a467..26aa375 100644
--- a/media/libmediametrics/include/MediaMetricsConstants.h
+++ b/media/libmediametrics/include/MediaMetricsConstants.h
@@ -184,6 +184,7 @@
 #define AMEDIAMETRICS_PROP_PLAYERIID      "playerIId"      // int32 (-1 invalid/unset IID)
 #define AMEDIAMETRICS_PROP_ROUTEDDEVICEID "routedDeviceId" // int32
 #define AMEDIAMETRICS_PROP_SAMPLERATE     "sampleRate"     // int32
+#define AMEDIAMETRICS_PROP_SAMPLERATECLIENT "sampleRateClient" // int32
 #define AMEDIAMETRICS_PROP_SAMPLERATEHARDWARE "sampleRateHardware" // int32
 #define AMEDIAMETRICS_PROP_SELECTEDDEVICEID "selectedDeviceId" // int32
 #define AMEDIAMETRICS_PROP_SELECTEDMICDIRECTION "selectedMicDirection" // int32
diff --git a/media/libmediaplayerservice/StagefrightMetadataRetriever.cpp b/media/libmediaplayerservice/StagefrightMetadataRetriever.cpp
index 84d772d..605e659 100644
--- a/media/libmediaplayerservice/StagefrightMetadataRetriever.cpp
+++ b/media/libmediaplayerservice/StagefrightMetadataRetriever.cpp
@@ -672,6 +672,18 @@
                     if (trackMeta->findInt32(kKeyWidth, &imageWidth)
                         && trackMeta->findInt32(kKeyHeight, &imageHeight)) {
                         imagePrimary = imageCount;
+                        int32_t displayLeft;
+                        int32_t displayTop;
+                        int32_t displayRight;
+                        int32_t displayBottom;
+                        if (trackMeta->findRect(kKeyCropRect, &displayLeft, &displayTop,
+                                                &displayRight, &displayBottom)
+                            && displayLeft >= 0 && displayTop >= 0 && displayRight < imageWidth
+                            && displayBottom < imageHeight && displayLeft <= displayRight
+                            && displayTop <= displayBottom) {
+                            imageWidth = displayRight - displayLeft + 1;
+                            imageHeight = displayBottom - displayTop + 1;
+                        }
                     } else {
                         ALOGE("primary image track ignored for missing dimensions");
                     }
diff --git a/media/libmediaplayerservice/fuzzer/Android.bp b/media/libmediaplayerservice/fuzzer/Android.bp
index c3d6c89..b511372 100644
--- a/media/libmediaplayerservice/fuzzer/Android.bp
+++ b/media/libmediaplayerservice/fuzzer/Android.bp
@@ -174,6 +174,7 @@
         "libnetd_client",
         "libpowermanager",
         "libstagefright_httplive",
+        "libaudiohal@7.0",
     ],
 }
 
diff --git a/media/libnbaio/Android.bp b/media/libnbaio/Android.bp
index 89e9806..434ae00 100644
--- a/media/libnbaio/Android.bp
+++ b/media/libnbaio/Android.bp
@@ -49,7 +49,7 @@
     defaults: ["libnbaio_mono_defaults"],
 }
 
-cc_library_shared {
+cc_library {
     name: "libnbaio",
     defaults: ["libnbaio_mono_defaults"],
     srcs: [
diff --git a/media/libstagefright/Android.bp b/media/libstagefright/Android.bp
index a26fcbe..0af9d12 100644
--- a/media/libstagefright/Android.bp
+++ b/media/libstagefright/Android.bp
@@ -270,10 +270,10 @@
         "SurfaceUtils.cpp",
         "ThrottledSource.cpp",
         "Utils.cpp",
-        "VideoRenderQualityTracker.cpp",
         "VideoFrameSchedulerBase.cpp",
         "VideoFrameScheduler.cpp",
-    ],
+        "VideoRenderQualityTracker.cpp",
+      ],
 
     shared_libs: [
         "libstagefright_framecapture_utils",
diff --git a/media/libstagefright/FrameDecoder.cpp b/media/libstagefright/FrameDecoder.cpp
index b5bd975..1a0bb7f 100644
--- a/media/libstagefright/FrameDecoder.cpp
+++ b/media/libstagefright/FrameDecoder.cpp
@@ -16,7 +16,7 @@
 
 //#define LOG_NDEBUG 0
 #define LOG_TAG "FrameDecoder"
-
+#define ATRACE_TAG  ATRACE_TAG_VIDEO
 #include "include/FrameDecoder.h"
 #include "include/FrameCaptureLayer.h"
 #include "include/HevcUtils.h"
@@ -41,6 +41,7 @@
 #include <media/stagefright/Utils.h>
 #include <private/media/VideoFrame.h>
 #include <utils/Log.h>
+#include <utils/Trace.h>
 
 namespace android {
 
@@ -85,6 +86,22 @@
         displayWidth = width;
         displayHeight = height;
     }
+    int32_t displayLeft = 0;
+    int32_t displayTop = 0;
+    int32_t displayRight;
+    int32_t displayBottom;
+    if (trackMeta->findRect(kKeyCropRect, &displayLeft, &displayTop, &displayRight,
+                            &displayBottom)) {
+        if (displayLeft >= 0 && displayTop >= 0 && displayRight < width && displayBottom < height &&
+            displayLeft <= displayRight && displayTop <= displayBottom) {
+            displayWidth = displayRight - displayLeft + 1;
+            displayHeight = displayBottom - displayTop + 1;
+        } else {
+            // Crop rectangle is invalid, use the whole frame.
+            displayLeft = 0;
+            displayTop = 0;
+        }
+    }
 
     if (allocRotated) {
         if (rotationAngle == 90 || rotationAngle == 270) {
@@ -107,8 +124,8 @@
         }
     }
 
-    VideoFrame frame(width, height, displayWidth, displayHeight,
-            tileWidth, tileHeight, rotationAngle, dstBpp, bitDepth, !metaOnly, iccSize);
+    VideoFrame frame(width, height, displayWidth, displayHeight, displayLeft, displayTop, tileWidth,
+                     tileHeight, rotationAngle, dstBpp, bitDepth, !metaOnly, iccSize);
 
     size_t size = frame.getFlattenedSize();
     sp<MemoryHeapBase> heap = new MemoryHeapBase(size, 0, "MetadataRetrieverClient");
@@ -340,6 +357,7 @@
 }
 
 sp<IMemory> FrameDecoder::extractFrame(FrameRect *rect) {
+    ScopedTrace trace(ATRACE_TAG, "FrameDecoder::ExtractFrame");
     status_t err = onExtractRect(rect);
     if (err == OK) {
         err = extractInternal();
@@ -713,6 +731,7 @@
     }
     converter.setSrcColorSpace(standard, range, transfer);
     if (converter.isValid()) {
+        ScopedTrace trace(ATRACE_TAG, "FrameDecoder::ColorConverter");
         converter.convert(
                 (const uint8_t *)videoFrameBuffer->data(),
                 width, height, stride,
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index 7e645b0..07f2ce9 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -31,7 +31,6 @@
 #include "include/SoftwareRenderer.h"
 
 #include <android/api-level.h>
-#include <android/binder_manager.h>
 #include <android/content/pm/IPackageManagerNative.h>
 #include <android/hardware/cas/native/1.0/IDescrambler.h>
 #include <android/hardware/media/omx/1.0/IGraphicBufferSource.h>
@@ -74,7 +73,6 @@
 #include <media/stagefright/MediaCodec.h>
 #include <media/stagefright/MediaCodecConstants.h>
 #include <media/stagefright/MediaCodecList.h>
-#include <media/stagefright/MediaCodecConstants.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MediaErrors.h>
 #include <media/stagefright/OMXClient.h>
@@ -245,7 +243,7 @@
         "android.media.mediacodec.judder-score-histogram-buckets";
 // Freeze event
 static const char *kCodecFreezeEventCount = "android.media.mediacodec.freeze-event-count";
-static const char *kFreezeEventKeyName = "freeze";
+static const char *kFreezeEventKeyName = "videofreeze";
 static const char *kFreezeEventInitialTimeUs = "android.media.mediacodec.freeze.initial-time-us";
 static const char *kFreezeEventDurationMs = "android.media.mediacodec.freeze.duration-ms";
 static const char *kFreezeEventCount = "android.media.mediacodec.freeze.count";
@@ -257,7 +255,7 @@
         "android.media.mediacodec.freeze.details-distance-ms";
 // Judder event
 static const char *kCodecJudderEventCount = "android.media.mediacodec.judder-event-count";
-static const char *kJudderEventKeyName = "judder";
+static const char *kJudderEventKeyName = "videojudder";
 static const char *kJudderEventInitialTimeUs = "android.media.mediacodec.judder.initial-time-us";
 static const char *kJudderEventDurationMs = "android.media.mediacodec.judder.duration-ms";
 static const char *kJudderEventCount = "android.media.mediacodec.judder.count";
diff --git a/media/libstagefright/include/ACodecBufferChannel.h b/media/libstagefright/include/ACodecBufferChannel.h
index a464504..946d533 100644
--- a/media/libstagefright/include/ACodecBufferChannel.h
+++ b/media/libstagefright/include/ACodecBufferChannel.h
@@ -72,8 +72,8 @@
     void setCrypto(const sp<ICrypto> &crypto) override;
     void setDescrambler(const sp<IDescrambler> &descrambler) override;
 
-    virtual status_t queueInputBuffer(const sp<MediaCodecBuffer> &buffer) override;
-    virtual status_t queueSecureInputBuffer(
+    status_t queueInputBuffer(const sp<MediaCodecBuffer> &buffer) override;
+    status_t queueSecureInputBuffer(
             const sp<MediaCodecBuffer> &buffer,
             bool secure,
             const uint8_t *key,
@@ -83,10 +83,10 @@
             const CryptoPlugin::SubSample *subSamples,
             size_t numSubSamples,
             AString *errorDetailMsg) override;
-    virtual status_t attachBuffer(
+    status_t attachBuffer(
             const std::shared_ptr<C2Buffer> &c2Buffer,
             const sp<MediaCodecBuffer> &buffer) override;
-    virtual status_t attachEncryptedBuffer(
+    status_t attachEncryptedBuffer(
             const sp<hardware::HidlMemory> &memory,
             bool secure,
             const uint8_t *key,
@@ -98,12 +98,12 @@
             size_t numSubSamples,
             const sp<MediaCodecBuffer> &buffer,
             AString* errorDetailMsg) override;
-    virtual status_t renderOutputBuffer(
+    status_t renderOutputBuffer(
             const sp<MediaCodecBuffer> &buffer, int64_t timestampNs) override;
-    virtual void pollForRenderedBuffers() override;
-    virtual status_t discardBuffer(const sp<MediaCodecBuffer> &buffer) override;
-    virtual void getInputBufferArray(Vector<sp<MediaCodecBuffer>> *array) override;
-    virtual void getOutputBufferArray(Vector<sp<MediaCodecBuffer>> *array) override;
+    void pollForRenderedBuffers() override;
+    status_t discardBuffer(const sp<MediaCodecBuffer> &buffer) override;
+    void getInputBufferArray(Vector<sp<MediaCodecBuffer>> *array) override;
+    void getOutputBufferArray(Vector<sp<MediaCodecBuffer>> *array) override;
 
     // Methods below are interface for ACodec to use.
 
diff --git a/media/libstagefright/include/media/stagefright/MediaHistogram.h b/media/libstagefright/include/media/stagefright/MediaHistogram.h
index 50fa258..46ee288 100644
--- a/media/libstagefright/include/media/stagefright/MediaHistogram.h
+++ b/media/libstagefright/include/media/stagefright/MediaHistogram.h
@@ -29,11 +29,11 @@
 public:
     MediaHistogram();
     void clear();
-    bool setup(int bucketCount, T width, T floor = 0);
+    bool setup(size_t bucketCount, T width, T floor = 0);
     bool setup(const std::vector<T> &bucketLimits);
     void insert(T sample);
-    size_t size();
-    int64_t operator[](int);
+    size_t size() const;
+    int64_t operator[](int) const;
     T getMin() const { return mMin; }
     T getMax() const { return mMax; }
     T getCount() const { return mCount; }
@@ -45,7 +45,7 @@
 private:
     MediaHistogram(const MediaHistogram &); // disallow
 
-    bool allocate(int bucketCount, bool withBucketLimits);
+    void allocate(size_t bucketCount, bool withBucketLimits);
 
     T mFloor, mCeiling, mWidth;
     T mMin, mMax, mSum;
@@ -73,13 +73,12 @@
 }
 
 template<typename T>
-bool MediaHistogram<T>::setup(int bucketCount, T width, T floor) {
+bool MediaHistogram<T>::setup(size_t bucketCount, T width, T floor) {
     if (bucketCount <= 0 || width <= 0) {
         return false;
     }
-    if (!allocate(bucketCount, false)) {
-        return false;
-    }
+    allocate(bucketCount, false);
+
     mWidth = width;
     mFloor = floor;
     mCeiling = floor + bucketCount * width;
@@ -92,14 +91,14 @@
     if (bucketLimits.size() <= 1) {
         return false;
     }
-    int bucketCount = bucketLimits.size() - 1;
-    if (!allocate(bucketCount, true)) {
-        return false;
-    }
+    // The floor is the first bucket limit value, so offset by 1
+    size_t bucketCount = bucketLimits.size() - 1;
+    allocate(bucketCount, true);
 
     mWidth = -1;
     mFloor = bucketLimits[0];
-    for (int i = 0; i < bucketCount; ++i) {
+    for (size_t i = 0; i < bucketCount; ++i) {
+        // The floor is the first bucket, so offset by 1
         mBucketLimits[i] = bucketLimits[i + 1];
     }
     mCeiling = bucketLimits[bucketCount];
@@ -108,7 +107,7 @@
 }
 
 template<typename T>
-bool MediaHistogram<T>::allocate(int bucketCount, bool withBucketLimits) {
+void MediaHistogram<T>::allocate(size_t bucketCount, bool withBucketLimits) {
     assert(bucketCount > 0);
     if (bucketCount != mBuckets.size()) {
         mBuckets = std::vector<T>(bucketCount, 0);
@@ -116,7 +115,6 @@
     if (withBucketLimits && mBucketLimits.size() != bucketCount) {
         mBucketLimits = std::vector<T>(bucketCount, 0);
     }
-    return true;
 }
 
 template<typename T>
@@ -128,8 +126,8 @@
 
     mCount++;
     mSum += sample;
-    if (mMin > sample) mMin = sample;
-    if (mMax < sample) mMax = sample;
+    mMin = std::min(mMin, sample);
+    mMax = std::max(mMax, sample);
 
     if (sample < mFloor) {
         mBelow++;
@@ -138,7 +136,7 @@
     } else if (mWidth == -1) {
         // A binary search might be more efficient for large number of buckets, but it is expected
         // that there will never be a large amount of buckets, so keep the code simple.
-        for (int slot = 0; slot < mBucketLimits.size(); ++slot) {
+        for (size_t slot = 0; slot < mBucketLimits.size(); ++slot) {
             if (sample < mBucketLimits[slot]) {
                 mBuckets[slot]++;
                 break;
@@ -153,12 +151,12 @@
 }
 
 template<typename T>
-size_t MediaHistogram<T>::size() {
+size_t MediaHistogram<T>::size() const {
     return mBuckets.size() + 1;
 }
 
 template<typename T>
-int64_t MediaHistogram<T>::operator[](int i) {
+int64_t MediaHistogram<T>::operator[](int i) const {
     assert(i >= 0);
     assert(i <= mBuckets.size());
     if (i == mBuckets.size()) {
@@ -179,7 +177,7 @@
     } else {
         ss << mFloor << "," << mWidth << "," << mBelow << "{";
     }
-    for (int i = 0; i < mBuckets.size(); i++) {
+    for (size_t i = 0; i < mBuckets.size(); i++) {
         if (i != 0) {
             ss << ",";
         }
@@ -194,12 +192,12 @@
     std::stringstream ss("");
     if (mWidth == -1) {
         ss << mFloor;
-        for (int i = 0; i < mBucketLimits.size(); ++i) {
+        for (size_t i = 0; i < mBucketLimits.size(); ++i) {
             ss << ',' << mBucketLimits[i];
         }
     } else {
         ss << mFloor;
-        for (int i = 1; i <= mBuckets.size(); ++i) {
+        for (size_t i = 1; i <= mBuckets.size(); ++i) {
             ss << ',' << (mFloor + i * mWidth);
         }
     }
diff --git a/media/libstagefright/rtsp/fuzzer/Android.bp b/media/libstagefright/rtsp/fuzzer/Android.bp
new file mode 100644
index 0000000..a2791ba
--- /dev/null
+++ b/media/libstagefright/rtsp/fuzzer/Android.bp
@@ -0,0 +1,89 @@
+/*
+* Copyright (C) 2023 The Android Open Source Project
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at:
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+cc_defaults {
+    name: "libstagefright_rtsp_fuzzer_defaults",
+    shared_libs: [
+        "liblog",
+        "libmedia",
+        "libutils",
+        "libstagefright_foundation",
+    ],
+    static_libs: [
+        "libdatasource",
+        "libstagefright_rtsp",
+    ],
+    header_libs: [
+        "libstagefright_rtsp_headers",
+    ],
+    fuzz_config:{
+        cc: [
+            "android-media-fuzzing-reports@google.com",
+        ],
+        componentid: 155276,
+    },
+}
+
+cc_fuzz {
+    name: "sdploader_fuzzer",
+    srcs: [
+        "sdploader_fuzzer.cpp",
+    ],
+    defaults: [
+        "libstagefright_rtsp_fuzzer_defaults",
+    ]
+}
+
+cc_fuzz {
+    name: "rtp_writer_fuzzer",
+    srcs: [
+        "rtp_writer_fuzzer.cpp",
+    ],
+    defaults: [
+        "libstagefright_rtsp_fuzzer_defaults",
+    ],
+    shared_libs:[
+        "libandroid_net",
+        "libbase",
+        "libstagefright",
+        "libcutils",
+    ],
+}
+
+cc_fuzz {
+    name: "packet_source_fuzzer",
+    srcs: [
+        "packet_source_fuzzer.cpp",
+    ],
+    defaults: [
+        "libstagefright_rtsp_fuzzer_defaults",
+    ],
+}
+
+cc_fuzz {
+    name: "rtsp_connection_fuzzer",
+    srcs: [
+        "rtsp_connection_fuzzer.cpp",
+    ],
+    shared_libs: [
+        "libcrypto",
+        "libcutils",
+        "libnetd_client",
+    ],
+    defaults: [
+        "libstagefright_rtsp_fuzzer_defaults",
+    ],
+}
diff --git a/media/libstagefright/rtsp/fuzzer/README.md b/media/libstagefright/rtsp/fuzzer/README.md
new file mode 100644
index 0000000..bc7be29
--- /dev/null
+++ b/media/libstagefright/rtsp/fuzzer/README.md
@@ -0,0 +1,117 @@
+# Fuzzers for libstagefright_rtsp
+
+## Table of contents
++ [sdploader_fuzzer](#SDPLoader)
++ [rtp_writer_fuzzer](#ARTPWriter)
++ [packet_source_fuzzer](#packetSource)
++ [rtsp_connection_fuzzer](#ARTSPConnection)
+
+# <a name="SDPLoader"></a> Fuzzer for SDPLoader
+
+SDPLoader supports the following parameters:
+1. Flag (parameter name: "flags")
+2. URL (parameter name: "url")
+3. Header (parameter name: "headers")
+
+| Parameter| Valid Values| Configured Value|
+|------------- |-------------| ----- |
+|`flags`| `UINT32_MIN`  to  `UINT32_MAX` |Value obtained from FuzzedDataProvider|
+|`url`| `String` |Value obtained from FuzzedDataProvider|
+|`headers`| `String` |Value obtained from FuzzedDataProvider|
+
+#### Steps to run
+1. Build the fuzzer
+```
+  $ mm -j$(nproc) sdploader_fuzzer
+```
+2. Run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/arm64/sdploader_fuzzer/sdploader_fuzzer
+```
+
+# <a name="ARTPWriter"></a> Fuzzer for ARTPWriter
+
+ARTPWriter supports the following parameters:
+1. File descriptor (parameter name: "fd")
+2. Local Ip (parameter name: "localIp")
+3. Local Port (parameter name: "localPort")
+4. Remote Ip (parameter name: "remoteIp")
+5. Remote Port (parameter name: "remotePort")
+6. Sequence No (parameter name: "seqNo")
+7. OpponentID (parameter name: "opponentID")
+8. Bit Rate (parameter name: "bitrate")
+9. kKeyMIMETypeArray (parameter name: "mimeType")
+
+| Parameter| Valid Values| Configured Value|
+|------------- |-------------| ----- |
+|`localIp`| `String` |Value obtained from FuzzedDataProvider|
+|`localPort`| `UINT32_MIN`  to  `UINT32_MAX` |Value obtained from FuzzedDataProvider|
+|`remoteIp`| `String` |Value obtained from FuzzedDataProvider|
+|`remotePort`| `UINT32_MIN`  to  `UINT32_MAX` |Value obtained from FuzzedDataProvider|
+|`seqNo`| `0`  to  `10000000` |Value obtained from FuzzedDataProvider|
+|`opponentID`| `UINT32_MIN`  to  `UINT32_MAX` |Value obtained from FuzzedDataProvider|
+|`bitrate`| `UINT32_MIN`  to  `UINT32_MAX` |Value obtained from FuzzedDataProvider|
+|`mimeType`| 0. `MEDIA_MIMETYPE_VIDEO_AVC`<br> 1. `MEDIA_MIMETYPE_VIDEO_HEVC`<br> 2. `MEDIA_MIMETYPE_VIDEO_H263`<br> 3. `MEDIA_MIMETYPE_AUDIO_AMR_NB`<br> 4. `MEDIA_MIMETYPE_AUDIO_AMR_WB`|Value obtained from FuzzedDataProvider|
+
+#### Steps to run
+1. Build the fuzzer
+```
+  $ mm -j$(nproc) rtp_writer_fuzzer
+```
+2. Run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/arm64/rtp_writer_fuzzer/rtp_writer_fuzzer
+```
+
+# <a name="packetSource"></a> Fuzzer for  PacketSource
+
+ PacketSource supports the following parameters:
+1. Codec (parameter name: "kCodecs")
+2. Format (parameter name: "kFmtp")
+
+| Parameter| Valid Values| Configured Value|
+|------------- |-------------| ----- |
+|`kCodecs`| 0. `opus`<br/>1. `ISAC`<br/>2. `VP8`<br/>3. `google-data`<br/>4. `G722`<br/>5. `PCMU`<br/>6. `PCMA`<br/>7. `CN`<br/>8. `telephone-event`<br/>9. `VP9`<br/>10. `red`<br/>11. `ulpfec`<br/>12. `rtx`<br/>13. `H264`<br/>14. `iLBC`<br/>15. `H261`<br/>16. `MPV`<br/>17. `H263`<br/>18. `AMR`<br/>19. `AC3`<br/>20. `G723`<br/>21. `G729A`<br/>22. `MP4V-ES`<br/>23. `H265`<br/>24. `H263-2000`<br/>25. `H263-1998`<br/>26. `AMR-WB`<br/>27. `MP4A-LATM`<br/>28. `MP2T`<br/>29. `mpeg4-generic` |Value obtained from FuzzedDataProvider|
+|`kFmtp`| <br/>0. `br=`<br/>1. `bw=`<br/>2. `ch-aw-recv=`<br/>3. `mode-change-capability=`<br/>4. `max-red =`<br/>5. `octet-align=`<br/>6. `mode-change-capability=`<br/>7. `profile-level-id=`<br/>8. `packetization-mode=`<br/>9. `profile=`<br/>10. `level=` <br/>11. `apt=`<br/>12. `annexb=`<br/>13. `protocol=`<br/>14. `config=`<br/>15. `streamtype=`<br/>16. `mode=`<br/>17. `sizelength=`<br/>18. `indexlength=`<br/>19. `indexdeltalength=`<br/>20. `minptime=`<br/>21. `useinbandfec=`<br/>22. `maxplaybackrate=`<br/>23. `stereo=`<br/>24. `level-asymmetry-allowed=`<br/>25. `max-fs=`<br/>26. `max-fr=`|Value obtained from FuzzedDataProvider|
+
+#### Steps to run
+1. Build the fuzzer
+```
+  $ mm -j$(nproc) packet_source_fuzzer
+```
+2. Run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/arm64/packet_source_fuzzer/packet_source_fuzzer
+```
+
+# <a name="ARTSPConnection"></a> Fuzzer for ARTSPConnection
+
+## Design Considerations
+This fuzzer aims at covering ARTSPConnection.cpp. A server is implemented in the fuzzer. After accepting a connect request, the server accepts the connections and handles them in a seperate thread. The threads are maintained in a ThreadPool which limits the maximum number of threads alive at a time. When the fuzzer process ends, all the threads in the ThreadPool are joined to the main thread.
+The inputs to the server are generated using FuzzedDataProvider and stored in a variable 'mFuzzData'. As this variable is shared among multiple threads, mutex is used to ensure synchronization.
+### Fuzzer Inputs:
+The inputs generated in the fuzzer using FuzzzedDataProvider have been randomized as much as possible. Due to the constraints in the module source code, the inputs have to be limited and arranged in some specific format.
+
+ARTSPConnection supports the following parameters:
+1. Authentication Type (parameter name: "kAuthType")
+2. FuzzData (parameter name: "mFuzzData")
+3. RequestData (parameter name: "mFuzzRequestData")
+
+| Parameter| Valid Values| Configured Value|
+|------------- |-------------| ----- |
+|`kAuthType`| 0.`Basic`<br/>1.`Digest`|Value obtained from FuzzedDataProvider|
+|`mFuzzData`| `String` |Value obtained from FuzzedDataProvider|
+|`mFuzzRequestData`| `String` |Value obtained from FuzzedDataProvider|
+
+#### Steps to run
+1. Build the fuzzer
+```
+  $ mm -j$(nproc) rtsp_connection_fuzzer
+```
+2. Run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/arm64/rtsp_connection_fuzzer/rtsp_connection_fuzzer
diff --git a/media/libstagefright/rtsp/fuzzer/packet_source_fuzzer.cpp b/media/libstagefright/rtsp/fuzzer/packet_source_fuzzer.cpp
new file mode 100644
index 0000000..a3d7535
--- /dev/null
+++ b/media/libstagefright/rtsp/fuzzer/packet_source_fuzzer.cpp
@@ -0,0 +1,173 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include <fuzzer/FuzzedDataProvider.h>
+#include <media/stagefright/foundation/AString.h>
+#include <media/stagefright/foundation/base64.h>
+#include <media/stagefright/rtsp/APacketSource.h>
+#include <media/stagefright/rtsp/ASessionDescription.h>
+
+using namespace android;
+
+static constexpr int32_t kMinValue = 0;
+static constexpr int32_t kMaxIPAddress = 255;
+static constexpr int32_t kMaxFmt = 255;
+static constexpr int32_t kMinAPICase = 0;
+static constexpr int32_t kMaxPacketSourceAPI = 5;
+static constexpr size_t kMinIndex = 1;
+static constexpr size_t kMaxCodecConfigs = 4;
+
+std::string kCodecs[] = {"opus",        "ISAC",         "VP8",
+                         "google-data", "G722",         "PCMU",
+                         "PCMA",        "CN",           "telephone-event",
+                         "VP9",         "red",          "ulpfec",
+                         "rtx",         "H264",         "iLBC",
+                         "H261",        "MPV",          "H263",
+                         "AMR",         "AC3",          "G723",
+                         "G729A",       "H264",         "MP4V-ES",
+                         "H265",        "H263-2000",    "H263-1998",
+                         "AMR",         "AMR-WB",       "MP4A-LATM",
+                         "MP2T",        "mpeg4-generic"};
+
+std::string kFmtp[] = {"br=",
+                       "bw=",
+                       "ch-aw-recv=",
+                       "mode-change-capability=",
+                       "max-red =",
+                       "octet-align=",
+                       "mode-change-capability=",
+                       "max-red=",
+                       "profile-level-id=",
+                       "packetization-mode=",
+                       "profile=",
+                       "level=",
+                       "apt=",
+                       "annexb=",
+                       "protocol=",
+                       "streamtype=",
+                       "mode=",
+                       "sizelength=",
+                       "indexlength=",
+                       "indexdeltalength=",
+                       "minptime=",
+                       "useinbandfec=",
+                       "maxplaybackrate=",
+                       "stereo=",
+                       "level-asymmetry-allowed=",
+                       "max-fs=",
+                       "max-fr="};
+
+std::string kCodecConfigString[kMaxCodecConfigs][2] = {{"H264", "profile-level-id="},
+                                                       {"MP4A-LATM", "config="},
+                                                       {"MP4V-ES", "config="},
+                                                       {"mpeg4-generic", "mode="}};
+
+class ASessionPacketFuzzer {
+  public:
+    ASessionPacketFuzzer(const uint8_t* data, size_t size) : mFdp(data, size){};
+    void process();
+
+  private:
+    FuzzedDataProvider mFdp;
+};
+
+bool checkFormatSupport(const std::string& codec, const std::string& format) {
+    for (int i = 0; i < kMaxCodecConfigs; ++i) {
+        if (codec == kCodecConfigString[i][0]) {
+            if (format == kCodecConfigString[i][1]) {
+                return true;
+            } else {
+                return false;
+            }
+        }
+    }
+    return true;
+}
+
+void ASessionPacketFuzzer::process() {
+    AString inputString;
+    const sp<ASessionDescription> sessionPacket = sp<ASessionDescription>::make();
+    std::string codec = mFdp.PickValueInArray(kCodecs);
+    std::string ipAddress =
+            std::to_string(mFdp.ConsumeIntegralInRange(kMinValue, kMaxIPAddress)) + "." +
+            std::to_string(mFdp.ConsumeIntegralInRange(kMinValue, kMaxIPAddress)) + "." +
+            std::to_string(mFdp.ConsumeIntegralInRange(kMinValue, kMaxIPAddress)) + "." + "0";
+    std::string format = mFdp.PickValueInArray(kFmtp);
+    std::string fmptStr = format + std::to_string(mFdp.ConsumeIntegralInRange(kMinValue, kMaxFmt)) +
+                          ";" + mFdp.PickValueInArray(kFmtp) +
+                          std::to_string(mFdp.ConsumeIntegralInRange(kMinValue, kMaxFmt));
+    sessionPacket->SDPStringFactory(
+            inputString, ipAddress.c_str() /* ip */, mFdp.ConsumeBool() /* isAudio */,
+            mFdp.ConsumeIntegral<unsigned int>() /* port */,
+            mFdp.ConsumeIntegral<unsigned int>() /* payloadType */,
+            mFdp.ConsumeIntegral<unsigned int>() /* as */, codec.c_str(), /* codec */
+            fmptStr.c_str() /* fmtp */, mFdp.ConsumeIntegral<int32_t>() /* width */,
+            mFdp.ConsumeIntegral<int32_t>() /* height */,
+            mFdp.ConsumeIntegral<int32_t>() /* cvoExtMap */);
+    sessionPacket->setTo(inputString.c_str(), inputString.size());
+    size_t trackSize = sessionPacket->countTracks();
+    AString desc = nullptr;
+    while (mFdp.remaining_bytes()) {
+        int32_t packetSourceAPI =
+                mFdp.ConsumeIntegralInRange<size_t>(kMinAPICase, kMaxPacketSourceAPI);
+        switch (packetSourceAPI) {
+            case 0: {
+                unsigned long payload = 0;
+                AString params = nullptr;
+                sessionPacket->getFormatType(mFdp.ConsumeIntegralInRange(kMinIndex, trackSize - 1),
+                                             &payload, &desc, &params);
+                break;
+            }
+            case 1: {
+                int32_t width, height;
+                unsigned long payload = mFdp.ConsumeIntegral<unsigned long>();
+                sessionPacket->getDimensions(mFdp.ConsumeIntegralInRange(kMinIndex, trackSize - 1),
+                                             payload, &width, &height);
+                break;
+            }
+            case 2: {
+                int32_t cvoExtMap = mFdp.ConsumeIntegral<int32_t>();
+                sessionPacket->getCvoExtMap(mFdp.ConsumeIntegralInRange(kMinIndex, trackSize - 1),
+                                            &cvoExtMap);
+                break;
+            }
+            case 3: {
+                int64_t durationUs = mFdp.ConsumeIntegral<int64_t>();
+                sessionPacket->getDurationUs(&durationUs);
+                break;
+            }
+            case 4: {
+                int32_t timeScale, numChannels;
+                if (desc != nullptr) {
+                    sessionPacket->ParseFormatDesc(desc.c_str(), &timeScale, &numChannels);
+                }
+                break;
+            }
+            case 5: {
+                if (checkFormatSupport(codec, format)) {
+                    sp<APacketSource> packetSource = sp<APacketSource>::make(
+                            sessionPacket, mFdp.ConsumeIntegralInRange(kMinIndex, trackSize - 1));
+                }
+                break;
+            }
+        }
+    }
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    ASessionPacketFuzzer packetSourceFuzzer(data, size);
+    packetSourceFuzzer.process();
+    return 0;
+}
diff --git a/media/libstagefright/rtsp/fuzzer/rtp_writer_fuzzer.cpp b/media/libstagefright/rtsp/fuzzer/rtp_writer_fuzzer.cpp
new file mode 100644
index 0000000..8d9f923
--- /dev/null
+++ b/media/libstagefright/rtsp/fuzzer/rtp_writer_fuzzer.cpp
@@ -0,0 +1,184 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+#include <fuzzer/FuzzedDataProvider.h>
+#include <media/stagefright/MediaBuffer.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/rtsp/ARTPWriter.h>
+
+constexpr int32_t kMinSize = 0;
+constexpr int32_t kMaxSize = 65536;
+constexpr int32_t kMaxTime = 1000;
+constexpr int32_t kMaxBytes = 128;
+constexpr int32_t kAMRNBFrameSizes[] = {13, 14, 16, 18, 20, 21, 27, 32};
+constexpr int32_t kAMRWBFrameSizes[] = {18, 24, 33, 37, 41, 47, 51, 59, 61};
+constexpr int32_t kAMRIndexOffset = 8;
+
+using namespace android;
+
+const char* kKeyMimeTypeArray[] = {MEDIA_MIMETYPE_VIDEO_AVC, MEDIA_MIMETYPE_VIDEO_HEVC,
+                                   MEDIA_MIMETYPE_VIDEO_H263, MEDIA_MIMETYPE_AUDIO_AMR_NB,
+                                   MEDIA_MIMETYPE_AUDIO_AMR_WB};
+
+struct TestMediaSource : public MediaSource {
+  public:
+    TestMediaSource(FuzzedDataProvider& mFdp) : mTestMetaData(new MetaData) {
+        int32_t vectorSize = 0;
+        mAllowRead = mFdp.ConsumeBool();
+        mKeySps = mFdp.ConsumeIntegral<int32_t>();
+        mKeyVps = mFdp.ConsumeIntegral<int32_t>();
+        mKeyPps = mFdp.ConsumeIntegral<int32_t>();
+        mKeyTime = mFdp.ConsumeIntegralInRange<int64_t>(kMinSize, kMaxTime);
+
+        mMimeType = mFdp.PickValueInArray(kKeyMimeTypeArray);
+        mTestMetaData->setCString(kKeyMIMEType, mMimeType);
+        if (mMimeType == MEDIA_MIMETYPE_AUDIO_AMR_NB) {
+            int32_t index =
+                    mFdp.ConsumeIntegralInRange<int32_t>(kMinSize, std::size(kAMRNBFrameSizes) - 1);
+            vectorSize = kAMRNBFrameSizes[index];
+            mData.push_back(kAMRIndexOffset * index);
+        } else if (mMimeType == MEDIA_MIMETYPE_AUDIO_AMR_WB) {
+            int32_t index =
+                    mFdp.ConsumeIntegralInRange<int32_t>(kMinSize, std::size(kAMRWBFrameSizes) - 1);
+            vectorSize = kAMRWBFrameSizes[index];
+            mData.push_back(kAMRIndexOffset * index);
+        } else if (mMimeType == MEDIA_MIMETYPE_VIDEO_H263) {
+            // Required format for H263 media data
+            mData.push_back(0);
+            mData.push_back(0);
+            vectorSize = mFdp.ConsumeIntegralInRange<int32_t>(kMinSize, kMaxSize);
+        } else {
+            vectorSize = mFdp.ConsumeIntegralInRange<int32_t>(kMinSize, kMaxSize);
+        }
+        for (size_t idx = mData.size(); idx < vectorSize; ++idx) {
+            mData.push_back(mFdp.ConsumeIntegral<uint8_t>());
+        }
+    }
+    virtual status_t start(MetaData* /*params*/) { return OK; }
+    virtual status_t stop() { return OK; }
+    virtual sp<MetaData> getFormat() { return mTestMetaData; }
+    virtual status_t read(MediaBufferBase** buffer, const ReadOptions* /*options*/) {
+        if (!mAllowRead) {
+            return -1;
+        }
+        *buffer = new MediaBuffer(mData.data() /*data*/, mData.size() /*size*/);
+        if (mKeySps) {
+            (*buffer)->meta_data().setInt32(kKeySps, mKeySps);
+        }
+        if (mKeyVps) {
+            (*buffer)->meta_data().setInt32(kKeyVps, mKeyVps);
+        }
+        if (mKeyPps) {
+            (*buffer)->meta_data().setInt32(kKeyPps, mKeyPps);
+        }
+        (*buffer)->meta_data().setInt64(kKeyTime, mKeyTime);
+        return OK;
+    }
+
+  private:
+    int32_t mKeySps;
+    int32_t mKeyVps;
+    int32_t mKeyPps;
+    int64_t mKeyTime;
+    bool mAllowRead;
+    const char* mMimeType;
+    sp<MetaData> mTestMetaData;
+    std::vector<uint8_t> mData;
+};
+
+class ARTPWriterFuzzer {
+  public:
+    ARTPWriterFuzzer(const uint8_t* data, size_t size)
+        : mDataSourceFd(memfd_create("InputFile", MFD_ALLOW_SEALING)), mFdp(data, size) {}
+    ~ARTPWriterFuzzer() { close(mDataSourceFd); }
+    void process();
+
+  private:
+    void createARTPWriter();
+    const int32_t mDataSourceFd;
+    FuzzedDataProvider mFdp;
+    sp<ARTPWriter> mArtpWriter;
+};
+
+void ARTPWriterFuzzer::createARTPWriter() {
+    String8 localIp = String8(mFdp.ConsumeRandomLengthString(kMaxBytes).c_str());
+    String8 remoteIp = String8(mFdp.ConsumeRandomLengthString(kMaxBytes).c_str());
+    mArtpWriter = sp<ARTPWriter>::make(
+            mDataSourceFd, localIp, mFdp.ConsumeIntegral<uint16_t>() /* localPort */, remoteIp,
+            mFdp.ConsumeIntegral<uint16_t>() /* remotePort */,
+            mFdp.ConsumeIntegralInRange<uint32_t>(kMinSize, kMaxSize) /* seqNo */);
+}
+
+void ARTPWriterFuzzer::process() {
+    if (mFdp.ConsumeBool()) {
+        mArtpWriter = sp<ARTPWriter>::make(mDataSourceFd);
+        if (mArtpWriter->getSequenceNum() > kMaxSize) {
+            createARTPWriter();
+        }
+    } else {
+        createARTPWriter();
+    }
+
+    mArtpWriter->addSource(sp<TestMediaSource>::make(mFdp) /* source */);
+
+    while (mFdp.remaining_bytes()) {
+        auto invokeRTPWriterFuzzer = mFdp.PickValueInArray<const std::function<void()>>({
+                [&]() {
+                    sp<MetaData> metaData = sp<MetaData>::make();
+                    if (mFdp.ConsumeBool()) {
+                        metaData->setInt32(kKeySelfID, mFdp.ConsumeIntegral<int32_t>());
+                    }
+                    if (mFdp.ConsumeBool()) {
+                        metaData->setInt32(kKeyPayloadType, mFdp.ConsumeIntegral<int32_t>());
+                    }
+                    if (mFdp.ConsumeBool()) {
+                        metaData->setInt32(kKeyRtpExtMap, mFdp.ConsumeIntegral<int32_t>());
+                    }
+                    if (mFdp.ConsumeBool()) {
+                        metaData->setInt32(kKeyRtpCvoDegrees, mFdp.ConsumeIntegral<int32_t>());
+                    }
+                    if (mFdp.ConsumeBool()) {
+                        metaData->setInt32(kKeyRtpDscp, mFdp.ConsumeIntegral<int32_t>());
+                    }
+                    if (mFdp.ConsumeBool()) {
+                        metaData->setInt64(kKeySocketNetwork, mFdp.ConsumeIntegral<int64_t>());
+                    }
+                    mArtpWriter->start(metaData.get() /*param*/);
+                },
+                [&]() {
+                    mArtpWriter->setTMMBNInfo(mFdp.ConsumeIntegral<uint32_t>() /* opponentID */,
+                                              mFdp.ConsumeIntegral<uint32_t>() /* bitrate */);
+                },
+                [&]() { mArtpWriter->stop(); },
+                [&]() {
+                    mArtpWriter->updateCVODegrees(mFdp.ConsumeIntegral<int32_t>() /* cvoDegrees */);
+                },
+                [&]() {
+                    mArtpWriter->updatePayloadType(
+                            mFdp.ConsumeIntegral<int32_t>() /* payloadType */);
+                },
+
+        });
+        invokeRTPWriterFuzzer();
+    }
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    ARTPWriterFuzzer artpWriterFuzzer(data, size);
+    artpWriterFuzzer.process();
+    return 0;
+}
diff --git a/media/libstagefright/rtsp/fuzzer/rtsp_connection_fuzzer.cpp b/media/libstagefright/rtsp/fuzzer/rtsp_connection_fuzzer.cpp
new file mode 100644
index 0000000..51c423e
--- /dev/null
+++ b/media/libstagefright/rtsp/fuzzer/rtsp_connection_fuzzer.cpp
@@ -0,0 +1,397 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <arpa/inet.h>
+#include <fuzzer/FuzzedDataProvider.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/rtsp/ARTSPConnection.h>
+#include <thread>
+
+using namespace android;
+
+const std::string kAuthType[] = {"Basic", "Digest"};
+const std::string kTab = "\t";
+const std::string kCSeq = "CSeq: ";
+const std::string kSpace = " ";
+const std::string kNewLine = "\n";
+const std::string kBinaryHeader = "$";
+const std::string kNonce = " nonce=\"\"";
+const std::string kRealm = " realm=\"\"";
+const std::string kHeaderBoundary = "\r\n\r\n";
+const std::string kContentLength = "content-length: ";
+const std::string kDefaultRequestValue = "INVALID_FORMAT";
+const std::string kUrlPrefix = "rtsp://root:pass@127.0.0.1:";
+const std::string kRequestMarker = "REQUEST_SENT";
+const std::string kQuitResponse = "\n\n\n\n";
+const std::string kRTSPVersion = "RTSP/1.0";
+const std::string kValidResponse = kRTSPVersion + " 200 \n";
+const std::string kAuthString = kRTSPVersion + " 401 \nwww-authenticate: ";
+constexpr char kNullValue = '\0';
+constexpr char kDefaultValue = '0';
+constexpr int32_t kWhat = 'resp';
+constexpr int32_t kMinPort = 100;
+constexpr int32_t kMaxPort = 999;
+constexpr int32_t kMinASCIIValue = 32;
+constexpr int32_t kMaxASCIIValue = 126;
+constexpr int32_t kMinContentLength = 0;
+constexpr int32_t kMaxContentLength = 1000;
+constexpr int32_t kBinaryVectorSize = 3;
+constexpr int32_t kDefaultCseqValue = 1;
+constexpr int32_t kBufferSize = 1024;
+constexpr int32_t kMaxLoopRuns = 5;
+constexpr int32_t kPort = 554;
+constexpr int32_t kMaxBytes = 128;
+constexpr int32_t kMaxThreads = 1024;
+
+struct FuzzAHandler : public AHandler {
+  public:
+    FuzzAHandler(std::function<void()> signalEosFunction)
+        : mSignalEosFunction(std::move(signalEosFunction)) {}
+    ~FuzzAHandler() = default;
+
+  protected:
+    void onMessageReceived(const sp<AMessage>& msg) override {
+        switch (msg->what()) {
+            case kWhat: {
+                mSignalEosFunction();
+                break;
+            }
+        }
+    }
+
+  private:
+    std::function<void()> mSignalEosFunction;
+};
+
+class RTSPConnectionFuzzer {
+  public:
+    RTSPConnectionFuzzer(const uint8_t* data, size_t size) : mFdp(data, size){};
+    ~RTSPConnectionFuzzer() {
+        // wait for all the threads to join the main thread
+        for (auto& thread : mThreadPool) {
+            if (thread.joinable()) {
+                thread.join();
+            }
+        }
+        close(mServerFd);
+    }
+    void process();
+
+  private:
+    void signalEos();
+    void startServer();
+    void createFuzzData();
+    void acceptConnection();
+    void handleConnection(int32_t);
+    void handleClientResponse(int32_t);
+    void sendValidResponse(int32_t, int32_t);
+    int32_t checkSocket(int32_t);
+    size_t generateBinaryDataSize(std::string);
+    bool checkValidRequestData(const AString&);
+    bool mEosReached = false;
+    bool mServerFailure = false;
+    bool mNotifyResponseListener = false;
+    int32_t mServerFd;
+    std::string mFuzzData = "";
+    std::string mFuzzRequestData = "";
+    std::string mRequestData = kDefaultRequestValue;
+    std::mutex mFuzzDataMutex;
+    std::mutex mMsgPostCompleteMutex;
+    std::condition_variable mConditionalVariable;
+    std::vector<std::thread> mThreadPool;
+    FuzzedDataProvider mFdp;
+};
+
+size_t RTSPConnectionFuzzer::generateBinaryDataSize(std::string values) {
+    // computed the binary data size as done in ARTSPConnection.cpp
+    uint8_t x = values[0];
+    uint8_t y = values[1];
+    return x << 8 | y;
+}
+
+bool RTSPConnectionFuzzer::checkValidRequestData(const AString& request) {
+    if (request.find(kHeaderBoundary.c_str()) <= 0) {
+        return false;
+    }
+    ssize_t space = request.find(kSpace.c_str());
+    if (space <= 0) {
+        return false;
+    }
+    if (request.find(kSpace.c_str(), space + 1) <= 0) {
+        return false;
+    }
+    return true;
+}
+
+void RTSPConnectionFuzzer::createFuzzData() {
+    std::unique_lock fuzzLock(mFuzzDataMutex);
+    mFuzzData = "";
+    mFuzzRequestData = "";
+    int32_t contentLength = 0;
+    if (mFdp.ConsumeBool()) {
+        if (mFdp.ConsumeBool()) {
+            // if we want to handle server request
+            mFuzzData.append(kSpace + kSpace + kRTSPVersion);
+        } else {
+            // if we want to notify response listener
+            mFuzzData.append(
+                    kRTSPVersion + kSpace +
+                    std::to_string(mFdp.ConsumeIntegralInRange<uint16_t>(kMinPort, kMaxPort)) +
+                    kSpace);
+        }
+        mFuzzData.append(kNewLine);
+        if (mFdp.ConsumeBool()) {
+            contentLength =
+                    mFdp.ConsumeIntegralInRange<int32_t>(kMinContentLength, kMaxContentLength);
+            mFuzzData.append(kContentLength + std::to_string(contentLength) + kNewLine);
+            if (mFdp.ConsumeBool()) {
+                mFdp.ConsumeBool() ? mFuzzData.append(kSpace + kNewLine)
+                                   : mFuzzData.append(kTab + kNewLine);
+            }
+        }
+        // new line to break out of infinite for loop
+        mFuzzData.append(kNewLine);
+        if (contentLength) {
+            std::string contentData = mFdp.ConsumeBytesAsString(contentLength);
+            contentData.resize(contentLength, kDefaultValue);
+            mFuzzData.append(contentData);
+        }
+    } else {
+        // for binary data
+        std::string randomValues(kBinaryVectorSize, kNullValue);
+        for (size_t idx = 0; idx < kBinaryVectorSize; ++idx) {
+            randomValues[idx] =
+                    (char)mFdp.ConsumeIntegralInRange<uint8_t>(kMinASCIIValue, kMaxASCIIValue);
+        }
+        size_t binaryDataSize = generateBinaryDataSize(randomValues);
+        std::string data = mFdp.ConsumeBytesAsString(binaryDataSize);
+        data.resize(binaryDataSize, kDefaultValue);
+        mFuzzData.append(kBinaryHeader + randomValues + data);
+    }
+    if (mFdp.ConsumeBool()) {
+        mRequestData = mFdp.ConsumeRandomLengthString(kMaxBytes) + kSpace + kSpace +
+                       kHeaderBoundary + mFdp.ConsumeRandomLengthString(kMaxBytes);
+        // Check if Request data is valid
+        if (checkValidRequestData(mRequestData.c_str())) {
+            if (mFdp.ConsumeBool()) {
+                if (mFdp.ConsumeBool()) {
+                    // if we want to handle server request
+                    mFuzzRequestData.append(kSpace + kSpace + kRTSPVersion + kNewLine);
+                } else {
+                    // if we want to add authentication headers
+                    mNotifyResponseListener = true;
+                    mFuzzRequestData.append(kAuthString);
+                    if (mFdp.ConsumeBool()) {
+                        // for Authentication type: Basic
+                        mFuzzRequestData.append(kAuthType[0]);
+                    } else {
+                        // for Authentication type: Digest
+                        mFuzzRequestData.append(kAuthType[1]);
+                        mFuzzRequestData.append(kNonce);
+                        mFuzzRequestData.append(kRealm);
+                    }
+                    mFuzzRequestData.append(kNewLine);
+                }
+            } else {
+                mNotifyResponseListener = false;
+                mFuzzRequestData.append(kValidResponse);
+            }
+        } else {
+            mRequestData = kDefaultRequestValue;
+        }
+    } else {
+        mRequestData = kDefaultRequestValue;
+        mFuzzData.append(kNewLine);
+    }
+}
+
+void RTSPConnectionFuzzer::signalEos() {
+    mEosReached = true;
+    mConditionalVariable.notify_all();
+    return;
+}
+
+int32_t RTSPConnectionFuzzer::checkSocket(int32_t newSocket) {
+    struct timeval tv;
+    tv.tv_sec = 1;
+    tv.tv_usec = 0;
+
+    fd_set rs;
+    FD_ZERO(&rs);
+    FD_SET(newSocket, &rs);
+
+    return select(newSocket + 1, &rs, nullptr, nullptr, &tv);
+}
+
+void RTSPConnectionFuzzer::sendValidResponse(int32_t newSocket, int32_t cseq = -1) {
+    std::string validResponse = kValidResponse;
+    if (cseq != -1) {
+        validResponse.append(kCSeq + std::to_string(cseq));
+        validResponse.append(kNewLine + kNewLine);
+    } else {
+        validResponse.append(kNewLine);
+    }
+    send(newSocket, validResponse.c_str(), validResponse.size(), 0);
+}
+
+void RTSPConnectionFuzzer::handleClientResponse(int32_t newSocket) {
+    char buffer[kBufferSize] = {0};
+    if (checkSocket(newSocket) == 1) {
+        read(newSocket, buffer, kBufferSize);
+    }
+}
+
+void RTSPConnectionFuzzer::handleConnection(int32_t newSocket) {
+    std::unique_lock fuzzLock(mFuzzDataMutex);
+    send(newSocket, mFuzzData.c_str(), mFuzzData.size(), 0);
+    if (mFuzzData[0] == kSpace[0]) {
+        handleClientResponse(newSocket);
+    }
+
+    if (mFuzzRequestData != "") {
+        char buffer[kBufferSize] = {0};
+        if (checkSocket(newSocket) == 1 && recv(newSocket, buffer, kBufferSize, MSG_DONTWAIT) > 0) {
+            // Extract the 'CSeq' value present at the end of header
+            std::string clientResponse(buffer);
+            std::string header = clientResponse.substr(0, clientResponse.find(kHeaderBoundary));
+            char cseq = header[header.rfind(kCSeq) + kCSeq.length()];
+            int32_t cseqValue = cseq ? cseq - '0' : kDefaultCseqValue;
+            std::string response = mFuzzRequestData;
+            response.append(kCSeq + std::to_string(cseqValue));
+            response.append(kNewLine + kNewLine);
+            send(newSocket, response.data(), response.length(), 0);
+
+            if (!mNotifyResponseListener) {
+                char buffer[kBufferSize] = {0};
+                if (checkSocket(newSocket) == 1) {
+                    if (recv(newSocket, buffer, kBufferSize, MSG_DONTWAIT) > 0) {
+                        // Extract the 'CSeq' value present at the end of header
+                        std::string clientResponse(buffer);
+                        std::string header =
+                                clientResponse.substr(0, clientResponse.find(kHeaderBoundary));
+                        char cseq = header[header.rfind(kCSeq) + kCSeq.length()];
+                        int32_t cseqValue = cseq ? cseq - '0' : kDefaultCseqValue;
+                        sendValidResponse(newSocket, cseqValue);
+                    } else {
+                        sendValidResponse(newSocket);
+                    }
+                }
+            }
+        } else {
+            // If no data to read, then send a valid response
+            // to release the mutex lock in fuzzer
+            sendValidResponse(newSocket);
+        }
+    }
+    send(newSocket, kQuitResponse.c_str(), kQuitResponse.size(), 0);
+}
+
+void RTSPConnectionFuzzer::startServer() {
+    signal(SIGPIPE, SIG_IGN);
+    mServerFd = socket(AF_INET, SOCK_STREAM | SOCK_CLOEXEC, 0);
+    struct sockaddr_in serverAddress;
+    serverAddress.sin_family = AF_INET;
+    serverAddress.sin_addr.s_addr = htonl(INADDR_LOOPBACK);
+    serverAddress.sin_port = htons(kPort);
+
+    // Get rid of "Address in use" error
+    int32_t opt = 1;
+    if (setsockopt(mServerFd, SOL_SOCKET, SO_REUSEADDR, &opt, sizeof(opt))) {
+        mServerFailure = true;
+    }
+
+    // Bind the socket and set for listening.
+    if (bind(mServerFd, (struct sockaddr*)(&serverAddress), sizeof(serverAddress)) < 0) {
+        mServerFailure = true;
+    }
+
+    if (listen(mServerFd, 5) < 0) {
+        mServerFailure = true;
+    }
+}
+
+void RTSPConnectionFuzzer::acceptConnection() {
+    int32_t clientFd = accept4(mServerFd, nullptr, nullptr, SOCK_CLOEXEC);
+    handleConnection(clientFd);
+    close(clientFd);
+}
+
+void RTSPConnectionFuzzer::process() {
+    startServer();
+    if (mServerFailure) {
+        return;
+    }
+    sp<ALooper> looper = sp<ALooper>::make();
+    sp<FuzzAHandler> handler =
+            sp<FuzzAHandler>::make(std::bind(&RTSPConnectionFuzzer::signalEos, this));
+    sp<ARTSPConnection> rtspConnection =
+            sp<ARTSPConnection>::make(mFdp.ConsumeBool(), mFdp.ConsumeIntegral<uint64_t>());
+    looper->start();
+    looper->registerHandler(rtspConnection);
+    looper->registerHandler(handler);
+    sp<AMessage> replymsg = sp<AMessage>::make(kWhat, handler);
+    std::string url = kUrlPrefix + std::to_string(kPort) + "/";
+
+    while (mFdp.remaining_bytes() && mThreadPool.size() < kMaxThreads) {
+        createFuzzData();
+        mThreadPool.push_back(std::thread(&RTSPConnectionFuzzer::acceptConnection, this));
+        if (mFdp.ConsumeBool()) {
+            rtspConnection->observeBinaryData(replymsg);
+        }
+
+        {
+            rtspConnection->connect(url.c_str(), replymsg);
+            std::unique_lock waitForMsgPostComplete(mMsgPostCompleteMutex);
+            mConditionalVariable.wait(waitForMsgPostComplete, [this] {
+                if (mEosReached == true) {
+                    mEosReached = false;
+                    return true;
+                }
+                return mEosReached;
+            });
+        }
+
+        if (mRequestData != kDefaultRequestValue) {
+            rtspConnection->sendRequest(mRequestData.c_str(), replymsg);
+            std::unique_lock waitForMsgPostComplete(mMsgPostCompleteMutex);
+            mConditionalVariable.wait(waitForMsgPostComplete, [this] {
+                if (mEosReached == true) {
+                    mEosReached = false;
+                    return true;
+                }
+                return mEosReached;
+            });
+        }
+
+        if (mFdp.ConsumeBool()) {
+            rtspConnection->disconnect(replymsg);
+            std::unique_lock waitForMsgPostComplete(mMsgPostCompleteMutex);
+            mConditionalVariable.wait(waitForMsgPostComplete, [this] {
+                if (mEosReached == true) {
+                    mEosReached = false;
+                    return true;
+                }
+                return mEosReached;
+            });
+        }
+    }
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    RTSPConnectionFuzzer rtspFuzz(data, size);
+    rtspFuzz.process();
+    return 0;
+}
diff --git a/media/libstagefright/rtsp/fuzzer/sdploader_fuzzer.cpp b/media/libstagefright/rtsp/fuzzer/sdploader_fuzzer.cpp
new file mode 100644
index 0000000..748e5b6
--- /dev/null
+++ b/media/libstagefright/rtsp/fuzzer/sdploader_fuzzer.cpp
@@ -0,0 +1,141 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+#include <datasource/HTTPBase.h>
+#include <fuzzer/FuzzedDataProvider.h>
+#include <media/MediaHTTPConnection.h>
+#include <media/MediaHTTPService.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/rtsp/SDPLoader.h>
+
+using namespace android;
+
+constexpr int32_t kMinCapacity = 0;
+constexpr int32_t kMaxCapacity = 1000;
+constexpr int32_t kMaxStringLength = 20;
+constexpr int32_t kMaxBytes = 128;
+enum { kWhatLoad = 'load' };
+
+struct FuzzAHandler : public AHandler {
+  public:
+    FuzzAHandler(std::function<void()> signalEosFunction) : mSignalEosFunction(signalEosFunction) {}
+
+  protected:
+    void onMessageReceived(const sp<AMessage>& msg) override {
+        switch (msg->what()) {
+            case kWhatLoad: {
+                mSignalEosFunction();
+                break;
+            }
+        }
+        return;
+    }
+
+  private:
+    std::function<void()> mSignalEosFunction;
+};
+
+struct FuzzMediaHTTPConnection : public MediaHTTPConnection {
+  public:
+    FuzzMediaHTTPConnection(FuzzedDataProvider* fdp) : mFdp(fdp) {
+        mSize = mFdp->ConsumeIntegralInRange(kMinCapacity, kMaxCapacity);
+        mData = mFdp->ConsumeBytes<uint8_t>(mSize);
+        mSize = mData.size();
+    }
+    virtual bool connect(const char* /* uri */,
+                         const KeyedVector<String8, String8>* /* headers */) {
+        return mFdp->ConsumeBool();
+    }
+    virtual void disconnect() { return; }
+    virtual ssize_t readAt(off64_t offset, void* data, size_t size) {
+        if ((size + offset <= mData.size()) && (offset >= 0)) {
+           memcpy(data, mData.data() + offset, size);
+           return size;
+        }
+        return 0;
+    }
+    virtual off64_t getSize() { return mSize; }
+    virtual status_t getMIMEType(String8* /*mimeType*/) {return mFdp->ConsumeIntegral<status_t>();}
+    virtual status_t getUri(String8* /*uri*/) {return mFdp->ConsumeIntegral<status_t>();}
+
+  private:
+    FuzzedDataProvider* mFdp = nullptr;
+    std::vector<uint8_t> mData;
+    size_t mSize = 0;
+};
+
+struct FuzzMediaHTTPService : public MediaHTTPService {
+  public:
+    FuzzMediaHTTPService(FuzzedDataProvider* fdp) : mFdp(fdp) {}
+    virtual sp<MediaHTTPConnection> makeHTTPConnection() {
+        mediaHTTPConnection = sp<FuzzMediaHTTPConnection>::make(mFdp);
+        return mediaHTTPConnection;
+    }
+
+  private:
+    sp<FuzzMediaHTTPConnection> mediaHTTPConnection = nullptr;
+    FuzzedDataProvider* mFdp = nullptr;
+};
+
+class SDPLoaderFuzzer {
+  public:
+    SDPLoaderFuzzer(const uint8_t* data, size_t size) : mFdp(data, size) {}
+    void process();
+
+  private:
+    void signalEos();
+
+    bool mEosReached = false;
+    std::mutex mMsgPostCompleteMutex;
+    std::condition_variable mConditionalVariable;
+    FuzzedDataProvider mFdp;
+};
+
+void SDPLoaderFuzzer::signalEos() {
+    mEosReached = true;
+    mConditionalVariable.notify_one();
+    return;
+}
+
+void SDPLoaderFuzzer::process() {
+    sp<FuzzAHandler> handler = sp<FuzzAHandler>::make(std::bind(&SDPLoaderFuzzer::signalEos, this));
+    sp<ALooper> looper = sp<ALooper>::make();
+    looper->start();
+    looper->registerHandler(handler);
+    const sp<AMessage> notify = sp<AMessage>::make(kWhatLoad, handler);
+    sp<SDPLoader> sdpLoader =
+            sp<SDPLoader>::make(notify, mFdp.ConsumeIntegral<uint32_t>() /* flags */,
+                                sp<FuzzMediaHTTPService>::make(&mFdp) /* httpService */);
+
+    KeyedVector<String8, String8> headers;
+    for (size_t idx = 0; idx < mFdp.ConsumeIntegralInRange<size_t>(kMinCapacity, kMaxCapacity);
+         ++idx) {
+        headers.add(String8(mFdp.ConsumeRandomLengthString(kMaxBytes).c_str()) /* key */,
+                    String8(mFdp.ConsumeRandomLengthString(kMaxBytes).c_str()) /* value */);
+    }
+
+    sdpLoader->load(mFdp.ConsumeRandomLengthString(kMaxBytes).c_str() /* url */, &headers);
+
+    std::unique_lock waitForMsgPostComplete(mMsgPostCompleteMutex);
+    mConditionalVariable.wait(waitForMsgPostComplete, [this] { return mEosReached; });
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    SDPLoaderFuzzer sdpLoaderFuzzer(data, size);
+    sdpLoaderFuzzer.process();
+    return 0;
+}
diff --git a/media/module/extractors/mp4/Android.bp b/media/module/extractors/mp4/Android.bp
index 540d75d..8072002 100644
--- a/media/module/extractors/mp4/Android.bp
+++ b/media/module/extractors/mp4/Android.bp
@@ -30,6 +30,7 @@
 
     srcs: [
         "AC4Parser.cpp",
+        "HeifCleanAperture.cpp",
         "ItemTable.cpp",
         "MPEG4Extractor.cpp",
         "SampleIterator.cpp",
diff --git a/media/module/extractors/mp4/HeifCleanAperture.cpp b/media/module/extractors/mp4/HeifCleanAperture.cpp
new file mode 100644
index 0000000..f0a0867
--- /dev/null
+++ b/media/module/extractors/mp4/HeifCleanAperture.cpp
@@ -0,0 +1,157 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <HeifCleanAperture.h>
+
+namespace android {
+namespace heif {
+namespace {
+
+// |a| and |b| hold int32_t values. The int64_t type is used so that we can negate INT32_MIN without
+// overflowing int32_t.
+int64_t calculateGreatestCommonDivisor(int64_t a, int64_t b) {
+    if (a < 0) {
+        a *= -1;
+    }
+    if (b < 0) {
+        b *= -1;
+    }
+    while (b != 0) {
+        int64_t r = a % b;
+        a = b;
+        b = r;
+    }
+    return a;
+}
+
+bool overflowsInt32(int64_t x) {
+    return (x < INT32_MIN) || (x > INT32_MAX);
+}
+
+Fraction calculateCenter(int32_t value) {
+    Fraction f(value, 2);
+    f.simplify();
+    return f;
+}
+
+}  // namespace
+
+Fraction::Fraction(int32_t n, int32_t d) {
+    this->n = n;
+    this->d = d;
+}
+
+void Fraction::simplify() {
+    int64_t gcd = calculateGreatestCommonDivisor(n, d);
+    if (gcd > 1) {
+        n = static_cast<int32_t>(n / gcd);
+        d = static_cast<int32_t>(d / gcd);
+    }
+}
+
+bool Fraction::commonDenominator(Fraction* f) {
+    simplify();
+    f->simplify();
+    if (d == f->d) return true;
+    const int64_t this_d = d;
+    const int64_t fd = f->d;
+    const int64_t thisnNew = n * fd;
+    const int64_t thisdNew = d * fd;
+    const int64_t fnNew = f->n * this_d;
+    const int64_t fdNew = f->d * this_d;
+    if (overflowsInt32(thisnNew) || overflowsInt32(thisdNew) || overflowsInt32(fnNew) ||
+        overflowsInt32(fdNew)) {
+        return false;
+    }
+    n = static_cast<int32_t>(thisnNew);
+    d = static_cast<int32_t>(thisdNew);
+    f->n = static_cast<int32_t>(fnNew);
+    f->d = static_cast<int32_t>(fdNew);
+    return true;
+}
+
+bool Fraction::add(Fraction f) {
+    if (!commonDenominator(&f)) {
+        return false;
+    }
+
+    const int64_t result = static_cast<int64_t>(n) + f.n;
+    if (overflowsInt32(result)) {
+        return false;
+    }
+    n = static_cast<int32_t>(result);
+    simplify();
+    return true;
+}
+
+bool Fraction::subtract(Fraction f) {
+    if (!commonDenominator(&f)) {
+        return false;
+    }
+
+    const int64_t result = static_cast<int64_t>(n) - f.n;
+    if (overflowsInt32(result)) {
+        return false;
+    }
+    n = static_cast<int32_t>(result);
+    simplify();
+    return true;
+}
+
+bool convertCleanApertureToRect(uint32_t imageW, uint32_t imageH, const CleanAperture& clap,
+                                int32_t* left, int32_t* top, int32_t* right, int32_t* bottom) {
+    // ISO/IEC 14496-12:2020, Section 12.1.4.1:
+    //   For horizOff and vertOff, D shall be strictly positive and N may be
+    //   positive or negative. For cleanApertureWidth and cleanApertureHeight,
+    //   N shall be positive and D shall be strictly positive.
+    if (clap.width.d <= 0 || clap.height.d <= 0 || clap.horizOff.d <= 0 || clap.vertOff.d <= 0 ||
+        clap.width.n < 0 || clap.height.n < 0 || !clap.width.isInteger() ||
+        !clap.height.isInteger() || imageW > INT32_MAX || imageH > INT32_MAX) {
+        return false;
+    }
+
+    const int32_t clapW = clap.width.getInt32();
+    const int32_t clapH = clap.height.getInt32();
+    if (clapW == 0 || clapH == 0) {
+        return false;
+    }
+
+    Fraction centerX = calculateCenter(imageW);
+    Fraction centerY = calculateCenter(imageH);
+    Fraction halfW(clapW, 2);
+    Fraction halfH(clapH, 2);
+
+    if (!centerX.add(clap.horizOff) || !centerX.subtract(halfW) || !centerX.isInteger() ||
+        centerX.n < 0 || !centerY.add(clap.vertOff) || !centerY.subtract(halfH) ||
+        !centerY.isInteger() || centerY.n < 0) {
+        return false;
+    }
+
+    *left = centerX.getInt32();
+    *top = centerY.getInt32();
+    *right = *left + clapW;
+    *bottom = *top + clapH;
+
+    // Make sure that the crop rect is within the image bounds.
+    if (*left > (UINT32_MAX - clapW) || *right > imageW || *top > (UINT32_MAX - clapH) ||
+        *bottom > imageH) {
+        return false;
+    }
+    return true;
+}
+
+}  // namespace heif
+}  // namespace android
diff --git a/media/module/extractors/mp4/ItemTable.cpp b/media/module/extractors/mp4/ItemTable.cpp
index 7fe5ba7..cf3df62 100644
--- a/media/module/extractors/mp4/ItemTable.cpp
+++ b/media/module/extractors/mp4/ItemTable.cpp
@@ -19,6 +19,7 @@
 
 #include <unordered_set>
 
+#include <HeifCleanAperture.h>
 #include <ItemTable.h>
 #include <media/MediaExtractorPluginApi.h>
 #include <media/MediaExtractorPluginHelper.h>
@@ -47,7 +48,7 @@
     ImageItem(uint32_t _type, uint32_t _id, bool _hidden) :
             type(_type), itemId(_id), hidden(_hidden),
             rows(0), columns(0), width(0), height(0), rotation(0),
-            offset(0), size(0), nextTileIndex(0) {}
+            offset(0), size(0), seenClap(false), nextTileIndex(0) {}
 
     bool isGrid() const {
         return type == FOURCC("grid");
@@ -77,6 +78,8 @@
     sp<ABuffer> hvcc;
     sp<ABuffer> icc;
     sp<ABuffer> av1c;
+    bool seenClap;
+    CleanAperture clap;
 
     Vector<uint32_t> thumbnails;
     Vector<uint32_t> dimgRefs;
@@ -833,6 +836,47 @@
     return OK;
 }
 
+struct ClapBox : public Box, public ItemProperty {
+    ClapBox(DataSourceHelper *source) :
+        Box(source, FOURCC("clap")), mSeen(false) {}
+
+    status_t parse(off64_t offset, size_t size) override;
+
+    void attachTo(ImageItem &image) const override {
+        image.seenClap = mSeen;
+        if (!mSeen) return;
+        image.clap = mClap;
+    }
+
+private:
+    bool mSeen;
+    CleanAperture mClap;
+};
+
+status_t ClapBox::parse(off64_t offset, size_t size) {
+    ALOGV("%s: offset %lld, size %zu", __FUNCTION__, (long long)offset, size);
+
+    if (size < 32) {
+        return ERROR_MALFORMED;
+    }
+    mSeen = true;
+    uint32_t values[8];
+    for (int i = 0; i < 8; ++i, offset += 4) {
+        if (!source()->getUInt32(offset, &values[i])) {
+            return ERROR_IO;
+        }
+    }
+    mClap.width.n = values[0];
+    mClap.width.d = values[1];
+    mClap.height.n = values[2];
+    mClap.height.d = values[3];
+    mClap.horizOff.n = values[4];
+    mClap.horizOff.d = values[5];
+    mClap.vertOff.n = values[6];
+    mClap.vertOff.d = values[7];
+    return OK;
+}
+
 struct ColrBox : public Box, public ItemProperty {
     ColrBox(DataSourceHelper *source) :
         Box(source, FOURCC("colr")) {}
@@ -992,6 +1036,11 @@
             itemProperty = new IrotBox(source());
             break;
         }
+        case FOURCC("clap"):
+        {
+            itemProperty = new ClapBox(source());
+            break;
+        }
         case FOURCC("colr"):
         {
             itemProperty = new ColrBox(source());
@@ -1599,6 +1648,12 @@
     AMediaFormat_setInt32(meta,
             AMEDIAFORMAT_KEY_MAX_INPUT_SIZE, image->width * image->height * 3 / 2);
 
+    int32_t left, top, right, bottom;
+    if (image->seenClap && convertCleanApertureToRect(image->width, image->height, image->clap,
+                                                      &left, &top, &right, &bottom)) {
+        AMediaFormat_setRect(meta, AMEDIAFORMAT_KEY_DISPLAY_CROP, left, top, right - 1, bottom - 1);
+    }
+
     if (!image->thumbnails.empty()) {
         ssize_t thumbItemIndex = mItemIdToItemMap.indexOfKey(image->thumbnails[0]);
         if (thumbItemIndex >= 0) {
diff --git a/media/module/extractors/mp4/MPEG4Extractor.cpp b/media/module/extractors/mp4/MPEG4Extractor.cpp
index ecd937d..9ab9f0f 100644
--- a/media/module/extractors/mp4/MPEG4Extractor.cpp
+++ b/media/module/extractors/mp4/MPEG4Extractor.cpp
@@ -2007,7 +2007,7 @@
             uint8_t mhac_header[mhac_header_size];
             off64_t data_offset = *offset;
 
-            if (chunk_size < sizeof(mhac_header)) {
+            if (mLastTrack == NULL || chunk_size < sizeof(mhac_header)) {
                 return ERROR_MALFORMED;
             }
 
diff --git a/media/module/extractors/mp4/include/HeifCleanAperture.h b/media/module/extractors/mp4/include/HeifCleanAperture.h
new file mode 100644
index 0000000..930197d
--- /dev/null
+++ b/media/module/extractors/mp4/include/HeifCleanAperture.h
@@ -0,0 +1,54 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef HEIF_CLEAN_APERTURE_H_
+#define HEIF_CLEAN_APERTURE_H_
+
+#include <stdint.h>
+
+namespace android {
+namespace heif {
+
+struct Fraction {
+    Fraction() = default;
+    Fraction(int32_t n, int32_t d);
+
+    void simplify();
+    bool commonDenominator(Fraction* f);
+    bool add(Fraction f);
+    bool subtract(Fraction f);
+    bool isInteger() const { return n % d == 0; }
+    int32_t getInt32() const { return n / d; }
+    int32_t n;
+    int32_t d;
+};
+
+struct CleanAperture {
+    Fraction width;
+    Fraction height;
+    Fraction horizOff;
+    Fraction vertOff;
+};
+
+// Converts the CleanAperture value into a rectangle with bounds left, top, right and bottom.
+// Returns true on success, false otherwise.
+bool convertCleanApertureToRect(uint32_t imageW, uint32_t imageH, const CleanAperture& image,
+                                int32_t* left, int32_t* top, int32_t* right, int32_t* bottom);
+
+}  // namespace heif
+}  // namespace android
+
+#endif  // HEIF_CLEAN_APERTURE_H_
diff --git a/media/module/extractors/mp4/tests/Android.bp b/media/module/extractors/mp4/tests/Android.bp
new file mode 100644
index 0000000..252cec2
--- /dev/null
+++ b/media/module/extractors/mp4/tests/Android.bp
@@ -0,0 +1,24 @@
+package {
+    default_applicable_licenses: ["frameworks_av_media_extractors_mp4_license"],
+}
+
+cc_test_host {
+    name: "HeifCleanApertureUnitTest",
+    gtest: true,
+
+    srcs: ["HeifCleanApertureUnitTest.cpp"],
+
+    header_libs: [
+        "libmp4extractor_headers",
+    ],
+
+    static_libs: [
+        "libmp4extractor",
+    ],
+
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
+}
diff --git a/media/module/extractors/mp4/tests/HeifCleanApertureUnitTest.cpp b/media/module/extractors/mp4/tests/HeifCleanApertureUnitTest.cpp
new file mode 100644
index 0000000..6a84ae3
--- /dev/null
+++ b/media/module/extractors/mp4/tests/HeifCleanApertureUnitTest.cpp
@@ -0,0 +1,114 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <stdint.h>
+
+#include <HeifCleanAperture.h>
+#include <gtest/gtest.h>
+
+namespace {
+
+using android::heif::CleanAperture;
+using android::heif::convertCleanApertureToRect;
+using android::heif::Fraction;
+
+struct InvalidClapPropertyParam {
+    uint32_t width;
+    uint32_t height;
+    CleanAperture clap;
+};
+
+const InvalidClapPropertyParam kInvalidClapPropertyTestParams[] = {
+        // Zero or negative denominators.
+        {120, 160, {Fraction(96, 0), Fraction(132, 1), Fraction(0, 1), Fraction(0, 1)}},
+        {120, 160, {Fraction(96, -1), Fraction(132, 1), Fraction(0, 1), Fraction(0, 1)}},
+        {120, 160, {Fraction(96, 1), Fraction(132, 0), Fraction(0, 1), Fraction(0, 1)}},
+        {120, 160, {Fraction(96, 1), Fraction(132, -1), Fraction(0, 1), Fraction(0, 1)}},
+        {120, 160, {Fraction(96, 1), Fraction(132, 1), Fraction(0, 0), Fraction(0, 1)}},
+        {120, 160, {Fraction(96, 1), Fraction(132, 1), Fraction(0, -1), Fraction(0, 1)}},
+        {120, 160, {Fraction(96, 1), Fraction(132, 1), Fraction(0, 1), Fraction(0, 0)}},
+        {120, 160, {Fraction(96, 1), Fraction(132, 1), Fraction(0, 1), Fraction(0, -1)}},
+        // Zero or negative clean aperture width or height.
+        {120, 160, {Fraction(-96, 1), Fraction(132, 1), Fraction(0, 1), Fraction(0, 1)}},
+        {120, 160, {Fraction(0, 1), Fraction(132, 1), Fraction(0, 1), Fraction(0, 1)}},
+        {120, 160, {Fraction(96, 1), Fraction(-132, 1), Fraction(0, 1), Fraction(0, 1)}},
+        {120, 160, {Fraction(96, 1), Fraction(0, 1), Fraction(0, 1), Fraction(0, 1)}},
+        // Clean aperture width or height is not an integer.
+        {120, 160, {Fraction(96, 5), Fraction(132, 1), Fraction(0, 1), Fraction(0, 1)}},
+        {120, 160, {Fraction(96, 1), Fraction(132, 5), Fraction(0, 1), Fraction(0, 1)}},
+        {722, 1024, {Fraction(385, 1), Fraction(330, 1), Fraction(103, 1), Fraction(-308, 1)}},
+        {1024, 722, {Fraction(330, 1), Fraction(385, 1), Fraction(-308, 1), Fraction(103, 1)}},
+};
+
+using InvalidClapPropertyTest = ::testing::TestWithParam<InvalidClapPropertyParam>;
+
+INSTANTIATE_TEST_SUITE_P(Parameterized, InvalidClapPropertyTest,
+                         ::testing::ValuesIn(kInvalidClapPropertyTestParams));
+
+// Negative tests for the convertCleanApertureToRect() function.
+TEST_P(InvalidClapPropertyTest, ValidateClapProperty) {
+    const InvalidClapPropertyParam& param = GetParam();
+    int32_t left, top, right, bottom;
+    EXPECT_FALSE(convertCleanApertureToRect(param.width, param.height, param.clap, &left, &top,
+                                            &right, &bottom));
+}
+
+struct ValidClapPropertyParam {
+    uint32_t width;
+    uint32_t height;
+    CleanAperture clap;
+
+    int32_t left;
+    int32_t top;
+    int32_t right;
+    int32_t bottom;
+};
+
+const ValidClapPropertyParam kValidClapPropertyTestParams[] = {
+        {120,
+         160,
+         {Fraction(96, 1), Fraction(132, 1), Fraction(0, 1), Fraction(0, 1)},
+         12,
+         14,
+         108,
+         146},
+        {120,
+         160,
+         {Fraction(60, 1), Fraction(80, 1), Fraction(-30, 1), Fraction(-40, 1)},
+         0,
+         0,
+         60,
+         80},
+};
+
+using ValidClapPropertyTest = ::testing::TestWithParam<ValidClapPropertyParam>;
+
+INSTANTIATE_TEST_SUITE_P(Parameterized, ValidClapPropertyTest,
+                         ::testing::ValuesIn(kValidClapPropertyTestParams));
+
+// Positive tests for the convertCleanApertureToRect() function.
+TEST_P(ValidClapPropertyTest, ValidateClapProperty) {
+    const ValidClapPropertyParam& param = GetParam();
+    int32_t left, top, right, bottom;
+    EXPECT_TRUE(convertCleanApertureToRect(param.width, param.height, param.clap, &left, &top,
+                                           &right, &bottom));
+    EXPECT_EQ(left, param.left);
+    EXPECT_EQ(top, param.top);
+    EXPECT_EQ(right, param.right);
+    EXPECT_EQ(bottom, param.bottom);
+}
+
+}  // namespace
diff --git a/media/module/foundation/MetaDataBase.cpp b/media/module/foundation/MetaDataBase.cpp
index da383fa..60478c9 100644
--- a/media/module/foundation/MetaDataBase.cpp
+++ b/media/module/foundation/MetaDataBase.cpp
@@ -23,6 +23,8 @@
 #include <stdlib.h>
 #include <string.h>
 
+#include <mutex>
+
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/AString.h>
 #include <media/stagefright/foundation/hexdump.h>
@@ -78,6 +80,7 @@
 
 
 struct MetaDataBase::MetaDataInternal {
+    std::mutex mLock;
     KeyedVector<uint32_t, MetaDataBase::typed_data> mItems;
 };
 
@@ -102,10 +105,12 @@
 }
 
 void MetaDataBase::clear() {
+    std::lock_guard<std::mutex> guard(mInternalData->mLock);
     mInternalData->mItems.clear();
 }
 
 bool MetaDataBase::remove(uint32_t key) {
+    std::lock_guard<std::mutex> guard(mInternalData->mLock);
     ssize_t i = mInternalData->mItems.indexOfKey(key);
 
     if (i < 0) {
@@ -252,6 +257,7 @@
         uint32_t key, uint32_t type, const void *data, size_t size) {
     bool overwrote_existing = true;
 
+    std::lock_guard<std::mutex> guard(mInternalData->mLock);
     ssize_t i = mInternalData->mItems.indexOfKey(key);
     if (i < 0) {
         typed_data item;
@@ -269,6 +275,7 @@
 
 bool MetaDataBase::findData(uint32_t key, uint32_t *type,
                         const void **data, size_t *size) const {
+    std::lock_guard<std::mutex> guard(mInternalData->mLock);
     ssize_t i = mInternalData->mItems.indexOfKey(key);
 
     if (i < 0) {
@@ -283,6 +290,7 @@
 }
 
 bool MetaDataBase::hasData(uint32_t key) const {
+    std::lock_guard<std::mutex> guard(mInternalData->mLock);
     ssize_t i = mInternalData->mItems.indexOfKey(key);
 
     if (i < 0) {
@@ -429,6 +437,7 @@
 
 String8 MetaDataBase::toString() const {
     String8 s;
+    std::lock_guard<std::mutex> guard(mInternalData->mLock);
     for (int i = mInternalData->mItems.size(); --i >= 0;) {
         int32_t key = mInternalData->mItems.keyAt(i);
         char cc[5];
@@ -443,6 +452,7 @@
 }
 
 void MetaDataBase::dumpToLog() const {
+    std::lock_guard<std::mutex> guard(mInternalData->mLock);
     for (int i = mInternalData->mItems.size(); --i >= 0;) {
         int32_t key = mInternalData->mItems.keyAt(i);
         char cc[5];
@@ -455,6 +465,7 @@
 #if defined(__ANDROID__) && !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
 status_t MetaDataBase::writeToParcel(Parcel &parcel) {
     status_t ret;
+    std::lock_guard<std::mutex> guard(mInternalData->mLock);
     size_t numItems = mInternalData->mItems.size();
     ret = parcel.writeUint32(uint32_t(numItems));
     if (ret) {
diff --git a/media/mtp/MtpDataPacket.cpp b/media/mtp/MtpDataPacket.cpp
index 5dbcd08..6068d68 100644
--- a/media/mtp/MtpDataPacket.cpp
+++ b/media/mtp/MtpDataPacket.cpp
@@ -73,14 +73,14 @@
 }
 
 bool MtpDataPacket::getUInt8(uint8_t& value) {
-    if (mPacketSize - mOffset < sizeof(value))
+    if ((mPacketSize - mOffset < sizeof(value)) || (mOffset >= mBufferSize))
         return false;
     value = mBuffer[mOffset++];
     return true;
 }
 
 bool MtpDataPacket::getUInt16(uint16_t& value) {
-    if (mPacketSize - mOffset < sizeof(value))
+    if ((mPacketSize - mOffset < sizeof(value)) || ((mOffset+1) >= mBufferSize))
         return false;
     int offset = mOffset;
     value = (uint16_t)mBuffer[offset] | ((uint16_t)mBuffer[offset + 1] << 8);
@@ -89,7 +89,7 @@
 }
 
 bool MtpDataPacket::getUInt32(uint32_t& value) {
-    if (mPacketSize - mOffset < sizeof(value))
+    if ((mPacketSize - mOffset < sizeof(value)) || ((mOffset+3) >= mBufferSize))
         return false;
     int offset = mOffset;
     value = (uint32_t)mBuffer[offset] | ((uint32_t)mBuffer[offset + 1] << 8) |
@@ -99,7 +99,7 @@
 }
 
 bool MtpDataPacket::getUInt64(uint64_t& value) {
-    if (mPacketSize - mOffset < sizeof(value))
+    if ((mPacketSize - mOffset < sizeof(value)) || ((mOffset+7) >= mBufferSize))
         return false;
     int offset = mOffset;
     value = (uint64_t)mBuffer[offset] | ((uint64_t)mBuffer[offset + 1] << 8) |
diff --git a/media/ndk/fuzzer/Android.bp b/media/ndk/fuzzer/Android.bp
index a3d6a96..ba92b19 100644
--- a/media/ndk/fuzzer/Android.bp
+++ b/media/ndk/fuzzer/Android.bp
@@ -56,6 +56,14 @@
             "android-media-fuzzing-reports@google.com",
         ],
         componentid: 155276,
+        hotlists: [
+            "4593311",
+        ],
+        description: "The fuzzer targets the APIs of libmediandk library",
+        vector: "local_no_privileges_required",
+        service_privilege: "privileged",
+        users: "multi_user",
+        fuzzed_code_usage: "shipped",
     },
 }
 
@@ -63,6 +71,11 @@
     name: "ndk_crypto_fuzzer",
     srcs: ["ndk_crypto_fuzzer.cpp"],
     defaults: ["libmediandk_fuzzer_defaults"],
+    fuzz_config: {
+        libfuzzer_options: [
+            "max_len=10000",
+        ],
+    },
 }
 
 cc_fuzz {
@@ -116,3 +129,16 @@
     header_libs: ["libnativewindow_headers",],
     defaults: ["libmediandk_fuzzer_defaults",],
 }
+
+cc_fuzz {
+    name: "ndk_async_codec_fuzzer",
+    srcs: [
+           "ndk_async_codec_fuzzer.cpp",
+           "NdkMediaCodecFuzzerBase.cpp",
+          ],
+    header_libs: [
+           "libnativewindow_headers",
+           "libutils_headers",
+          ],
+    defaults: ["libmediandk_fuzzer_defaults",],
+}
diff --git a/media/ndk/fuzzer/README.md b/media/ndk/fuzzer/README.md
index 0fd08b0..7f6bdd7 100644
--- a/media/ndk/fuzzer/README.md
+++ b/media/ndk/fuzzer/README.md
@@ -8,6 +8,7 @@
 + [ndk_drm_fuzzer](#NdkDrm)
 + [ndk_mediamuxer_fuzzer](#NdkMediaMuxer)
 + [ndk_sync_codec_fuzzer](#NdkSyncCodec)
++ [ndk_async_codec_fuzzer](#NdkAsyncCodec)
 
 # <a name="NdkCrypto"></a> Fuzzer for NdkCrypto
 
@@ -156,3 +157,16 @@
   $ adb sync data
   $ adb shell /data/fuzz/arm64/ndk_sync_codec_fuzzer/ndk_sync_codec_fuzzer
 ```
+
+# <a name="NdkAsyncCodec"></a>Fuzzer for NdkAsyncCodec
+
+#### Steps to run
+1. Build the fuzzer
+```
+  $ mm -j$(nproc) ndk_async_codec_fuzzer
+```
+2. Run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/arm64/ndk_async_codec_fuzzer/ndk_sync_codec_fuzzer
+```
diff --git a/media/ndk/fuzzer/ndk_async_codec_fuzzer.cpp b/media/ndk/fuzzer/ndk_async_codec_fuzzer.cpp
new file mode 100644
index 0000000..28a38fe
--- /dev/null
+++ b/media/ndk/fuzzer/ndk_async_codec_fuzzer.cpp
@@ -0,0 +1,441 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include <NdkMediaCodecFuzzerBase.h>
+#include <media/NdkMediaFormatPriv.h>
+#include <mutex>
+#include <queue>
+#include <thread>
+
+using namespace android;
+using namespace std;
+
+constexpr int32_t kMaxCryptoInfoAPIs = 3;
+constexpr int32_t kMaxNdkCodecAPIs = 5;
+
+template <typename T>
+class CallBackQueue {
+  public:
+    void push(T elem) {
+        bool needsNotify = false;
+        {
+            unique_lock<mutex> lock(mMutex);
+            needsNotify = mQueue.empty();
+            mQueue.push(std::move(elem));
+        }
+        if (needsNotify) {
+            mQueueNotEmptyCondition.notify_one();
+        }
+    }
+
+    T pop() {
+        unique_lock<mutex> lock(mMutex);
+        if (mQueue.empty()) {
+            mQueueNotEmptyCondition.wait(lock, [this]() { return !mQueue.empty(); });
+        }
+        auto result = mQueue.front();
+        mQueue.pop();
+        return result;
+    }
+
+  private:
+    mutex mMutex;
+    std::queue<T> mQueue;
+    std::condition_variable mQueueNotEmptyCondition;
+};
+
+class CallBackHandle {
+  public:
+    CallBackHandle() : mSawError(false), mIsDone(false) {}
+
+    virtual ~CallBackHandle() {}
+
+    void ioThread();
+
+    // Implementation in child class (Decoder/Encoder)
+    virtual void invokeInputBufferAPI(AMediaCodec* codec, int32_t index) {
+        (void)codec;
+        (void)index;
+    }
+    virtual void onFormatChanged(AMediaCodec* codec, AMediaFormat* format) {
+        (void)codec;
+        (void)format;
+    }
+    virtual void receiveError(void) {}
+    virtual void invokeOutputBufferAPI(AMediaCodec* codec, int32_t index,
+                                       AMediaCodecBufferInfo* bufferInfo) {
+        (void)codec;
+        (void)index;
+        (void)bufferInfo;
+    }
+
+    // Keep a queue of all function callbacks.
+    typedef function<void()> IOTask;
+    CallBackQueue<IOTask> mIOQueue;
+    bool mSawError;
+    bool mIsDone;
+};
+
+void CallBackHandle::ioThread() {
+    while (!mIsDone && !mSawError) {
+        auto task = mIOQueue.pop();
+        task();
+    }
+}
+
+static void onAsyncInputAvailable(AMediaCodec* codec, void* userdata, int32_t index) {
+    CallBackHandle* self = (CallBackHandle*)userdata;
+    self->mIOQueue.push([self, codec, index]() { self->invokeInputBufferAPI(codec, index); });
+}
+
+static void onAsyncOutputAvailable(AMediaCodec* codec, void* userdata, int32_t index,
+                                   AMediaCodecBufferInfo* bufferInfo) {
+    CallBackHandle* self = (CallBackHandle*)userdata;
+    AMediaCodecBufferInfo bufferInfoCopy = *bufferInfo;
+    self->mIOQueue.push([self, codec, index, bufferInfoCopy]() {
+        AMediaCodecBufferInfo bc = bufferInfoCopy;
+        self->invokeOutputBufferAPI(codec, index, &bc);
+    });
+}
+
+static void onAsyncFormatChanged(AMediaCodec* codec, void* userdata, AMediaFormat* format) {
+    (void)codec;
+    (void)userdata;
+    (void)format;
+};
+
+static void onAsyncError(AMediaCodec* codec, void* userdata, media_status_t err, int32_t actionCode,
+                         const char* detail) {
+    CallBackHandle* self = (CallBackHandle*)userdata;
+    self->mSawError = true;
+    self->receiveError();
+    (void)codec;
+    (void)err;
+    (void)actionCode;
+    (void)detail;
+};
+
+class NdkAsyncCodecFuzzer : public NdkMediaCodecFuzzerBase, public CallBackHandle {
+  public:
+    NdkAsyncCodecFuzzer(const uint8_t* data, size_t size)
+        : NdkMediaCodecFuzzerBase(), mFdp(data, size) {
+        setFdp(&mFdp);
+        mStopCodec = false;
+        mSawInputEOS = false;
+        mSignalledError = false;
+        mIsEncoder = false;
+        mNumOfFrames = 0;
+        mNumInputFrames = 0;
+    };
+    ~NdkAsyncCodecFuzzer() {
+        mIOThreadPool->stop();
+        delete (mIOThreadPool);
+    };
+
+    void process();
+
+    static void codecOnFrameRendered(AMediaCodec* codec, void* userdata, int64_t mediaTimeUs,
+                                     int64_t systemNano) {
+        (void)codec;
+        (void)userdata;
+        (void)mediaTimeUs;
+        (void)systemNano;
+    };
+    class ThreadPool {
+      public:
+        void start();
+        void queueJob(const std::function<void()>& job);
+        void stop();
+
+      private:
+        void ThreadLoop();
+        bool mShouldTerminate = false;
+        std::vector<std::thread> mThreads;
+        std::mutex mQueueMutex;
+        std::condition_variable mQueueMutexCondition;
+        std::queue<std::function<void()>> mJobs;
+    };
+
+  private:
+    FuzzedDataProvider mFdp;
+    AMediaCodec* mCodec = nullptr;
+    void invokeCodecCryptoInfoAPI();
+    void invokekAsyncCodecAPIs(bool isEncoder);
+    void invokeAsyncCodeConfigAPI();
+    void invokeInputBufferAPI(AMediaCodec* codec, int32_t bufferIndex);
+    void invokeOutputBufferAPI(AMediaCodec* codec, int32_t bufferIndex,
+                               AMediaCodecBufferInfo* bufferInfo);
+    void invokeFormatAPI(AMediaCodec* codec);
+    void receiveError();
+    bool mStopCodec;
+    bool mSawInputEOS;
+    bool mSignalledError;
+    int32_t mNumOfFrames;
+    int32_t mNumInputFrames;
+    mutable Mutex mMutex;
+    bool mIsEncoder;
+    ThreadPool* mIOThreadPool = new ThreadPool();
+};
+
+void NdkAsyncCodecFuzzer::ThreadPool::start() {
+    const uint32_t numThreads = std::thread::hardware_concurrency();
+    mThreads.resize(numThreads);
+    for (uint32_t i = 0; i < numThreads; ++i) {
+        mThreads.at(i) = std::thread(&ThreadPool::ThreadLoop, this);
+    }
+}
+
+void NdkAsyncCodecFuzzer::ThreadPool::ThreadLoop() {
+    while (true) {
+        std::function<void()> job;
+        {
+            std::unique_lock<std::mutex> lock(mQueueMutex);
+            mQueueMutexCondition.wait(lock, [this] { return !mJobs.empty() || mShouldTerminate; });
+            if (mShouldTerminate) {
+                return;
+            }
+            job = mJobs.front();
+            mJobs.pop();
+        }
+        job();
+    }
+}
+
+void NdkAsyncCodecFuzzer::ThreadPool::queueJob(const std::function<void()>& job) {
+    {
+        std::unique_lock<std::mutex> lock(mQueueMutex);
+        mJobs.push(job);
+    }
+    mQueueMutexCondition.notify_one();
+}
+
+void NdkAsyncCodecFuzzer::ThreadPool::stop() {
+    {
+        std::unique_lock<std::mutex> lock(mQueueMutex);
+        mShouldTerminate = true;
+    }
+    mQueueMutexCondition.notify_all();
+    for (std::thread& active_thread : mThreads) {
+        active_thread.join();
+    }
+    mThreads.clear();
+}
+
+void NdkAsyncCodecFuzzer::receiveError(void) {
+    mSignalledError = true;
+}
+
+void NdkAsyncCodecFuzzer::invokeInputBufferAPI(AMediaCodec* codec, int32_t bufferIndex) {
+    size_t bufferSize = 0;
+    Mutex::Autolock autoLock(mMutex);
+    if (mSignalledError) {
+        CallBackHandle::mSawError = true;
+        return;
+    }
+    if (mStopCodec || bufferIndex < 0 || mSawInputEOS) {
+        return;
+    }
+
+    uint8_t* buffer = AMediaCodec_getInputBuffer(codec, bufferIndex, &bufferSize);
+    if (buffer) {
+        std::vector<uint8_t> bytesRead = mFdp.ConsumeBytes<uint8_t>(
+                std::min(mFdp.ConsumeIntegralInRange<size_t>(kMinBytes, kMaxBytes), bufferSize));
+        memcpy(buffer, bytesRead.data(), bytesRead.size());
+        bufferSize = bytesRead.size();
+    } else {
+        mSignalledError = true;
+        return;
+    }
+
+    uint32_t flag = 0;
+    if (!bufferSize || mNumInputFrames == mNumOfFrames) {
+        flag |= AMEDIACODEC_BUFFER_FLAG_END_OF_STREAM;
+        mSawInputEOS = true;
+    }
+    AMediaCodec_queueInputBuffer(codec, bufferIndex, 0 /* offset */, bufferSize, 0 /* time */,
+                                 flag);
+    mNumInputFrames++;
+}
+
+void NdkAsyncCodecFuzzer::invokeOutputBufferAPI(AMediaCodec* codec, int32_t bufferIndex,
+                                                AMediaCodecBufferInfo* bufferInfo) {
+    size_t bufferSize = 0;
+    Mutex::Autolock autoLock(mMutex);
+
+    if (mSignalledError) {
+        CallBackHandle::mSawError = true;
+        return;
+    }
+
+    if (mStopCodec || bufferIndex < 0 || mIsDone) {
+        return;
+    }
+
+    if (!mIsEncoder) {
+        (void)AMediaCodec_getOutputBuffer(codec, bufferIndex, &bufferSize);
+    }
+    AMediaCodec_releaseOutputBuffer(codec, bufferIndex, mFdp.ConsumeBool());
+    mIsDone = (0 != (bufferInfo->flags & AMEDIACODEC_BUFFER_FLAG_END_OF_STREAM));
+}
+
+void NdkAsyncCodecFuzzer::invokeFormatAPI(AMediaCodec* codec) {
+    AMediaFormat* codecFormat = nullptr;
+    if (mFdp.ConsumeBool()) {
+        codecFormat = AMediaCodec_getInputFormat(codec);
+    } else {
+        codecFormat = AMediaCodec_getOutputFormat(codec);
+    }
+    if (codecFormat) {
+        AMediaFormat_delete(codecFormat);
+    }
+}
+
+void NdkAsyncCodecFuzzer::invokekAsyncCodecAPIs(bool isEncoder) {
+    ANativeWindow* nativeWindow = nullptr;
+
+    if (mFdp.ConsumeBool()) {
+        AMediaCodec_createInputSurface(mCodec, &nativeWindow);
+    }
+
+    if (AMEDIA_OK == AMediaCodec_configure(mCodec, getCodecFormat(), nativeWindow,
+                                           nullptr /* crypto */,
+                                           (isEncoder ? AMEDIACODEC_CONFIGURE_FLAG_ENCODE : 0))) {
+        mNumOfFrames = mFdp.ConsumeIntegralInRange<size_t>(kMinIterations, kMaxIterations);
+        // Configure codecs to run in async mode.
+        AMediaCodecOnAsyncNotifyCallback callBack = {onAsyncInputAvailable, onAsyncOutputAvailable,
+                                                     onAsyncFormatChanged, onAsyncError};
+        AMediaCodec_setAsyncNotifyCallback(mCodec, callBack, this);
+        mIOThreadPool->queueJob([this] { CallBackHandle::ioThread(); });
+
+        AMediaCodec_start(mCodec);
+        sleep(5);
+        int32_t count = 0;
+        while (++count <= mNumOfFrames) {
+            int32_t ndkcodecAPI =
+                    mFdp.ConsumeIntegralInRange<size_t>(kMinAPICase, kMaxNdkCodecAPIs);
+            switch (ndkcodecAPI) {
+                case 0: {  // get input and output Format
+                    invokeFormatAPI(mCodec);
+                    break;
+                }
+                case 1: {
+                    AMediaCodec_signalEndOfInputStream(mCodec);
+                    mSawInputEOS = true;
+                    break;
+                }
+                case 2: {  // set parameters
+                    // Create a new parameter and set
+                    AMediaFormat* params = AMediaFormat_new();
+                    AMediaFormat_setInt32(
+                            params, "video-bitrate",
+                            mFdp.ConsumeIntegralInRange<size_t>(kMinIntKeyValue, kMaxIntKeyValue));
+                    AMediaCodec_setParameters(mCodec, params);
+                    AMediaFormat_delete(params);
+                    break;
+                }
+                case 3: {  // flush codec
+                    AMediaCodec_flush(mCodec);
+                    if (mFdp.ConsumeBool()) {
+                        AMediaCodec_start(mCodec);
+                    }
+                    break;
+                }
+                case 4: {
+                    char* name = nullptr;
+                    AMediaCodec_getName(mCodec, &name);
+                    AMediaCodec_releaseName(mCodec, name);
+                    break;
+                }
+                case 5:
+                default: {
+                    std::vector<uint8_t> userData = mFdp.ConsumeBytes<uint8_t>(
+                            mFdp.ConsumeIntegralInRange<size_t>(kMinBytes, kMaxBytes));
+                    AMediaCodecOnFrameRendered callback = codecOnFrameRendered;
+                    AMediaCodec_setOnFrameRenderedCallback(mCodec, callback, userData.data());
+                    break;
+                }
+            }
+        }
+        {
+            Mutex::Autolock autoLock(mMutex);
+            mStopCodec = 1;
+            AMediaCodec_stop(mCodec);
+        }
+    }
+
+    if (nativeWindow) {
+        ANativeWindow_release(nativeWindow);
+    }
+}
+
+void NdkAsyncCodecFuzzer::invokeAsyncCodeConfigAPI() {
+    mIOThreadPool->start();
+
+    while (mFdp.remaining_bytes() > 0) {
+        mIsEncoder = mFdp.ConsumeBool();
+        mCodec = createCodec(mIsEncoder, mFdp.ConsumeBool() /* isCodecForClient */);
+        if (mCodec) {
+            invokekAsyncCodecAPIs(mIsEncoder);
+            AMediaCodec_delete(mCodec);
+        }
+    }
+    mIOThreadPool->stop();
+}
+
+void NdkAsyncCodecFuzzer::invokeCodecCryptoInfoAPI() {
+    while (mFdp.remaining_bytes() > 0) {
+        AMediaCodecCryptoInfo* cryptoInfo = getAMediaCodecCryptoInfo();
+        int32_t ndkCryptoInfoAPI =
+                mFdp.ConsumeIntegralInRange<size_t>(kMinAPICase, kMaxCryptoInfoAPIs);
+        switch (ndkCryptoInfoAPI) {
+            case 0: {
+                size_t sizes[kMaxCryptoKey];
+                AMediaCodecCryptoInfo_getEncryptedBytes(cryptoInfo, sizes);
+                break;
+            }
+            case 1: {
+                size_t sizes[kMaxCryptoKey];
+                AMediaCodecCryptoInfo_getClearBytes(cryptoInfo, sizes);
+                break;
+            }
+            case 2: {
+                uint8_t bytes[kMaxCryptoKey];
+                AMediaCodecCryptoInfo_getIV(cryptoInfo, bytes);
+                break;
+            }
+            case 3:
+            default: {
+                uint8_t bytes[kMaxCryptoKey];
+                AMediaCodecCryptoInfo_getKey(cryptoInfo, bytes);
+                break;
+            }
+        }
+        AMediaCodecCryptoInfo_delete(cryptoInfo);
+    }
+}
+
+void NdkAsyncCodecFuzzer::process() {
+    if (mFdp.ConsumeBool()) {
+        invokeCodecCryptoInfoAPI();
+    } else {
+        invokeAsyncCodeConfigAPI();
+    }
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    NdkAsyncCodecFuzzer ndkAsyncCodecFuzzer(data, size);
+    ndkAsyncCodecFuzzer.process();
+    return 0;
+}
diff --git a/media/ndk/fuzzer/ndk_crypto_fuzzer.cpp b/media/ndk/fuzzer/ndk_crypto_fuzzer.cpp
index 2b22f0f..a759ae7 100644
--- a/media/ndk/fuzzer/ndk_crypto_fuzzer.cpp
+++ b/media/ndk/fuzzer/ndk_crypto_fuzzer.cpp
@@ -20,10 +20,12 @@
 constexpr size_t kMaxString = 256;
 constexpr size_t kMinBytes = 0;
 constexpr size_t kMaxBytes = 1000;
+constexpr size_t kMaxRuns = 100;
 
 extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
     FuzzedDataProvider fdp(data, size);
     AMediaUUID uuid = {};
+    size_t apiCount = 0;
     int32_t maxLen = fdp.ConsumeIntegralInRange<size_t>(kMinBytes, (size_t)sizeof(AMediaUUID));
     for (size_t idx = 0; idx < maxLen; ++idx) {
         uuid[idx] = fdp.ConsumeIntegral<uint8_t>();
@@ -31,7 +33,14 @@
     std::vector<uint8_t> initData =
             fdp.ConsumeBytes<uint8_t>(fdp.ConsumeIntegralInRange<size_t>(kMinBytes, kMaxBytes));
     AMediaCrypto* crypto = AMediaCrypto_new(uuid, initData.data(), initData.size());
-    while (fdp.remaining_bytes()) {
+    /*
+     * The AMediaCrypto_isCryptoSchemeSupported API doesn't consume any input bytes,
+     * so when PickValueInArray() selects it repeatedly, only one byte is consumed by 'fdp'.
+     * As a result, on larger inputs, AMediaCrypto_isCryptoSchemeSupported can run a large
+     * number of times, potentially causing a timeout crash.
+     * Therefore, to prevent this issue, while loop is limited to kMaxRuns.
+     */
+    while (fdp.remaining_bytes() && ++apiCount <= kMaxRuns) {
         auto invokeNdkCryptoFuzzer = fdp.PickValueInArray<const std::function<void()>>({
                 [&]() {
                     AMediaCrypto_requiresSecureDecoderComponent(
diff --git a/media/ndk/fuzzer/ndk_mediaformat_fuzzer.cpp b/media/ndk/fuzzer/ndk_mediaformat_fuzzer.cpp
index c19ea13..23e2eaf 100644
--- a/media/ndk/fuzzer/ndk_mediaformat_fuzzer.cpp
+++ b/media/ndk/fuzzer/ndk_mediaformat_fuzzer.cpp
@@ -18,6 +18,7 @@
 #include <fcntl.h>
 #include <fuzzer/FuzzedDataProvider.h>
 #include <media/NdkMediaFormat.h>
+#include <media/stagefright/foundation/AMessage.h>
 #include <sys/mman.h>
 #include <unistd.h>
 #include <utils/Log.h>
@@ -176,11 +177,13 @@
 constexpr size_t kMaxBytes = 1000;
 constexpr size_t kMinChoice = 0;
 constexpr size_t kMaxChoice = 9;
+const size_t kMaxIteration = android::AMessage::maxAllowedEntries();
 
 extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
     FuzzedDataProvider fdp(data, size);
     AMediaFormat* mediaFormat = AMediaFormat_new();
-    while (fdp.remaining_bytes()) {
+    std::vector<std::string> nameCollection;
+    while (fdp.remaining_bytes() && nameCollection.size() < kMaxIteration) {
         const char* name = nullptr;
         std::string nameString;
         if (fdp.ConsumeBool()) {
@@ -190,6 +193,11 @@
                             : fdp.ConsumeRandomLengthString(
                                       fdp.ConsumeIntegralInRange<size_t>(kMinBytes, kMaxBytes));
             name = nameString.c_str();
+            std::vector<std::string>::iterator it =
+                    find(nameCollection.begin(), nameCollection.end(), name);
+            if (it == nameCollection.end()) {
+                nameCollection.push_back(name);
+            }
         }
         switch (fdp.ConsumeIntegralInRange<int32_t>(kMinChoice, kMaxChoice)) {
             case 0: {
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Decoder.java b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Decoder.java
index 9e0d5e4..be8b2b4 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Decoder.java
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Decoder.java
@@ -322,7 +322,7 @@
             ByteBuffer inputCodecBuffer = mediaCodec.getInputBuffer(inputBufferId);
             BufferInfo bufInfo;
             if (mNumInFramesProvided >= mNumInFramesRequired) {
-                Log.i(TAG, "Input frame limit reached");
+                Log.i(TAG, "Input frame limit reached provided: " + mNumInFramesProvided);
                 mIndex = mInputBufferInfo.size() - 1;
                 bufInfo = mInputBufferInfo.get(mIndex);
                 if ((bufInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) == 0) {
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/FrameReleaseQueue.java b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/FrameReleaseQueue.java
index 84554d3..d182f88 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/FrameReleaseQueue.java
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/FrameReleaseQueue.java
@@ -21,6 +21,8 @@
 import androidx.annotation.NonNull;
 import java.nio.ByteBuffer;
 import java.util.concurrent.LinkedBlockingQueue;
+import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.concurrent.CompletableFuture;
 
 public class FrameReleaseQueue {
     private static final String TAG = "FrameReleaseQueue";
@@ -28,7 +30,7 @@
     private MediaCodec mCodec;
     private LinkedBlockingQueue<FrameInfo> mFrameInfoQueue;
     private ReleaseThread mReleaseThread;
-    private boolean doFrameRelease = false;
+    private AtomicBoolean doFrameRelease = new AtomicBoolean(false);
     private boolean mRender = false;
     private int mWaitTime = 40; // milliseconds per frame
     private int mWaitTimeCorrection = 0;
@@ -49,46 +51,47 @@
 
     private class ReleaseThread extends Thread {
         public void run() {
-            int nextReleaseTime = 0;
+            long nextReleaseTime = 0;
             int loopCount = 0;
-            while (doFrameRelease || mFrameInfoQueue.size() > 0) {
+            while (doFrameRelease.get() || mFrameInfoQueue.size() > 0) {
                 FrameInfo curFrameInfo = mFrameInfoQueue.peek();
                 if (curFrameInfo == null) {
                     nextReleaseTime += mWaitTime;
                 } else {
-                    if (curFrameInfo.displayTime == 0) {
+                    if (firstReleaseTime == -1 || curFrameInfo.displayTime <= 0) {
                         // first frame of loop
                         firstReleaseTime = getCurSysTime();
                         nextReleaseTime = firstReleaseTime + mWaitTime;
-                        popAndRelease(curFrameInfo, true);
-                    } else if (!doFrameRelease && mFrameInfoQueue.size() == 1) {
+                        popAndRelease(true);
+                    } else if (!doFrameRelease.get() && mFrameInfoQueue.size() == 1) {
                         // EOS
                         Log.i(TAG, "EOS");
-                        popAndRelease(curFrameInfo, false);
+                        popAndRelease(false);
                     } else {
                         nextReleaseTime += mWaitTime;
                         int curSysTime = getCurSysTime();
                         int curMediaTime = curSysTime - firstReleaseTime;
                         while (curFrameInfo != null && curFrameInfo.displayTime > 0 &&
                                 curFrameInfo.displayTime <= curMediaTime) {
-                            if (!((curMediaTime - curFrameInfo.displayTime) < THRESHOLD_TIME)) {
+                            if (!((curMediaTime - curFrameInfo.displayTime) <= THRESHOLD_TIME)) {
                                 Log.d(TAG, "Dropping expired frame " + curFrameInfo.number +
                                     " display time " + curFrameInfo.displayTime +
                                     " current time " + curMediaTime);
-                                popAndRelease(curFrameInfo, false);
+                                popAndRelease(false);
                             } else {
-                                popAndRelease(curFrameInfo, true);
+                                popAndRelease(true);
                             }
                             curFrameInfo = mFrameInfoQueue.peek();
                         }
                         if (curFrameInfo != null && curFrameInfo.displayTime > curMediaTime) {
                             if ((curFrameInfo.displayTime - curMediaTime) < THRESHOLD_TIME) {
-                                popAndRelease(curFrameInfo, true);
+                                // release the frame now as we are already there
+                                popAndRelease(true);
                             }
                         }
                     }
                 }
-                int sleepTime = nextReleaseTime - getCurSysTime();
+                long sleepTime = nextReleaseTime - getCurSysTime();
                 if (sleepTime > 0) {
                     try {
                         mReleaseThread.sleep(sleepTime);
@@ -109,7 +112,7 @@
     public FrameReleaseQueue(boolean render, int frameRate) {
         this.mFrameInfoQueue = new LinkedBlockingQueue();
         this.mReleaseThread = new ReleaseThread();
-        this.doFrameRelease = true;
+        this.doFrameRelease.set(true);
         this.mRender = render;
         this.mWaitTime = 1000 / frameRate; // wait time in milliseconds per frame
         int waitTimeRemainder = 1000 % frameRate;
@@ -146,24 +149,26 @@
         return (int)(System.nanoTime()/1000000);
     }
 
-    private void popAndRelease(FrameInfo curFrameInfo, boolean renderThisFrame) {
+    private void popAndRelease(boolean renderThisFrame) {
+        final boolean actualRender = (renderThisFrame && mRender);
         try {
-            curFrameInfo = mFrameInfoQueue.take();
+            final FrameInfo curFrameInfo = mFrameInfoQueue.take();
+
+            CompletableFuture future = CompletableFuture.runAsync(() -> {
+                try {
+                    mCodec.releaseOutputBuffer(curFrameInfo.bufferId, actualRender);
+                } catch (IllegalStateException e) {
+                    e.printStackTrace();
+                }
+            });
+
         } catch (InterruptedException e) {
             Log.e(TAG, "Threw InterruptedException on take");
         }
-        boolean actualRender = (renderThisFrame && mRender);
-        try {
-            mCodec.releaseOutputBuffer(curFrameInfo.bufferId, actualRender);
-        } catch (IllegalStateException e) {
-            Log.e(TAG,
-                    "Threw IllegalStateException on releaseOutputBuffer for frame "
-                            + curFrameInfo.number);
-        }
     }
 
     public void stopFrameRelease() {
-        doFrameRelease = false;
+        doFrameRelease.set(false);
         try {
             mReleaseThread.join();
             Log.i(TAG, "Joined frame release thread");
diff --git a/media/utils/Android.bp b/media/utils/Android.bp
index 7abb0b6..07dac5e 100644
--- a/media/utils/Android.bp
+++ b/media/utils/Android.bp
@@ -179,3 +179,8 @@
     local_include_dirs: ["include"],
     export_include_dirs: ["include"],
 }
+
+cc_library_headers {
+    name: "mediautils_headers",
+    export_include_dirs: ["include", "."],
+}
diff --git a/media/utils/include/mediautils/SharedMemoryAllocator.h b/media/utils/include/mediautils/SharedMemoryAllocator.h
index 79621e2..4243b9c 100644
--- a/media/utils/include/mediautils/SharedMemoryAllocator.h
+++ b/media/utils/include/mediautils/SharedMemoryAllocator.h
@@ -138,7 +138,7 @@
 template <typename Allocator>
 class ScopedAllocator {
   public:
-    static constexpr size_t alignment() { return Allocator::alignment(); }
+    static size_t alignment() { return Allocator::alignment(); }
 
     explicit ScopedAllocator(const std::shared_ptr<Allocator>& allocator) : mAllocator(allocator) {}
 
@@ -218,7 +218,7 @@
 template <typename Allocator, typename Policy>
 class PolicyAllocator {
   public:
-    static constexpr size_t alignment() { return Allocator::alignment(); }
+    static size_t alignment() { return Allocator::alignment(); }
 
     PolicyAllocator(Allocator allocator, Policy policy)
         : mAllocator(allocator), mPolicy(std::move(policy)) {}
@@ -277,7 +277,7 @@
         std::string name;
         size_t allocation_number;
     };
-    static constexpr size_t alignment() { return Allocator::alignment(); }
+    static size_t alignment() { return Allocator::alignment(); }
 
     SnoopingAllocator(Allocator allocator, std::string_view name)
         : mName(name), mAllocator(std::move(allocator)) {}
@@ -362,16 +362,19 @@
 template <class PrimaryAllocator, class SecondaryAllocator>
 class FallbackAllocator {
   public:
-    static_assert(PrimaryAllocator::alignment() == SecondaryAllocator::alignment());
     static_assert(shared_allocator_impl::has_owns<PrimaryAllocator>);
 
-    static constexpr size_t alignment() { return PrimaryAllocator::alignment(); }
+    static size_t alignment() { return PrimaryAllocator::alignment(); }
 
     FallbackAllocator(const PrimaryAllocator& primary, const SecondaryAllocator& secondary)
-        : mPrimary(primary), mSecondary(secondary) {}
+        : mPrimary(primary), mSecondary(secondary) {
+      verify_alignment();
+    }
 
     // Default construct primary and secondary allocator
-    FallbackAllocator() = default;
+    FallbackAllocator() {
+      verify_alignment();
+    }
 
     template <typename T>
     AllocationType allocate(T&& request) {
@@ -414,6 +417,10 @@
     }
 
   private:
+    void verify_alignment() {
+      LOG_ALWAYS_FATAL_IF(PrimaryAllocator::alignment() != SecondaryAllocator::alignment(),
+                          "PrimaryAllocator::alignment() != SecondaryAllocator::alignment()");
+    }
     [[no_unique_address]] PrimaryAllocator mPrimary;
     [[no_unique_address]] SecondaryAllocator mSecondary;
 };
@@ -423,7 +430,7 @@
 template <typename Allocator>
 class IndirectAllocator {
   public:
-    static constexpr size_t alignment() { return Allocator::alignment(); }
+    static size_t alignment() { return Allocator::alignment(); }
 
     explicit IndirectAllocator(const std::shared_ptr<Allocator>& allocator)
         : mAllocator(allocator) {}
@@ -449,7 +456,7 @@
 // a shared memory mapped anonymous file) as allocations.
 class MemoryHeapBaseAllocator {
   public:
-    static constexpr size_t alignment() { return 4096; /* PAGE_SIZE */ }
+    static size_t alignment() { return kPageSize; }
     static constexpr unsigned FLAGS = 0;  // default flags
 
     template <typename T>
@@ -475,5 +482,7 @@
         // allocated by us, the underlying IMemoryHeap was a MemoryHeapBase
         static_cast<MemoryHeapBase&>(*heap).dispose();
     }
+  private:
+    static inline const size_t kPageSize = getpagesize();
 };
 }  // namespace android::mediautils
diff --git a/media/utils/tests/Android.bp b/media/utils/tests/Android.bp
index 0689083..3fdc6eb 100644
--- a/media/utils/tests/Android.bp
+++ b/media/utils/tests/Android.bp
@@ -200,7 +200,10 @@
     name: "timerthread_tests",
 
     defaults: ["libmediautils_tests_defaults"],
-
+    // TODO(b/270180838)
+    test_options: {
+        unit_test: false,
+    },
     srcs: [
         "TimerThread-test.cpp",
     ],
diff --git a/media/utils/tests/shared_memory_allocator_tests.cpp b/media/utils/tests/shared_memory_allocator_tests.cpp
index 11bc72a..c164cbd 100644
--- a/media/utils/tests/shared_memory_allocator_tests.cpp
+++ b/media/utils/tests/shared_memory_allocator_tests.cpp
@@ -19,21 +19,25 @@
 #include <gtest/gtest.h>
 #include <mediautils/SharedMemoryAllocator.h>
 #include <sys/stat.h>
+#include <unistd.h>
 #include <utils/Log.h>
 
 using namespace android;
 using namespace android::mediautils;
 
 namespace {
+const size_t kPageSize = getpagesize();
+constexpr size_t kMaxPageSize = 65536;  // arm64 supports 4k, 16k and 64k pages
+
 void validate_block(const AllocationType& block) {
     ASSERT_TRUE(block != nullptr);
-    memset(block->unsecurePointer(), 10, 4096);
+    memset(block->unsecurePointer(), 10, kPageSize);
     EXPECT_EQ(*(static_cast<char*>(block->unsecurePointer()) + 100), static_cast<char>(10));
 }
 
 template <size_t N = 0, bool FatalOwn = true>
 struct ValidateForwarding {
-    static constexpr size_t alignment() { return 1337; }
+    static size_t alignment() { return 1337; }
 
     bool owns(const AllocationType& allocation) const {
         if (allocation == owned) return true;
@@ -48,9 +52,9 @@
 
     static inline size_t deallocate_all_count = 0;
     static inline const AllocationType owned =
-            MemoryHeapBaseAllocator().allocate(BasicAllocRequest{4096});
+            MemoryHeapBaseAllocator().allocate(BasicAllocRequest{kMaxPageSize});
     static inline const AllocationType not_owned =
-            MemoryHeapBaseAllocator().allocate(BasicAllocRequest{4096});
+            MemoryHeapBaseAllocator().allocate(BasicAllocRequest{kMaxPageSize});
     static inline const std::string dump_string = std::to_string(N) + "Test Dump Forwarding";
 };
 
@@ -64,17 +68,14 @@
         shared_allocator_impl::has_deallocate_all<SnoopingAllocator<MemoryHeapBaseAllocator>> ==
         true);
 static_assert(
-        shared_allocator_impl::has_owns<
-                PolicyAllocator<SnoopingAllocator<MemoryHeapBaseAllocator>, SizePolicy<4096>>> ==
-        true);
+        shared_allocator_impl::has_owns<PolicyAllocator<SnoopingAllocator<MemoryHeapBaseAllocator>,
+                                                        SizePolicy<kMaxPageSize>>> == true);
 static_assert(
-        shared_allocator_impl::has_dump<
-                PolicyAllocator<SnoopingAllocator<MemoryHeapBaseAllocator>, SizePolicy<4096>>> ==
-        true);
-static_assert(
-        shared_allocator_impl::has_deallocate_all<
-                PolicyAllocator<SnoopingAllocator<MemoryHeapBaseAllocator>, SizePolicy<4096>>> ==
-        true);
+        shared_allocator_impl::has_dump<PolicyAllocator<SnoopingAllocator<MemoryHeapBaseAllocator>,
+                                                        SizePolicy<kMaxPageSize>>> == true);
+static_assert(shared_allocator_impl::has_deallocate_all<PolicyAllocator<
+                      SnoopingAllocator<MemoryHeapBaseAllocator>, SizePolicy<kMaxPageSize>>> ==
+              true);
 static_assert(shared_allocator_impl::has_owns<
                       FallbackAllocator<SnoopingAllocator<MemoryHeapBaseAllocator>,
                                         SnoopingAllocator<MemoryHeapBaseAllocator>>> == true);
@@ -93,7 +94,7 @@
     const auto memory = allocator.allocate(BasicAllocRequest{500});
     ASSERT_TRUE(memory != nullptr);
     const auto fd = dup(memory->getMemory()->getHeapID());
-    EXPECT_EQ(memory->size(), static_cast<unsigned>(4096));
+    EXPECT_EQ(memory->size(), static_cast<unsigned>(kPageSize));
     EXPECT_EQ(memory->size(), memory->getMemory()->getSize());
     validate_block(memory);
     allocator.deallocate(memory);
@@ -108,7 +109,7 @@
 }
 
 TEST(shared_memory_allocator_tests, mheapbase_allocator_independence) {
-    static_assert(MemoryHeapBaseAllocator::alignment() == 4096);
+    ASSERT_EQ(MemoryHeapBaseAllocator::alignment(), kPageSize);
     MemoryHeapBaseAllocator allocator;
     const auto first_memory = allocator.allocate(BasicAllocRequest{500});
     const auto second_memory = allocator.allocate(BasicAllocRequest{500});
@@ -120,8 +121,8 @@
 }
 
 TEST(shared_memory_allocator_tests, snooping_allocator) {
-    static_assert(SnoopingAllocator<ValidateForwarding<0>>::alignment() ==
-                  ValidateForwarding<0>::alignment());
+    ASSERT_EQ(SnoopingAllocator<ValidateForwarding<0>>::alignment(),
+              ValidateForwarding<0>::alignment());
 
     SnoopingAllocator<MemoryHeapBaseAllocator> allocator{"allocator"};
     const auto first_memory = allocator.allocate(NamedAllocRequest{{500}, "allocate_1"});
@@ -165,29 +166,29 @@
 // TODO generic policy test
 TEST(shared_memory_allocator_tests, size_policy_allocator_enforcement) {
     PolicyAllocator allocator{MemoryHeapBaseAllocator{},
-                              SizePolicy<4096 * 7, 4096 * 2, 4096 * 4>{}};
+                              SizePolicy<kMaxPageSize * 7, kMaxPageSize * 2, kMaxPageSize * 4>{}};
     // Violate max size
-    EXPECT_TRUE(allocator.allocate(BasicAllocRequest{4096 * 5}) == nullptr);
+    EXPECT_TRUE(allocator.allocate(BasicAllocRequest{kMaxPageSize * 5}) == nullptr);
     // Violate min alloc size
-    EXPECT_TRUE(allocator.allocate(BasicAllocRequest{4096}) == nullptr);
-    const auto first_memory = allocator.allocate(BasicAllocRequest{4096 * 4});
+    EXPECT_TRUE(allocator.allocate(BasicAllocRequest{kMaxPageSize}) == nullptr);
+    const auto first_memory = allocator.allocate(BasicAllocRequest{kMaxPageSize * 4});
     validate_block(first_memory);
     // Violate pool size
-    EXPECT_TRUE(allocator.allocate(BasicAllocRequest{4096 * 4}) == nullptr);
-    const auto second_memory = allocator.allocate(BasicAllocRequest{4096 * 3});
+    EXPECT_TRUE(allocator.allocate(BasicAllocRequest{kMaxPageSize * 4}) == nullptr);
+    const auto second_memory = allocator.allocate(BasicAllocRequest{kMaxPageSize * 3});
     validate_block(second_memory);
     allocator.deallocate(second_memory);
     // Check pool size update after deallocation
-    const auto new_second_memory = allocator.allocate(BasicAllocRequest{4096 * 2});
+    const auto new_second_memory = allocator.allocate(BasicAllocRequest{kMaxPageSize * 2});
     validate_block(new_second_memory);
 }
 
 TEST(shared_memory_allocator_tests, indirect_allocator) {
-    static_assert(IndirectAllocator<ValidateForwarding<0>>::alignment() ==
-                  ValidateForwarding<0>::alignment());
+    ASSERT_EQ(IndirectAllocator<ValidateForwarding<0>>::alignment(),
+              ValidateForwarding<0>::alignment());
     const auto allocator_handle = std::make_shared<SnoopingAllocator<MemoryHeapBaseAllocator>>();
     IndirectAllocator allocator{allocator_handle};
-    const auto memory = allocator.allocate(NamedAllocRequest{{4096}, "allocation"});
+    const auto memory = allocator.allocate(NamedAllocRequest{{kPageSize}, "allocation"});
     EXPECT_TRUE(allocator_handle->owns(memory));
     EXPECT_TRUE(allocator_handle->getAllocations().size() == 1);
     allocator.deallocate(memory);
@@ -199,35 +200,37 @@
     // Test appropriate forwarding of allocator, deallocate
     const auto primary_allocator =
             std::make_shared<SnoopingAllocator<MemoryHeapBaseAllocator>>("allocator");
-    PolicyAllocator allocator{IndirectAllocator(primary_allocator), SizePolicy<4096>{}};
-    const auto memory = allocator.allocate(NamedAllocRequest{{4096}, "allocation"});
+    PolicyAllocator allocator{IndirectAllocator(primary_allocator), SizePolicy<kMaxPageSize>{}};
+    const auto memory = allocator.allocate(NamedAllocRequest{{kPageSize}, "allocation"});
     EXPECT_TRUE(primary_allocator->owns(memory));
     const auto& allocations = primary_allocator->getAllocations();
     EXPECT_TRUE(allocations.size() == 1);
     allocator.deallocate(memory);
     EXPECT_TRUE(allocations.size() == 0);
-    const auto memory2 = allocator.allocate(NamedAllocRequest{{4096}, "allocation_2"});
+    const auto memory2 = allocator.allocate(NamedAllocRequest{{kPageSize}, "allocation_2"});
     EXPECT_TRUE(allocations.size() == 1);
     EXPECT_TRUE(primary_allocator->owns(memory2));
     allocator.deallocate(memory2);
     EXPECT_FALSE(primary_allocator->owns(memory2));
     EXPECT_TRUE(allocations.size() == 0);
     // Test appropriate forwarding of own, dump, alignment, deallocate_all
-    PolicyAllocator allocator2{ValidateForwarding<0>{}, SizePolicy<4096>{}};
+    PolicyAllocator allocator2{ValidateForwarding<0>{}, SizePolicy<kMaxPageSize>{}};
     EXPECT_TRUE(allocator2.owns(ValidateForwarding<0>::owned));
     EXPECT_FALSE(allocator2.owns(ValidateForwarding<0>::not_owned));
     EXPECT_TRUE(allocator2.dump().find(ValidateForwarding<0>::dump_string) != std::string::npos);
-    static_assert(decltype(allocator2)::alignment() == ValidateForwarding<0>::alignment());
+    ASSERT_EQ(decltype(allocator2)::alignment(), ValidateForwarding<0>::alignment());
     size_t prev = ValidateForwarding<0>::deallocate_all_count;
     allocator2.deallocate_all();
     EXPECT_EQ(ValidateForwarding<0>::deallocate_all_count, prev + 1);
 }
 
 TEST(shared_memory_allocator_tests, snooping_allocator_nullptr) {
-    SnoopingAllocator allocator{PolicyAllocator{MemoryHeapBaseAllocator{}, SizePolicy<4096 * 2>{}}};
-    const auto memory = allocator.allocate(NamedAllocRequest{{3000}, "allocation_1"});
+    SnoopingAllocator allocator{
+            PolicyAllocator{MemoryHeapBaseAllocator{}, SizePolicy<kMaxPageSize * 2>{}}};
+    const auto memory = allocator.allocate(NamedAllocRequest{{kMaxPageSize}, "allocation_1"});
     validate_block(memory);
-    ASSERT_TRUE(allocator.allocate(NamedAllocRequest{{5000}, "allocation_2"}) == nullptr);
+    ASSERT_TRUE(allocator.allocate(NamedAllocRequest{{kMaxPageSize * 3}, "allocation_2"}) ==
+                nullptr);
     const auto& allocations = allocator.getAllocations();
     EXPECT_EQ(allocations.size(), 1ul);
     for (const auto& [key, val] : allocations) {
@@ -240,23 +243,26 @@
 TEST(shared_memory_allocator_tests, fallback_allocator) {
     // Construct Fallback Allocator
     const auto primary_allocator = std::make_shared<
-            SnoopingAllocator<PolicyAllocator<MemoryHeapBaseAllocator, SizePolicy<4096>>>>(
-            PolicyAllocator<MemoryHeapBaseAllocator, SizePolicy<4096>>{}, "primary_allocator");
+            SnoopingAllocator<PolicyAllocator<MemoryHeapBaseAllocator, SizePolicy<kMaxPageSize>>>>(
+            PolicyAllocator<MemoryHeapBaseAllocator, SizePolicy<kMaxPageSize>>{},
+            "primary_allocator");
     const auto secondary_allocator =
             std::make_shared<SnoopingAllocator<MemoryHeapBaseAllocator>>("secondary_allocator");
 
     FallbackAllocator fallback_allocator{SnoopingAllocator{IndirectAllocator{primary_allocator}},
                                          SnoopingAllocator{IndirectAllocator{secondary_allocator}}};
-    static_assert(decltype(fallback_allocator)::alignment() == 4096);
+    ASSERT_EQ(decltype(fallback_allocator)::alignment(), kPageSize);
     // Basic Allocation Test
-    const auto memory = fallback_allocator.allocate(NamedAllocRequest{{3000}, "allocation_1"});
+    const auto memory =
+            fallback_allocator.allocate(NamedAllocRequest{{kMaxPageSize}, "allocation_1"});
     validate_block(memory);
     // Correct allocator selected
     EXPECT_TRUE(fallback_allocator.owns(memory));
     EXPECT_TRUE(primary_allocator->owns(memory));
     EXPECT_FALSE(secondary_allocator->owns(memory));
     // Test fallback allocation
-    const auto memory2 = fallback_allocator.allocate(NamedAllocRequest{{3000}, "allocation_2"});
+    const auto memory2 =
+            fallback_allocator.allocate(NamedAllocRequest{{kMaxPageSize}, "allocation_2"});
     validate_block(memory2);
     // Correct allocator selected
     EXPECT_TRUE(fallback_allocator.owns(memory2));
@@ -276,7 +282,8 @@
     EXPECT_TRUE(primary_allocator->getAllocations().size() == 0ul);
     EXPECT_TRUE(secondary_allocator->getAllocations().size() == 1ul);
     // Appropriate fallback after deallocation
-    const auto memory3 = fallback_allocator.allocate(NamedAllocRequest{{3000}, "allocation_3"});
+    const auto memory3 =
+            fallback_allocator.allocate(NamedAllocRequest{{kMaxPageSize}, "allocation_3"});
     EXPECT_TRUE(fallback_allocator.owns(memory3));
     EXPECT_TRUE(primary_allocator->owns(memory3));
     EXPECT_FALSE(secondary_allocator->owns(memory3));
@@ -285,7 +292,8 @@
     EXPECT_TRUE(secondary_allocator->getAllocations().size() == 1ul);
     fallback_allocator.deallocate(memory2);
     EXPECT_TRUE(secondary_allocator->getAllocations().size() == 0ul);
-    const auto memory4 = fallback_allocator.allocate(NamedAllocRequest{{3000}, "allocation_4"});
+    const auto memory4 =
+            fallback_allocator.allocate(NamedAllocRequest{{kMaxPageSize}, "allocation_4"});
     EXPECT_TRUE(fallback_allocator.owns(memory4));
     EXPECT_FALSE(primary_allocator->owns(memory4));
     EXPECT_TRUE(secondary_allocator->owns(memory4));
@@ -311,7 +319,7 @@
     EXPECT_FALSE(forward_test.owns(Alloc1::not_owned));
     EXPECT_FALSE(forward_test.owns(Alloc2::not_owned));
     // Test alignment forwarding
-    static_assert(FallbackAllocator<Alloc1, Alloc2>::alignment() == Alloc1::alignment());
+    ASSERT_EQ(decltype(forward_test)::alignment(), Alloc1::alignment());
     // Test deallocate_all forwarding
     size_t prev1 = Alloc1::deallocate_all_count;
     size_t prev2 = Alloc2::deallocate_all_count;
@@ -343,8 +351,8 @@
     }
     EXPECT_EQ(allocations.size(), 0ul);
     // Test forwarding
-    static_assert(ScopedAllocator<ValidateForwarding<0>>::alignment() ==
-                  ValidateForwarding<0>::alignment());
+    ASSERT_EQ(ScopedAllocator<ValidateForwarding<0>>::alignment(),
+              ValidateForwarding<0>::alignment());
     ScopedAllocator<ValidateForwarding<0>> forwarding{};
     EXPECT_EQ(forwarding.dump(), ValidateForwarding<0>::dump_string);
 }
diff --git a/services/audioflinger/Android.bp b/services/audioflinger/Android.bp
index 0cd6243..7663250 100644
--- a/services/audioflinger/Android.bp
+++ b/services/audioflinger/Android.bp
@@ -138,10 +138,57 @@
     ],
 }
 
-cc_library_shared {
+cc_defaults {
+    name: "libaudioflinger_dependencies",
+
+    shared_libs: [
+        "audioflinger-aidl-cpp",
+        "audioclient-types-aidl-cpp",
+        "av-types-aidl-cpp",
+        "effect-aidl-cpp",
+        "libaudioclient_aidl_conversion",
+        "libactivitymanager_aidl",
+        "libaudioflinger_datapath",
+        "libaudioflinger_fastpath",
+        "libaudioflinger_timing",
+        "libaudioflinger_utils",
+        "libaudiofoundation",
+        "libaudiohal",
+        "libaudioprocessing",
+        "libaudioutils",
+        "libcutils",
+        "libutils",
+        "liblog",
+        "libbinder",
+        "libbinder_ndk",
+        "libaudioclient",
+        "libaudiomanager",
+        "libmediametrics",
+        "libmediautils",
+        "libnbaio",
+        "libnblog",
+        "libpermission",
+        "libpowermanager",
+        "libmemunreachable",
+        "libmedia_helper",
+        "libshmemcompat",
+        "libsounddose",
+        "libvibrator",
+        "packagemanager_aidl-cpp",
+    ],
+
+    static_libs: [
+        "libmedialogservice",
+        "libaudiospdif",
+    ],
+}
+
+
+cc_library {
     name: "libaudioflinger",
 
     defaults: [
+        "libaudioflinger_dependencies",
         "latest_android_media_audio_common_types_cpp_shared",
         "latest_android_hardware_audio_core_sounddose_ndk_shared",
         "audioflinger_flags_defaults",
@@ -164,44 +211,6 @@
         "frameworks/av/services/medialog",
     ],
 
-    shared_libs: [
-        "audioflinger-aidl-cpp",
-        "audioclient-types-aidl-cpp",
-        "av-types-aidl-cpp",
-        "effect-aidl-cpp",
-        "libaudioclient_aidl_conversion",
-        "libactivitymanager_aidl",
-        "libaudioflinger_datapath",
-        "libaudioflinger_fastpath",
-        "libaudioflinger_timing",
-        "libaudioflinger_utils",
-        "libaudiofoundation",
-        "libaudiohal",
-        "libaudioprocessing",
-        "libaudiospdif",
-        "libaudioutils",
-        "libcutils",
-        "libutils",
-        "liblog",
-        "libbinder",
-        "libbinder_ndk",
-        "libaudioclient",
-        "libaudiomanager",
-        "libmedialogservice",
-        "libmediametrics",
-        "libmediautils",
-        "libnbaio",
-        "libnblog",
-        "libpermission",
-        "libpowermanager",
-        "libmemunreachable",
-        "libmedia_helper",
-        "libshmemcompat",
-        "libsounddose",
-        "libvibrator",
-        "packagemanager_aidl-cpp",
-    ],
-
     static_libs: [
         "libcpustats",
         "libpermission",
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index 69a6faa..e0b907f 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -23,7 +23,6 @@
 
 #include "Configuration.h"
 #include "AudioFlinger.h"
-#include "EffectConfiguration.h"
 
 //#define BUFLOG_NDEBUG 0
 #include <afutils/BufLog.h>
@@ -232,23 +231,6 @@
 }
 
 AudioFlinger::AudioFlinger()
-    : mMediaLogNotifier(new AudioFlinger::MediaLogNotifier()),
-      mPrimaryHardwareDev(NULL),
-      mAudioHwDevs(NULL),
-      mHardwareStatus(AUDIO_HW_IDLE),
-      mMasterVolume(1.0f),
-      mMasterMute(false),
-      // mNextUniqueId(AUDIO_UNIQUE_ID_USE_MAX),
-      mMode(AUDIO_MODE_INVALID),
-      mBtNrecIsOff(false),
-      mIsLowRamDevice(true),
-      mIsDeviceTypeKnown(false),
-      mTotalMemory(0),
-      mClientSharedHeapSize(kMinimumClientSharedHeapSizeBytes),
-      mGlobalEffectEnableTime(0),
-      mPatchCommandThread(sp<PatchCommandThread>::make()),
-      mSystemReady(false),
-      mBluetoothLatencyModesEnabled(true)
 {
     // Move the audio session unique ID generator start base as time passes to limit risk of
     // generating the same ID again after an audioserver restart.
@@ -285,9 +267,6 @@
     // in bad state, reset the state upon service start.
     BatteryNotifier::getInstance().noteResetAudio();
 
-    mDevicesFactoryHal = DevicesFactoryHalInterface::create();
-    mEffectsFactoryHal = audioflinger::EffectConfiguration::getEffectsFactoryHal();
-
     mMediaLogNotifier->run("MediaLogNotifier");
     std::vector<pid_t> halPids;
     mDevicesFactoryHal->getHalPids(&halPids);
@@ -301,7 +280,7 @@
 
 void AudioFlinger::onFirstRef()
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
 
     mMode = AUDIO_MODE_NORMAL;
 
@@ -327,19 +306,19 @@
 
 status_t AudioFlinger::setVibratorInfos(
         const std::vector<media::AudioVibratorInfo>& vibratorInfos) {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     mAudioVibratorInfos = vibratorInfos;
     return NO_ERROR;
 }
 
 status_t AudioFlinger::updateSecondaryOutputs(
         const TrackSecondaryOutputsMap& trackSecondaryOutputs) {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     for (const auto& [trackId, secondaryOutputs] : trackSecondaryOutputs) {
         size_t i = 0;
         for (; i < mPlaybackThreads.size(); ++i) {
             IAfPlaybackThread* thread = mPlaybackThreads.valueAt(i).get();
-            Mutex::Autolock _tl(thread->mutex());
+            audio_utils::lock_guard _tl(thread->mutex());
             sp<IAfTrack> track = thread->getTrackById_l(trackId);
             if (track != nullptr) {
                 ALOGD("%s trackId: %u", __func__, trackId);
@@ -355,13 +334,13 @@
 
 status_t AudioFlinger::getMmapPolicyInfos(
             AudioMMapPolicyType policyType, std::vector<AudioMMapPolicyInfo> *policyInfos) {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     if (const auto it = mPolicyInfos.find(policyType); it != mPolicyInfos.end()) {
         *policyInfos = it->second;
         return NO_ERROR;
     }
     if (mDevicesFactoryHal->getHalVersion() > kMaxAAudioPropertyDeviceHalVersion) {
-        AutoMutex lock(mHardwareLock);
+        audio_utils::lock_guard lock(hardwareMutex());
         for (size_t i = 0; i < mAudioHwDevs.size(); ++i) {
             AudioHwDevice *dev = mAudioHwDevs.valueAt(i);
             std::vector<AudioMMapPolicyInfo> infos;
@@ -382,20 +361,20 @@
 }
 
 int32_t AudioFlinger::getAAudioMixerBurstCount() const {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     return mAAudioBurstsPerBuffer;
 }
 
 int32_t AudioFlinger::getAAudioHardwareBurstMinUsec() const {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     return mAAudioHwBurstMinMicros;
 }
 
 status_t AudioFlinger::setDeviceConnectedState(const struct audio_port_v7 *port,
                                                media::DeviceConnectedState state) {
     status_t final_result = NO_INIT;
-    Mutex::Autolock _l(mLock);
-    AutoMutex lock(mHardwareLock);
+    audio_utils::lock_guard _l(mutex());
+    audio_utils::lock_guard lock(hardwareMutex());
     mHardwareStatus = AUDIO_HW_SET_CONNECTED_STATE;
     for (size_t i = 0; i < mAudioHwDevs.size(); i++) {
         sp<DeviceHalInterface> dev = mAudioHwDevs.valueAt(i)->hwDevice();
@@ -415,8 +394,8 @@
 status_t AudioFlinger::setSimulateDeviceConnections(bool enabled) {
     bool at_least_one_succeeded = false;
     status_t last_error = INVALID_OPERATION;
-    Mutex::Autolock _l(mLock);
-    AutoMutex lock(mHardwareLock);
+    audio_utils::lock_guard _l(mutex());
+    audio_utils::lock_guard lock(hardwareMutex());
     mHardwareStatus = AUDIO_HW_SET_SIMULATE_CONNECTIONS;
     for (size_t i = 0; i < mAudioHwDevs.size(); i++) {
         sp<DeviceHalInterface> dev = mAudioHwDevs.valueAt(i)->hwDevice();
@@ -459,7 +438,7 @@
     }
 
     for (size_t i = 0; i < mAudioHwDevs.size(); i++) {
-        // no mHardwareLock needed, as there are no other references to this
+        // no hardwareMutex() needed, as there are no other references to this
         delete mAudioHwDevs.valueAt(i);
     }
 
@@ -581,6 +560,9 @@
         return ret;
     }
 
+    // use unique_lock as we may selectively unlock.
+    audio_utils::unique_lock l(mutex());
+
     // at this stage, a MmapThread was created when openOutput() or openInput() was called by
     // audio policy manager and we can retrieve it
     const sp<IAfMmapThread> thread = mMmapThreads.valueFor(io);
@@ -593,12 +575,14 @@
         config->channel_mask = thread->channelMask();
         config->format = thread->format();
     } else {
+        l.unlock();
         if (direction == MmapStreamInterface::DIRECTION_OUTPUT) {
             AudioSystem::releaseOutput(portId);
         } else {
             AudioSystem::releaseInput(portId);
         }
         ret = NO_INIT;
+        // we don't reacquire the lock here as nothing left to do.
     }
 
     ALOGV("%s done status %d portId %d", __FUNCTION__, ret, portId);
@@ -608,7 +592,7 @@
 
 status_t AudioFlinger::addEffectToHal(
         const struct audio_port_config *device, const sp<EffectHalInterface>& effect) {
-    AutoMutex lock(mHardwareLock);
+    audio_utils::lock_guard lock(hardwareMutex());
     AudioHwDevice *audioHwDevice = mAudioHwDevs.valueFor(device->ext.device.hw_module);
     if (audioHwDevice == nullptr) {
         return NO_INIT;
@@ -618,7 +602,7 @@
 
 status_t AudioFlinger::removeEffectFromHal(
         const struct audio_port_config *device, const sp<EffectHalInterface>& effect) {
-    AutoMutex lock(mHardwareLock);
+    audio_utils::lock_guard lock(hardwareMutex());
     AudioHwDevice *audioHwDevice = mAudioHwDevs.valueFor(device->ext.device.hw_module);
     if (audioHwDevice == nullptr) {
         return NO_INIT;
@@ -638,11 +622,11 @@
 {
     // if module is 0, the request comes from an old policy manager and we should load
     // well known modules
-    AutoMutex lock(mHardwareLock);
+    audio_utils::lock_guard lock(hardwareMutex());
     if (module == 0) {
         ALOGW("findSuitableHwDev_l() loading well know audio hw modules");
         for (size_t i = 0; i < arraysize(audio_interfaces); i++) {
-            loadHwModule_l(audio_interfaces[i]);
+            loadHwModule_ll(audio_interfaces[i]);
         }
         // then try to find a module supporting the requested device.
         for (size_t i = 0; i < mAudioHwDevs.size(); i++) {
@@ -665,7 +649,7 @@
     return NULL;
 }
 
-void AudioFlinger::dumpClients(int fd, const Vector<String16>& args __unused)
+void AudioFlinger::dumpClients_ll(int fd, const Vector<String16>& args __unused)
 {
     String8 result;
 
@@ -699,7 +683,7 @@
 }
 
 
-void AudioFlinger::dumpInternals(int fd, const Vector<String16>& args __unused)
+void AudioFlinger::dumpInternals_l(int fd, const Vector<String16>& args __unused)
 {
     const size_t SIZE = 256;
     char buffer[SIZE];
@@ -738,15 +722,15 @@
         dumpPermissionDenial(fd, args);
     } else {
         // get state of hardware lock
-        const bool hardwareLocked = afutils::dumpTryLock(mHardwareLock);
+        const bool hardwareLocked = afutils::dumpTryLock(hardwareMutex());
         if (!hardwareLocked) {
             String8 result(kHardwareLockedString);
             write(fd, result.c_str(), result.size());
         } else {
-            mHardwareLock.unlock();
+            hardwareMutex().unlock();
         }
 
-        const bool locked = afutils::dumpTryLock(mLock);
+        const bool locked = afutils::dumpTryLock(mutex());
 
         // failed to lock - AudioFlinger is probably deadlocked
         if (!locked) {
@@ -754,7 +738,7 @@
             write(fd, result.c_str(), result.size());
         }
 
-        const bool clientLocked = afutils::dumpTryLock(mClientLock);
+        const bool clientLocked = afutils::dumpTryLock(clientMutex());
         if (!clientLocked) {
             String8 result(kClientLockedString);
             write(fd, result.c_str(), result.size());
@@ -767,12 +751,12 @@
             write(fd, result.c_str(), result.size());
         }
 
-        dumpClients(fd, args);
+        dumpClients_ll(fd, args);
         if (clientLocked) {
-            mClientLock.unlock();
+            clientMutex().unlock();
         }
 
-        dumpInternals(fd, args);
+        dumpInternals_l(fd, args);
 
         // dump playback threads
         for (size_t i = 0; i < mPlaybackThreads.size(); i++) {
@@ -827,7 +811,7 @@
         BUFLOG_RESET;
 
         if (locked) {
-            mLock.unlock();
+            mutex().unlock();
         }
 
 #ifdef TEE_SINK
@@ -899,7 +883,7 @@
 
 sp<Client> AudioFlinger::registerPid(pid_t pid)
 {
-    Mutex::Autolock _cl(mClientLock);
+    audio_utils::lock_guard _cl(clientMutex());
     // If pid is already in the mClients wp<> map, then use that entry
     // (for which promote() is always != 0), otherwise create a new entry and Client.
     sp<Client> client = mClients.valueFor(pid).promote();
@@ -922,7 +906,7 @@
     // If allocation fails, consult the vector of previously unregistered writers
     // and garbage-collect one or more them until an allocation succeeds
     if (shared == 0) {
-        Mutex::Autolock _l(mUnregisteredWritersLock);
+        audio_utils::lock_guard _l(unregisteredWritersMutex());
         for (size_t count = mUnregisteredWriters.size(); count > 0; count--) {
             {
                 // Pick the oldest stale writer to garbage-collect
@@ -962,7 +946,7 @@
     }
     // Rather than removing the writer immediately, append it to a queue of old writers to
     // be garbage-collected later.  This allows us to continue to view old logs for a while.
-    Mutex::Autolock _l(mUnregisteredWritersLock);
+    audio_utils::lock_guard _l(unregisteredWritersMutex());
     mUnregisteredWriters.push(writer);
 }
 
@@ -1058,7 +1042,7 @@
     }
 
     {
-        Mutex::Autolock _l(mLock);
+        audio_utils::lock_guard _l(mutex());
         IAfPlaybackThread* thread = checkPlaybackThread_l(output.outputId);
         if (thread == NULL) {
             ALOGE("no playback thread found for output handle %d", output.outputId);
@@ -1096,7 +1080,7 @@
                                       input.sharedBuffer, sessionId, &output.flags,
                                       callingPid, adjAttributionSource, input.clientInfo.clientTid,
                                       &lStatus, portId, input.audioTrackCallback, isSpatialized,
-                                      isBitPerfect);
+                                      isBitPerfect, &output.afTrackFlags);
         LOG_ALWAYS_FATAL_IF((lStatus == NO_ERROR) && (track == 0));
         // we don't abort yet if lStatus != NO_ERROR; there is still work to be done regardless
 
@@ -1109,8 +1093,8 @@
         output.portId = portId;
 
         if (lStatus == NO_ERROR) {
-            // no risk of deadlock because AudioFlinger::mLock is held
-            Mutex::Autolock _dl(thread->mutex());
+            // no risk of deadlock because AudioFlinger::mutex() is held
+            audio_utils::lock_guard _dl(thread->mutex());
             // Connect secondary outputs. Failure on a secondary output must not imped the primary
             // Any secondary output setup failure will lead to a desync between the AP and AF until
             // the track is destroyed.
@@ -1118,8 +1102,9 @@
             // move effect chain to this output thread if an effect on same session was waiting
             // for a track to be created
             if (effectThread != nullptr) {
-                Mutex::Autolock _sl(effectThread->mutex());
-                if (moveEffectChain_l(sessionId, effectThread, thread) == NO_ERROR) {
+                // No thread safety analysis: double lock on a thread capability.
+                audio_utils::lock_guard_no_thread_safety_analysis _sl(effectThread->mutex());
+                if (moveEffectChain_ll(sessionId, effectThread, thread) == NO_ERROR) {
                     effectThreadId = thread->id();
                     effectIds = thread->getEffectIds_l(sessionId);
                 }
@@ -1148,11 +1133,11 @@
 
     if (lStatus != NO_ERROR) {
         // remove local strong reference to Client before deleting the Track so that the
-        // Client destructor is called by the TrackBase destructor with mClientLock held
-        // Don't hold mClientLock when releasing the reference on the track as the
+        // Client destructor is called by the TrackBase destructor with clientMutex() held
+        // Don't hold clientMutex() when releasing the reference on the track as the
         // destructor will acquire it.
         {
-            Mutex::Autolock _cl(mClientLock);
+            audio_utils::lock_guard _cl(clientMutex());
             client.clear();
         }
         track.clear();
@@ -1177,7 +1162,7 @@
 
 uint32_t AudioFlinger::sampleRate(audio_io_handle_t ioHandle) const
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     IAfThreadBase* const thread = checkThread_l(ioHandle);
     if (thread == NULL) {
         ALOGW("sampleRate() unknown thread %d", ioHandle);
@@ -1188,7 +1173,7 @@
 
 audio_format_t AudioFlinger::format(audio_io_handle_t output) const
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     IAfPlaybackThread* const thread = checkPlaybackThread_l(output);
     if (thread == NULL) {
         ALOGW("format() unknown thread %d", output);
@@ -1199,7 +1184,7 @@
 
 size_t AudioFlinger::frameCount(audio_io_handle_t ioHandle) const
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     IAfThreadBase* const thread = checkThread_l(ioHandle);
     if (thread == NULL) {
         ALOGW("frameCount() unknown thread %d", ioHandle);
@@ -1212,7 +1197,7 @@
 
 size_t AudioFlinger::frameCountHAL(audio_io_handle_t ioHandle) const
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     IAfThreadBase* const thread = checkThread_l(ioHandle);
     if (thread == NULL) {
         ALOGW("frameCountHAL() unknown thread %d", ioHandle);
@@ -1223,7 +1208,7 @@
 
 uint32_t AudioFlinger::latency(audio_io_handle_t output) const
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     IAfPlaybackThread* const thread = checkPlaybackThread_l(output);
     if (thread == NULL) {
         ALOGW("latency(): no playback thread found for output handle %d", output);
@@ -1244,12 +1229,12 @@
         return PERMISSION_DENIED;
     }
 
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     mMasterVolume = value;
 
     // Set master volume in the HALs which support it.
     {
-        AutoMutex lock(mHardwareLock);
+        audio_utils::lock_guard lock(hardwareMutex());
         for (size_t i = 0; i < mAudioHwDevs.size(); i++) {
             AudioHwDevice *dev = mAudioHwDevs.valueAt(i);
 
@@ -1291,7 +1276,7 @@
         return BAD_VALUE;
     }
 
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
 
     // short cut.
     if (mMasterBalance == balance) return NO_ERROR;
@@ -1325,18 +1310,18 @@
     }
 
     { // scope for the lock
-        AutoMutex lock(mHardwareLock);
+        audio_utils::lock_guard lock(hardwareMutex());
         if (mPrimaryHardwareDev == nullptr) {
             return INVALID_OPERATION;
         }
-        sp<DeviceHalInterface> dev = mPrimaryHardwareDev->hwDevice();
+        sp<DeviceHalInterface> dev = mPrimaryHardwareDev.load()->hwDevice();
         mHardwareStatus = AUDIO_HW_SET_MODE;
         ret = dev->setMode(mode);
         mHardwareStatus = AUDIO_HW_IDLE;
     }
 
     if (NO_ERROR == ret) {
-        Mutex::Autolock _l(mLock);
+        audio_utils::lock_guard _l(mutex());
         mMode = mode;
         for (size_t i = 0; i < mPlaybackThreads.size(); i++) {
             mPlaybackThreads.valueAt(i)->setMode(mode);
@@ -1362,11 +1347,11 @@
         return PERMISSION_DENIED;
     }
 
-    AutoMutex lock(mHardwareLock);
+    audio_utils::lock_guard lock(hardwareMutex());
     if (mPrimaryHardwareDev == nullptr) {
         return INVALID_OPERATION;
     }
-    sp<DeviceHalInterface> primaryDev = mPrimaryHardwareDev->hwDevice();
+    sp<DeviceHalInterface> primaryDev = mPrimaryHardwareDev.load()->hwDevice();
     if (primaryDev == nullptr) {
         ALOGW("%s: no primary HAL device", __func__);
         return INVALID_OPERATION;
@@ -1390,11 +1375,11 @@
     if (ret != NO_ERROR) {
         return false;
     }
-    AutoMutex lock(mHardwareLock);
+    audio_utils::lock_guard lock(hardwareMutex());
     if (mPrimaryHardwareDev == nullptr) {
         return false;
     }
-    sp<DeviceHalInterface> primaryDev = mPrimaryHardwareDev->hwDevice();
+    sp<DeviceHalInterface> primaryDev = mPrimaryHardwareDev.load()->hwDevice();
     if (primaryDev == nullptr) {
         ALOGW("%s: no primary HAL device", __func__);
         return false;
@@ -1411,7 +1396,7 @@
 {
     ALOGV("AudioFlinger::setRecordSilenced(portId:%d, silenced:%d)", portId, silenced);
 
-    AutoMutex lock(mLock);
+    audio_utils::lock_guard lock(mutex());
     for (size_t i = 0; i < mRecordThreads.size(); i++) {
         mRecordThreads[i]->setRecordSilenced(portId, silenced);
     }
@@ -1432,12 +1417,12 @@
         return PERMISSION_DENIED;
     }
 
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     mMasterMute = muted;
 
     // Set master mute in the HALs which support it.
     {
-        AutoMutex lock(mHardwareLock);
+        audio_utils::lock_guard lock(hardwareMutex());
         for (size_t i = 0; i < mAudioHwDevs.size(); i++) {
             AudioHwDevice *dev = mAudioHwDevs.valueAt(i);
 
@@ -1463,20 +1448,20 @@
 
 float AudioFlinger::masterVolume() const
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     return masterVolume_l();
 }
 
 status_t AudioFlinger::getMasterBalance(float *balance) const
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     *balance = getMasterBalance_l();
     return NO_ERROR; // if called through binder, may return a transactional error
 }
 
 bool AudioFlinger::masterMute() const
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     return masterMute_l();
 }
 
@@ -1495,7 +1480,8 @@
     return mMasterMute;
 }
 
-status_t AudioFlinger::checkStreamType(audio_stream_type_t stream) const
+/* static */
+status_t AudioFlinger::checkStreamType(audio_stream_type_t stream)
 {
     if (uint32_t(stream) >= AUDIO_STREAM_CNT) {
         ALOGW("checkStreamType() invalid stream %d", stream);
@@ -1528,7 +1514,7 @@
     LOG_ALWAYS_FATAL_IF(stream == AUDIO_STREAM_PATCH && value != 1.0f,
                         "AUDIO_STREAM_PATCH must have full scale volume");
 
-    AutoMutex lock(mLock);
+    audio_utils::lock_guard lock(mutex());
     sp<VolumeInterface> volumeInterface = getVolumeInterface_l(output);
     if (volumeInterface == NULL) {
         return BAD_VALUE;
@@ -1543,7 +1529,7 @@
     if (output == AUDIO_IO_HANDLE_NONE) {
         return BAD_VALUE;
     }
-    AutoMutex lock(mLock);
+    audio_utils::lock_guard lock(mutex());
     IAfPlaybackThread* const thread = checkPlaybackThread_l(output);
     if (thread == nullptr) {
         return BAD_VALUE;
@@ -1556,7 +1542,7 @@
     if (output == AUDIO_IO_HANDLE_NONE) {
         return BAD_VALUE;
     }
-    AutoMutex lock(mLock);
+    audio_utils::lock_guard lock(mutex());
     IAfPlaybackThread* const thread = checkPlaybackThread_l(output);
     if (thread == nullptr) {
         return BAD_VALUE;
@@ -1565,7 +1551,7 @@
 }
 
 status_t AudioFlinger::setBluetoothVariableLatencyEnabled(bool enabled) {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     status_t status = INVALID_OPERATION;
     for (size_t i = 0; i < mPlaybackThreads.size(); i++) {
         // Success if at least one PlaybackThread supports Bluetooth latency modes
@@ -1591,7 +1577,7 @@
     if (support == nullptr) {
         return BAD_VALUE;
     }
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(hardwareMutex());
     *support = false;
     for (size_t i = 0; i < mAudioHwDevs.size(); i++) {
         if (mAudioHwDevs.valueAt(i)->supportsBluetoothVariableLatency()) {
@@ -1630,7 +1616,7 @@
         return BAD_VALUE;
     }
 
-    AutoMutex lock(mLock);
+    audio_utils::lock_guard lock(mutex());
     mStreamTypes[stream].mute = muted;
     std::vector<sp<VolumeInterface>> volumeInterfaces = getAllVolumeInterfaces_l();
     for (size_t i = 0; i < volumeInterfaces.size(); i++) {
@@ -1650,7 +1636,7 @@
         return 0.0f;
     }
 
-    AutoMutex lock(mLock);
+    audio_utils::lock_guard lock(mutex());
     sp<VolumeInterface> volumeInterface = getVolumeInterface_l(output);
     if (volumeInterface == NULL) {
         return 0.0f;
@@ -1666,7 +1652,7 @@
         return true;
     }
 
-    AutoMutex lock(mLock);
+    audio_utils::lock_guard lock(mutex());
     return streamMute_l(stream);
 }
 
@@ -1685,7 +1671,7 @@
     }
 }
 
-// forwardAudioHwSyncToDownstreamPatches_l() must be called with AudioFlinger::mLock held
+// forwardAudioHwSyncToDownstreamPatches_l() must be called with AudioFlinger::mutex() held
 void AudioFlinger::forwardParametersToDownstreamPatches_l(
         audio_io_handle_t upStream, const String8& keyValuePairs,
         const std::function<bool(const sp<IAfPlaybackThread>&)>& useThread)
@@ -1790,11 +1776,11 @@
 
     // AUDIO_IO_HANDLE_NONE means the parameters are global to the audio hardware interface
     if (ioHandle == AUDIO_IO_HANDLE_NONE) {
-        Mutex::Autolock _l(mLock);
+        audio_utils::lock_guard _l(mutex());
         // result will remain NO_INIT if no audio device is present
         status_t final_result = NO_INIT;
         {
-            AutoMutex lock(mHardwareLock);
+            audio_utils::lock_guard lock(hardwareMutex());
             mHardwareStatus = AUDIO_HW_SET_PARAMETER;
             for (size_t i = 0; i < mAudioHwDevs.size(); i++) {
                 sp<DeviceHalInterface> dev = mAudioHwDevs.valueAt(i)->hwDevice();
@@ -1833,7 +1819,7 @@
     // and the thread is exited once the lock is released
     sp<IAfThreadBase> thread;
     {
-        Mutex::Autolock _l(mLock);
+        audio_utils::lock_guard _l(mutex());
         thread = checkPlaybackThread_l(ioHandle);
         if (thread == 0) {
             thread = checkRecordThread_l(ioHandle);
@@ -1852,6 +1838,7 @@
     }
     if (thread != 0) {
         status_t result = thread->setParameters(filteredKeyValuePairs);
+        audio_utils::lock_guard _l(mutex());
         forwardParametersToDownstreamPatches_l(thread->id(), filteredKeyValuePairs);
         return result;
     }
@@ -1863,12 +1850,12 @@
     ALOGVV("getParameters() io %d, keys %s, calling pid %d",
             ioHandle, keys.c_str(), IPCThreadState::self()->getCallingPid());
 
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
 
     if (ioHandle == AUDIO_IO_HANDLE_NONE) {
         String8 out_s8;
 
-        AutoMutex lock(mHardwareLock);
+        audio_utils::lock_guard lock(hardwareMutex());
         for (size_t i = 0; i < mAudioHwDevs.size(); i++) {
             String8 s;
             mHardwareStatus = AUDIO_HW_GET_PARAMETER;
@@ -1906,13 +1893,13 @@
         return 0;
     }
 
-    AutoMutex lock(mHardwareLock);
+    audio_utils::lock_guard lock(hardwareMutex());
     if (mPrimaryHardwareDev == nullptr) {
         return 0;
     }
     mHardwareStatus = AUDIO_HW_GET_INPUT_BUFFER_SIZE;
 
-    sp<DeviceHalInterface> dev = mPrimaryHardwareDev->hwDevice();
+    sp<DeviceHalInterface> dev = mPrimaryHardwareDev.load()->hwDevice();
 
     std::vector<audio_channel_mask_t> channelMasks = {channelMask};
     if (channelMask != AUDIO_CHANNEL_IN_MONO) {
@@ -1978,7 +1965,7 @@
 
 uint32_t AudioFlinger::getInputFramesLost(audio_io_handle_t ioHandle) const
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
 
     IAfRecordThread* const recordThread = checkRecordThread_l(ioHandle);
     if (recordThread != NULL) {
@@ -1999,11 +1986,11 @@
         return PERMISSION_DENIED;
     }
 
-    AutoMutex lock(mHardwareLock);
+    audio_utils::lock_guard lock(hardwareMutex());
     if (mPrimaryHardwareDev == nullptr) {
         return INVALID_OPERATION;
     }
-    sp<DeviceHalInterface> dev = mPrimaryHardwareDev->hwDevice();
+    sp<DeviceHalInterface> dev = mPrimaryHardwareDev.load()->hwDevice();
     mHardwareStatus = AUDIO_HW_SET_VOICE_VOLUME;
     ret = dev->setVoiceVolume(value);
     mHardwareStatus = AUDIO_HW_IDLE;
@@ -2018,7 +2005,7 @@
 status_t AudioFlinger::getRenderPosition(uint32_t *halFrames, uint32_t *dspFrames,
         audio_io_handle_t output) const
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
 
     IAfPlaybackThread* const playbackThread = checkPlaybackThread_l(output);
     if (playbackThread != NULL) {
@@ -2030,14 +2017,14 @@
 
 void AudioFlinger::registerClient(const sp<media::IAudioFlingerClient>& client)
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     if (client == 0) {
         return;
     }
     pid_t pid = IPCThreadState::self()->getCallingPid();
     const uid_t uid = IPCThreadState::self()->getCallingUid();
     {
-        Mutex::Autolock _cl(mClientLock);
+        audio_utils::lock_guard _cl(clientMutex());
         if (mNotificationClients.indexOfKey(pid) < 0) {
             sp<NotificationClient> notificationClient = new NotificationClient(this,
                                                                                 client,
@@ -2053,9 +2040,10 @@
         }
     }
 
-    // mClientLock should not be held here because ThreadBase::sendIoConfigEvent() will lock the
-    // ThreadBase mutex and the locking order is ThreadBase::mLock then AudioFlinger::mClientLock.
-    // the config change is always sent from playback or record threads to avoid deadlock
+    // clientMutex() should not be held here because ThreadBase::sendIoConfigEvent()
+    // will lock the ThreadBase::mutex() and the locking order is
+    // ThreadBase::mutex() then AudioFlinger::clientMutex().
+    // The config change is always sent from playback or record threads to avoid deadlock
     // with AudioSystem::gLock
     for (size_t i = 0; i < mPlaybackThreads.size(); i++) {
         mPlaybackThreads.valueAt(i)->sendIoConfigEvent(AUDIO_OUTPUT_REGISTERED, pid);
@@ -2070,9 +2058,9 @@
 {
     std::vector<sp<IAfEffectModule>> removedEffects;
     {
-        Mutex::Autolock _l(mLock);
+        audio_utils::lock_guard _l(mutex());
         {
-            Mutex::Autolock _cl(mClientLock);
+            audio_utils::lock_guard _cl(clientMutex());
             mNotificationClients.removeItem(pid);
         }
 
@@ -2094,6 +2082,9 @@
         }
         if (removed) {
             removedEffects = purgeStaleEffects_l();
+            std::vector< sp<IAfEffectModule> > removedOrphanEffects = purgeOrphanEffectChains_l();
+            removedEffects.insert(removedEffects.end(), removedOrphanEffects.begin(),
+                    removedOrphanEffects.end());
         }
     }
     for (auto& effect : removedEffects) {
@@ -2101,7 +2092,8 @@
     }
 }
 
-void AudioFlinger::ioConfigChanged(audio_io_config_event_t event,
+// Hold either AudioFlinger::mutex or ThreadBase::mutex
+void AudioFlinger::ioConfigChanged_l(audio_io_config_event_t event,
                                    const sp<AudioIoDescriptor>& ioDesc,
                                    pid_t pid) {
     media::AudioIoConfigEvent eventAidl = VALUE_OR_FATAL(
@@ -2109,7 +2101,7 @@
     media::AudioIoDescriptor descAidl = VALUE_OR_FATAL(
             legacy2aidl_AudioIoDescriptor_AudioIoDescriptor(ioDesc));
 
-    Mutex::Autolock _l(mClientLock);
+    audio_utils::lock_guard _l(clientMutex());
     size_t size = mNotificationClients.size();
     for (size_t i = 0; i < size; i++) {
         if ((pid == 0) || (mNotificationClients.keyAt(i) == pid)) {
@@ -2126,7 +2118,7 @@
                 convertContainer<std::vector<media::audio::common::AudioLatencyMode>>(
                         modes, legacy2aidl_audio_latency_mode_t_AudioLatencyMode));
 
-    Mutex::Autolock _l(mClientLock);
+    audio_utils::lock_guard _l(clientMutex());
     size_t size = mNotificationClients.size();
     for (size_t i = 0; i < size; i++) {
         mNotificationClients.valueAt(i)->audioFlingerClient()
@@ -2134,7 +2126,7 @@
     }
 }
 
-// removeClient_l() must be called with AudioFlinger::mClientLock held
+// removeClient_l() must be called with AudioFlinger::clientMutex() held
 void AudioFlinger::removeClient_l(pid_t pid)
 {
     ALOGV("removeClient_l() pid %d, calling pid %d", pid,
@@ -2142,7 +2134,7 @@
     mClients.removeItem(pid);
 }
 
-// getEffectThread_l() must be called with AudioFlinger::mLock held
+// getEffectThread_l() must be called with AudioFlinger::mutex() held
 sp<IAfThreadBase> AudioFlinger::getEffectThread_l(audio_session_t sessionId,
         int effectId)
 {
@@ -2201,9 +2193,9 @@
 
 
 void AudioFlinger::MediaLogNotifier::requestMerge() {
-    AutoMutex _l(mMutex);
+    audio_utils::lock_guard _l(mMutex);
     mPendingRequests = true;
-    mCond.signal();
+    mCondition.notify_one();
 }
 
 bool AudioFlinger::MediaLogNotifier::threadLoop() {
@@ -2213,10 +2205,10 @@
     }
     // Wait until there are pending requests
     {
-        AutoMutex _l(mMutex);
+        audio_utils::unique_lock _l(mMutex);
         mPendingRequests = false; // to ignore past requests
         while (!mPendingRequests) {
-            mCond.wait(mMutex);
+            mCondition.wait(_l);
             // TODO may also need an exitPending check
         }
         mPendingRequests = false;
@@ -2327,7 +2319,7 @@
     }
 
     {
-        Mutex::Autolock _l(mLock);
+        audio_utils::lock_guard _l(mutex());
         IAfRecordThread* const thread = checkRecordThread_l(output.inputId);
         if (thread == NULL) {
             ALOGW("createRecord() checkRecordThread_l failed, input handle %d", output.inputId);
@@ -2384,7 +2376,7 @@
         // session and move it to this thread.
         sp<IAfEffectChain> chain = getOrphanEffectChain_l(sessionId);
         if (chain != 0) {
-            Mutex::Autolock _l2(thread->mutex());
+            audio_utils::lock_guard _l2(thread->mutex());
             thread->addEffectChain_l(chain);
         }
         break;
@@ -2403,11 +2395,11 @@
 Exit:
     if (lStatus != NO_ERROR) {
         // remove local strong reference to Client before deleting the RecordTrack so that the
-        // Client destructor is called by the TrackBase destructor with mClientLock held
-        // Don't hold mClientLock when releasing the reference on the track as the
+        // Client destructor is called by the TrackBase destructor with clientMutex() held
+        // Don't hold clientMutex() when releasing the reference on the track as the
         // destructor will acquire it.
         {
-            Mutex::Autolock _cl(mClientLock);
+            audio_utils::lock_guard _cl(clientMutex());
             client.clear();
         }
         recordTrack.clear();
@@ -2428,8 +2420,8 @@
     if (config == nullptr) {
         return BAD_VALUE;
     }
-    Mutex::Autolock _l(mLock);
-    AutoMutex lock(mHardwareLock);
+    audio_utils::lock_guard _l(mutex());
+    audio_utils::lock_guard lock(hardwareMutex());
     RETURN_STATUS_IF_ERROR(
             mDevicesFactoryHal->getSurroundSoundConfig(&config->surroundSoundConfig));
     RETURN_STATUS_IF_ERROR(mDevicesFactoryHal->getEngineConfig(&config->engineConfig));
@@ -2437,7 +2429,7 @@
     RETURN_STATUS_IF_ERROR(mDevicesFactoryHal->getDeviceNames(&hwModuleNames));
     std::set<AudioMode> allSupportedModes;
     for (const auto& name : hwModuleNames) {
-        AudioHwDevice* module = loadHwModule_l(name.c_str());
+        AudioHwDevice* module = loadHwModule_ll(name.c_str());
         if (module == nullptr) continue;
         media::AudioHwModule aidlModule;
         if (module->hwDevice()->getAudioPorts(&aidlModule.ports) == OK &&
@@ -2472,14 +2464,15 @@
     if (!settingsAllowed()) {
         return AUDIO_MODULE_HANDLE_NONE;
     }
-    Mutex::Autolock _l(mLock);
-    AutoMutex lock(mHardwareLock);
-    AudioHwDevice* module = loadHwModule_l(name);
+    audio_utils::lock_guard _l(mutex());
+    audio_utils::lock_guard lock(hardwareMutex());
+    AudioHwDevice* module = loadHwModule_ll(name);
     return module != nullptr ? module->handle() : AUDIO_MODULE_HANDLE_NONE;
 }
 
-// loadHwModule_l() must be called with AudioFlinger::mLock and AudioFlinger::mHardwareLock held
-AudioHwDevice* AudioFlinger::loadHwModule_l(const char *name)
+// loadHwModule_l() must be called with AudioFlinger::mutex()
+// and AudioFlinger::hardwareMutex() held
+AudioHwDevice* AudioFlinger::loadHwModule_ll(const char *name)
 {
     for (size_t i = 0; i < mAudioHwDevs.size(); i++) {
         if (strncmp(mAudioHwDevs.valueAt(i)->moduleName(), name, strlen(name)) == 0) {
@@ -2558,7 +2551,7 @@
     if (strcmp(name, AUDIO_HARDWARE_MODULE_ID_PRIMARY) == 0) {
         mPrimaryHardwareDev = audioDevice;
         mHardwareStatus = AUDIO_HW_SET_MODE;
-        mPrimaryHardwareDev->hwDevice()->setMode(mMode);
+        mPrimaryHardwareDev.load()->hwDevice()->setMode(mMode);
         mHardwareStatus = AUDIO_HW_IDLE;
     }
 
@@ -2585,14 +2578,14 @@
 
 uint32_t AudioFlinger::getPrimaryOutputSamplingRate() const
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     IAfPlaybackThread* const thread = fastPlaybackThread_l();
     return thread != NULL ? thread->sampleRate() : 0;
 }
 
 size_t AudioFlinger::getPrimaryOutputFrameCount() const
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     IAfPlaybackThread* const thread = fastPlaybackThread_l();
     return thread != NULL ? thread->frameCountHAL() : 0;
 }
@@ -2605,7 +2598,7 @@
     if (!isAudioServerOrSystemServerUid(uid)) {
         return PERMISSION_DENIED;
     }
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     if (mIsDeviceTypeKnown) {
         return INVALID_OPERATION;
     }
@@ -2663,8 +2656,8 @@
         module = config->ext.mix.hw_module;
     }
 
-    Mutex::Autolock _l(mLock);
-    AutoMutex lock(mHardwareLock);
+    audio_utils::lock_guard _l(mutex());
+    audio_utils::lock_guard lock(hardwareMutex());
     ssize_t index = mAudioHwDevs.indexOfKey(module);
     if (index < 0) {
         ALOGW("%s() bad hw module %d", __func__, module);
@@ -2677,7 +2670,7 @@
 
 audio_hw_sync_t AudioFlinger::getAudioHwSyncForSession(audio_session_t sessionId)
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
 
     ssize_t index = mHwAvSyncIds.indexOfKey(sessionId);
     if (index >= 0) {
@@ -2688,11 +2681,11 @@
 
     sp<DeviceHalInterface> dev;
     {
-        AutoMutex lock(mHardwareLock);
+        audio_utils::lock_guard lock(hardwareMutex());
         if (mPrimaryHardwareDev == nullptr) {
             return AUDIO_HW_SYNC_INVALID;
         }
-        dev = mPrimaryHardwareDev->hwDevice();
+        dev = mPrimaryHardwareDev.load()->hwDevice();
     }
     if (dev == nullptr) {
         return AUDIO_HW_SYNC_INVALID;
@@ -2737,7 +2730,7 @@
 
 status_t AudioFlinger::systemReady()
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     ALOGI("%s", __FUNCTION__);
     if (mSystemReady) {
         ALOGW("%s called twice", __FUNCTION__);
@@ -2780,7 +2773,7 @@
 
 status_t AudioFlinger::getMicrophones(std::vector<media::MicrophoneInfoFw>* microphones) const
 {
-    AutoMutex lock(mHardwareLock);
+    audio_utils::lock_guard lock(hardwareMutex());
     status_t status = INVALID_OPERATION;
 
     for (size_t i = 0; i < mAudioHwDevs.size(); i++) {
@@ -2804,7 +2797,7 @@
     return status;
 }
 
-// setAudioHwSyncForSession_l() must be called with AudioFlinger::mLock held
+// setAudioHwSyncForSession_l() must be called with AudioFlinger::mutex() held
 void AudioFlinger::setAudioHwSyncForSession_l(
         IAfPlaybackThread* const thread, audio_session_t sessionId)
 {
@@ -2943,7 +2936,7 @@
         return BAD_VALUE;
     }
 
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
 
     const sp<IAfThreadBase> thread = openOutput_l(module, &output, &halConfig,
             &mixerConfig, deviceType, address, flags);
@@ -2954,21 +2947,21 @@
             latencyMs = playbackThread->latency();
 
             // notify client processes of the new output creation
-            playbackThread->ioConfigChanged(AUDIO_OUTPUT_OPENED);
+            playbackThread->ioConfigChanged_l(AUDIO_OUTPUT_OPENED);
 
             // the first primary output opened designates the primary hw device if no HW module
             // named "primary" was already loaded.
-            AutoMutex lock(mHardwareLock);
+            audio_utils::lock_guard lock(hardwareMutex());
             if ((mPrimaryHardwareDev == nullptr) && (flags & AUDIO_OUTPUT_FLAG_PRIMARY)) {
                 ALOGI("Using module %d as the primary audio interface", module);
                 mPrimaryHardwareDev = playbackThread->getOutput()->audioHwDev;
 
                 mHardwareStatus = AUDIO_HW_SET_MODE;
-                mPrimaryHardwareDev->hwDevice()->setMode(mMode);
+                mPrimaryHardwareDev.load()->hwDevice()->setMode(mMode);
                 mHardwareStatus = AUDIO_HW_IDLE;
             }
         } else {
-            thread->ioConfigChanged(AUDIO_OUTPUT_OPENED);
+            thread->ioConfigChanged_l(AUDIO_OUTPUT_OPENED);
         }
         response->output = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_io_handle_t_int32_t(output));
         response->config = VALUE_OR_RETURN_STATUS(
@@ -2985,7 +2978,7 @@
 audio_io_handle_t AudioFlinger::openDuplicateOutput(audio_io_handle_t output1,
         audio_io_handle_t output2)
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     IAfPlaybackThread* const thread1 = checkMixerThread_l(output1);
     IAfPlaybackThread* const thread2 = checkMixerThread_l(output2);
 
@@ -3001,7 +2994,7 @@
     thread->addOutputTrack(thread2);
     mPlaybackThreads.add(id, thread);
     // notify client processes of the new output creation
-    thread->ioConfigChanged(AUDIO_OUTPUT_OPENED);
+    thread->ioConfigChanged_l(AUDIO_OUTPUT_OPENED);
     return id;
 }
 
@@ -3017,7 +3010,7 @@
     sp<IAfPlaybackThread> playbackThread;
     sp<IAfMmapPlaybackThread> mmapThread;
     {
-        Mutex::Autolock _l(mLock);
+        audio_utils::lock_guard _l(mutex());
         playbackThread = checkPlaybackThread_l(output);
         if (playbackThread != NULL) {
             ALOGV("closeOutput() %d", output);
@@ -3042,11 +3035,11 @@
                         checkPlaybackThread_l(mPlaybackThreads.keyAt(0));
                 if (dstThread != NULL) {
                     // audioflinger lock is held so order of thread lock acquisition doesn't matter
-                    Mutex::Autolock _dl(dstThread->mutex());
-                    Mutex::Autolock _sl(playbackThread->mutex());
+                    // Use scoped_lock to avoid deadlock order issues with duplicating threads.
+                    audio_utils::scoped_lock sl(dstThread->mutex(), playbackThread->mutex());
                     Vector<sp<IAfEffectChain>> effectChains = playbackThread->getEffectChains_l();
                     for (size_t i = 0; i < effectChains.size(); i ++) {
-                        moveEffectChain_l(effectChains[i]->sessionId(), playbackThread.get(),
+                        moveEffectChain_ll(effectChains[i]->sessionId(), playbackThread.get(),
                                 dstThread);
                     }
                 }
@@ -3061,7 +3054,7 @@
             mMmapThreads.removeItem(output);
             ALOGD("closing mmapThread %p", mmapThread.get());
         }
-        ioConfigChanged(AUDIO_OUTPUT_CLOSED, sp<AudioIoDescriptor>::make(output));
+        ioConfigChanged_l(AUDIO_OUTPUT_CLOSED, sp<AudioIoDescriptor>::make(output));
         mPatchPanel->notifyStreamClosed(output);
     }
     // The thread entity (active unit of execution) is no longer running here,
@@ -3083,6 +3076,7 @@
     return NO_ERROR;
 }
 
+/* static */
 void AudioFlinger::closeOutputFinish(const sp<IAfPlaybackThread>& thread)
 {
     AudioStreamOut *out = thread->clearOutput();
@@ -3100,7 +3094,7 @@
 
 status_t AudioFlinger::suspendOutput(audio_io_handle_t output)
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     IAfPlaybackThread* const thread = checkPlaybackThread_l(output);
 
     if (thread == NULL) {
@@ -3115,7 +3109,7 @@
 
 status_t AudioFlinger::restoreOutput(audio_io_handle_t output)
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     IAfPlaybackThread* const thread = checkPlaybackThread_l(output);
 
     if (thread == NULL) {
@@ -3132,7 +3126,7 @@
 status_t AudioFlinger::openInput(const media::OpenInputRequest& request,
                                  media::OpenInputResponse* response)
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
 
     AudioDeviceTypeAddr device = VALUE_OR_RETURN_STATUS(
             aidl2legacy_AudioDeviceTypeAddress(request.device));
@@ -3163,7 +3157,7 @@
 
     if (thread != 0) {
         // notify client processes of the new input creation
-        thread->ioConfigChanged(AUDIO_INPUT_OPENED);
+        thread->ioConfigChanged_l(AUDIO_INPUT_OPENED);
         return NO_ERROR;
     }
     return NO_INIT;
@@ -3269,7 +3263,7 @@
     sp<IAfRecordThread> recordThread;
     sp<IAfMmapCaptureThread> mmapThread;
     {
-        Mutex::Autolock _l(mLock);
+        audio_utils::lock_guard _l(mutex());
         recordThread = checkRecordThread_l(input);
         if (recordThread != 0) {
             ALOGV("closeInput() %d", input);
@@ -3282,7 +3276,7 @@
             // new capture on the same session
             sp<IAfEffectChain> chain;
             {
-                Mutex::Autolock _sl(recordThread->mutex());
+                audio_utils::lock_guard _sl(recordThread->mutex());
                 const Vector<sp<IAfEffectChain>> effectChains = recordThread->getEffectChains_l();
                 // Note: maximum one chain per record thread
                 if (effectChains.size() != 0) {
@@ -3300,7 +3294,7 @@
                         continue;
                     }
                     if (t->hasAudioSession(chain->sessionId()) != 0) {
-                        Mutex::Autolock _l2(t->mutex());
+                        audio_utils::lock_guard _l2(t->mutex());
                         ALOGV("closeInput() found thread %d for effect session %d",
                               t->id(), chain->sessionId());
                         t->addEffectChain_l(chain);
@@ -3322,9 +3316,9 @@
             dumpToThreadLog_l(mmapThread);
             mMmapThreads.removeItem(input);
         }
-        ioConfigChanged(AUDIO_INPUT_CLOSED, sp<AudioIoDescriptor>::make(input));
+        ioConfigChanged_l(AUDIO_INPUT_CLOSED, sp<AudioIoDescriptor>::make(input));
     }
-    // FIXME: calling thread->exit() without mLock held should not be needed anymore now that
+    // FIXME: calling thread->exit() without mutex() held should not be needed anymore now that
     // we have a different lock for notification client
     if (recordThread != 0) {
         closeInputFinish(recordThread);
@@ -3354,7 +3348,7 @@
 }
 
 status_t AudioFlinger::invalidateTracks(const std::vector<audio_port_handle_t> &portIds) {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     ALOGV("%s", __func__);
 
     std::set<audio_port_handle_t> portIdSet(portIds.begin(), portIds.end());
@@ -3389,7 +3383,7 @@
 void AudioFlinger::acquireAudioSessionId(
         audio_session_t audioSession, pid_t pid, uid_t uid)
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     pid_t caller = IPCThreadState::self()->getCallingPid();
     ALOGV("acquiring %d from %d, for %d", audioSession, caller, pid);
     const uid_t callerUid = IPCThreadState::self()->getCallingUid();
@@ -3401,7 +3395,7 @@
     }
 
     {
-        Mutex::Autolock _cl(mClientLock);
+        audio_utils::lock_guard _cl(clientMutex());
         // Ignore requests received from processes not known as notification client. The request
         // is likely proxied by mediaserver (e.g CameraService) and releaseAudioSessionId() can be
         // called from a different pid leaving a stale session reference.  Also we don't know how
@@ -3429,7 +3423,7 @@
 {
     std::vector<sp<IAfEffectModule>> removedEffects;
     {
-        Mutex::Autolock _l(mLock);
+        audio_utils::lock_guard _l(mutex());
         pid_t caller = IPCThreadState::self()->getCallingPid();
         ALOGV("releasing %d from %d for %d", audioSession, caller, pid);
         const uid_t callerUid = IPCThreadState::self()->getCallingUid();
@@ -3484,7 +3478,7 @@
 
     for (size_t i = 0; i < mPlaybackThreads.size(); i++) {
         sp<IAfPlaybackThread> t = mPlaybackThreads.valueAt(i);
-        Mutex::Autolock _l(t->mutex());
+        audio_utils::lock_guard _l(t->mutex());
         const Vector<sp<IAfEffectChain>> threadChains = t->getEffectChains_l();
         for (size_t j = 0; j < threadChains.size(); j++) {
             sp<IAfEffectChain> ec = threadChains[j];
@@ -3496,7 +3490,7 @@
 
     for (size_t i = 0; i < mRecordThreads.size(); i++) {
         sp<IAfRecordThread> t = mRecordThreads.valueAt(i);
-        Mutex::Autolock _l(t->mutex());
+        audio_utils::lock_guard _l(t->mutex());
         const Vector<sp<IAfEffectChain>> threadChains = t->getEffectChains_l();
         for (size_t j = 0; j < threadChains.size(); j++) {
             sp<IAfEffectChain> ec = threadChains[j];
@@ -3506,7 +3500,7 @@
 
     for (size_t i = 0; i < mMmapThreads.size(); i++) {
         const sp<IAfMmapThread> t = mMmapThreads.valueAt(i);
-        Mutex::Autolock _l(t->mutex());
+        audio_utils::lock_guard _l(t->mutex());
         const Vector<sp<IAfEffectChain>> threadChains = t->getEffectChains_l();
         for (size_t j = 0; j < threadChains.size(); j++) {
             sp<IAfEffectChain> ec = threadChains[j];
@@ -3534,7 +3528,7 @@
             }
         }
         if (!found) {
-            Mutex::Autolock _l(t->mutex());
+            audio_utils::lock_guard _l(t->mutex());
             // remove all effects from the chain
             while (ec->numberOfEffects()) {
                 sp<IAfEffectModule> effect = ec->getEffectModule(0);
@@ -3550,7 +3544,43 @@
     return removedEffects;
 }
 
-// dumpToThreadLog_l() must be called with AudioFlinger::mLock held
+std::vector< sp<IAfEffectModule> > AudioFlinger::purgeOrphanEffectChains_l()
+{
+    ALOGV("purging stale effects from orphan chains");
+    std::vector< sp<IAfEffectModule> > removedEffects;
+    for (size_t index = 0; index < mOrphanEffectChains.size(); index++) {
+        sp<IAfEffectChain> chain = mOrphanEffectChains.valueAt(index);
+        audio_session_t session = mOrphanEffectChains.keyAt(index);
+        if (session == AUDIO_SESSION_OUTPUT_MIX || session == AUDIO_SESSION_DEVICE
+                || session == AUDIO_SESSION_OUTPUT_STAGE) {
+            continue;
+        }
+        size_t numSessionRefs = mAudioSessionRefs.size();
+        bool found = false;
+        for (size_t k = 0; k < numSessionRefs; k++) {
+            AudioSessionRef *ref = mAudioSessionRefs.itemAt(k);
+            if (ref->mSessionid == session) {
+                ALOGV(" session %d still exists for %d with %d refs", session, ref->mPid,
+                        ref->mCnt);
+                found = true;
+                break;
+            }
+        }
+        if (!found) {
+            for (size_t i = 0; i < chain->numberOfEffects(); i++) {
+                sp<IAfEffectModule> effect = chain->getEffectModule(i);
+                removedEffects.push_back(effect);
+            }
+        }
+    }
+    for (auto& effect : removedEffects) {
+        effect->unPin();
+        updateOrphanEffectChains_l(effect);
+    }
+    return removedEffects;
+}
+
+// dumpToThreadLog_l() must be called with AudioFlinger::mutex() held
 void AudioFlinger::dumpToThreadLog_l(const sp<IAfThreadBase> &thread)
 {
     constexpr int THREAD_DUMP_TIMEOUT_MS = 2;
@@ -3562,7 +3592,7 @@
     }
 }
 
-// checkThread_l() must be called with AudioFlinger::mLock held
+// checkThread_l() must be called with AudioFlinger::mutex() held
 IAfThreadBase* AudioFlinger::checkThread_l(audio_io_handle_t ioHandle) const
 {
     IAfThreadBase* thread = checkMmapThread_l(ioHandle);
@@ -3581,7 +3611,7 @@
     return thread;
 }
 
-// checkOutputThread_l() must be called with AudioFlinger::mLock held
+// checkOutputThread_l() must be called with AudioFlinger::mutex() held
 sp<IAfThreadBase> AudioFlinger::checkOutputThread_l(audio_io_handle_t ioHandle) const
 {
     if (audio_unique_id_get_use(ioHandle) != AUDIO_UNIQUE_ID_USE_OUTPUT) {
@@ -3595,33 +3625,33 @@
     return thread;
 }
 
-// checkPlaybackThread_l() must be called with AudioFlinger::mLock held
+// checkPlaybackThread_l() must be called with AudioFlinger::mutex() held
 IAfPlaybackThread* AudioFlinger::checkPlaybackThread_l(audio_io_handle_t output) const
 {
     return mPlaybackThreads.valueFor(output).get();
 }
 
-// checkMixerThread_l() must be called with AudioFlinger::mLock held
+// checkMixerThread_l() must be called with AudioFlinger::mutex() held
 IAfPlaybackThread* AudioFlinger::checkMixerThread_l(audio_io_handle_t output) const
 {
     IAfPlaybackThread * const thread = checkPlaybackThread_l(output);
     return thread != nullptr && thread->type() != IAfThreadBase::DIRECT ? thread : nullptr;
 }
 
-// checkRecordThread_l() must be called with AudioFlinger::mLock held
+// checkRecordThread_l() must be called with AudioFlinger::mutex() held
 IAfRecordThread* AudioFlinger::checkRecordThread_l(audio_io_handle_t input) const
 {
     return mRecordThreads.valueFor(input).get();
 }
 
-// checkMmapThread_l() must be called with AudioFlinger::mLock held
+// checkMmapThread_l() must be called with AudioFlinger::mutex() held
 IAfMmapThread* AudioFlinger::checkMmapThread_l(audio_io_handle_t io) const
 {
     return mMmapThreads.valueFor(io).get();
 }
 
 
-// checkPlaybackThread_l() must be called with AudioFlinger::mLock held
+// checkPlaybackThread_l() must be called with AudioFlinger::mutex() held
 sp<VolumeInterface> AudioFlinger::getVolumeInterface_l(audio_io_handle_t output) const
 {
     sp<VolumeInterface> volumeInterface = mPlaybackThreads.valueFor(output).get();
@@ -3677,7 +3707,7 @@
 
 IAfPlaybackThread* AudioFlinger::primaryPlaybackThread_l() const
 {
-    AutoMutex lock(mHardwareLock);
+    audio_utils::lock_guard lock(hardwareMutex());
     if (mPrimaryHardwareDev == nullptr) {
         return nullptr;
     }
@@ -3702,7 +3732,8 @@
         return {};
     }
 
-    return thread->outDeviceTypes();
+    audio_utils::lock_guard l(thread->mutex());
+    return thread->outDeviceTypes_l();
 }
 
 IAfPlaybackThread* AudioFlinger::fastPlaybackThread_l() const
@@ -3766,7 +3797,7 @@
         // The frameCount should also not be smaller than the secondary thread min frame
         // count
         size_t minFrameCount = AudioSystem::calculateMinFrameCount(
-                    [&] { Mutex::Autolock _l(secondaryThread->mutex());
+                    [&] { audio_utils::lock_guard _l(secondaryThread->mutex());
                           return secondaryThread->latency_l(); }(),
                     secondaryThread->frameCount(), // normal frame count
                     secondaryThread->sampleRate(),
@@ -3834,7 +3865,7 @@
                                     const audioflinger::SyncEventCallback& callBack,
                                     const wp<IAfTrackBase>& cookie)
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
 
     auto event = sp<audioflinger::SyncEvent>::make(
             type, triggerSession, listenerSession, callBack, cookie);
@@ -3871,7 +3902,7 @@
 
 status_t AudioFlinger::queryNumberEffects(uint32_t *numEffects) const
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     if (mEffectsFactoryHal.get()) {
         return mEffectsFactoryHal->queryNumberEffects(numEffects);
     } else {
@@ -3881,7 +3912,7 @@
 
 status_t AudioFlinger::queryEffect(uint32_t index, effect_descriptor_t *descriptor) const
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     if (mEffectsFactoryHal.get()) {
         return mEffectsFactoryHal->getDescriptor(index, descriptor);
     } else {
@@ -3898,7 +3929,7 @@
         return BAD_VALUE;
     }
 
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
 
     if (!mEffectsFactoryHal.get()) {
         return -ENODEV;
@@ -4018,7 +4049,11 @@
             lStatus = BAD_VALUE;
             goto Exit;
         }
-        IAfPlaybackThread* const thread = checkPlaybackThread_l(io);
+        IAfPlaybackThread* thread;
+        {
+            audio_utils::lock_guard l(mutex());
+            thread = checkPlaybackThread_l(io);
+        }
         if (thread == nullptr) {
             ALOGE("%s: invalid output %d specified for AUDIO_SESSION_OUTPUT_STAGE", __func__, io);
             lStatus = BAD_VALUE;
@@ -4109,7 +4144,7 @@
             ALOGV("createEffect got output %d", io);
         }
 
-        Mutex::Autolock _l(mLock);
+        audio_utils::lock_guard _l(mutex());
 
         if (sessionId == AUDIO_SESSION_DEVICE) {
             sp<Client> client = registerPid(currentPid);
@@ -4118,8 +4153,8 @@
                     &descOut, device, client, effectClient, mPatchPanel->patches_l(),
                     &enabledOut, &lStatus, probe, request.notifyFramesProcessed);
             if (lStatus != NO_ERROR && lStatus != ALREADY_EXISTS) {
-                // remove local strong reference to Client with mClientLock held
-                Mutex::Autolock _cl(mClientLock);
+                // remove local strong reference to Client with clientMutex() held
+                audio_utils::lock_guard _cl(clientMutex());
                 client.clear();
             } else {
                 // handle must be valid here, but check again to be safe.
@@ -4212,7 +4247,7 @@
             // session and used it instead of creating a new one.
             sp<IAfEffectChain> chain = getOrphanEffectChain_l(sessionId);
             if (chain != 0) {
-                Mutex::Autolock _l2(thread->mutex());
+                audio_utils::lock_guard _l2(thread->mutex());
                 thread->addEffectChain_l(chain);
             }
         }
@@ -4239,8 +4274,8 @@
                                         &descOut, &enabledOut, &lStatus, pinned, probe,
                                         request.notifyFramesProcessed);
         if (lStatus != NO_ERROR && lStatus != ALREADY_EXISTS) {
-            // remove local strong reference to Client with mClientLock held
-            Mutex::Autolock _cl(mClientLock);
+            // remove local strong reference to Client with clientMutex() held
+            audio_utils::lock_guard _cl(clientMutex());
             client.clear();
         } else {
             // handle must be valid here, but check again to be safe.
@@ -4275,7 +4310,7 @@
 
     response->id = idOut;
     response->enabled = enabledOut != 0;
-    response->effect = handle->asIEffect();
+    response->effect = handle.get() ? handle->asIEffect() : nullptr;
     response->desc = VALUE_OR_RETURN_STATUS(
             legacy2aidl_effect_descriptor_t_EffectDescriptor(descOut));
 
@@ -4283,30 +4318,47 @@
     return lStatus;
 }
 
-status_t AudioFlinger::moveEffects(audio_session_t sessionId, audio_io_handle_t srcOutput,
-        audio_io_handle_t dstOutput)
+status_t AudioFlinger::moveEffects(audio_session_t sessionId, audio_io_handle_t srcIo,
+        audio_io_handle_t dstIo)
+NO_THREAD_SAFETY_ANALYSIS
 {
-    ALOGV("%s() session %d, srcOutput %d, dstOutput %d",
-            __func__, sessionId, srcOutput, dstOutput);
-    Mutex::Autolock _l(mLock);
-    if (srcOutput == dstOutput) {
-        ALOGW("%s() same dst and src outputs %d", __func__, dstOutput);
+    ALOGV("%s() session %d, srcIo %d, dstIo %d", __func__, sessionId, srcIo, dstIo);
+    audio_utils::lock_guard _l(mutex());
+    if (srcIo == dstIo) {
+        ALOGW("%s() same dst and src outputs %d", __func__, dstIo);
         return NO_ERROR;
     }
-    IAfPlaybackThread* const srcThread = checkPlaybackThread_l(srcOutput);
+    IAfRecordThread* const srcRecordThread = checkRecordThread_l(srcIo);
+    IAfRecordThread* const dstRecordThread = checkRecordThread_l(dstIo);
+    if (srcRecordThread != nullptr || dstRecordThread != nullptr) {
+        if (srcRecordThread != nullptr) {
+            srcRecordThread->mutex().lock();
+        }
+        if (dstRecordThread != nullptr) {
+            dstRecordThread->mutex().lock();
+        }
+        status_t ret = moveEffectChain_ll(sessionId, srcRecordThread, dstRecordThread);
+        if (srcRecordThread != nullptr) {
+            srcRecordThread->mutex().unlock();
+        }
+        if (dstRecordThread != nullptr) {
+            dstRecordThread->mutex().unlock();
+        }
+        return ret;
+    }
+    IAfPlaybackThread* const srcThread = checkPlaybackThread_l(srcIo);
     if (srcThread == nullptr) {
-        ALOGW("%s() bad srcOutput %d", __func__, srcOutput);
+        ALOGW("%s() bad srcIo %d", __func__, srcIo);
         return BAD_VALUE;
     }
-    IAfPlaybackThread* const dstThread = checkPlaybackThread_l(dstOutput);
+    IAfPlaybackThread* const dstThread = checkPlaybackThread_l(dstIo);
     if (dstThread == nullptr) {
-        ALOGW("%s() bad dstOutput %d", __func__, dstOutput);
+        ALOGW("%s() bad dstIo %d", __func__, dstIo);
         return BAD_VALUE;
     }
 
-    Mutex::Autolock _dl(dstThread->mutex());
-    Mutex::Autolock _sl(srcThread->mutex());
-    return moveEffectChain_l(sessionId, srcThread, dstThread);
+    audio_utils::scoped_lock _ll(dstThread->mutex(), srcThread->mutex());
+    return moveEffectChain_ll(sessionId, srcThread, dstThread);
 }
 
 
@@ -4314,38 +4366,38 @@
                                 audio_session_t sessionId,
                                 bool suspended)
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
 
     sp<IAfThreadBase> thread = getEffectThread_l(sessionId, effectId);
     if (thread == nullptr) {
       return;
     }
-    Mutex::Autolock _sl(thread->mutex());
+    audio_utils::lock_guard _sl(thread->mutex());
     sp<IAfEffectModule> effect = thread->getEffect_l(sessionId, effectId);
     thread->setEffectSuspended_l(&effect->desc().type, suspended, sessionId);
 }
 
 
-// moveEffectChain_l must be called with both srcThread and dstThread mLocks held
-status_t AudioFlinger::moveEffectChain_l(audio_session_t sessionId,
+// moveEffectChain_ll must be called with the AudioFlinger::mutex()
+// and both srcThread and dstThread mutex()s held
+status_t AudioFlinger::moveEffectChain_ll(audio_session_t sessionId,
         IAfPlaybackThread* srcThread, IAfPlaybackThread* dstThread)
-NO_THREAD_SAFETY_ANALYSIS // requires srcThread and dstThread locks
 {
-    ALOGV("moveEffectChain_l() session %d from thread %p to thread %p",
-            sessionId, srcThread, dstThread);
+    ALOGV("%s: session %d from thread %p to thread %p",
+            __func__, sessionId, srcThread, dstThread);
 
     sp<IAfEffectChain> chain = srcThread->getEffectChain_l(sessionId);
     if (chain == 0) {
-        ALOGW("moveEffectChain_l() effect chain for session %d not on source thread %p",
-                sessionId, srcThread);
+        ALOGW("%s: effect chain for session %d not on source thread %p",
+                __func__, sessionId, srcThread);
         return INVALID_OPERATION;
     }
 
     // Check whether the destination thread and all effects in the chain are compatible
     if (!chain->isCompatibleWithThread_l(dstThread)) {
-        ALOGW("moveEffectChain_l() effect chain failed because"
+        ALOGW("%s: effect chain failed because"
                 " destination thread %p is not compatible with effects in the chain",
-                dstThread);
+                __func__, dstThread);
         return INVALID_OPERATION;
     }
 
@@ -4367,7 +4419,7 @@
             effect = chain->getEffectFromId_l(0)) {
         srcThread->removeEffect_l(effect);
         removed.add(effect);
-        status = dstThread->addEffect_l(effect);
+        status = dstThread->addEffect_ll(effect);
         if (status != NO_ERROR) {
             errorString = StringPrintf(
                     "cannot add effect %p to destination thread", effect.get());
@@ -4393,7 +4445,7 @@
         for (const auto& effect : removed) {
             dstThread->removeEffect_l(effect); // Note: Depending on error location, the last
                                                // effect may not have been placed on dstThread.
-            if (srcThread->addEffect_l(effect) == NO_ERROR) {
+            if (srcThread->addEffect_ll(effect) == NO_ERROR) {
                 ++restored;
                 if (dstChain == nullptr) {
                     dstChain = effect->getCallback()->chain().promote();
@@ -4409,7 +4461,7 @@
         // If we do not take the dstChain lock, it is possible that processing is ongoing
         // while we are starting the effect.  This can cause glitches with volume,
         // see b/202360137.
-        dstChain->lock();
+        dstChain->mutex().lock();
         for (const auto& effect : removed) {
             if (effect->state() == IAfEffectModule::ACTIVE ||
                     effect->state() == IAfEffectModule::STOPPING) {
@@ -4417,7 +4469,7 @@
                 effect->start();
             }
         }
-        dstChain->unlock();
+        dstChain->mutex().unlock();
     }
 
     if (status != NO_ERROR) {
@@ -4437,17 +4489,58 @@
     return status;
 }
 
+
+// moveEffectChain_ll must be called with both srcThread (if not null) and dstThread (if not null)
+// mutex()s held
+status_t AudioFlinger::moveEffectChain_ll(audio_session_t sessionId,
+        IAfRecordThread* srcThread, IAfRecordThread* dstThread)
+{
+    sp<IAfEffectChain> chain = nullptr;
+    if (srcThread != 0) {
+        const Vector<sp<IAfEffectChain>> effectChains = srcThread->getEffectChains_l();
+        for (size_t i = 0; i < effectChains.size(); i ++) {
+             if (effectChains[i]->sessionId() == sessionId) {
+                 chain = effectChains[i];
+                 break;
+             }
+        }
+        ALOGV_IF(effectChains.size() == 0, "%s: no effect chain on io=%d", __func__,
+                srcThread->id());
+        if (chain == nullptr) {
+            ALOGE("%s wrong session id %d", __func__, sessionId);
+            return BAD_VALUE;
+        }
+        ALOGV("%s: removing effect chain for session=%d io=%d", __func__, sessionId,
+                srcThread->id());
+        srcThread->removeEffectChain_l(chain);
+    } else {
+        chain = getOrphanEffectChain_l(sessionId);
+        if (chain == nullptr) {
+            ALOGE("%s: no orphan effect chain found for session=%d", __func__, sessionId);
+            return BAD_VALUE;
+        }
+    }
+    if (dstThread != 0) {
+        ALOGV("%s: adding effect chain for session=%d on io=%d", __func__, sessionId,
+                dstThread->id());
+        dstThread->addEffectChain_l(chain);
+        return NO_ERROR;
+    }
+    ALOGV("%s: parking to orphan effect chain for session=%d", __func__, sessionId);
+    putOrphanEffectChain_l(chain);
+    return NO_ERROR;
+}
+
 status_t AudioFlinger::moveAuxEffectToIo(int EffectId,
         const sp<IAfPlaybackThread>& dstThread, sp<IAfPlaybackThread>* srcThread)
 {
     status_t status = NO_ERROR;
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     const sp<IAfThreadBase> threadBase = getEffectThread_l(AUDIO_SESSION_OUTPUT_MIX, EffectId);
     const sp<IAfPlaybackThread> thread = threadBase ? threadBase->asIAfPlaybackThread() : nullptr;
 
     if (EffectId != 0 && thread != 0 && dstThread != thread.get()) {
-        Mutex::Autolock _dl(dstThread->mutex());
-        Mutex::Autolock _sl(thread->mutex());
+        audio_utils::scoped_lock _ll(dstThread->mutex(), thread->mutex());
         sp<IAfEffectChain> srcChain = thread->getEffectChain_l(AUDIO_SESSION_OUTPUT_MIX);
         sp<IAfEffectChain> dstChain;
         if (srcChain == 0) {
@@ -4459,16 +4552,16 @@
             return INVALID_OPERATION;
         }
         thread->removeEffect_l(effect);
-        status = dstThread->addEffect_l(effect);
+        status = dstThread->addEffect_ll(effect);
         if (status != NO_ERROR) {
-            thread->addEffect_l(effect);
+            thread->addEffect_ll(effect);
             status = INVALID_OPERATION;
             goto Exit;
         }
 
         dstChain = effect->getCallback()->chain().promote();
         if (dstChain == 0) {
-            thread->addEffect_l(effect);
+            thread->addEffect_ll(effect);
             status = INVALID_OPERATION;
         }
 
@@ -4487,7 +4580,6 @@
 }
 
 bool AudioFlinger::isNonOffloadableGlobalEffectEnabled_l() const
-NO_THREAD_SAFETY_ANALYSIS  // thread lock for getEffectChain_l.
 {
     if (mGlobalEffectEnableTime != 0 &&
             ((systemTime() - mGlobalEffectEnableTime) < kMinGlobalEffectEnabletimeNs)) {
@@ -4495,8 +4587,9 @@
     }
 
     for (size_t i = 0; i < mPlaybackThreads.size(); i++) {
-        sp<IAfEffectChain> ec =
-                mPlaybackThreads.valueAt(i)->getEffectChain_l(AUDIO_SESSION_OUTPUT_MIX);
+        const auto thread = mPlaybackThreads.valueAt(i);
+        audio_utils::lock_guard l(thread->mutex());
+        const sp<IAfEffectChain> ec = thread->getEffectChain_l(AUDIO_SESSION_OUTPUT_MIX);
         if (ec != 0 && ec->isNonOffloadableEnabled()) {
             return true;
         }
@@ -4506,7 +4599,7 @@
 
 void AudioFlinger::onNonOffloadableGlobalEffectEnable()
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
 
     mGlobalEffectEnableTime = systemTime();
 
@@ -4551,7 +4644,12 @@
 
 bool AudioFlinger::updateOrphanEffectChains(const sp<IAfEffectModule>& effect)
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
+    return updateOrphanEffectChains_l(effect);
+}
+
+bool AudioFlinger::updateOrphanEffectChains_l(const sp<IAfEffectModule>& effect)
+{
     audio_session_t session = effect->sessionId();
     ssize_t index = mOrphanEffectChains.indexOfKey(session);
     ALOGV("updateOrphanEffectChains session %d index %zd", session, index);
@@ -4573,8 +4671,8 @@
 status_t AudioFlinger::listAudioPorts(unsigned int* num_ports,
         struct audio_port* ports) const
 {
-    Mutex::Autolock _l(mLock);
-    return mPatchPanel->listAudioPorts(num_ports, ports);
+    audio_utils::lock_guard _l(mutex());
+    return mPatchPanel->listAudioPorts_l(num_ports, ports);
 }
 
 /* Get supported attributes for a given audio port */
@@ -4584,8 +4682,8 @@
         return status;
     }
 
-    Mutex::Autolock _l(mLock);
-    return mPatchPanel->getAudioPort(port);
+    audio_utils::lock_guard _l(mutex());
+    return mPatchPanel->getAudioPort_l(port);
 }
 
 /* Connect a patch between several source and sink ports */
@@ -4597,23 +4695,23 @@
         return status;
     }
 
-    Mutex::Autolock _l(mLock);
-    return mPatchPanel->createAudioPatch(patch, handle);
+    audio_utils::lock_guard _l(mutex());
+    return mPatchPanel->createAudioPatch_l(patch, handle);
 }
 
 /* Disconnect a patch */
 status_t AudioFlinger::releaseAudioPatch(audio_patch_handle_t handle)
 {
-    Mutex::Autolock _l(mLock);
-    return mPatchPanel->releaseAudioPatch(handle);
+    audio_utils::lock_guard _l(mutex());
+    return mPatchPanel->releaseAudioPatch_l(handle);
 }
 
 /* List connected audio ports and they attributes */
 status_t AudioFlinger::listAudioPatches(
         unsigned int* num_patches, struct audio_patch* patches) const
 {
-    Mutex::Autolock _l(mLock);
-    return mPatchPanel->listAudioPatches(num_patches, patches);
+    audio_utils::lock_guard _l(mutex());
+    return mPatchPanel->listAudioPatches_l(num_patches, patches);
 }
 
 // ----------------------------------------------------------------------------
diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h
index ef36d50..6af8015 100644
--- a/services/audioflinger/AudioFlinger.h
+++ b/services/audioflinger/AudioFlinger.h
@@ -20,6 +20,7 @@
 // Classes and interfaces directly used.
 #include "Client.h"
 #include "DeviceEffectManager.h"
+#include "EffectConfiguration.h"
 #include "IAfEffect.h"
 #include "IAfPatchPanel.h"
 #include "IAfThread.h"
@@ -28,6 +29,7 @@
 #include "PatchCommandThread.h"
 
 // External classes
+#include <audio_utils/mutex.h>
 #include <audio_utils/FdToString.h>
 #include <audio_utils/SimpleLog.h>
 #include <media/IAudioFlinger.h>
@@ -63,187 +65,213 @@
 
     // ---- begin IAudioFlinger interface
 
-    status_t dump(int fd, const Vector<String16>& args) final;
+    status_t dump(int fd, const Vector<String16>& args) final EXCLUDES_AudioFlinger_Mutex;
 
     status_t createTrack(const media::CreateTrackRequest& input,
-                         media::CreateTrackResponse& output) final;
+            media::CreateTrackResponse& output) final EXCLUDES_AudioFlinger_Mutex;
 
     status_t createRecord(const media::CreateRecordRequest& input,
-                          media::CreateRecordResponse& output) final;
+            media::CreateRecordResponse& output) final EXCLUDES_AudioFlinger_Mutex;
 
-    uint32_t sampleRate(audio_io_handle_t ioHandle) const final;
-    audio_format_t format(audio_io_handle_t output) const final;
-    size_t frameCount(audio_io_handle_t ioHandle) const final;
-    size_t frameCountHAL(audio_io_handle_t ioHandle) const final;
-    uint32_t latency(audio_io_handle_t output) const final;
+    uint32_t sampleRate(audio_io_handle_t ioHandle) const final EXCLUDES_AudioFlinger_Mutex;
+    audio_format_t format(audio_io_handle_t output) const final EXCLUDES_AudioFlinger_Mutex;
+    size_t frameCount(audio_io_handle_t ioHandle) const final EXCLUDES_AudioFlinger_Mutex;
+    size_t frameCountHAL(audio_io_handle_t ioHandle) const final EXCLUDES_AudioFlinger_Mutex;
+    uint32_t latency(audio_io_handle_t output) const final EXCLUDES_AudioFlinger_Mutex;
 
-    status_t setMasterVolume(float value) final;
-    status_t setMasterMute(bool muted) final;
-    float masterVolume() const final;
-    bool masterMute() const final;
+    status_t setMasterVolume(float value) final EXCLUDES_AudioFlinger_Mutex;
+    status_t setMasterMute(bool muted) final EXCLUDES_AudioFlinger_Mutex;
+    float masterVolume() const final EXCLUDES_AudioFlinger_Mutex;
+    bool masterMute() const final EXCLUDES_AudioFlinger_Mutex;
 
     // Balance value must be within -1.f (left only) to 1.f (right only) inclusive.
-    status_t setMasterBalance(float balance) final;
-    status_t getMasterBalance(float* balance) const final;
+    status_t setMasterBalance(float balance) final EXCLUDES_AudioFlinger_Mutex;
+    status_t getMasterBalance(float* balance) const final EXCLUDES_AudioFlinger_Mutex;
 
     status_t setStreamVolume(audio_stream_type_t stream, float value,
-            audio_io_handle_t output) final;
-    status_t setStreamMute(audio_stream_type_t stream, bool muted) final;
+            audio_io_handle_t output) final EXCLUDES_AudioFlinger_Mutex;
+    status_t setStreamMute(audio_stream_type_t stream, bool muted) final
+            EXCLUDES_AudioFlinger_Mutex;
 
     float streamVolume(audio_stream_type_t stream,
-            audio_io_handle_t output) const final;
-    bool streamMute(audio_stream_type_t stream) const final;
+            audio_io_handle_t output) const final EXCLUDES_AudioFlinger_Mutex;
+    bool streamMute(audio_stream_type_t stream) const final EXCLUDES_AudioFlinger_Mutex;
 
-    status_t setMode(audio_mode_t mode) final;
+    status_t setMode(audio_mode_t mode) final EXCLUDES_AudioFlinger_Mutex;
 
-    status_t setMicMute(bool state) final;
-    bool getMicMute() const final;
+    status_t setMicMute(bool state) final EXCLUDES_AudioFlinger_Mutex;
+    bool getMicMute() const final EXCLUDES_AudioFlinger_Mutex;
 
-    void setRecordSilenced(audio_port_handle_t portId, bool silenced) final;
+    void setRecordSilenced(audio_port_handle_t portId, bool silenced) final
+            EXCLUDES_AudioFlinger_Mutex;
 
-    status_t setParameters(audio_io_handle_t ioHandle, const String8& keyValuePairs) final;
-    String8 getParameters(audio_io_handle_t ioHandle, const String8& keys) const final;
+    status_t setParameters(audio_io_handle_t ioHandle, const String8& keyValuePairs) final
+            EXCLUDES_AudioFlinger_Mutex;
+    String8 getParameters(audio_io_handle_t ioHandle, const String8& keys) const final
+            EXCLUDES_AudioFlinger_Mutex;
 
-    void registerClient(const sp<media::IAudioFlingerClient>& client) final;
+    void registerClient(const sp<media::IAudioFlingerClient>& client) final
+            EXCLUDES_AudioFlinger_Mutex;
     size_t getInputBufferSize(uint32_t sampleRate, audio_format_t format,
-            audio_channel_mask_t channelMask) const final;
+            audio_channel_mask_t channelMask) const final EXCLUDES_AudioFlinger_Mutex;
 
     status_t openOutput(const media::OpenOutputRequest& request,
-            media::OpenOutputResponse* response) final;
+            media::OpenOutputResponse* response) final EXCLUDES_AudioFlinger_Mutex;
 
     audio_io_handle_t openDuplicateOutput(audio_io_handle_t output1,
-            audio_io_handle_t output2) final;
+            audio_io_handle_t output2) final EXCLUDES_AudioFlinger_Mutex;
 
-    status_t closeOutput(audio_io_handle_t output) final;
+    status_t closeOutput(audio_io_handle_t output) final EXCLUDES_AudioFlinger_Mutex;
 
-    status_t suspendOutput(audio_io_handle_t output) final;
+    status_t suspendOutput(audio_io_handle_t output) final EXCLUDES_AudioFlinger_Mutex;
 
-    status_t restoreOutput(audio_io_handle_t output) final;
+    status_t restoreOutput(audio_io_handle_t output) final EXCLUDES_AudioFlinger_Mutex;
 
     status_t openInput(const media::OpenInputRequest& request,
-            media::OpenInputResponse* response) final;
+            media::OpenInputResponse* response) final EXCLUDES_AudioFlinger_Mutex;
 
-    status_t closeInput(audio_io_handle_t input) final;
+    status_t closeInput(audio_io_handle_t input) final EXCLUDES_AudioFlinger_Mutex;
 
-    status_t setVoiceVolume(float volume) final;
+    status_t setVoiceVolume(float volume) final EXCLUDES_AudioFlinger_Mutex;
 
     status_t getRenderPosition(uint32_t* halFrames, uint32_t* dspFrames,
-            audio_io_handle_t output) const final;
+            audio_io_handle_t output) const final EXCLUDES_AudioFlinger_Mutex;
 
-    uint32_t getInputFramesLost(audio_io_handle_t ioHandle) const final;
+    uint32_t getInputFramesLost(audio_io_handle_t ioHandle) const final
+            EXCLUDES_AudioFlinger_Mutex;
 
     // This is the binder API.  For the internal API see nextUniqueId().
-    audio_unique_id_t newAudioUniqueId(audio_unique_id_use_t use) final;
+    audio_unique_id_t newAudioUniqueId(audio_unique_id_use_t use) final
+            EXCLUDES_AudioFlinger_Mutex;
 
-    void acquireAudioSessionId(audio_session_t audioSession, pid_t pid, uid_t uid) final;
+    void acquireAudioSessionId(audio_session_t audioSession, pid_t pid, uid_t uid) final
+            EXCLUDES_AudioFlinger_Mutex;
 
-    void releaseAudioSessionId(audio_session_t audioSession, pid_t pid) final;
+    void releaseAudioSessionId(audio_session_t audioSession, pid_t pid) final
+            EXCLUDES_AudioFlinger_Mutex;
 
-    status_t queryNumberEffects(uint32_t* numEffects) const final;
+    status_t queryNumberEffects(uint32_t* numEffects) const final EXCLUDES_AudioFlinger_Mutex;
 
-    status_t queryEffect(uint32_t index, effect_descriptor_t* descriptor) const final;
+    status_t queryEffect(uint32_t index, effect_descriptor_t* descriptor) const final
+            EXCLUDES_AudioFlinger_Mutex;
 
     status_t getEffectDescriptor(const effect_uuid_t* pUuid,
             const effect_uuid_t* pTypeUuid,
             uint32_t preferredTypeFlag,
-            effect_descriptor_t* descriptor) const final;
+            effect_descriptor_t* descriptor) const final EXCLUDES_AudioFlinger_Mutex;
 
     status_t createEffect(const media::CreateEffectRequest& request,
-            media::CreateEffectResponse* response) final;
+            media::CreateEffectResponse* response) final EXCLUDES_AudioFlinger_Mutex;
 
     status_t moveEffects(audio_session_t sessionId, audio_io_handle_t srcOutput,
-            audio_io_handle_t dstOutput) final;
+            audio_io_handle_t dstOutput) final EXCLUDES_AudioFlinger_Mutex;
 
     void setEffectSuspended(int effectId,
             audio_session_t sessionId,
-            bool suspended) final;
+            bool suspended) final EXCLUDES_AudioFlinger_Mutex;
 
-    audio_module_handle_t loadHwModule(const char* name) final;
+    audio_module_handle_t loadHwModule(const char* name) final EXCLUDES_AudioFlinger_Mutex;
 
-    uint32_t getPrimaryOutputSamplingRate() const final;
-    size_t getPrimaryOutputFrameCount() const final;
+    uint32_t getPrimaryOutputSamplingRate() const final EXCLUDES_AudioFlinger_Mutex;
+    size_t getPrimaryOutputFrameCount() const final EXCLUDES_AudioFlinger_Mutex;
 
-    status_t setLowRamDevice(bool isLowRamDevice, int64_t totalMemory) final;
+    status_t setLowRamDevice(bool isLowRamDevice, int64_t totalMemory) final
+            EXCLUDES_AudioFlinger_Mutex;
 
     /* Get attributes for a given audio port */
-    status_t getAudioPort(struct audio_port_v7* port) const final;
+    status_t getAudioPort(struct audio_port_v7* port) const final EXCLUDES_AudioFlinger_Mutex;
 
     /* Create an audio patch between several source and sink ports */
     status_t createAudioPatch(const struct audio_patch *patch,
-            audio_patch_handle_t* handle) final;
+            audio_patch_handle_t* handle) final EXCLUDES_AudioFlinger_Mutex;
 
     /* Release an audio patch */
-    status_t releaseAudioPatch(audio_patch_handle_t handle) final;
+    status_t releaseAudioPatch(audio_patch_handle_t handle) final EXCLUDES_AudioFlinger_Mutex;
 
     /* List existing audio patches */
     status_t listAudioPatches(unsigned int* num_patches,
-            struct audio_patch* patches) const final;
+            struct audio_patch* patches) const final EXCLUDES_AudioFlinger_Mutex;
 
     /* Set audio port configuration */
-    status_t setAudioPortConfig(const struct audio_port_config* config) final;
+    status_t setAudioPortConfig(const struct audio_port_config* config) final
+            EXCLUDES_AudioFlinger_Mutex;
 
     /* Get the HW synchronization source used for an audio session */
-    audio_hw_sync_t getAudioHwSyncForSession(audio_session_t sessionId) final;
+    audio_hw_sync_t getAudioHwSyncForSession(audio_session_t sessionId) final
+            EXCLUDES_AudioFlinger_Mutex;
 
     /* Indicate JAVA services are ready (scheduling, power management ...) */
-    status_t systemReady() final;
+    status_t systemReady() final EXCLUDES_AudioFlinger_Mutex;
     status_t audioPolicyReady() final { mAudioPolicyReady.store(true); return NO_ERROR; }
 
-    status_t getMicrophones(std::vector<media::MicrophoneInfoFw>* microphones) const final;
+    status_t getMicrophones(std::vector<media::MicrophoneInfoFw>* microphones) const final
+            EXCLUDES_AudioFlinger_Mutex;
 
-    status_t setAudioHalPids(const std::vector<pid_t>& pids) final;
+    status_t setAudioHalPids(const std::vector<pid_t>& pids) final
+            EXCLUDES_AudioFlinger_Mutex;
 
-    status_t setVibratorInfos(const std::vector<media::AudioVibratorInfo>& vibratorInfos) final;
+    status_t setVibratorInfos(const std::vector<media::AudioVibratorInfo>& vibratorInfos) final
+            EXCLUDES_AudioFlinger_Mutex;
 
     status_t updateSecondaryOutputs(
-            const TrackSecondaryOutputsMap& trackSecondaryOutputs) final;
+            const TrackSecondaryOutputsMap& trackSecondaryOutputs) final
+            EXCLUDES_AudioFlinger_Mutex;
 
     status_t getMmapPolicyInfos(
             media::audio::common::AudioMMapPolicyType policyType,
-            std::vector<media::audio::common::AudioMMapPolicyInfo>* policyInfos) final;
+            std::vector<media::audio::common::AudioMMapPolicyInfo>* policyInfos) final
+            EXCLUDES_AudioFlinger_Mutex;
 
-    int32_t getAAudioMixerBurstCount() const final;
+    int32_t getAAudioMixerBurstCount() const final EXCLUDES_AudioFlinger_Mutex;
 
-    int32_t getAAudioHardwareBurstMinUsec() const final;
+    int32_t getAAudioHardwareBurstMinUsec() const final EXCLUDES_AudioFlinger_Mutex;
 
     status_t setDeviceConnectedState(const struct audio_port_v7* port,
-            media::DeviceConnectedState state) final;
+            media::DeviceConnectedState state) final EXCLUDES_AudioFlinger_Mutex;
 
-    status_t setSimulateDeviceConnections(bool enabled) final;
+    status_t setSimulateDeviceConnections(bool enabled) final EXCLUDES_AudioFlinger_Mutex;
 
     status_t setRequestedLatencyMode(
-            audio_io_handle_t output, audio_latency_mode_t mode) final;
+            audio_io_handle_t output, audio_latency_mode_t mode) final
+            EXCLUDES_AudioFlinger_Mutex;
 
     status_t getSupportedLatencyModes(audio_io_handle_t output,
-            std::vector<audio_latency_mode_t>* modes) const final;
+            std::vector<audio_latency_mode_t>* modes) const final EXCLUDES_AudioFlinger_Mutex;
 
-    status_t setBluetoothVariableLatencyEnabled(bool enabled) final;
+    status_t setBluetoothVariableLatencyEnabled(bool enabled) final EXCLUDES_AudioFlinger_Mutex;
 
-    status_t isBluetoothVariableLatencyEnabled(bool* enabled) const final;
+    status_t isBluetoothVariableLatencyEnabled(bool* enabled) const final
+            EXCLUDES_AudioFlinger_Mutex;
 
-    status_t supportsBluetoothVariableLatency(bool* support) const final;
+    status_t supportsBluetoothVariableLatency(bool* support) const final
+            EXCLUDES_AudioFlinger_Mutex;
 
     status_t getSoundDoseInterface(const sp<media::ISoundDoseCallback>& callback,
-            sp<media::ISoundDose>* soundDose) const final;
+            sp<media::ISoundDose>* soundDose) const final EXCLUDES_AudioFlinger_Mutex;
 
-    status_t invalidateTracks(const std::vector<audio_port_handle_t>& portIds) final;
+    status_t invalidateTracks(const std::vector<audio_port_handle_t>& portIds) final
+            EXCLUDES_AudioFlinger_Mutex;
 
-    status_t getAudioPolicyConfig(media::AudioPolicyConfig* config) final;
+    status_t getAudioPolicyConfig(media::AudioPolicyConfig* config) final
+            EXCLUDES_AudioFlinger_Mutex;
 
     status_t onTransactWrapper(TransactionCode code, const Parcel& data, uint32_t flags,
-            const std::function<status_t()>& delegate) final;
+            const std::function<status_t()>& delegate) final EXCLUDES_AudioFlinger_Mutex;
 
     // ---- end of IAudioFlinger interface
 
     // ---- begin IAfClientCallback interface
 
-    Mutex& clientMutex() const final { return mClientLock; }
-    void removeClient_l(pid_t pid) final;
-    void removeNotificationClient(pid_t pid) final;
+    audio_utils::mutex& clientMutex() const final
+            RETURN_CAPABILITY(audio_utils::AudioFlinger_ClientMutex) {
+        return mClientMutex;
+    }
+    void removeClient_l(pid_t pid) REQUIRES(clientMutex()) final;
+    void removeNotificationClient(pid_t pid) final EXCLUDES_AudioFlinger_Mutex;
     status_t moveAuxEffectToIo(
             int effectId,
             const sp<IAfPlaybackThread>& dstThread,
-            sp<IAfPlaybackThread>* srcThread) final;
+            sp<IAfPlaybackThread>* srcThread) final EXCLUDES_AudioFlinger_Mutex;
 
     // ---- end of IAfClientCallback interface
 
@@ -254,31 +282,35 @@
     // below also used by IAfMelReporterCallback, IAfPatchPanelCallback
     const sp<PatchCommandThread>& getPatchCommandThread() final { return mPatchCommandThread; }
     status_t addEffectToHal(
-            const struct audio_port_config* device, const sp<EffectHalInterface>& effect) final;
+            const struct audio_port_config* device, const sp<EffectHalInterface>& effect) final
+            EXCLUDES_AudioFlinger_HardwareMutex;
     status_t removeEffectFromHal(
-            const struct audio_port_config* device, const sp<EffectHalInterface>& effect) final;
+            const struct audio_port_config* device, const sp<EffectHalInterface>& effect) final
+            EXCLUDES_AudioFlinger_HardwareMutex;
 
     // ---- end of IAfDeviceEffectManagerCallback interface
 
     // ---- begin IAfMelReporterCallback interface
 
     // below also used by IAfThreadCallback
-    Mutex& mutex() const final { return mLock; }
-    sp<IAfThreadBase> checkOutputThread_l(audio_io_handle_t ioHandle) const final REQUIRES(mLock);
+    audio_utils::mutex& mutex() const final
+            RETURN_CAPABILITY(audio_utils::AudioFlinger_Mutex)
+            EXCLUDES_BELOW_AudioFlinger_Mutex { return mMutex; }
+    sp<IAfThreadBase> checkOutputThread_l(audio_io_handle_t ioHandle) const final
+            REQUIRES(mutex());
 
     // ---- end of IAfMelReporterCallback interface
 
     // ---- begin IAfPatchPanelCallback interface
 
-    void closeThreadInternal_l(const sp<IAfPlaybackThread>& thread) final;
-    void closeThreadInternal_l(const sp<IAfRecordThread>& thread) final;
+    void closeThreadInternal_l(const sp<IAfPlaybackThread>& thread) final REQUIRES(mutex());
+    void closeThreadInternal_l(const sp<IAfRecordThread>& thread) final REQUIRES(mutex());
     // return thread associated with primary hardware device, or NULL
-    IAfPlaybackThread* primaryPlaybackThread_l() const final;
-    IAfPlaybackThread* checkPlaybackThread_l(audio_io_handle_t output) const final;
-    IAfRecordThread* checkRecordThread_l(audio_io_handle_t input) const final;
-    IAfMmapThread* checkMmapThread_l(audio_io_handle_t io) const final;
-    void lock() const final ACQUIRE(mLock) { mLock.lock(); }
-    void unlock() const final RELEASE(mLock) { mLock.unlock(); }
+    IAfPlaybackThread* primaryPlaybackThread_l() const final  REQUIRES(mutex());
+    IAfPlaybackThread* checkPlaybackThread_l(audio_io_handle_t output) const final
+            REQUIRES(mutex());
+    IAfRecordThread* checkRecordThread_l(audio_io_handle_t input) const final  REQUIRES(mutex());
+    IAfMmapThread* checkMmapThread_l(audio_io_handle_t io) const final REQUIRES(mutex());
     sp<IAfThreadBase> openInput_l(audio_module_handle_t module,
             audio_io_handle_t* input,
             audio_config_t* config,
@@ -287,36 +319,41 @@
             audio_source_t source,
             audio_input_flags_t flags,
             audio_devices_t outputDevice,
-            const String8& outputDeviceAddress) final;
+            const String8& outputDeviceAddress) final REQUIRES(mutex());
     sp<IAfThreadBase> openOutput_l(audio_module_handle_t module,
             audio_io_handle_t* output,
             audio_config_t* halConfig,
             audio_config_base_t* mixerConfig,
             audio_devices_t deviceType,
             const String8& address,
-            audio_output_flags_t flags) final;
+            audio_output_flags_t flags) final REQUIRES(mutex());
     const DefaultKeyedVector<audio_module_handle_t, AudioHwDevice*>&
-            getAudioHwDevs_l() const final { return mAudioHwDevs; }
+            getAudioHwDevs_l() const final REQUIRES(mutex()) { return mAudioHwDevs; }
     void updateDownStreamPatches_l(const struct audio_patch* patch,
-            const std::set<audio_io_handle_t>& streams) final;
-    void updateOutDevicesForRecordThreads_l(const DeviceDescriptorBaseVector& devices) final;
+            const std::set<audio_io_handle_t>& streams) final REQUIRES(mutex());
+    void updateOutDevicesForRecordThreads_l(const DeviceDescriptorBaseVector& devices) final
+            REQUIRES(mutex());
 
     // ---- end of IAfPatchPanelCallback interface
 
     // ----- begin IAfThreadCallback interface
 
-    bool isNonOffloadableGlobalEffectEnabled_l() const final;
+    bool isNonOffloadableGlobalEffectEnabled_l() const final
+            REQUIRES(mutex()) EXCLUDES_ThreadBase_Mutex;
     bool btNrecIsOff() const final { return mBtNrecIsOff.load(); }
-    float masterVolume_l() const final;
-    bool masterMute_l() const final;
-    float getMasterBalance_l() const;
-    // no range check, AudioFlinger::mLock held
-    bool streamMute_l(audio_stream_type_t stream) const final { return mStreamTypes[stream].mute; }
+    float masterVolume_l() const final REQUIRES(mutex());
+    bool masterMute_l() const final REQUIRES(mutex());
+    float getMasterBalance_l() const REQUIRES(mutex());
+    // no range check, AudioFlinger::mutex() held
+    bool streamMute_l(audio_stream_type_t stream) const final REQUIRES(mutex()) {
+        return mStreamTypes[stream].mute;
+    }
     audio_mode_t getMode() const final { return mMode; }
     bool isLowRamDevice() const final { return mIsLowRamDevice; }
     uint32_t getScreenState() const final { return mScreenState; }
 
-    std::optional<media::AudioVibratorInfo> getDefaultVibratorInfo_l() const final;
+    std::optional<media::AudioVibratorInfo> getDefaultVibratorInfo_l() const final
+            REQUIRES(mutex());
     const sp<IAfPatchPanel>& getPatchPanel() const final { return mPatchPanel; }
     const sp<MelReporter>& getMelReporter() const final { return mMelReporter; }
     const sp<EffectsFactoryHalInterface>& getEffectsFactoryHal() const final {
@@ -328,34 +365,39 @@
     // effect belongs to an effect chain in mOrphanEffectChains, the chain is updated
     // and removed from mOrphanEffectChains if it does not contain any effect.
     // Return true if the effect was found in mOrphanEffectChains, false otherwise.
-    bool updateOrphanEffectChains(const sp<IAfEffectModule>& effect) final;
+    bool updateOrphanEffectChains(const sp<IAfEffectModule>& effect) final
+            EXCLUDES_AudioFlinger_Mutex;
 
-    status_t moveEffectChain_l(audio_session_t sessionId,
-            IAfPlaybackThread* srcThread, IAfPlaybackThread* dstThread) final;
+    status_t moveEffectChain_ll(audio_session_t sessionId,
+            IAfPlaybackThread* srcThread, IAfPlaybackThread* dstThread) final
+            REQUIRES(mutex(), audio_utils::ThreadBase_Mutex);
 
     // This is a helper that is called during incoming binder calls.
     // Requests media.log to start merging log buffers
     void requestLogMerge() final;
-    sp<NBLog::Writer> newWriter_l(size_t size, const char *name) final;
+    sp<NBLog::Writer> newWriter_l(size_t size, const char *name) final REQUIRES(mutex());
     void unregisterWriter(const sp<NBLog::Writer>& writer) final;
 
     sp<audioflinger::SyncEvent> createSyncEvent(AudioSystem::sync_event_t type,
             audio_session_t triggerSession,
             audio_session_t listenerSession,
             const audioflinger::SyncEventCallback& callBack,
-            const wp<IAfTrackBase>& cookie) final;
+            const wp<IAfTrackBase>& cookie) final EXCLUDES_AudioFlinger_Mutex;
 
-    void ioConfigChanged(audio_io_config_event_t event,
+    // Hold either AudioFlinger::mutex or ThreadBase::mutex
+    void ioConfigChanged_l(audio_io_config_event_t event,
             const sp<AudioIoDescriptor>& ioDesc,
-            pid_t pid = 0) final;
-    void onNonOffloadableGlobalEffectEnable() final;
+            pid_t pid = 0) final EXCLUDES_AudioFlinger_ClientMutex;
+    void onNonOffloadableGlobalEffectEnable() final EXCLUDES_AudioFlinger_Mutex;
     void onSupportedLatencyModesChanged(
-            audio_io_handle_t output, const std::vector<audio_latency_mode_t>& modes) final;
+            audio_io_handle_t output, const std::vector<audio_latency_mode_t>& modes) final
+            EXCLUDES_AudioFlinger_ClientMutex;
 
     // ---- end of IAfThreadCallback interface
 
     /* List available audio ports and their attributes */
-    status_t listAudioPorts(unsigned int* num_ports, struct audio_port* ports) const;
+    status_t listAudioPorts(unsigned int* num_ports, struct audio_port* ports) const
+            EXCLUDES_AudioFlinger_Mutex;
 
     sp<EffectsFactoryHalInterface> getEffectsFactory();
 
@@ -372,7 +414,7 @@
                             audio_session_t *sessionId,
                             const sp<MmapStreamCallback>& callback,
                             sp<MmapStreamInterface>& interface,
-                            audio_port_handle_t *handle);
+            audio_port_handle_t *handle) EXCLUDES_AudioFlinger_Mutex;
 private:
     // FIXME The 400 is temporarily too high until a leak of writers in media.log is fixed.
     static const size_t kLogMemorySize = 400 * 1024;
@@ -380,32 +422,33 @@
     // When a log writer is unregistered, it is done lazily so that media.log can continue to see it
     // for as long as possible.  The memory is only freed when it is needed for another log writer.
     Vector< sp<NBLog::Writer> > mUnregisteredWriters;
-    Mutex               mUnregisteredWritersLock;
+    audio_utils::mutex& unregisteredWritersMutex() const { return mUnregisteredWritersMutex; }
+    mutable audio_utils::mutex mUnregisteredWritersMutex;
 
                             AudioFlinger() ANDROID_API;
     ~AudioFlinger() override;
 
     // call in any IAudioFlinger method that accesses mPrimaryHardwareDev
-    status_t                initCheck() const { return mPrimaryHardwareDev == NULL ?
+    status_t initCheck() const { return mPrimaryHardwareDev == NULL ?
                                                         NO_INIT : NO_ERROR; }
 
     // RefBase
     void onFirstRef() override;
 
     AudioHwDevice*          findSuitableHwDev_l(audio_module_handle_t module,
-                                                audio_devices_t deviceType);
+            audio_devices_t deviceType) REQUIRES(mutex());
 
     // incremented by 2 when screen state changes, bit 0 == 1 means "off"
-    // AudioFlinger::setParameters() updates with mLock.
+    // AudioFlinger::setParameters() updates with mutex().
     std::atomic_uint32_t mScreenState{};
 
     void dumpPermissionDenial(int fd, const Vector<String16>& args);
-    void dumpClients(int fd, const Vector<String16>& args);
-    void dumpInternals(int fd, const Vector<String16>& args);
+    void dumpClients_ll(int fd, const Vector<String16>& args) REQUIRES(mutex(), clientMutex());
+    void dumpInternals_l(int fd, const Vector<String16>& args) REQUIRES(mutex());
 
     SimpleLog mThreadLog{16}; // 16 Thread history limit
 
-    void dumpToThreadLog_l(const sp<IAfThreadBase>& thread);
+    void dumpToThreadLog_l(const sp<IAfThreadBase>& thread) REQUIRES(mutex());
 
     // --- Notification Client ---
     class NotificationClient : public IBinder::DeathRecipient {
@@ -452,21 +495,22 @@
         bool mPendingRequests;
 
         // Mutex and condition variable around mPendingRequests' value
-        Mutex       mMutex;
-        Condition   mCond;
+        audio_utils::mutex mMutex;
+        audio_utils::condition_variable mCondition;
 
         // Duration of the sleep period after a processed request
         static const int kPostTriggerSleepPeriod = 1000000;
     };
 
-    const sp<MediaLogNotifier> mMediaLogNotifier;
+    const sp<MediaLogNotifier> mMediaLogNotifier = sp<MediaLogNotifier>::make();
 
     // Find io handle by session id.
     // Preference is given to an io handle with a matching effect chain to session id.
     // If none found, AUDIO_IO_HANDLE_NONE is returned.
     template <typename T>
     static audio_io_handle_t findIoHandleBySessionId_l(
-            audio_session_t sessionId, const T& threads) {
+            audio_session_t sessionId, const T& threads)
+            REQUIRES(audio_utils::AudioFlinger_Mutex) {
         audio_io_handle_t io = AUDIO_IO_HANDLE_NONE;
 
         for (size_t i = 0; i < threads.size(); i++) {
@@ -481,14 +525,14 @@
         return io;
     }
 
-    IAfThreadBase* checkThread_l(audio_io_handle_t ioHandle) const;
-    IAfPlaybackThread* checkMixerThread_l(audio_io_handle_t output) const;
+    IAfThreadBase* checkThread_l(audio_io_handle_t ioHandle) const REQUIRES(mutex());
+    IAfPlaybackThread* checkMixerThread_l(audio_io_handle_t output) const REQUIRES(mutex());
 
-              sp<VolumeInterface> getVolumeInterface_l(audio_io_handle_t output) const;
-              std::vector<sp<VolumeInterface>> getAllVolumeInterfaces_l() const;
+    sp<VolumeInterface> getVolumeInterface_l(audio_io_handle_t output) const REQUIRES(mutex());
+    std::vector<sp<VolumeInterface>> getAllVolumeInterfaces_l() const REQUIRES(mutex());
 
 
-    void closeOutputFinish(const sp<IAfPlaybackThread>& thread);
+    static void closeOutputFinish(const sp<IAfPlaybackThread>& thread);
     void closeInputFinish(const sp<IAfRecordThread>& thread);
 
               // Allocate an audio_unique_id_t.
@@ -505,22 +549,27 @@
     // used by IAfDeviceEffectManagerCallback, IAfPatchPanelCallback, IAfThreadCallback
     audio_unique_id_t nextUniqueId(audio_unique_id_use_t use) final;
 
+    status_t moveEffectChain_ll(audio_session_t sessionId,
+            IAfRecordThread* srcThread, IAfRecordThread* dstThread)
+            REQUIRES(mutex(), audio_utils::ThreadBase_Mutex);
+
               // return thread associated with primary hardware device, or NULL
-              DeviceTypeSet primaryOutputDevice_l() const;
+    DeviceTypeSet primaryOutputDevice_l() const REQUIRES(mutex());
 
               // return the playback thread with smallest HAL buffer size, and prefer fast
-              IAfPlaybackThread* fastPlaybackThread_l() const;
+    IAfPlaybackThread* fastPlaybackThread_l() const REQUIRES(mutex());
 
-              sp<IAfThreadBase> getEffectThread_l(audio_session_t sessionId, int effectId);
+    sp<IAfThreadBase> getEffectThread_l(audio_session_t sessionId, int effectId)
+            REQUIRES(mutex());
 
-              IAfThreadBase* hapticPlaybackThread_l() const;
+    IAfThreadBase* hapticPlaybackThread_l() const REQUIRES(mutex());
 
               void updateSecondaryOutputsForTrack_l(
                       IAfTrack* track,
                       IAfPlaybackThread* thread,
-                      const std::vector<audio_io_handle_t>& secondaryOutputs) const;
+            const std::vector<audio_io_handle_t>& secondaryOutputs) const REQUIRES(mutex());
 
-                bool isSessionAcquired_l(audio_session_t audioSession);
+    bool isSessionAcquired_l(audio_session_t audioSession) REQUIRES(mutex());
 
                 // Store an effect chain to mOrphanEffectChains keyed vector.
                 // Called when a thread exits and effects are still attached to it.
@@ -529,17 +578,21 @@
                 // return ALREADY_EXISTS if a chain with the same session already exists in
                 // mOrphanEffectChains. Note that this should never happen as there is only one
                 // chain for a given session and it is attached to only one thread at a time.
-                status_t putOrphanEffectChain_l(const sp<IAfEffectChain>& chain);
+    status_t putOrphanEffectChain_l(const sp<IAfEffectChain>& chain) REQUIRES(mutex());
                 // Get an effect chain for the specified session in mOrphanEffectChains and remove
                 // it if found. Returns 0 if not found (this is the most common case).
-                sp<IAfEffectChain> getOrphanEffectChain_l(audio_session_t session);
+    sp<IAfEffectChain> getOrphanEffectChain_l(audio_session_t session) REQUIRES(mutex());
 
-                std::vector< sp<IAfEffectModule> > purgeStaleEffects_l();
+    std::vector< sp<IAfEffectModule> > purgeStaleEffects_l() REQUIRES(mutex());
 
-                void broadcastParametersToRecordThreads_l(const String8& keyValuePairs);
-                void forwardParametersToDownstreamPatches_l(
+    std::vector< sp<IAfEffectModule> > purgeOrphanEffectChains_l() REQUIRES(mutex());
+    bool updateOrphanEffectChains_l(const sp<IAfEffectModule>& effect) REQUIRES(mutex());
+
+    void broadcastParametersToRecordThreads_l(const String8& keyValuePairs) REQUIRES(mutex());
+    void forwardParametersToDownstreamPatches_l(
                         audio_io_handle_t upStream, const String8& keyValuePairs,
-            const std::function<bool(const sp<IAfPlaybackThread>&)>& useThread = nullptr);
+            const std::function<bool(const sp<IAfPlaybackThread>&)>& useThread = nullptr)
+            REQUIRES(mutex());
 
     // for mAudioSessionRefs only
     struct AudioSessionRef {
@@ -551,27 +604,28 @@
         int         mCnt;
     };
 
-    mutable     Mutex                               mLock;
+    mutable audio_utils::mutex mMutex;
                 // protects mClients and mNotificationClients.
-                // must be locked after mLock and ThreadBase::mLock if both must be locked
-                // avoids acquiring AudioFlinger::mLock from inside thread loop.
+                // must be locked after mutex() and ThreadBase::mutex() if both must be locked
+                // avoids acquiring AudioFlinger::mutex() from inside thread loop.
 
-    mutable Mutex mClientLock;
+    mutable audio_utils::mutex mClientMutex;
 
-                // protected by mClientLock
-                DefaultKeyedVector< pid_t, wp<Client> >     mClients;   // see ~Client()
+    DefaultKeyedVector<pid_t, wp<Client>> mClients GUARDED_BY(clientMutex());   // see ~Client()
 
-                mutable     Mutex                   mHardwareLock;
-                // NOTE: If both mLock and mHardwareLock mutexes must be held,
-                // always take mLock before mHardwareLock
+    audio_utils::mutex& hardwareMutex() const { return mHardwareMutex; }
 
-                // guarded by mHardwareLock
-                AudioHwDevice* mPrimaryHardwareDev;
-                DefaultKeyedVector<audio_module_handle_t, AudioHwDevice*>  mAudioHwDevs;
+    mutable audio_utils::mutex mHardwareMutex;
+    // NOTE: If both mMutex and mHardwareMutex mutexes must be held,
+    // always take mMutex before mHardwareMutex
 
-                // These two fields are immutable after onFirstRef(), so no lock needed to access
-                sp<DevicesFactoryHalInterface> mDevicesFactoryHal;
-                sp<DevicesFactoryHalCallback> mDevicesFactoryHalCallback;
+    std::atomic<AudioHwDevice*> mPrimaryHardwareDev = nullptr;
+    DefaultKeyedVector<audio_module_handle_t, AudioHwDevice*> mAudioHwDevs
+            GUARDED_BY(hardwareMutex()) {nullptr /* defValue */};
+
+     const sp<DevicesFactoryHalInterface> mDevicesFactoryHal =
+             DevicesFactoryHalInterface::create();
+     /* const */ sp<DevicesFactoryHalCallback> mDevicesFactoryHalCallback;  // set onFirstRef().
 
     // for dump, indicates which hardware operation is currently in progress (but not stream ops)
     enum hardware_call_state {
@@ -601,57 +655,56 @@
         AUDIO_HW_SET_SIMULATE_CONNECTIONS, // setSimulateDeviceConnections
     };
 
-    mutable     hardware_call_state                 mHardwareStatus;    // for dump only
+    mutable hardware_call_state mHardwareStatus = AUDIO_HW_IDLE;  // for dump only
+    DefaultKeyedVector<audio_io_handle_t, sp<IAfPlaybackThread>> mPlaybackThreads
+            GUARDED_BY(mutex());
+    stream_type_t mStreamTypes[AUDIO_STREAM_CNT] GUARDED_BY(mutex());
 
+    float mMasterVolume GUARDED_BY(mutex()) = 1.f;
+    bool mMasterMute GUARDED_BY(mutex()) = false;
+    float mMasterBalance GUARDED_BY(mutex()) = 0.f;
 
-    DefaultKeyedVector<audio_io_handle_t, sp<IAfPlaybackThread>> mPlaybackThreads;
-                stream_type_t                       mStreamTypes[AUDIO_STREAM_CNT];
+    DefaultKeyedVector<audio_io_handle_t, sp<IAfRecordThread>> mRecordThreads GUARDED_BY(mutex());
 
-                // member variables below are protected by mLock
-                float                               mMasterVolume;
-                bool                                mMasterMute;
-                float                               mMasterBalance = 0.f;
-                // end of variables protected by mLock
-
-    DefaultKeyedVector<audio_io_handle_t, sp<IAfRecordThread>> mRecordThreads;
-
-                // protected by mClientLock
-                DefaultKeyedVector< pid_t, sp<NotificationClient> >    mNotificationClients;
+    DefaultKeyedVector<pid_t, sp<NotificationClient>> mNotificationClients
+            GUARDED_BY(clientMutex());
 
                 // updated by atomic_fetch_add_explicit
-                volatile atomic_uint_fast32_t       mNextUniqueIds[AUDIO_UNIQUE_ID_USE_MAX];
+    volatile atomic_uint_fast32_t mNextUniqueIds[AUDIO_UNIQUE_ID_USE_MAX];  // ctor init
 
-                audio_mode_t                        mMode;
-                std::atomic_bool                    mBtNrecIsOff;
+    std::atomic<audio_mode_t> mMode = AUDIO_MODE_INVALID;
+    std::atomic<bool> mBtNrecIsOff = false;
 
-                // protected by mLock
-                Vector<AudioSessionRef*> mAudioSessionRefs;
+    Vector<AudioSessionRef*> mAudioSessionRefs GUARDED_BY(mutex());
 
-                AudioHwDevice* loadHwModule_l(const char *name);
+    AudioHwDevice* loadHwModule_ll(const char *name) REQUIRES(mutex(), hardwareMutex());
 
                 // sync events awaiting for a session to be created.
-                std::list<sp<audioflinger::SyncEvent>> mPendingSyncEvents;
+    std::list<sp<audioflinger::SyncEvent>> mPendingSyncEvents GUARDED_BY(mutex());
 
                 // Effect chains without a valid thread
-                DefaultKeyedVector<audio_session_t, sp<IAfEffectChain>> mOrphanEffectChains;
+    DefaultKeyedVector<audio_session_t, sp<IAfEffectChain>> mOrphanEffectChains
+            GUARDED_BY(mutex());
 
                 // list of sessions for which a valid HW A/V sync ID was retrieved from the HAL
-                DefaultKeyedVector< audio_session_t , audio_hw_sync_t >mHwAvSyncIds;
+    DefaultKeyedVector<audio_session_t, audio_hw_sync_t> mHwAvSyncIds GUARDED_BY(mutex());
 
                 // list of MMAP stream control threads. Those threads allow for wake lock, routing
                 // and volume control for activity on the associated MMAP stream at the HAL.
                 // Audio data transfer is directly handled by the client creating the MMAP stream
-    DefaultKeyedVector<audio_io_handle_t, sp<IAfMmapThread>> mMmapThreads;
+    DefaultKeyedVector<audio_io_handle_t, sp<IAfMmapThread>> mMmapThreads GUARDED_BY(mutex());
 
-    sp<Client>  registerPid(pid_t pid);    // always returns non-0
+    sp<Client> registerPid(pid_t pid) EXCLUDES_AudioFlinger_ClientMutex; // always returns non-0
 
     // for use from destructor
-    status_t    closeOutput_nonvirtual(audio_io_handle_t output);
-    status_t    closeInput_nonvirtual(audio_io_handle_t input);
-    void setAudioHwSyncForSession_l(IAfPlaybackThread* thread, audio_session_t sessionId);
+    status_t closeOutput_nonvirtual(audio_io_handle_t output) EXCLUDES_AudioFlinger_Mutex;
+    status_t closeInput_nonvirtual(audio_io_handle_t input) EXCLUDES_AudioFlinger_Mutex;
+    void setAudioHwSyncForSession_l(IAfPlaybackThread* thread, audio_session_t sessionId)
+            REQUIRES(mutex());
 
-    status_t    checkStreamType(audio_stream_type_t stream) const;
+    static status_t checkStreamType(audio_stream_type_t stream);
 
+    // no mutex needed.
     void        filterReservedParameters(String8& keyValuePairs, uid_t callingUid);
     void        logFilteredParameters(size_t originalKVPSize, const String8& originalKVPs,
                                       size_t rejectedKVPSize, const String8& rejectedKVPs,
@@ -661,45 +714,49 @@
     // though the variables are updated with mLock.
     size_t getClientSharedHeapSize() const;
 
-    std::atomic<bool> mIsLowRamDevice;
-    bool    mIsDeviceTypeKnown;
-    int64_t mTotalMemory;
-    std::atomic<size_t> mClientSharedHeapSize;
+    std::atomic<bool> mIsLowRamDevice = true;
+    bool mIsDeviceTypeKnown GUARDED_BY(mutex()) = false;
+    int64_t mTotalMemory GUARDED_BY(mutex()) = 0;
+    std::atomic<size_t> mClientSharedHeapSize = kMinimumClientSharedHeapSizeBytes;
     static constexpr size_t kMinimumClientSharedHeapSizeBytes = 1024 * 1024; // 1MB
 
-    nsecs_t mGlobalEffectEnableTime;  // when a global effect was last enabled
+    // when a global effect was last enabled
+    nsecs_t mGlobalEffectEnableTime GUARDED_BY(mutex()) = 0;
 
     /* const */ sp<IAfPatchPanel> mPatchPanel;
 
-    sp<EffectsFactoryHalInterface> mEffectsFactoryHal;
+    const sp<EffectsFactoryHalInterface> mEffectsFactoryHal =
+            audioflinger::EffectConfiguration::getEffectsFactoryHal();
 
-    const sp<PatchCommandThread> mPatchCommandThread;
+    const sp<PatchCommandThread> mPatchCommandThread = sp<PatchCommandThread>::make();
     /* const */ sp<DeviceEffectManager> mDeviceEffectManager;  // set onFirstRef
     /* const */ sp<MelReporter> mMelReporter;  // set onFirstRef
 
-    bool       mSystemReady;
-    std::atomic_bool mAudioPolicyReady{};
+    bool mSystemReady GUARDED_BY(mutex()) = false;
+    std::atomic<bool> mAudioPolicyReady = false;
 
-    mediautils::UidInfo mUidInfo;
+    mediautils::UidInfo mUidInfo GUARDED_BY(mutex());
 
+    // no mutex needed.
     SimpleLog  mRejectedSetParameterLog;
     SimpleLog  mAppSetParameterLog;
     SimpleLog  mSystemSetParameterLog;
 
-    std::vector<media::AudioVibratorInfo> mAudioVibratorInfos;
+    std::vector<media::AudioVibratorInfo> mAudioVibratorInfos GUARDED_BY(mutex());
 
     static inline constexpr const char *mMetricsId = AMEDIAMETRICS_KEY_AUDIO_FLINGER;
 
     std::map<media::audio::common::AudioMMapPolicyType,
-             std::vector<media::audio::common::AudioMMapPolicyInfo>> mPolicyInfos;
-    int32_t mAAudioBurstsPerBuffer = 0;
-    int32_t mAAudioHwBurstMinMicros = 0;
+             std::vector<media::audio::common::AudioMMapPolicyInfo>> mPolicyInfos
+             GUARDED_BY(mutex());
+    int32_t mAAudioBurstsPerBuffer GUARDED_BY(mutex()) = 0;
+    int32_t mAAudioHwBurstMinMicros GUARDED_BY(mutex()) = 0;
 
     /** Interface for interacting with the AudioService. */
-    mediautils::atomic_sp<IAudioManager>       mAudioManager;
+    mediautils::atomic_sp<IAudioManager> mAudioManager;
 
     // Bluetooth Variable latency control logic is enabled or disabled
-    std::atomic_bool mBluetoothLatencyModesEnabled;
+    std::atomic<bool> mBluetoothLatencyModesEnabled = true;
 };
 
 // ----------------------------------------------------------------------------
diff --git a/services/audioflinger/Client.h b/services/audioflinger/Client.h
index b2e3cf7..ff0d751 100644
--- a/services/audioflinger/Client.h
+++ b/services/audioflinger/Client.h
@@ -17,8 +17,8 @@
 #pragma once
 
 #include <afutils/AllocatorFactory.h>
+#include <audio_utils/mutex.h>
 #include <android-base/macros.h>  // DISALLOW_COPY_AND_ASSIGN
-#include <utils/Mutex.h>
 #include <utils/RefBase.h>        // avoid transitive dependency
 
 // TODO(b/291318727) Move to nested namespace
@@ -28,13 +28,16 @@
 
 class IAfClientCallback : public virtual RefBase {
 public:
-    virtual Mutex& clientMutex() const = 0;
-    virtual void removeClient_l(pid_t pid) = 0;
-    virtual void removeNotificationClient(pid_t pid) = 0;
+    virtual audio_utils::mutex& clientMutex() const
+            RETURN_CAPABILITY(audio_utils::AudioFlinger_ClientMutex) = 0;
+    virtual void removeClient_l(pid_t pid) REQUIRES(clientMutex()) = 0;
+    virtual void removeNotificationClient(pid_t pid) EXCLUDES_AudioFlinger_Mutex = 0;
+
+    // used indirectly by clients.
     virtual status_t moveAuxEffectToIo(
             int effectId,
             const sp<IAfPlaybackThread>& dstThread,
-            sp<IAfPlaybackThread>* srcThread) = 0;  // used by indirectly by clients.
+            sp<IAfPlaybackThread>* srcThread) EXCLUDES_AudioFlinger_Mutex = 0;
 };
 
 class Client : public RefBase {
diff --git a/services/audioflinger/DeviceEffectManager.cpp b/services/audioflinger/DeviceEffectManager.cpp
index 6636717..201d147 100644
--- a/services/audioflinger/DeviceEffectManager.cpp
+++ b/services/audioflinger/DeviceEffectManager.cpp
@@ -38,7 +38,7 @@
 DeviceEffectManager::DeviceEffectManager(
         const sp<IAfDeviceEffectManagerCallback>& afDeviceEffectManagerCallback)
     : mAfDeviceEffectManagerCallback(afDeviceEffectManagerCallback),
-      mMyCallback(new DeviceEffectManagerCallback(*this)) {}
+      mMyCallback(sp<DeviceEffectManagerCallback>::make(*this)) {}
 
 void DeviceEffectManager::onFirstRef() {
     mAfDeviceEffectManagerCallback->getPatchCommandThread()->addListener(this);
@@ -59,23 +59,42 @@
     ALOGV("%s handle %d mHalHandle %d device sink %08x",
             __func__, handle, patch.mHalHandle,
             patch.mAudioPatch.num_sinks > 0 ? patch.mAudioPatch.sinks[0].ext.device.type : 0);
-    Mutex::Autolock _l(mLock);
-    for (auto& effect : mDeviceEffects) {
-        status_t status = effect.second->onCreatePatch(handle, patch);
-        ALOGV("%s Effect onCreatePatch status %d", __func__, status);
-        ALOGW_IF(status == BAD_VALUE, "%s onCreatePatch error %d", __func__, status);
+    audio_utils::lock_guard _l(mutex());
+    for (auto& effectProxies : mDeviceEffects) {
+        for (auto& effect : effectProxies.second) {
+            const status_t status = effect->onCreatePatch(handle, patch);
+            ALOGV("%s Effect onCreatePatch status %d", __func__, status);
+            ALOGW_IF(status == BAD_VALUE, "%s onCreatePatch error %d", __func__, status);
+        }
     }
 }
 
 void DeviceEffectManager::onReleaseAudioPatch(audio_patch_handle_t handle) {
     ALOGV("%s", __func__);
-    Mutex::Autolock _l(mLock);
-    for (auto& effect : mDeviceEffects) {
-        effect.second->onReleasePatch(handle);
+    audio_utils::lock_guard _l(mutex());
+    for (auto& effectProxies : mDeviceEffects) {
+        for (auto& effect : effectProxies.second) {
+            effect->onReleasePatch(handle);
+        }
     }
 }
 
-// DeviceEffectManager::createEffect_l() must be called with AudioFlinger::mLock held
+void DeviceEffectManager::onUpdateAudioPatch(audio_patch_handle_t oldHandle,
+        audio_patch_handle_t newHandle, const IAfPatchPanel::Patch& patch) {
+    ALOGV("%s oldhandle %d newHandle %d mHalHandle %d device sink %08x",
+            __func__, oldHandle, newHandle, patch.mHalHandle,
+            patch.mAudioPatch.num_sinks > 0 ? patch.mAudioPatch.sinks[0].ext.device.type : 0);
+    audio_utils::lock_guard _l(mutex());
+    for (auto& effectProxies : mDeviceEffects) {
+        for (auto& effect : effectProxies.second) {
+            const status_t status = effect->onUpdatePatch(oldHandle, newHandle, patch);
+            ALOGV("%s Effect onUpdatePatch status %d", __func__, status);
+            ALOGW_IF(status != NO_ERROR, "%s onUpdatePatch error %d", __func__, status);
+        }
+    }
+}
+
+// DeviceEffectManager::createEffect_l() must be called with AudioFlinger::mutex() held
 sp<IAfEffectHandle> DeviceEffectManager::createEffect_l(
         effect_descriptor_t *descriptor,
         const AudioDeviceTypeAddr& device,
@@ -87,6 +106,7 @@
         bool probe,
         bool notifyFramesProcessed) {
     sp<IAfDeviceEffectProxy> effect;
+    std::vector<sp<IAfDeviceEffectProxy>> effectsForDevice = {};
     sp<IAfEffectHandle> handle;
     status_t lStatus;
 
@@ -97,15 +117,24 @@
     }
 
     {
-        Mutex::Autolock _l(mLock);
+        audio_utils::lock_guard _l(mutex());
         auto iter = mDeviceEffects.find(device);
         if (iter != mDeviceEffects.end()) {
-            effect = iter->second;
-        } else {
+            effectsForDevice = iter->second;
+            for (const auto& iterEffect : effectsForDevice) {
+                if (memcmp(&iterEffect->desc().uuid, &descriptor->uuid, sizeof(effect_uuid_t)) ==
+                    0) {
+                    effect = iterEffect;
+                    break;
+                }
+            }
+        }
+        if (effect == nullptr) {
             effect = IAfDeviceEffectProxy::create(device, mMyCallback,
                     descriptor,
                     mAfDeviceEffectManagerCallback->nextUniqueId(AUDIO_UNIQUE_ID_USE_EFFECT),
                     notifyFramesProcessed);
+            effectsForDevice.push_back(effect);
         }
         // create effect handle and connect it to effect module
         handle = IAfEffectHandle::create(
@@ -119,7 +148,8 @@
                     lStatus = NO_ERROR;
                 }
                 if (lStatus == NO_ERROR || lStatus == ALREADY_EXISTS) {
-                    mDeviceEffects.emplace(device, effect);
+                    mDeviceEffects.erase(device);
+                    mDeviceEffects.emplace(device, effectsForDevice);
                 }
             }
         }
@@ -131,6 +161,7 @@
     return handle;
 }
 
+/* static */
 status_t DeviceEffectManager::checkEffectCompatibility(
         const effect_descriptor_t *desc) {
     const sp<EffectsFactoryHalInterface> effectsFactory =
@@ -157,6 +188,7 @@
     return NO_ERROR;
 }
 
+/* static */
 status_t DeviceEffectManager::createEffectHal(
         const effect_uuid_t *pEffectUuid, int32_t sessionId, int32_t deviceId,
         sp<EffectHalInterface> *effect) {
@@ -173,7 +205,7 @@
 void DeviceEffectManager::dump(int fd)
 NO_THREAD_SAFETY_ANALYSIS  // conditional try lock
 {
-    const bool locked = afutils::dumpTryLock(mLock);
+    const bool locked = afutils::dumpTryLock(mutex());
     if (!locked) {
         String8 result("DeviceEffectManager may be deadlocked\n");
         write(fd, result.c_str(), result.size());
@@ -185,19 +217,34 @@
         String8 outStr;
         outStr.appendFormat("%*sEffect for device %s address %s:\n", 2, "",
                 ::android::toString(iter.first.mType).c_str(), iter.first.getAddress());
-        write(fd, outStr.c_str(), outStr.size());
-        iter.second->dump2(fd, 4);
+        for (const auto& effect : iter.second) {
+            write(fd, outStr.c_str(), outStr.size());
+            effect->dump2(fd, 4);
+        }
     }
 
     if (locked) {
-        mLock.unlock();
+        mutex().unlock();
     }
 }
 
 size_t DeviceEffectManager::removeEffect(const sp<IAfDeviceEffectProxy>& effect)
 {
-    Mutex::Autolock _l(mLock);
-    mDeviceEffects.erase(effect->device());
+    audio_utils::lock_guard _l(mutex());
+    const auto& iter = mDeviceEffects.find(effect->device());
+    if (iter != mDeviceEffects.end()) {
+        const auto& iterEffect = std::find_if(
+                iter->second.begin(), iter->second.end(), [&effect](const auto& effectProxy) {
+                    return memcmp(&effectProxy->desc().uuid, &effect->desc().uuid,
+                            sizeof(effect_uuid_t)) == 0;
+                });
+        if (iterEffect != iter->second.end()) {
+            iter->second.erase(iterEffect);
+            if (iter->second.empty()) {
+                mDeviceEffects.erase(effect->device());
+            }
+        }
+    }
     return mDeviceEffects.size();
 }
 
diff --git a/services/audioflinger/DeviceEffectManager.h b/services/audioflinger/DeviceEffectManager.h
index 6111030..faba806 100644
--- a/services/audioflinger/DeviceEffectManager.h
+++ b/services/audioflinger/DeviceEffectManager.h
@@ -20,8 +20,6 @@
 #include "IAfEffect.h"
 #include "PatchCommandThread.h"
 
-#include <utils/Mutex.h>  // avoid transitive dependency
-
 namespace android {
 
 class IAfDeviceEffectManagerCallback : public virtual RefBase {
@@ -30,9 +28,11 @@
     virtual audio_unique_id_t nextUniqueId(audio_unique_id_use_t use) = 0;
     virtual const sp<PatchCommandThread>& getPatchCommandThread() = 0;
     virtual status_t addEffectToHal(
-            const struct audio_port_config* device, const sp<EffectHalInterface>& effect) = 0;
+            const struct audio_port_config* device, const sp<EffectHalInterface>& effect)
+            EXCLUDES_AudioFlinger_HardwareMutex = 0;
     virtual status_t removeEffectFromHal(
-            const struct audio_port_config* device, const sp<EffectHalInterface>& effect) = 0;
+            const struct audio_port_config* device, const sp<EffectHalInterface>& effect)
+            EXCLUDES_AudioFlinger_HardwareMutex= 0;
 };
 
 class DeviceEffectManagerCallback;
@@ -53,10 +53,10 @@
                 int *enabled,
                 status_t *status,
                 bool probe,
-                bool notifyFramesProcessed);
+                bool notifyFramesProcessed) REQUIRES(audio_utils::AudioFlinger_Mutex);
 
     size_t removeEffect(const sp<IAfDeviceEffectProxy>& effect);
-    status_t createEffectHal(const effect_uuid_t *pEffectUuid,
+    static status_t createEffectHal(const effect_uuid_t *pEffectUuid,
            int32_t sessionId, int32_t deviceId,
            sp<EffectHalInterface> *effect);
     status_t addEffectToHal(const struct audio_port_config *device,
@@ -71,16 +71,25 @@
     // PatchCommandThread::PatchCommandListener implementation
 
     void onCreateAudioPatch(audio_patch_handle_t handle,
-            const IAfPatchPanel::Patch& patch) final;
-    void onReleaseAudioPatch(audio_patch_handle_t handle) final;
+            const IAfPatchPanel::Patch& patch) final
+            EXCLUDES_DeviceEffectManager_Mutex;
+    void onReleaseAudioPatch(audio_patch_handle_t handle) final
+            EXCLUDES_DeviceEffectManager_Mutex;
+    void onUpdateAudioPatch(audio_patch_handle_t oldHandle,
+            audio_patch_handle_t newHandle, const IAfPatchPanel::Patch& patch) final
+            EXCLUDES_DeviceEffectManager_Mutex;
 
 private:
-    status_t checkEffectCompatibility(const effect_descriptor_t *desc);
+    static status_t checkEffectCompatibility(const effect_descriptor_t *desc);
 
-    Mutex mLock;
+    audio_utils::mutex& mutex() const RETURN_CAPABILITY(audio_utils::DeviceEffectManager_Mutex) {
+       return mMutex;
+   }
+    mutable audio_utils::mutex mMutex;
     const sp<IAfDeviceEffectManagerCallback> mAfDeviceEffectManagerCallback;
     const sp<DeviceEffectManagerCallback> mMyCallback;
-    std::map<AudioDeviceTypeAddr, sp<IAfDeviceEffectProxy>> mDeviceEffects;
+    std::map<AudioDeviceTypeAddr, std::vector<sp<IAfDeviceEffectProxy>>>
+            mDeviceEffects GUARDED_BY(mutex());
 };
 
 class DeviceEffectManagerCallback : public EffectCallbackInterface {
@@ -89,55 +98,54 @@
         : mManager(manager) {}
 
     status_t createEffectHal(const effect_uuid_t *pEffectUuid,
-           int32_t sessionId, int32_t deviceId,
-           sp<EffectHalInterface> *effect) override {
+            int32_t sessionId, int32_t deviceId, sp<EffectHalInterface> *effect) final {
                 return mManager.createEffectHal(pEffectUuid, sessionId, deviceId, effect);
             }
     status_t allocateHalBuffer(size_t size __unused,
-            sp<EffectBufferHalInterface>* buffer __unused) override { return NO_ERROR; }
-    bool updateOrphanEffectChains(const sp<IAfEffectBase>& effect __unused) override {
+            sp<EffectBufferHalInterface>* buffer __unused) final { return NO_ERROR; }
+    bool updateOrphanEffectChains(const sp<IAfEffectBase>& effect __unused) final {
         return false;
     }
 
-    audio_io_handle_t io() const override  { return AUDIO_IO_HANDLE_NONE; }
-    bool isOutput() const override { return false; }
-    bool isOffload() const override { return false; }
-    bool isOffloadOrDirect() const override { return false; }
-    bool isOffloadOrMmap() const override { return false; }
-    bool isSpatializer() const override { return false; }
+    audio_io_handle_t io() const final { return AUDIO_IO_HANDLE_NONE; }
+    bool isOutput() const final { return false; }
+    bool isOffload() const final { return false; }
+    bool isOffloadOrDirect() const final { return false; }
+    bool isOffloadOrMmap() const final { return false; }
+    bool isSpatializer() const final { return false; }
 
-    uint32_t  sampleRate() const override { return 0; }
-    audio_channel_mask_t inChannelMask(int id __unused) const override {
+    uint32_t sampleRate() const final { return 0; }
+    audio_channel_mask_t inChannelMask(int id __unused) const final {
         return AUDIO_CHANNEL_NONE;
     }
-    uint32_t inChannelCount(int id __unused) const override { return 0; }
-    audio_channel_mask_t outChannelMask() const override { return AUDIO_CHANNEL_NONE; }
-    uint32_t outChannelCount() const override { return 0; }
+    uint32_t inChannelCount(int id __unused) const final { return 0; }
+    audio_channel_mask_t outChannelMask() const final { return AUDIO_CHANNEL_NONE; }
+    uint32_t outChannelCount() const final { return 0; }
 
-    audio_channel_mask_t hapticChannelMask() const override { return AUDIO_CHANNEL_NONE; }
-    size_t    frameCount() const override  { return 0; }
-    uint32_t  latency() const override  { return 0; }
+    audio_channel_mask_t hapticChannelMask() const final { return AUDIO_CHANNEL_NONE; }
+    size_t frameCount() const final { return 0; }
+    uint32_t latency() const final { return 0; }
 
-    status_t addEffectToHal(const sp<EffectHalInterface>& /* effect */) override {
+    status_t addEffectToHal(const sp<EffectHalInterface>& /* effect */) final {
         return NO_ERROR;
     }
-    status_t removeEffectFromHal(const sp<EffectHalInterface>& /* effect */) override {
+    status_t removeEffectFromHal(const sp<EffectHalInterface>& /* effect */) final {
         return NO_ERROR;
     }
 
-    bool disconnectEffectHandle(IAfEffectHandle *handle, bool unpinIfLast) override;
-    void setVolumeForOutput(float left __unused, float right __unused) const override {}
+    bool disconnectEffectHandle(IAfEffectHandle *handle, bool unpinIfLast) final;
+    void setVolumeForOutput(float left __unused, float right __unused) const final {}
 
     // check if effects should be suspended or restored when a given effect is enable or disabled
     void checkSuspendOnEffectEnabled(const sp<IAfEffectBase>& effect __unused,
-                          bool enabled __unused, bool threadLocked __unused) override {}
-    void resetVolume() override {}
-    product_strategy_t strategy() const override  { return static_cast<product_strategy_t>(0); }
-    int32_t activeTrackCnt() const override { return 0; }
-    void onEffectEnable(const sp<IAfEffectBase>& effect __unused) override {}
-    void onEffectDisable(const sp<IAfEffectBase>& effect __unused) override {}
+                          bool enabled __unused, bool threadLocked __unused) final {}
+    void resetVolume() final {}
+    product_strategy_t strategy() const final { return static_cast<product_strategy_t>(0); }
+    int32_t activeTrackCnt() const final { return 0; }
+    void onEffectEnable(const sp<IAfEffectBase>& effect __unused) final {}
+    void onEffectDisable(const sp<IAfEffectBase>& effect __unused) final {}
 
-    wp<IAfEffectChain> chain() const override { return nullptr; }
+    wp<IAfEffectChain> chain() const final { return nullptr; }
 
     bool isAudioPolicyReady() const final;
 
diff --git a/services/audioflinger/Effects.cpp b/services/audioflinger/Effects.cpp
index fe13e26..93db608 100644
--- a/services/audioflinger/Effects.cpp
+++ b/services/audioflinger/Effects.cpp
@@ -109,7 +109,7 @@
 {
 }
 
-// must be called with EffectModule::mLock held
+// must be called with EffectModule::mutex() held
 status_t EffectBase::setEnabled_l(bool enabled)
 {
 
@@ -155,7 +155,7 @@
 {
     status_t status;
     {
-        Mutex::Autolock _l(mLock);
+        audio_utils::lock_guard _l(mutex());
         status = setEnabled_l(enabled);
     }
     if (fromHandle) {
@@ -190,13 +190,13 @@
 
 void EffectBase::setSuspended(bool suspended)
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     mSuspended = suspended;
 }
 
 bool EffectBase::suspended() const
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     return mSuspended;
 }
 
@@ -204,7 +204,7 @@
 {
     status_t status;
 
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     int priority = handle->priority();
     size_t size = mHandles.size();
     IAfEffectHandle *controlHandle = nullptr;
@@ -250,7 +250,7 @@
     product_strategy_t strategy = PRODUCT_STRATEGY_NONE;
 
     {
-        Mutex::Autolock _l(mLock);
+        audio_utils::lock_guard _l(mutex());
 
         if ((isInternal_l() && !mPolicyRegistered)
                 || !getCallback()->isAudioPolicyReady()) {
@@ -284,7 +284,7 @@
             return NO_ERROR;
         }
     }
-    mPolicyLock.lock();
+    policyMutex().lock();
     ALOGV("%s name %s id %d session %d doRegister %d registered %d doEnable %d enabled %d",
         __func__, mDescriptor.name, mId, mSessionId, doRegister, registered, doEnable, enabled);
     if (doRegister) {
@@ -302,7 +302,7 @@
     if (registered && doEnable) {
         status = AudioSystem::setEffectEnabled(mId, enabled);
     }
-    mPolicyLock.unlock();
+    policyMutex().unlock();
 
     return status;
 }
@@ -310,7 +310,7 @@
 
 ssize_t EffectBase::removeHandle(IAfEffectHandle *handle)
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     return removeHandle_l(handle);
 }
 
@@ -348,7 +348,7 @@
     return mHandles.size();
 }
 
-// must be called with EffectModule::mLock held
+// must be called with EffectModule::mutex() held
 IAfEffectHandle *EffectBase::controlHandle_l()
 {
     // the first valid handle in the list has control over the module
@@ -371,12 +371,12 @@
         return mHandles.size();
     }
 
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     ssize_t numHandles = removeHandle_l(handle);
     if ((numHandles == 0) && (!mPinned || unpinIfLast)) {
-        mLock.unlock();
+        mutex().unlock();
         callback->updateOrphanEffectChains(this);
-        mLock.lock();
+        mutex().lock();
     }
     return numHandles;
 }
@@ -384,7 +384,7 @@
 bool EffectBase::purgeHandles()
 {
     bool enabled = false;
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     IAfEffectHandle *handle = controlHandle_l();
     if (handle != NULL) {
         enabled = handle->enabled();
@@ -509,16 +509,17 @@
 
     result.appendFormat("\tEffect ID %d:\n", mId);
 
-    const bool locked = afutils::dumpTryLock(mLock);
+    const bool locked = afutils::dumpTryLock(mutex());
     // failed to lock - AudioFlinger is probably deadlocked
     if (!locked) {
         result.append("\t\tCould not lock Fx mutex:\n");
     }
-
-    result.append("\t\tSession State Registered Enabled Suspended:\n");
-    result.appendFormat("\t\t%05d   %03d   %s          %s       %s\n",
-            mSessionId, mState, mPolicyRegistered ? "y" : "n",
-            mPolicyEnabled ? "y" : "n", mSuspended ? "y" : "n");
+    bool isInternal = isInternal_l();
+    result.append("\t\tSession State Registered Internal Enabled Suspended:\n");
+    result.appendFormat("\t\t%05d   %03d   %s          %s        %s       %s\n",
+            mSessionId, mState, mPolicyRegistered ? "y" : "n", isInternal ? "y" : "n",
+            ((isInternal && isEnabled()) || (!isInternal && mPolicyEnabled)) ? "y" : "n",
+            mSuspended ? "y" : "n");
 
     result.append("\t\tDescriptor:\n");
     char uuidStr[64];
@@ -547,7 +548,7 @@
         }
     }
     if (locked) {
-        mLock.unlock();
+        mutex().unlock();
     }
 
     write(fd, result.c_str(), result.length());
@@ -616,7 +617,7 @@
 }
 
 bool EffectModule::updateState() {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
 
     bool started = false;
     switch (mState) {
@@ -672,7 +673,7 @@
 
 void EffectModule::process()
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
 
     if (mState == DESTROYED || mEffectInterface == 0 || mInBuffer == 0 || mOutBuffer == 0) {
         return;
@@ -1001,8 +1002,9 @@
     // mConfig.outputCfg.buffer.frameCount cannot be zero.
     mMaxDisableWaitCnt = (uint32_t)std::max(
             (uint64_t)1, // mMaxDisableWaitCnt must be greater than zero.
-            (uint64_t)MAX_DISABLE_TIME_MS * mConfig.outputCfg.samplingRate
-                / ((uint64_t)1000 * mConfig.outputCfg.buffer.frameCount));
+            (uint64_t)mConfig.outputCfg.buffer.frameCount == 0 ? 1
+                : (MAX_DISABLE_TIME_MS * mConfig.outputCfg.samplingRate
+                / ((uint64_t)1000 * mConfig.outputCfg.buffer.frameCount)));
 
 exit:
     // TODO: consider clearing mConfig on error.
@@ -1013,7 +1015,7 @@
 
 status_t EffectModule::init()
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     if (mEffectInterface == 0) {
         return NO_INIT;
     }
@@ -1043,12 +1045,12 @@
     }
 }
 
-// start() must be called with PlaybackThread::mLock or EffectChain::mLock held
+// start() must be called with PlaybackThread::mutex() or EffectChain::mutex() held
 status_t EffectModule::start()
 {
     status_t status;
     {
-        Mutex::Autolock _l(mLock);
+        audio_utils::lock_guard _l(mutex());
         status = start_l();
     }
     if (status == NO_ERROR) {
@@ -1083,7 +1085,7 @@
 
 status_t EffectModule::stop()
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     return stop_l();
 }
 
@@ -1120,7 +1122,7 @@
     return status;
 }
 
-// must be called with EffectChain::mLock held
+// must be called with EffectChain::mutex() held
 void EffectModule::release_l()
 {
     if (mEffectInterface != 0) {
@@ -1156,7 +1158,7 @@
                      int32_t maxReplySize,
                      std::vector<uint8_t>* reply)
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     ALOGVV("command(), cmdCode: %d, mEffectInterface: %p", cmdCode, mEffectInterface.get());
 
     if (mState == DESTROYED || mEffectInterface == 0) {
@@ -1353,7 +1355,7 @@
 
 status_t EffectModule::setVolume(uint32_t *left, uint32_t *right, bool controller)
 {
-    AutoLockReentrant _l(mLock, mSetVolumeReentrantTid);
+    AutoLockReentrant _l(mutex(), mSetVolumeReentrantTid);
     if (mStatus != NO_ERROR) {
         return mStatus;
     }
@@ -1407,7 +1409,7 @@
         return NO_ERROR;
     }
 
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     if (mStatus != NO_ERROR) {
         return mStatus;
     }
@@ -1437,7 +1439,7 @@
 
 status_t EffectModule::setMode(audio_mode_t mode)
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     if (mStatus != NO_ERROR) {
         return mStatus;
     }
@@ -1459,7 +1461,7 @@
 
 status_t EffectModule::setAudioSource(audio_source_t source)
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     if (mStatus != NO_ERROR) {
         return mStatus;
     }
@@ -1477,7 +1479,7 @@
 
 status_t EffectModule::setOffloaded(bool offloaded, audio_io_handle_t io)
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     if (mStatus != NO_ERROR) {
         return mStatus;
     }
@@ -1510,7 +1512,7 @@
 
 bool EffectModule::isOffloaded() const
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     return mOffloaded;
 }
 
@@ -1581,7 +1583,7 @@
 
 status_t EffectModule::getConfigs(
         audio_config_base_t* inputCfg, audio_config_base_t* outputCfg, bool* isOutput) const {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     if (mConfig.inputCfg.mask == 0 || mConfig.outputCfg.mask == 0) {
         return NO_INIT;
     }
@@ -1616,7 +1618,7 @@
     EffectBase::dump(fd, args);
 
     String8 result;
-    const bool locked = afutils::dumpTryLock(mLock);
+    const bool locked = afutils::dumpTryLock(mutex());
 
     result.append("\t\tStatus Engine:\n");
     result.appendFormat("\t\t%03d    %p\n",
@@ -1659,7 +1661,7 @@
     }
 
     if (locked) {
-        mLock.unlock();
+        mutex().unlock();
     }
 }
 
@@ -1782,7 +1784,7 @@
 
 Status EffectHandle::enable(int32_t* _aidl_return)
 {
-    AutoMutex _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     ALOGV("enable %p", this);
     sp<IAfEffectBase> effect = mEffect.promote();
     if (effect == 0 || mDisconnected) {
@@ -1821,7 +1823,7 @@
 Status EffectHandle::disable(int32_t* _aidl_return)
 {
     ALOGV("disable %p", this);
-    AutoMutex _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     sp<IAfEffectBase> effect = mEffect.promote();
     if (effect == 0 || mDisconnected) {
         RETURN(DEAD_OBJECT);
@@ -1854,7 +1856,7 @@
 
 void EffectHandle::disconnect(bool unpinIfLast)
 {
-    AutoMutex _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     ALOGV("disconnect(%s) %p", unpinIfLast ? "true" : "false", this);
     if (mDisconnected) {
         if (unpinIfLast) {
@@ -1881,7 +1883,7 @@
         }
         mCblkMemory.clear();    // free the shared memory before releasing the heap it belongs to
         // Client destructor must run with AudioFlinger client mutex locked
-        Mutex::Autolock _l2(mClient->afClientCallback()->clientMutex());
+        audio_utils::lock_guard _l2(mClient->afClientCallback()->clientMutex());
         mClient.clear();
     }
 }
@@ -1893,7 +1895,7 @@
 
 Status EffectHandle::getConfig(
         media::EffectConfig* _config, int32_t* _aidl_return) {
-    AutoMutex _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     sp<IAfEffectBase> effect = mEffect.promote();
     if (effect == nullptr || mDisconnected) {
         RETURN(DEAD_OBJECT);
@@ -1960,7 +1962,7 @@
         return disable(_aidl_return);
     }
 
-    AutoMutex _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     sp<IAfEffectBase> effect = mEffect.promote();
     if (effect == 0 || mDisconnected) {
         RETURN(DEAD_OBJECT);
@@ -2174,7 +2176,7 @@
 std::vector<int> EffectChain::getEffectIds() const
 {
     std::vector<int> ids;
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     for (size_t i = 0; i < mEffects.size(); i++) {
         ids.push_back(mEffects[i]->id());
     }
@@ -2183,11 +2185,11 @@
 
 void EffectChain::clearInputBuffer()
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     clearInputBuffer_l();
 }
 
-// Must be called with EffectChain::mLock locked
+// Must be called with EffectChain::mutex() locked
 void EffectChain::clearInputBuffer_l()
 {
     if (mInBuffer == NULL) {
@@ -2200,7 +2202,7 @@
     mInBuffer->commit();
 }
 
-// Must be called with EffectChain::mLock locked
+// Must be called with EffectChain::mutex() locked
 void EffectChain::process_l()
 {
     // never process effects when:
@@ -2259,7 +2261,7 @@
                                                    audio_session_t sessionId,
                                                    bool pinned)
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     effect = new EffectModule(mEffectCallback, desc, id, sessionId, pinned, AUDIO_PORT_HANDLE_NONE);
     status_t lStatus = effect->status();
     if (lStatus == NO_ERROR) {
@@ -2274,10 +2276,10 @@
 // addEffect_l() must be called with IAfThreadBase::mutex() held
 status_t EffectChain::addEffect_l(const sp<IAfEffectModule>& effect)
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     return addEffect_ll(effect);
 }
-// addEffect_l() must be called with IAfThreadBase::mLock and EffectChain::mutex() held
+// addEffect_l() must be called with IAfThreadBase::mutex() and EffectChain::mutex() held
 status_t EffectChain::addEffect_ll(const sp<IAfEffectModule>& effect)
 {
     effect->setCallback(mEffectCallback);
@@ -2444,7 +2446,7 @@
 size_t EffectChain::removeEffect_l(const sp<IAfEffectModule>& effect,
                                                  bool release)
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     size_t size = mEffects.size();
     uint32_t type = effect->desc().flags & EFFECT_FLAG_TYPE_MASK;
 
@@ -2531,7 +2533,7 @@
     return false;
 }
 
-// setVolume_l() must be called with IAfThreadBase::mLock or EffectChain::mLock held
+// setVolume_l() must be called with IAfThreadBase::mutex() or EffectChain::mutex() held
 bool EffectChain::setVolume_l(uint32_t *left, uint32_t *right, bool force)
 {
     uint32_t newLeft = *left;
@@ -2607,7 +2609,7 @@
     return volumeControlIndex.has_value();
 }
 
-// resetVolume_l() must be called with IAfThreadBase::mutex() or EffectChain::mLock held
+// resetVolume_l() must be called with IAfThreadBase::mutex() or EffectChain::mutex() held
 void EffectChain::resetVolume_l()
 {
     if ((mLeftVolume != UINT_MAX) && (mRightVolume != UINT_MAX)) {
@@ -2618,7 +2620,7 @@
 }
 
 // containsHapticGeneratingEffect_l must be called with
-// IAfThreadBase::mutex() or EffectChain::mLock held
+// IAfThreadBase::mutex() or EffectChain::mutex() held
 bool EffectChain::containsHapticGeneratingEffect_l()
 {
     for (size_t i = 0; i < mEffects.size(); ++i) {
@@ -2631,7 +2633,7 @@
 
 void EffectChain::setHapticIntensity_l(int id, os::HapticScale intensity)
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     for (size_t i = 0; i < mEffects.size(); ++i) {
         mEffects[i]->setHapticIntensity(id, intensity);
     }
@@ -2639,7 +2641,7 @@
 
 void EffectChain::syncHalEffectsState()
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     for (size_t i = 0; i < mEffects.size(); i++) {
         if (mEffects[i]->state() == EffectModule::ACTIVE ||
                 mEffects[i]->state() == EffectModule::STOPPING) {
@@ -2657,7 +2659,7 @@
     result.appendFormat("    %zu effects for session %d\n", numEffects, mSessionId);
 
     if (numEffects) {
-        const bool locked = afutils::dumpTryLock(mLock);
+        const bool locked = afutils::dumpTryLock(mutex());
         // failed to lock - AudioFlinger is probably deadlocked
         if (!locked) {
             result.append("\tCould not lock mutex:\n");
@@ -2680,7 +2682,7 @@
         }
 
         if (locked) {
-            mLock.unlock();
+            mutex().unlock();
         }
     } else {
         write(fd, result.c_str(), result.size());
@@ -2729,12 +2731,12 @@
                 sp<IAfEffectModule> effect = desc->mEffect.promote();
                 if (effect != 0) {
                     effect->setSuspended(false);
-                    effect->lock();
+                    effect->mutex().lock();
                     IAfEffectHandle *handle = effect->controlHandle_l();
                     if (handle != NULL && !handle->disconnected()) {
                         effect->setEnabled_l(handle->enabled());
                     }
-                    effect->unlock();
+                    effect->mutex().unlock();
                 }
                 desc->mEffect.clear();
             }
@@ -2884,7 +2886,7 @@
 
 bool EffectChain::isNonOffloadableEnabled() const
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     return isNonOffloadableEnabled_l();
 }
 
@@ -2901,7 +2903,7 @@
 
 void EffectChain::setThread(const sp<IAfThreadBase>& thread)
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     mEffectCallback->setThread(thread);
 }
 
@@ -2930,7 +2932,7 @@
 
 bool EffectChain::isRawCompatible() const
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     for (const auto &effect : mEffects) {
         if (effect->isProcessImplemented()) {
             return false;
@@ -2942,7 +2944,7 @@
 
 bool EffectChain::isFastCompatible() const
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     for (const auto &effect : mEffects) {
         if (effect->isProcessImplemented()
                 && effect->isImplementationSoftware()) {
@@ -2954,7 +2956,7 @@
 }
 
 bool EffectChain::isBitPerfectCompatible() const {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     for (const auto &effect : mEffects) {
         if (effect->isProcessImplemented()
                 && effect->isImplementationSoftware()) {
@@ -2965,10 +2967,10 @@
     return true;
 }
 
-// isCompatibleWithThread_l() must be called with thread->mLock held
+// isCompatibleWithThread_l() must be called with thread->mutex() held
 bool EffectChain::isCompatibleWithThread_l(const sp<IAfThreadBase>& thread) const
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     for (size_t i = 0; i < mEffects.size(); i++) {
         if (thread->checkEffectCompatibility_l(&(mEffects[i]->desc()), mSessionId) != NO_ERROR) {
             return false;
@@ -3081,7 +3083,10 @@
     return t->sampleRate();
 }
 
-audio_channel_mask_t EffectChain::EffectCallback::inChannelMask(int id) const {
+audio_channel_mask_t EffectChain::EffectCallback::inChannelMask(int id) const
+NO_THREAD_SAFETY_ANALYSIS
+// calling function 'hasAudioSession_l' requires holding mutex 'ThreadBase_Mutex' exclusively
+{
     const sp<IAfThreadBase> t = thread().promote();
     if (t == nullptr) {
         return AUDIO_CHANNEL_NONE;
@@ -3117,7 +3122,10 @@
     return audio_channel_count_from_out_mask(inChannelMask(id));
 }
 
-audio_channel_mask_t EffectChain::EffectCallback::outChannelMask() const {
+audio_channel_mask_t EffectChain::EffectCallback::outChannelMask() const
+NO_THREAD_SAFETY_ANALYSIS
+// calling function 'hasAudioSession_l' requires holding mutex 'ThreadBase_Mutex' exclusively
+{
     const sp<IAfThreadBase> t = thread().promote();
     if (t == nullptr) {
         return AUDIO_CHANNEL_NONE;
@@ -3272,7 +3280,7 @@
 status_t DeviceEffectProxy::setEnabled(bool enabled, bool fromHandle)
 {
     status_t status = EffectBase::setEnabled(enabled, fromHandle);
-    Mutex::Autolock _l(mProxyLock);
+    audio_utils::lock_guard _l(proxyMutex());
     if (status == NO_ERROR) {
         for (auto& handle : mEffectHandles) {
             Status bs;
@@ -3313,6 +3321,23 @@
     return status;
 }
 
+status_t DeviceEffectProxy::onUpdatePatch(audio_patch_handle_t oldPatchHandle,
+        audio_patch_handle_t newPatchHandle,
+        const IAfPatchPanel::Patch& /* patch */) {
+    status_t status = NAME_NOT_FOUND;
+    ALOGV("%s", __func__);
+    audio_utils::lock_guard _l(proxyMutex());
+    if (mEffectHandles.find(oldPatchHandle) != mEffectHandles.end()) {
+        ALOGV("%s replacing effect from handle %d to handle %d", __func__, oldPatchHandle,
+                newPatchHandle);
+        sp<IAfEffectHandle> effect = mEffectHandles.at(oldPatchHandle);
+        mEffectHandles.erase(oldPatchHandle);
+        mEffectHandles.emplace(newPatchHandle, effect);
+        status = NO_ERROR;
+    }
+    return status;
+}
+
 status_t DeviceEffectProxy::onCreatePatch(
         audio_patch_handle_t patchHandle, const IAfPatchPanel::Patch& patch) {
     status_t status = NAME_NOT_FOUND;
@@ -3325,7 +3350,10 @@
         ALOGV("%s sink %d checkPort status %d", __func__, i, status);
     }
     if (status == NO_ERROR || status == ALREADY_EXISTS) {
-        Mutex::Autolock _l(mProxyLock);
+        audio_utils::lock_guard _l(proxyMutex());
+        size_t erasedHandle = mEffectHandles.erase(patchHandle);
+        ALOGV("%s %s effecthandle %p for patch %d",
+                __func__, (erasedHandle == 0 ? "adding" : "replacing"), handle.get(), patchHandle);
         mEffectHandles.emplace(patchHandle, handle);
     }
     ALOGW_IF(status == BAD_VALUE,
@@ -3335,7 +3363,10 @@
 }
 
 status_t DeviceEffectProxy::checkPort(const IAfPatchPanel::Patch& patch,
-        const struct audio_port_config *port, sp<IAfEffectHandle> *handle) {
+        const struct audio_port_config *port, sp<IAfEffectHandle> *handle)
+NO_THREAD_SAFETY_ANALYSIS
+// calling function 'createEffect_l' requires holding mutex 'AudioFlinger_Mutex' exclusively
+{
 
     ALOGV("%s type %d device type %d address %s device ID %d patch.isSoftware() %d",
             __func__, port->type, port->ext.device.type,
@@ -3357,19 +3388,22 @@
     status_t status = NAME_NOT_FOUND;
 
     if (mDescriptor.flags & EFFECT_FLAG_HW_ACC_TUNNEL) {
-        Mutex::Autolock _l(mProxyLock);
-        mDevicePort = *port;
-        mHalEffect = new EffectModule(mMyCallback,
+        audio_utils::lock_guard _l(proxyMutex());
+        if (mHalEffect != nullptr && mDevicePort.id == port->id) {
+            ALOGV("%s reusing HAL effect", __func__);
+        } else {
+            mDevicePort = *port;
+            mHalEffect = new EffectModule(mMyCallback,
                                       const_cast<effect_descriptor_t *>(&mDescriptor),
                                       mMyCallback->newEffectId(), AUDIO_SESSION_DEVICE,
                                       false /* pinned */, port->id);
-        if (audio_is_input_device(mDevice.mType)) {
-            mHalEffect->setInputDevice(mDevice);
-        } else {
-            mHalEffect->setDevices({mDevice});
+            if (audio_is_input_device(mDevice.mType)) {
+                mHalEffect->setInputDevice(mDevice);
+            } else {
+                mHalEffect->setDevices({mDevice});
+            }
+            mHalEffect->configure();
         }
-        mHalEffect->configure();
-
         *handle = new EffectHandle(mHalEffect, nullptr, nullptr, 0 /*priority*/,
                                    mNotifyFramesProcessed);
         status = (*handle)->initCheck();
@@ -3421,7 +3455,7 @@
 void DeviceEffectProxy::onReleasePatch(audio_patch_handle_t patchHandle) {
     sp<IAfEffectHandle> effect;
     {
-        Mutex::Autolock _l(mProxyLock);
+        audio_utils::lock_guard _l(proxyMutex());
         if (mEffectHandles.find(patchHandle) != mEffectHandles.end()) {
             effect = mEffectHandles.at(patchHandle);
             mEffectHandles.erase(patchHandle);
@@ -3432,7 +3466,7 @@
 
 size_t DeviceEffectProxy::removeEffect(const sp<IAfEffectModule>& effect)
 {
-    Mutex::Autolock _l(mProxyLock);
+    audio_utils::lock_guard _l(proxyMutex());
     if (effect == mHalEffect) {
         mHalEffect->release_l();
         mHalEffect.clear();
@@ -3457,6 +3491,23 @@
     return mManagerCallback->removeEffectFromHal(&mDevicePort, effect);
 }
 
+status_t DeviceEffectProxy::command(
+        int32_t cmdCode, const std::vector<uint8_t>& cmdData, int32_t maxReplySize,
+        std::vector<uint8_t>* reply) {
+    audio_utils::lock_guard _l(proxyMutex());
+    status_t status = EffectBase::command(cmdCode, cmdData, maxReplySize, reply);
+    if (status == NO_ERROR) {
+        for (auto& handle : mEffectHandles) {
+            sp<IAfEffectBase> effect = handle.second->effect().promote();
+            if (effect != nullptr) {
+                status = effect->command(cmdCode, cmdData, maxReplySize, reply);
+            }
+        }
+    }
+    ALOGV("%s status %d", __func__, status);
+    return status;
+}
+
 bool DeviceEffectProxy::isOutput() const {
     if (mDevicePort.id != AUDIO_PORT_HANDLE_NONE) {
         return mDevicePort.role == AUDIO_PORT_ROLE_SINK;
@@ -3493,7 +3544,7 @@
     const Vector<String16> args;
     EffectBase::dump(fd, args);
 
-    const bool locked = afutils::dumpTryLock(mProxyLock);
+    const bool locked = afutils::dumpTryLock(proxyMutex());
     if (!locked) {
         String8 result("DeviceEffectProxy may be deadlocked\n");
         write(fd, result.c_str(), result.size());
@@ -3523,7 +3574,7 @@
     }
 
     if (locked) {
-        mLock.unlock();
+        proxyMutex().unlock();
     }
 }
 
diff --git a/services/audioflinger/Effects.h b/services/audioflinger/Effects.h
index e0f5cd7..2ece5dc 100644
--- a/services/audioflinger/Effects.h
+++ b/services/audioflinger/Effects.h
@@ -111,8 +111,7 @@
     bool             isPinned() const final { return mPinned; }
     void             unPin() final { mPinned = false; }
 
-    void             lock() ACQUIRE(mLock) final { mLock.lock(); }
-    void             unlock() RELEASE(mLock) final { mLock.unlock(); }
+    audio_utils::mutex& mutex() const final { return mMutex; }
 
     status_t         updatePolicyState() final;
 
@@ -135,7 +134,8 @@
 
     DISALLOW_COPY_AND_ASSIGN(EffectBase);
 
-    mutable Mutex mLock;      // mutex for process, commands and handles list protection
+    // mutex for process, commands and handles list protection
+    mutable audio_utils::mutex mMutex;
     mediautils::atomic_sp<EffectCallbackInterface> mCallback; // parent effect chain
     const int                 mId;        // this instance unique ID
     const audio_session_t     mSessionId; // audio session ID
@@ -148,9 +148,10 @@
                 // First handle in mHandles has highest priority and controls the effect module
 
     // Audio policy effect state management
-    // Mutex protecting transactions with audio policy manager as mLock cannot
+    // Mutex protecting transactions with audio policy manager as mutex() cannot
     // be held to avoid cross deadlocks with audio policy mutex
-    Mutex                     mPolicyLock;
+    audio_utils::mutex& policyMutex() const { return mPolicyMutex; }
+    mutable audio_utils::mutex mPolicyMutex;
     // Effect is registered in APM or not
     bool                      mPolicyRegistered = false;
     // Effect enabled state communicated to APM. Enabled state corresponds to
@@ -272,9 +273,10 @@
     uint32_t mInChannelCountRequested;
     uint32_t mOutChannelCountRequested;
 
+    template <typename MUTEX>
     class AutoLockReentrant {
     public:
-        AutoLockReentrant(Mutex& mutex, pid_t allowedTid)
+        AutoLockReentrant(MUTEX& mutex, pid_t allowedTid)
             : mMutex(gettid() == allowedTid ? nullptr : &mutex)
         {
             if (mMutex != nullptr) mMutex->lock();
@@ -283,7 +285,7 @@
             if (mMutex != nullptr) mMutex->unlock();
         }
     private:
-        Mutex * const mMutex;
+        MUTEX * const mMutex;
     };
 
     static constexpr pid_t INVALID_PID = (pid_t)-1;
@@ -364,7 +366,8 @@
 private:
     DISALLOW_COPY_AND_ASSIGN(EffectHandle);
 
-    Mutex mLock;                             // protects IEffect method calls
+    audio_utils::mutex& mutex() const { return mMutex; }
+    mutable audio_utils::mutex mMutex; // protects IEffect method calls
     const wp<IAfEffectBase> mEffect;               // pointer to controlled EffectModule
     const sp<media::IEffectClient> mEffectClient;  // callback interface for client notifications
     /*const*/ sp<Client> mClient;            // client for shared memory allocation, see
@@ -397,12 +400,8 @@
 
     void process_l() final;
 
-    void lock() ACQUIRE(mLock) final {
-        mLock.lock();
-    }
-    void unlock() RELEASE(mLock) final {
-        mLock.unlock();
-    }
+    audio_utils::mutex& mutex() const final { return mMutex; }
+
     status_t createEffect_l(sp<IAfEffectModule>& effect,
                             effect_descriptor_t *desc,
                             int id,
@@ -488,9 +487,11 @@
     // Is this EffectChain compatible with the bit-perfect audio flag.
     bool isBitPerfectCompatible() const final;
 
-    // isCompatibleWithThread_l() must be called with thread->mLock held
-    bool isCompatibleWithThread_l(const sp<IAfThreadBase>& thread) const final;
+    // isCompatibleWithThread_l() must be called with thread->mutex() held
+    bool isCompatibleWithThread_l(const sp<IAfThreadBase>& thread) const final
+            REQUIRES(audio_utils::ThreadBase_Mutex);
 
+    // Requires either IAfThreadBase::mutex() or EffectChain::mutex() held
     bool containsHapticGeneratingEffect_l() final;
 
     void setHapticIntensity_l(int id, os::HapticScale intensity) final;
@@ -624,7 +625,7 @@
 
     std::optional<size_t> findVolumeControl_l(size_t from, size_t to) const;
 
-    mutable  Mutex mLock;        // mutex protecting effect list
+    mutable audio_utils::mutex mMutex; // mutex protecting effect list
              Vector<sp<IAfEffectModule>> mEffects; // list of effect modules
              audio_session_t mSessionId; // audio session ID
              sp<EffectBufferHalInterface> mInBuffer;  // chain input buffer
@@ -671,6 +672,9 @@
     status_t onCreatePatch(audio_patch_handle_t patchHandle,
             const IAfPatchPanel::Patch& patch) final;
 
+    status_t onUpdatePatch(audio_patch_handle_t oldPatchHandle, audio_patch_handle_t newPatchHandle,
+           const IAfPatchPanel::Patch& patch) final;
+
     void onReleasePatch(audio_patch_handle_t patchHandle) final;
 
     size_t removeEffect(const sp<IAfEffectModule>& effect) final;
@@ -684,6 +688,11 @@
     audio_channel_mask_t channelMask() const final;
     uint32_t channelCount() const final;
 
+    status_t command(int32_t cmdCode,
+                     const std::vector<uint8_t>& cmdData,
+                     int32_t maxReplySize,
+                     std::vector<uint8_t>* reply) final;
+
     void dump2(int fd, int spaces) const final;
 
 private:
@@ -754,9 +763,10 @@
     const sp<DeviceEffectManagerCallback> mManagerCallback;
     const sp<ProxyCallback> mMyCallback;
 
-    mutable Mutex mProxyLock;
-    std::map<audio_patch_handle_t, sp<IAfEffectHandle>> mEffectHandles; // protected by mProxyLock
-    sp<IAfEffectModule> mHalEffect; // protected by mProxyLock
+    audio_utils::mutex& proxyMutex() const { return mProxyMutex; }
+    mutable audio_utils::mutex mProxyMutex;
+    std::map<audio_patch_handle_t, sp<IAfEffectHandle>> mEffectHandles; // protected by mProxyMutex
+    sp<IAfEffectModule> mHalEffect; // protected by mProxyMutex
     struct audio_port_config mDevicePort = { .id = AUDIO_PORT_HANDLE_NONE };
     const bool mNotifyFramesProcessed;
 };
diff --git a/services/audioflinger/IAfEffect.h b/services/audioflinger/IAfEffect.h
index 7393448..8c5bc4b 100644
--- a/services/audioflinger/IAfEffect.h
+++ b/services/audioflinger/IAfEffect.h
@@ -21,6 +21,7 @@
 #include <android/media/AudioVibratorInfo.h>
 #include <android/media/BnEffect.h>
 #include <android/media/BnEffectClient.h>
+#include <audio_utils/mutex.h>
 #include <media/AudioCommonTypes.h>  // product_strategy_t
 #include <media/AudioDeviceTypeAddr.h>
 #include <media/audiohal/EffectHalInterface.h>
@@ -124,6 +125,11 @@
     virtual sp<IAfEffectModule> asEffectModule() = 0;
     virtual sp<IAfDeviceEffectProxy> asDeviceEffectProxy() = 0;
 
+    virtual status_t command(int32_t cmdCode,
+            const std::vector<uint8_t>& cmdData,
+            int32_t maxReplySize,
+            std::vector<uint8_t>* reply) = 0;
+
     virtual void dump(int fd, const Vector<String16>& args) const = 0;
 
 private:
@@ -132,17 +138,11 @@
     virtual void setSuspended(bool suspended) = 0;
     virtual bool suspended() const = 0;
 
-    virtual status_t command(int32_t cmdCode,
-            const std::vector<uint8_t>& cmdData,
-            int32_t maxReplySize,
-            std::vector<uint8_t>* reply) = 0;
-
     virtual ssize_t disconnectHandle(IAfEffectHandle *handle, bool unpinIfLast) = 0;
     virtual ssize_t removeHandle_l(IAfEffectHandle *handle) = 0;
     virtual IAfEffectHandle* controlHandle_l() = 0;
 
-    virtual void lock() = 0;
-    virtual void unlock() = 0;
+    virtual audio_utils::mutex& mutex() const = 0;
 };
 
 class IAfEffectModule : public virtual IAfEffectBase {
@@ -218,8 +218,7 @@
 
     virtual void process_l() = 0;
 
-    virtual void lock() = 0;
-    virtual void unlock() = 0;
+    virtual audio_utils::mutex& mutex() const = 0;
 
     virtual status_t createEffect_l(sp<IAfEffectModule>& effect,
                             effect_descriptor_t *desc,
@@ -361,6 +360,9 @@
     virtual status_t onCreatePatch(
             audio_patch_handle_t patchHandle,
             const IAfPatchPanel::Patch& patch) = 0;
+    virtual status_t onUpdatePatch(audio_patch_handle_t oldPatchHandle,
+            audio_patch_handle_t newPatchHandle,
+            const IAfPatchPanel::Patch& patch) = 0;
     virtual void onReleasePatch(audio_patch_handle_t patchHandle) = 0;
 
     virtual void dump2(int fd, int spaces) const = 0; // TODO(b/291319101) naming?
diff --git a/services/audioflinger/IAfPatchPanel.h b/services/audioflinger/IAfPatchPanel.h
index 20e092d..5a6621e 100644
--- a/services/audioflinger/IAfPatchPanel.h
+++ b/services/audioflinger/IAfPatchPanel.h
@@ -45,8 +45,7 @@
           mRecordThreadHandle(recordThreadHandle) {}
     SoftwarePatch(const SoftwarePatch&) = default;
 
-    // Must be called under AudioFlinger::mLock
-    status_t getLatencyMs_l(double* latencyMs) const;
+    status_t getLatencyMs_l(double* latencyMs) const REQUIRES(audio_utils::AudioFlinger_Mutex);
     audio_patch_handle_t getPatchHandle() const { return mPatchHandle; };
     audio_io_handle_t getPlaybackThreadHandle() const { return mPlaybackThreadHandle; };
     audio_io_handle_t getRecordThreadHandle() const { return mRecordThreadHandle; };
@@ -60,12 +59,14 @@
 
 class IAfPatchPanelCallback : public virtual RefBase {
 public:
-    virtual void closeThreadInternal_l(const sp<IAfPlaybackThread>& thread) = 0;
-    virtual void closeThreadInternal_l(const sp<IAfRecordThread>& thread) = 0;
-    virtual IAfPlaybackThread* primaryPlaybackThread_l() const = 0;
-    virtual IAfPlaybackThread* checkPlaybackThread_l(audio_io_handle_t output) const = 0;
-    virtual IAfRecordThread* checkRecordThread_l(audio_io_handle_t input) const = 0;
-    virtual IAfMmapThread* checkMmapThread_l(audio_io_handle_t io) const = 0;
+    virtual void closeThreadInternal_l(const sp<IAfPlaybackThread>& thread) REQUIRES(mutex()) = 0;
+    virtual void closeThreadInternal_l(const sp<IAfRecordThread>& thread) REQUIRES(mutex()) = 0;
+    virtual IAfPlaybackThread* primaryPlaybackThread_l() const REQUIRES(mutex()) = 0;
+    virtual IAfPlaybackThread* checkPlaybackThread_l(audio_io_handle_t output) const
+            REQUIRES(mutex()) = 0;
+    virtual IAfRecordThread* checkRecordThread_l(audio_io_handle_t input) const
+            REQUIRES(mutex()) = 0;
+    virtual IAfMmapThread* checkMmapThread_l(audio_io_handle_t io) const REQUIRES(mutex()) = 0;
     virtual sp<IAfThreadBase> openInput_l(audio_module_handle_t module,
             audio_io_handle_t* input,
             audio_config_t* config,
@@ -74,23 +75,25 @@
             audio_source_t source,
             audio_input_flags_t flags,
             audio_devices_t outputDevice,
-            const String8& outputDeviceAddress) = 0;
+            const String8& outputDeviceAddress) REQUIRES(mutex()) = 0;
     virtual sp<IAfThreadBase> openOutput_l(audio_module_handle_t module,
             audio_io_handle_t* output,
             audio_config_t* halConfig,
             audio_config_base_t* mixerConfig,
             audio_devices_t deviceType,
             const String8& address,
-            audio_output_flags_t flags) = 0;
-    virtual void lock() const = 0;
-    virtual void unlock() const = 0;
+            audio_output_flags_t flags) REQUIRES(mutex()) = 0;
+    virtual audio_utils::mutex& mutex() const
+            RETURN_CAPABILITY(audio_utils::AudioFlinger_Mutex) = 0;
     virtual const DefaultKeyedVector<audio_module_handle_t, AudioHwDevice*>&
-            getAudioHwDevs_l() const = 0;
+            getAudioHwDevs_l() const REQUIRES(mutex()) = 0;
     virtual audio_unique_id_t nextUniqueId(audio_unique_id_use_t use) = 0;
     virtual const sp<PatchCommandThread>& getPatchCommandThread() = 0;
     virtual void updateDownStreamPatches_l(
-            const struct audio_patch* patch, const std::set<audio_io_handle_t>& streams) = 0;
-    virtual void updateOutDevicesForRecordThreads_l(const DeviceDescriptorBaseVector& devices) = 0;
+            const struct audio_patch* patch, const std::set<audio_io_handle_t>& streams)
+            REQUIRES(mutex()) = 0;
+    virtual void updateOutDevicesForRecordThreads_l(const DeviceDescriptorBaseVector& devices)
+            REQUIRES(mutex()) = 0;
 };
 
 class IAfPatchPanel : public virtual RefBase {
@@ -134,9 +137,12 @@
         sp<const ThreadType> const_thread() const { return mThread; }
         sp<const TrackType> const_track() const { return mTrack; }
 
-        void closeConnections(const sp<IAfPatchPanel>& panel) {
+        void closeConnections_l(const sp<IAfPatchPanel>& panel)
+                REQUIRES(audio_utils::AudioFlinger_Mutex)
+                NO_THREAD_SAFETY_ANALYSIS // this is broken in clang
+        {
             if (mHandle != AUDIO_PATCH_HANDLE_NONE) {
-                panel->releaseAudioPatch(mHandle);
+                panel->releaseAudioPatch_l(mHandle);
                 mHandle = AUDIO_PATCH_HANDLE_NONE;
             }
             if (mThread != nullptr) {
@@ -218,8 +224,10 @@
 
         friend void swap(Patch& a, Patch& b) noexcept { a.swap(b); }
 
-        status_t createConnections(const sp<IAfPatchPanel>& panel);
-        void clearConnections(const sp<IAfPatchPanel>& panel);
+        status_t createConnections_l(const sp<IAfPatchPanel>& panel)
+                REQUIRES(audio_utils::AudioFlinger_Mutex);
+        void clearConnections_l(const sp<IAfPatchPanel>& panel)
+                REQUIRES(audio_utils::AudioFlinger_Mutex);
         bool isSoftware() const {
             return mRecord.handle() != AUDIO_PATCH_HANDLE_NONE ||
                    mPlayback.handle() != AUDIO_PATCH_HANDLE_NONE;
@@ -251,22 +259,27 @@
     };
 
     /* List connected audio ports and their attributes */
-    virtual status_t listAudioPorts(unsigned int* num_ports, struct audio_port* ports) = 0;
+    virtual status_t listAudioPorts_l(unsigned int* num_ports, struct audio_port* ports)
+            REQUIRES(audio_utils::AudioFlinger_Mutex) = 0;
 
     /* Get supported attributes for a given audio port */
-    virtual status_t getAudioPort(struct audio_port_v7* port) = 0;
+    virtual status_t getAudioPort_l(struct audio_port_v7* port)
+            REQUIRES(audio_utils::AudioFlinger_Mutex) = 0;
 
     /* Create a patch between several source and sink ports */
-    virtual status_t createAudioPatch(
+    virtual status_t createAudioPatch_l(
             const struct audio_patch* patch,
             audio_patch_handle_t* handle,
-            bool endpointPatch = false) = 0;
+            bool endpointPatch = false)
+            REQUIRES(audio_utils::AudioFlinger_Mutex) = 0;
 
     /* Release a patch */
-    virtual status_t releaseAudioPatch(audio_patch_handle_t handle) = 0;
+    virtual status_t releaseAudioPatch_l(audio_patch_handle_t handle)
+            REQUIRES(audio_utils::AudioFlinger_Mutex) = 0;
 
     /* List connected audio devices and they attributes */
-    virtual status_t listAudioPatches(unsigned int* num_patches, struct audio_patch* patches) = 0;
+    virtual status_t listAudioPatches_l(unsigned int* num_patches, struct audio_patch* patches)
+            REQUIRES(audio_utils::AudioFlinger_Mutex) = 0;
 
     // Retrieves all currently estrablished software patches for a stream
     // opened on an intermediate module.
@@ -281,13 +294,14 @@
 
     virtual void dump(int fd) const = 0;
 
-    // Must be called under AudioFlinger::mLock
+    virtual const std::map<audio_patch_handle_t, Patch>& patches_l() const
+            REQUIRES(audio_utils::AudioFlinger_Mutex) = 0;
 
-    virtual const std::map<audio_patch_handle_t, Patch>& patches_l() const = 0;
+    virtual status_t getLatencyMs_l(audio_patch_handle_t patchHandle, double* latencyMs) const
+            REQUIRES(audio_utils::AudioFlinger_Mutex) = 0;
 
-    virtual status_t getLatencyMs_l(audio_patch_handle_t patchHandle, double* latencyMs) const = 0;
-
-    virtual void closeThreadInternal_l(const sp<IAfThreadBase>& thread) const = 0;
+    virtual void closeThreadInternal_l(const sp<IAfThreadBase>& thread) const
+            REQUIRES(audio_utils::AudioFlinger_Mutex) = 0;
 };
 
 }  // namespace android
diff --git a/services/audioflinger/IAfThread.h b/services/audioflinger/IAfThread.h
index be51d51..3cac17c 100644
--- a/services/audioflinger/IAfThread.h
+++ b/services/audioflinger/IAfThread.h
@@ -19,6 +19,7 @@
 #include <android/media/IAudioTrackCallback.h>
 #include <android/media/IEffectClient.h>
 #include <audiomanager/IAudioManager.h>
+#include <audio_utils/mutex.h>
 #include <audio_utils/MelProcessor.h>
 #include <binder/MemoryDealer.h>
 #include <datapath/AudioStreamIn.h>
@@ -30,7 +31,6 @@
 #include <media/audiohal/StreamHalInterface.h>
 #include <media/nblog/NBLog.h>
 #include <timing/SyncEvent.h>
-#include <utils/Mutex.h>
 #include <utils/RefBase.h>
 #include <vibrator/ExternalVibration.h>
 
@@ -67,44 +67,57 @@
 // and hence may be used by the Effect / Track framework.
 class IAfThreadCallback : public virtual RefBase {
 public:
-    virtual Mutex& mutex() const = 0;
-    virtual bool isNonOffloadableGlobalEffectEnabled_l() const = 0;  // Tracks
+    virtual audio_utils::mutex& mutex() const
+            RETURN_CAPABILITY(audio_utils::AudioFlinger_Mutex) = 0;
+    virtual bool isNonOffloadableGlobalEffectEnabled_l() const
+            REQUIRES(mutex()) EXCLUDES_ThreadBase_Mutex = 0;  // Tracks
     virtual audio_unique_id_t nextUniqueId(audio_unique_id_use_t use) = 0;
     virtual bool btNrecIsOff() const = 0;
-    virtual float masterVolume_l() const = 0;
-    virtual bool masterMute_l() const = 0;
-    virtual float getMasterBalance_l() const = 0;
-    virtual bool streamMute_l(audio_stream_type_t stream) const = 0;
+    virtual float masterVolume_l() const
+            REQUIRES(mutex()) = 0;
+    virtual bool masterMute_l() const
+            REQUIRES(mutex()) = 0;
+    virtual float getMasterBalance_l() const
+            REQUIRES(mutex()) = 0;
+    virtual bool streamMute_l(audio_stream_type_t stream) const
+            REQUIRES(mutex()) = 0;
     virtual audio_mode_t getMode() const = 0;
     virtual bool isLowRamDevice() const = 0;
     virtual bool isAudioPolicyReady() const = 0;  // Effects
     virtual uint32_t getScreenState() const = 0;
-    virtual std::optional<media::AudioVibratorInfo> getDefaultVibratorInfo_l() const = 0;
+    virtual std::optional<media::AudioVibratorInfo> getDefaultVibratorInfo_l() const
+            REQUIRES(mutex()) = 0;
     virtual const sp<IAfPatchPanel>& getPatchPanel() const = 0;
     virtual const sp<MelReporter>& getMelReporter() const = 0;
     virtual const sp<EffectsFactoryHalInterface>& getEffectsFactoryHal() const = 0;
     virtual sp<IAudioManager> getOrCreateAudioManager() = 0;  // Tracks
 
-    virtual bool updateOrphanEffectChains(const sp<IAfEffectModule>& effect) = 0;
-    virtual status_t moveEffectChain_l(audio_session_t sessionId,
-            IAfPlaybackThread* srcThread, IAfPlaybackThread* dstThread) = 0;
+    virtual bool updateOrphanEffectChains(const sp<IAfEffectModule>& effect)
+            EXCLUDES_AudioFlinger_Mutex = 0;
+    virtual status_t moveEffectChain_ll(audio_session_t sessionId,
+            IAfPlaybackThread* srcThread, IAfPlaybackThread* dstThread)
+            REQUIRES(mutex(), audio_utils::ThreadBase_Mutex) = 0;
 
     virtual void requestLogMerge() = 0;
-    virtual sp<NBLog::Writer> newWriter_l(size_t size, const char *name) = 0;
+    virtual sp<NBLog::Writer> newWriter_l(size_t size, const char *name)
+            REQUIRES(mutex()) = 0;
     virtual void unregisterWriter(const sp<NBLog::Writer>& writer) = 0;
 
     virtual sp<audioflinger::SyncEvent> createSyncEvent(AudioSystem::sync_event_t type,
             audio_session_t triggerSession,
             audio_session_t listenerSession,
             const audioflinger::SyncEventCallback& callBack,
-            const wp<IAfTrackBase>& cookie) = 0;
+            const wp<IAfTrackBase>& cookie)
+            EXCLUDES_AudioFlinger_Mutex = 0;
 
-    virtual void ioConfigChanged(audio_io_config_event_t event,
+    // Hold either AudioFlinger::mutex or ThreadBase::mutex
+    virtual void ioConfigChanged_l(audio_io_config_event_t event,
             const sp<AudioIoDescriptor>& ioDesc,
-            pid_t pid = 0) = 0;
-    virtual void onNonOffloadableGlobalEffectEnable() = 0;
+            pid_t pid = 0) EXCLUDES_AudioFlinger_ClientMutex = 0;
+    virtual void onNonOffloadableGlobalEffectEnable() EXCLUDES_AudioFlinger_Mutex = 0;
     virtual void onSupportedLatencyModesChanged(
-            audio_io_handle_t output, const std::vector<audio_latency_mode_t>& modes) = 0;
+            audio_io_handle_t output, const std::vector<audio_latency_mode_t>& modes)
+            EXCLUDES_AudioFlinger_ClientMutex = 0;
 };
 
 class IAfThreadBase : public virtual RefBase {
@@ -128,7 +141,7 @@
     static bool isValidPcmSinkFormat(audio_format_t format);
 
     virtual status_t readyToRun() = 0;
-    virtual void clearPowerManager() = 0;
+    virtual void clearPowerManager() EXCLUDES_ThreadBase_Mutex = 0;
     virtual status_t initCheck() const = 0;
     virtual type_t type() const = 0;
     virtual bool isDuplicating() const = 0;
@@ -144,21 +157,23 @@
     virtual size_t frameCount() const = 0;
     virtual audio_channel_mask_t hapticChannelMask() const = 0;
     virtual uint32_t hapticChannelCount() const = 0;
-    virtual uint32_t latency_l() const = 0;
-    virtual void setVolumeForOutput_l(float left, float right) const = 0;
+    virtual uint32_t latency_l() const = 0;  // NO_THREAD_SAFETY_ANALYSIS
+    virtual void setVolumeForOutput_l(float left, float right) const REQUIRES(mutex()) = 0;
 
     // Return's the HAL's frame count i.e. fast mixer buffer size.
     virtual size_t frameCountHAL() const = 0;
     virtual size_t frameSize() const = 0;
     // Should be "virtual status_t requestExitAndWait()" and override same
     // method in Thread, but Thread::requestExitAndWait() is not yet virtual.
-    virtual void exit() = 0;
-    virtual bool checkForNewParameter_l(const String8& keyValuePair, status_t& status) = 0;
-    virtual status_t setParameters(const String8& keyValuePairs) = 0;
-    virtual String8 getParameters(const String8& keys) = 0;
-    virtual void ioConfigChanged(
+    virtual void exit() EXCLUDES_ThreadBase_Mutex = 0;
+    virtual bool checkForNewParameter_l(const String8& keyValuePair, status_t& status)
+             REQUIRES(mutex()) = 0;
+    virtual status_t setParameters(const String8& keyValuePairs) EXCLUDES_ThreadBase_Mutex = 0;
+    virtual String8 getParameters(const String8& keys) EXCLUDES_ThreadBase_Mutex = 0;
+    virtual void ioConfigChanged_l(
             audio_io_config_event_t event, pid_t pid = 0,
-            audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE) = 0;
+            audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE)
+            /* holds either AF::mutex or TB::mutex */ = 0;
 
     // sendConfigEvent_l() must be called with ThreadBase::mLock held
     // Can temporarily release the lock if waiting for a reply from
@@ -166,38 +181,53 @@
     // status_t sendConfigEvent_l(sp<ConfigEvent>& event);
     virtual void sendIoConfigEvent(
             audio_io_config_event_t event, pid_t pid = 0,
-            audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE) = 0;
+            audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE) EXCLUDES_ThreadBase_Mutex = 0;
     virtual void sendIoConfigEvent_l(
             audio_io_config_event_t event, pid_t pid = 0,
-            audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE) = 0;
-    virtual void sendPrioConfigEvent(pid_t pid, pid_t tid, int32_t prio, bool forApp) = 0;
-    virtual void sendPrioConfigEvent_l(pid_t pid, pid_t tid, int32_t prio, bool forApp) = 0;
-    virtual status_t sendSetParameterConfigEvent_l(const String8& keyValuePair) = 0;
+            audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE) REQUIRES(mutex()) = 0;
+    virtual void sendPrioConfigEvent(pid_t pid, pid_t tid, int32_t prio, bool forApp)
+            EXCLUDES_ThreadBase_Mutex = 0;
+    virtual void sendPrioConfigEvent_l(pid_t pid, pid_t tid, int32_t prio, bool forApp)
+            REQUIRES(mutex()) = 0;
+    virtual status_t sendSetParameterConfigEvent_l(const String8& keyValuePair)
+            REQUIRES(mutex()) = 0;
     virtual status_t sendCreateAudioPatchConfigEvent(
-            const struct audio_patch* patch, audio_patch_handle_t* handle) = 0;
-    virtual status_t sendReleaseAudioPatchConfigEvent(audio_patch_handle_t handle) = 0;
+            const struct audio_patch* patch, audio_patch_handle_t* handle)
+            EXCLUDES_ThreadBase_Mutex = 0;
+    virtual status_t sendReleaseAudioPatchConfigEvent(audio_patch_handle_t handle)
+            EXCLUDES_ThreadBase_Mutex = 0;
     virtual status_t sendUpdateOutDeviceConfigEvent(
-            const DeviceDescriptorBaseVector& outDevices) = 0;
-    virtual void sendResizeBufferConfigEvent_l(int32_t maxSharedAudioHistoryMs) = 0;
-    virtual void sendCheckOutputStageEffectsEvent() = 0;
-    virtual void sendCheckOutputStageEffectsEvent_l() = 0;
-    virtual void sendHalLatencyModesChangedEvent_l() = 0;
+            const DeviceDescriptorBaseVector& outDevices) EXCLUDES_ThreadBase_Mutex = 0;
+    virtual void sendResizeBufferConfigEvent_l(int32_t maxSharedAudioHistoryMs)
+            REQUIRES(mutex()) = 0;
+    virtual void sendCheckOutputStageEffectsEvent() EXCLUDES_ThreadBase_Mutex = 0;
+    virtual void sendCheckOutputStageEffectsEvent_l()
+            REQUIRES(mutex()) = 0;
+    virtual void sendHalLatencyModesChangedEvent_l()
+            REQUIRES(mutex()) = 0;
 
-    virtual void processConfigEvents_l() = 0;
-    virtual void setCheckOutputStageEffects() = 0;
-    virtual void cacheParameters_l() = 0;
+    virtual void processConfigEvents_l()
+            REQUIRES(mutex()) = 0;
+    virtual void setCheckOutputStageEffects() = 0;  // no mutex needed
+    virtual void cacheParameters_l()
+            REQUIRES(mutex()) = 0;
     virtual status_t createAudioPatch_l(
-            const struct audio_patch* patch, audio_patch_handle_t* handle) = 0;
-    virtual status_t releaseAudioPatch_l(const audio_patch_handle_t handle) = 0;
-    virtual void updateOutDevices(const DeviceDescriptorBaseVector& outDevices) = 0;
-    virtual void toAudioPortConfig(struct audio_port_config* config) = 0;
-    virtual void resizeInputBuffer_l(int32_t maxSharedAudioHistoryMs) = 0;
+            const struct audio_patch* patch, audio_patch_handle_t* handle)
+            REQUIRES(mutex()) = 0;
+    virtual status_t releaseAudioPatch_l(const audio_patch_handle_t handle)
+            REQUIRES(mutex()) = 0;
+    virtual void updateOutDevices(const DeviceDescriptorBaseVector& outDevices)
+            EXCLUDES_ThreadBase_Mutex = 0;
+    virtual void toAudioPortConfig(struct audio_port_config* config)
+            EXCLUDES_ThreadBase_Mutex = 0;
+    virtual void resizeInputBuffer_l(int32_t maxSharedAudioHistoryMs)
+            REQUIRES(mutex()) = 0;
 
     // see note at declaration of mStandby, mOutDevice and mInDevice
     virtual bool inStandby() const = 0;
-    virtual const DeviceTypeSet outDeviceTypes() const = 0;
-    virtual audio_devices_t inDeviceType() const = 0;
-    virtual DeviceTypeSet getDeviceTypes() const = 0;
+    virtual const DeviceTypeSet outDeviceTypes_l() const REQUIRES(mutex()) = 0;
+    virtual audio_devices_t inDeviceType_l() const REQUIRES(mutex()) = 0;
+    virtual DeviceTypeSet getDeviceTypes_l() const REQUIRES(mutex()) = 0;
     virtual const AudioDeviceTypeAddrVector& outDeviceTypeAddrs() const = 0;
     virtual const AudioDeviceTypeAddr& inDeviceTypeAddr() const = 0;
     virtual bool isOutput() const = 0;
@@ -213,7 +243,8 @@
             status_t* status /*non-NULL*/,
             bool pinned,
             bool probe,
-            bool notifyFramesProcessed) = 0;
+            bool notifyFramesProcessed)
+            REQUIRES(audio_utils::AudioFlinger_Mutex) EXCLUDES_ThreadBase_Mutex = 0;
 
     // return values for hasAudioSession (bit field)
     enum effect_state {
@@ -230,57 +261,76 @@
     };
 
     // get effect chain corresponding to session Id.
-    virtual sp<IAfEffectChain> getEffectChain(audio_session_t sessionId) const = 0;
+    virtual sp<IAfEffectChain> getEffectChain(audio_session_t sessionId) const
+            EXCLUDES_ThreadBase_Mutex = 0;
     // same as getEffectChain() but must be called with ThreadBase mutex locked
-    virtual sp<IAfEffectChain> getEffectChain_l(audio_session_t sessionId) const = 0;
-    virtual std::vector<int> getEffectIds_l(audio_session_t sessionId) const = 0;
+    virtual sp<IAfEffectChain> getEffectChain_l(audio_session_t sessionId) const
+            REQUIRES(mutex()) = 0;
+    virtual std::vector<int> getEffectIds_l(audio_session_t sessionId) const
+            REQUIRES(mutex()) = 0;
     // add an effect chain to the chain list (mEffectChains)
-    virtual status_t addEffectChain_l(const sp<IAfEffectChain>& chain) = 0;
+    virtual status_t addEffectChain_l(const sp<IAfEffectChain>& chain)
+            REQUIRES(mutex()) = 0;
     // remove an effect chain from the chain list (mEffectChains)
-    virtual size_t removeEffectChain_l(const sp<IAfEffectChain>& chain) = 0;
+    virtual size_t removeEffectChain_l(const sp<IAfEffectChain>& chain)
+            REQUIRES(mutex()) = 0;
     // lock all effect chains Mutexes. Must be called before releasing the
     // ThreadBase mutex before processing the mixer and effects. This guarantees the
     // integrity of the chains during the process.
     // Also sets the parameter 'effectChains' to current value of mEffectChains.
-    virtual void lockEffectChains_l(Vector<sp<IAfEffectChain>>& effectChains) = 0;
+    virtual void lockEffectChains_l(Vector<sp<IAfEffectChain>>& effectChains)
+            REQUIRES(mutex()) = 0;
     // unlock effect chains after process
-    virtual void unlockEffectChains(const Vector<sp<IAfEffectChain>>& effectChains) = 0;
+    virtual void unlockEffectChains(const Vector<sp<IAfEffectChain>>& effectChains)
+            EXCLUDES_ThreadBase_Mutex = 0;
     // get a copy of mEffectChains vector
-    virtual Vector<sp<IAfEffectChain>> getEffectChains_l() const = 0;
+    virtual Vector<sp<IAfEffectChain>> getEffectChains_l() const
+            REQUIRES(mutex()) = 0;
     // set audio mode to all effect chains
-    virtual void setMode(audio_mode_t mode) = 0;
+    virtual void setMode(audio_mode_t mode)
+            EXCLUDES_ThreadBase_Mutex = 0;
     // get effect module with corresponding ID on specified audio session
-    virtual sp<IAfEffectModule> getEffect(audio_session_t sessionId, int effectId) const = 0;
-    virtual sp<IAfEffectModule> getEffect_l(audio_session_t sessionId, int effectId) const = 0;
+    virtual sp<IAfEffectModule> getEffect(audio_session_t sessionId, int effectId) const
+            EXCLUDES_ThreadBase_Mutex = 0;
+    virtual sp<IAfEffectModule> getEffect_l(audio_session_t sessionId, int effectId) const
+            REQUIRES(mutex()) = 0;
     // add and effect module. Also creates the effect chain is none exists for
     // the effects audio session. Only called in a context of moving an effect
     // from one thread to another
-    virtual status_t addEffect_l(const sp<IAfEffectModule>& effect) = 0;
+    virtual status_t addEffect_ll(const sp<IAfEffectModule>& effect)
+            REQUIRES(audio_utils::AudioFlinger_Mutex, mutex()) = 0;
     // remove and effect module. Also removes the effect chain is this was the last
     // effect
-    virtual void removeEffect_l(const sp<IAfEffectModule>& effect, bool release = false) = 0;
+    virtual void removeEffect_l(const sp<IAfEffectModule>& effect, bool release = false)
+            REQUIRES(mutex()) = 0;
     // disconnect an effect handle from module and destroy module if last handle
-    virtual void disconnectEffectHandle(IAfEffectHandle* handle, bool unpinIfLast) = 0;
+    virtual void disconnectEffectHandle(IAfEffectHandle* handle, bool unpinIfLast)
+            EXCLUDES_ThreadBase_Mutex = 0;
     // detach all tracks connected to an auxiliary effect
-    virtual void detachAuxEffect_l(int effectId) = 0;
+    virtual void detachAuxEffect_l(int effectId) REQUIRES(mutex()) = 0;
     // returns a combination of:
     // - EFFECT_SESSION if effects on this audio session exist in one chain
     // - TRACK_SESSION if tracks on this audio session exist
     // - FAST_SESSION if fast tracks on this audio session exist
     // - SPATIALIZED_SESSION if spatialized tracks on this audio session exist
-    virtual uint32_t hasAudioSession_l(audio_session_t sessionId) const = 0;
-    virtual uint32_t hasAudioSession(audio_session_t sessionId) const = 0;
+    virtual uint32_t hasAudioSession_l(audio_session_t sessionId) const REQUIRES(mutex()) = 0;
+    virtual uint32_t hasAudioSession(audio_session_t sessionId) const
+            EXCLUDES_ThreadBase_Mutex = 0;
 
     // the value returned by default implementation is not important as the
     // strategy is only meaningful for PlaybackThread which implements this method
-    virtual product_strategy_t getStrategyForSession_l(audio_session_t sessionId) const = 0;
+    virtual product_strategy_t getStrategyForSession_l(audio_session_t sessionId) const
+            REQUIRES(mutex()) = 0;
 
     // check if some effects must be suspended/restored when an effect is enabled
     // or disabled
     virtual void checkSuspendOnEffectEnabled(
-            bool enabled, audio_session_t sessionId, bool threadLocked) = 0;
+            bool enabled, audio_session_t sessionId, bool threadLocked)
+            EXCLUDES_ThreadBase_Mutex = 0;
 
-    virtual status_t setSyncEvent(const sp<audioflinger::SyncEvent>& event) = 0;
+    virtual status_t setSyncEvent(const sp<audioflinger::SyncEvent>& event)
+            EXCLUDES_ThreadBase_Mutex = 0;
+    // internally static, perhaps make static member.
     virtual bool isValidSyncEvent(const sp<audioflinger::SyncEvent>& event) const = 0;
 
     // Return a reference to a per-thread heap which can be used to allocate IMemory
@@ -293,41 +343,48 @@
 
     virtual sp<IMemory> pipeMemory() const = 0;
 
-    virtual void systemReady() = 0;
+    virtual void systemReady() EXCLUDES_ThreadBase_Mutex = 0;
 
     // checkEffectCompatibility_l() must be called with ThreadBase::mLock held
     virtual status_t checkEffectCompatibility_l(
-            const effect_descriptor_t* desc, audio_session_t sessionId) = 0;
+            const effect_descriptor_t* desc, audio_session_t sessionId) REQUIRES(mutex()) = 0;
 
-    virtual void broadcast_l() = 0;
+    virtual void broadcast_l() REQUIRES(mutex()) = 0;
 
-    virtual bool isTimestampCorrectionEnabled() const = 0;
+    virtual bool isTimestampCorrectionEnabled_l() const REQUIRES(mutex()) = 0;
 
     virtual bool isMsdDevice() const = 0;
 
-    virtual void dump(int fd, const Vector<String16>& args) = 0;
+    virtual void dump(int fd, const Vector<String16>& args) EXCLUDES_ThreadBase_Mutex = 0;
 
     // deliver stats to mediametrics.
-    virtual void sendStatistics(bool force) = 0;
+    virtual void sendStatistics(bool force) EXCLUDES_ThreadBase_Mutex = 0;
 
-    virtual Mutex& mutex() const = 0;
+    virtual audio_utils::mutex& mutex() const
+            RETURN_CAPABILITY(audio_utils::ThreadBase_Mutex) = 0;
 
-    virtual void onEffectEnable(const sp<IAfEffectModule>& effect) = 0;
-    virtual void onEffectDisable() = 0;
+    virtual void onEffectEnable(const sp<IAfEffectModule>& effect) EXCLUDES_ThreadBase_Mutex = 0;
+    virtual void onEffectDisable() EXCLUDES_ThreadBase_Mutex = 0;
 
     // invalidateTracksForAudioSession_l must be called with holding mLock.
-    virtual void invalidateTracksForAudioSession_l(audio_session_t sessionId) const = 0;
+    virtual void invalidateTracksForAudioSession_l(audio_session_t sessionId) const
+            REQUIRES(mutex()) = 0;
     // Invalidate all the tracks with the given audio session.
-    virtual void invalidateTracksForAudioSession(audio_session_t sessionId) const = 0;
+    virtual void invalidateTracksForAudioSession(audio_session_t sessionId) const
+            EXCLUDES_ThreadBase_Mutex = 0;
 
     virtual bool isStreamInitialized() const = 0;
-    virtual void startMelComputation_l(const sp<audio_utils::MelProcessor>& processor) = 0;
-    virtual void stopMelComputation_l() = 0;
+    virtual void startMelComputation_l(const sp<audio_utils::MelProcessor>& processor)
+            REQUIRES(audio_utils::AudioFlinger_Mutex) = 0;
+    virtual void stopMelComputation_l()
+            REQUIRES(audio_utils::AudioFlinger_Mutex) = 0;
 
-    virtual product_strategy_t getStrategyForStream(audio_stream_type_t stream) const = 0;
+    virtual product_strategy_t getStrategyForStream(audio_stream_type_t stream) const
+            EXCLUDES_AUDIO_ALL = 0;
 
     virtual void setEffectSuspended_l(
-            const effect_uuid_t* type, bool suspend, audio_session_t sessionId) = 0;
+            const effect_uuid_t* type, bool suspend, audio_session_t sessionId)
+            REQUIRES(mutex()) = 0;
 
     // Dynamic cast to derived interface
     virtual sp<IAfDirectOutputThread> asIAfDirectOutputThread() { return nullptr; }
@@ -375,7 +432,7 @@
     // return estimated latency in milliseconds, as reported by HAL
     virtual uint32_t latency() const = 0;  // should be in IAfThreadBase?
 
-    virtual uint32_t& fastTrackAvailMask_l() = 0;
+    virtual uint32_t& fastTrackAvailMask_l() REQUIRES(mutex()) = 0;
 
     virtual sp<IAfTrack> createTrack_l(
             const sp<Client>& client,
@@ -398,14 +455,16 @@
             audio_port_handle_t portId,
             const sp<media::IAudioTrackCallback>& callback,
             bool isSpatialized,
-            bool isBitPerfect) = 0;
+            bool isBitPerfect,
+            audio_output_flags_t* afTrackFlags)
+            REQUIRES(audio_utils::AudioFlinger_Mutex) = 0;
 
-    virtual status_t addTrack_l(const sp<IAfTrack>& track) = 0;
-    virtual bool destroyTrack_l(const sp<IAfTrack>& track) = 0;
-    virtual bool isTrackActive(const sp<IAfTrack>& track) const = 0;
-    virtual void addOutputTrack_l(const sp<IAfTrack>& track) = 0;
+    virtual status_t addTrack_l(const sp<IAfTrack>& track) REQUIRES(mutex()) = 0;
+    virtual bool destroyTrack_l(const sp<IAfTrack>& track) REQUIRES(mutex()) = 0;
+    virtual bool isTrackActive(const sp<IAfTrack>& track) const REQUIRES(mutex()) = 0;
+    virtual void addOutputTrack_l(const sp<IAfTrack>& track) REQUIRES(mutex()) = 0;
 
-    virtual AudioStreamOut* getOutput_l() const = 0;
+    virtual AudioStreamOut* getOutput_l() const REQUIRES(mutex()) = 0;
     virtual AudioStreamOut* getOutput() const = 0;
     virtual AudioStreamOut* clearOutput() = 0;
 
@@ -413,50 +472,57 @@
     virtual void suspend() = 0;
     virtual void restore() = 0;
     virtual bool isSuspended() const = 0;
-    virtual status_t getRenderPosition(uint32_t* halFrames, uint32_t* dspFrames) const = 0;
+    virtual status_t getRenderPosition(uint32_t* halFrames, uint32_t* dspFrames) const
+            EXCLUDES_ThreadBase_Mutex = 0;
     // Consider also removing and passing an explicit mMainBuffer initialization
     // parameter to AF::IAfTrack::Track().
     virtual float* sinkBuffer() const = 0;
 
-    virtual status_t attachAuxEffect(const sp<IAfTrack>& track, int EffectId) = 0;
-    virtual status_t attachAuxEffect_l(const sp<IAfTrack>& track, int EffectId) = 0;
+    virtual status_t attachAuxEffect(const sp<IAfTrack>& track, int EffectId)
+            EXCLUDES_ThreadBase_Mutex = 0;
+    virtual status_t attachAuxEffect_l(const sp<IAfTrack>& track, int EffectId)
+            REQUIRES(mutex()) = 0;
 
     // called with AudioFlinger lock held
-    virtual bool invalidateTracks_l(audio_stream_type_t streamType) = 0;
-    virtual bool invalidateTracks_l(std::set<audio_port_handle_t>& portIds) = 0;
-    virtual void invalidateTracks(audio_stream_type_t streamType) = 0;
+    virtual bool invalidateTracks_l(audio_stream_type_t streamType) REQUIRES(mutex()) = 0;
+    virtual bool invalidateTracks_l(std::set<audio_port_handle_t>& portIds) REQUIRES(mutex()) = 0;
+    virtual void invalidateTracks(audio_stream_type_t streamType)
+            EXCLUDES_ThreadBase_Mutex = 0;
     // Invalidate tracks by a set of port ids. The port id will be removed from
     // the given set if the corresponding track is found and invalidated.
-    virtual void invalidateTracks(std::set<audio_port_handle_t>& portIds) = 0;
+    virtual void invalidateTracks(std::set<audio_port_handle_t>& portIds)
+            EXCLUDES_ThreadBase_Mutex = 0;
 
-    virtual status_t getTimestamp_l(AudioTimestamp& timestamp) = 0;
-    virtual void addPatchTrack(const sp<IAfPatchTrack>& track) = 0;
-    virtual void deletePatchTrack(const sp<IAfPatchTrack>& track) = 0;
+    virtual status_t getTimestamp_l(AudioTimestamp& timestamp) REQUIRES(mutex()) = 0;
+    virtual void addPatchTrack(const sp<IAfPatchTrack>& track) EXCLUDES_ThreadBase_Mutex = 0;
+    virtual void deletePatchTrack(const sp<IAfPatchTrack>& track) EXCLUDES_ThreadBase_Mutex = 0;
 
     // Return the asynchronous signal wait time.
-    virtual int64_t computeWaitTimeNs_l() const = 0;
+    virtual int64_t computeWaitTimeNs_l() const REQUIRES(mutex()) = 0;
     // returns true if the track is allowed to be added to the thread.
     virtual bool isTrackAllowed_l(
             audio_channel_mask_t channelMask, audio_format_t format, audio_session_t sessionId,
-            uid_t uid) const = 0;
+            uid_t uid) const REQUIRES(mutex()) = 0;
 
     virtual bool supportsHapticPlayback() const = 0;
 
-    virtual void setDownStreamPatch(const struct audio_patch* patch) = 0;
+    virtual void setDownStreamPatch(const struct audio_patch* patch)
+            EXCLUDES_ThreadBase_Mutex = 0;
 
-    virtual IAfTrack* getTrackById_l(audio_port_handle_t trackId) = 0;
+    virtual IAfTrack* getTrackById_l(audio_port_handle_t trackId) REQUIRES(mutex()) = 0;
 
     virtual bool hasMixer() const = 0;
 
     virtual status_t setRequestedLatencyMode(audio_latency_mode_t mode) = 0;
 
-    virtual status_t getSupportedLatencyModes(std::vector<audio_latency_mode_t>* modes) = 0;
+    virtual status_t getSupportedLatencyModes(std::vector<audio_latency_mode_t>* modes)
+           EXCLUDES_ThreadBase_Mutex = 0;
 
     virtual status_t setBluetoothVariableLatencyEnabled(bool enabled) = 0;
 
-    virtual void setStandby() = 0;
-    virtual void setStandby_l() = 0;
-    virtual bool waitForHalStart() = 0;
+    virtual void setStandby() EXCLUDES_ThreadBase_Mutex = 0;
+    virtual void setStandby_l() REQUIRES(mutex()) = 0;
+    virtual bool waitForHalStart() EXCLUDES_ThreadBase_Mutex = 0;
 
     virtual bool hasFastMixer() const = 0;
     virtual FastTrackUnderruns getFastTrackUnderruns(size_t fastIndex) const = 0;
@@ -476,9 +542,9 @@
             const sp<IAfThreadCallback>& afThreadCallback, IAfPlaybackThread* mainThread,
             audio_io_handle_t id, bool systemReady);
 
-    virtual void addOutputTrack(IAfPlaybackThread* thread) = 0;
+    virtual void addOutputTrack(IAfPlaybackThread* thread) EXCLUDES_ThreadBase_Mutex = 0;
     virtual uint32_t waitTimeMs() const = 0;
-    virtual void removeOutputTrack(IAfPlaybackThread* thread) = 0;
+    virtual void removeOutputTrack(IAfPlaybackThread* thread) EXCLUDES_ThreadBase_Mutex = 0;
 };
 
 class IAfRecordThread : public virtual IAfThreadBase {
@@ -502,32 +568,40 @@
             pid_t tid,
             status_t* status /*non-NULL*/,
             audio_port_handle_t portId,
-            int32_t maxSharedAudioHistoryMs) = 0;
-    virtual void destroyTrack_l(const sp<IAfRecordTrack>& track) = 0;
-    virtual void removeTrack_l(const sp<IAfRecordTrack>& track) = 0;
+            int32_t maxSharedAudioHistoryMs)
+            REQUIRES(audio_utils::AudioFlinger_Mutex) EXCLUDES_ThreadBase_Mutex = 0;
+    virtual void destroyTrack_l(const sp<IAfRecordTrack>& track) REQUIRES(mutex()) = 0;
+    virtual void removeTrack_l(const sp<IAfRecordTrack>& track) REQUIRES(mutex()) = 0;
 
     virtual status_t start(
             IAfRecordTrack* recordTrack, AudioSystem::sync_event_t event,
-            audio_session_t triggerSession) = 0;
+            audio_session_t triggerSession) EXCLUDES_ThreadBase_Mutex = 0;
 
     // ask the thread to stop the specified track, and
     // return true if the caller should then do it's part of the stopping process
-    virtual bool stop(IAfRecordTrack* recordTrack) = 0;
+    virtual bool stop(IAfRecordTrack* recordTrack) EXCLUDES_ThreadBase_Mutex = 0;
 
+    // NO_THREAD_SAFETY_ANALYSIS: consider atomics
     virtual AudioStreamIn* getInput() const = 0;
     virtual AudioStreamIn* clearInput() = 0;
 
     virtual status_t getActiveMicrophones(
-            std::vector<media::MicrophoneInfoFw>* activeMicrophones) const = 0;
-    virtual status_t setPreferredMicrophoneDirection(audio_microphone_direction_t direction) = 0;
-    virtual status_t setPreferredMicrophoneFieldDimension(float zoom) = 0;
+            std::vector<media::MicrophoneInfoFw>* activeMicrophones)
+            const EXCLUDES_ThreadBase_Mutex = 0;
+    virtual status_t setPreferredMicrophoneDirection(audio_microphone_direction_t direction)
+            EXCLUDES_ThreadBase_Mutex = 0;
+    virtual status_t setPreferredMicrophoneFieldDimension(float zoom)
+            EXCLUDES_ThreadBase_Mutex = 0;
 
-    virtual void addPatchTrack(const sp<IAfPatchRecord>& record) = 0;
-    virtual void deletePatchTrack(const sp<IAfPatchRecord>& record) = 0;
+    virtual void addPatchTrack(const sp<IAfPatchRecord>& record)
+            EXCLUDES_ThreadBase_Mutex = 0;
+    virtual void deletePatchTrack(const sp<IAfPatchRecord>& record)
+            EXCLUDES_ThreadBase_Mutex = 0;
     virtual bool fastTrackAvailable() const = 0;
     virtual void setFastTrackAvailable(bool available) = 0;
 
-    virtual void setRecordSilenced(audio_port_handle_t portId, bool silenced) = 0;
+    virtual void setRecordSilenced(audio_port_handle_t portId, bool silenced)
+            EXCLUDES_ThreadBase_Mutex = 0;
     virtual bool hasFastCapture() const = 0;
 
     virtual void checkBtNrec() = 0;
@@ -536,8 +610,8 @@
     virtual status_t shareAudioHistory(
             const std::string& sharedAudioPackageName,
             audio_session_t sharedSessionId = AUDIO_SESSION_NONE,
-            int64_t sharedAudioStartMs = -1) = 0;
-    virtual void resetAudioHistory_l() = 0;
+            int64_t sharedAudioStartMs = -1) EXCLUDES_ThreadBase_Mutex = 0;
+    virtual void resetAudioHistory_l() REQUIRES(mutex()) = 0;
 };
 
 class IAfMmapThread : public virtual IAfThreadBase {
@@ -557,7 +631,7 @@
             const sp<MmapStreamCallback>& callback,
             audio_port_handle_t deviceId,
             audio_port_handle_t portId) = 0;
-    virtual void disconnect() = 0;
+    virtual void disconnect() EXCLUDES_ThreadBase_Mutex = 0;
 
     // MmapStreamInterface handling (see adapter)
     virtual status_t createMmapBuffer(
@@ -572,10 +646,12 @@
     virtual status_t reportData(const void* buffer, size_t frameCount) = 0;
 
     // TODO(b/291317898)  move to IAfThreadBase?
-    virtual void invalidateTracks(std::set<audio_port_handle_t>& portIds) = 0;
+    virtual void invalidateTracks(std::set<audio_port_handle_t>& portIds)
+            EXCLUDES_ThreadBase_Mutex = 0;
 
     // Sets the UID records silence - TODO(b/291317898)  move to IAfMmapCaptureThread
-    virtual void setRecordSilenced(audio_port_handle_t portId, bool silenced) = 0;
+    virtual void setRecordSilenced(audio_port_handle_t portId, bool silenced)
+            EXCLUDES_ThreadBase_Mutex = 0;
 
     virtual sp<IAfMmapPlaybackThread> asIAfMmapPlaybackThread() { return nullptr; }
     virtual sp<IAfMmapCaptureThread> asIAfMmapCaptureThread() { return nullptr; }
@@ -587,7 +663,7 @@
             const sp<IAfThreadCallback>& afThreadCallback, audio_io_handle_t id,
             AudioHwDevice* hwDev, AudioStreamOut* output, bool systemReady);
 
-    virtual AudioStreamOut* clearOutput() = 0;
+    virtual AudioStreamOut* clearOutput() EXCLUDES_ThreadBase_Mutex = 0;
 };
 
 class IAfMmapCaptureThread : public virtual IAfMmapThread {
@@ -596,7 +672,7 @@
             const sp<IAfThreadCallback>& afThreadCallback, audio_io_handle_t id,
             AudioHwDevice* hwDev, AudioStreamIn* input, bool systemReady);
 
-    virtual AudioStreamIn* clearInput() = 0;
+    virtual AudioStreamIn* clearInput() EXCLUDES_ThreadBase_Mutex = 0;
 };
 
 }  // namespace android
diff --git a/services/audioflinger/MelReporter.cpp b/services/audioflinger/MelReporter.cpp
index 77bd45b..a9c868e 100644
--- a/services/audioflinger/MelReporter.cpp
+++ b/services/audioflinger/MelReporter.cpp
@@ -62,7 +62,7 @@
 
 void MelReporter::activateInternalSoundDoseComputation() {
     {
-        std::lock_guard _l(mLock);
+        audio_utils::lock_guard _l(mutex());
         if (!mUseHalSoundDoseInterface) {
             // no need to start internal MEL on active patches
             return;
@@ -87,8 +87,8 @@
         return;
     }
 
-    std::lock_guard _laf(mAfMelReporterCallback->mutex());
-    std::lock_guard _l(mLock);
+    audio_utils::lock_guard _laf(mAfMelReporterCallback->mutex());  // AudioFlinger_Mutex
+    audio_utils::lock_guard _l(mutex());
     auto activeMelPatchId = activePatchStreamHandle_l(streamHandle);
     if (!activeMelPatchId) {
         ALOGV("%s stream handle %d does not have an active patch", __func__, streamHandle);
@@ -154,8 +154,8 @@
     }
 
     if (!newPatch.deviceStates.empty() && newPatch.csdActive) {
-        std::lock_guard _afl(mAfMelReporterCallback->mutex());
-        std::lock_guard _l(mLock);
+        audio_utils::lock_guard _afl(mAfMelReporterCallback->mutex());  // AudioFlinger_Mutex
+        audio_utils::lock_guard _l(mutex());
         ALOGV("%s add patch handle %d to active devices", __func__, handle);
         startMelComputationForActivePatch_l(newPatch);
         mActiveMelPatches[handle] = newPatch;
@@ -192,8 +192,8 @@
 
 void MelReporter::startMelComputationForDeviceId(audio_port_handle_t deviceId) {
     ALOGV("%s(%d)", __func__, deviceId);
-    std::lock_guard _laf(mAfMelReporterCallback->mutex());
-    std::lock_guard _l(mLock);
+    audio_utils::lock_guard _laf(mAfMelReporterCallback->mutex());
+    audio_utils::lock_guard _l(mutex());
 
     for (auto& activeMelPatch : mActiveMelPatches) {
         bool csdActive = false;
@@ -218,7 +218,7 @@
 
     ActiveMelPatch melPatch;
     {
-        std::lock_guard _l(mLock);
+        audio_utils::lock_guard _l(mutex());
 
         auto patchIt = mActiveMelPatches.find(handle);
         if (patchIt == mActiveMelPatches.end()) {
@@ -231,9 +231,19 @@
         mActiveMelPatches.erase(patchIt);
     }
 
-    std::lock_guard _afl(mAfMelReporterCallback->mutex());
-    std::lock_guard _l(mLock);
-    stopMelComputationForPatch_l(melPatch);
+    audio_utils::lock_guard _afl(mAfMelReporterCallback->mutex());  // AudioFlinger_Mutex
+    audio_utils::lock_guard _l(mutex());
+    if (melPatch.csdActive) {
+        // only need to stop if patch was active
+        melPatch.csdActive = false;
+        stopMelComputationForPatch_l(melPatch);
+    }
+}
+
+void MelReporter::onUpdateAudioPatch(audio_patch_handle_t oldHandle,
+        audio_patch_handle_t newHandle, const IAfPatchPanel::Patch& patch) {
+    onReleaseAudioPatch(oldHandle);
+    onCreateAudioPatch(newHandle, patch);
 }
 
 sp<media::ISoundDose> MelReporter::getSoundDoseInterface(
@@ -244,7 +254,7 @@
 
 void MelReporter::stopInternalMelComputation() {
     ALOGV("%s", __func__);
-    std::lock_guard _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     if (mUseHalSoundDoseInterface) {
         return;
     }
@@ -277,8 +287,8 @@
 
 void MelReporter::stopMelComputationForDeviceId(audio_port_handle_t deviceId) {
     ALOGV("%s(%d)", __func__, deviceId);
-    std::lock_guard _laf(mAfMelReporterCallback->mutex());
-    std::lock_guard _l(mLock);
+    audio_utils::lock_guard _laf(mAfMelReporterCallback->mutex());
+    audio_utils::lock_guard _l(mutex());
 
     for (auto& activeMelPatch : mActiveMelPatches) {
         bool csdActive = false;
@@ -312,7 +322,7 @@
 }
 
 std::string MelReporter::dump() {
-    std::lock_guard _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     std::string output("\nSound Dose:\n");
     output.append(mSoundDoseManager->dump());
     return output;
diff --git a/services/audioflinger/MelReporter.h b/services/audioflinger/MelReporter.h
index 78c6c0c..bf4f390 100644
--- a/services/audioflinger/MelReporter.h
+++ b/services/audioflinger/MelReporter.h
@@ -20,9 +20,9 @@
 #include "IAfPatchPanel.h"
 #include "PatchCommandThread.h"
 
+#include <audio_utils/mutex.h>
 #include <sounddose/SoundDoseManager.h>
 
-#include <mutex>
 #include <unordered_map>
 
 namespace android {
@@ -31,9 +31,11 @@
 
 class IAfMelReporterCallback : public virtual RefBase {
 public:
-    virtual Mutex& mutex() const = 0;
+    virtual audio_utils::mutex& mutex() const
+            RETURN_CAPABILITY(audio_utils::AudioFlinger_Mutex) = 0;
     virtual const sp<PatchCommandThread>& getPatchCommandThread() = 0;
-    virtual sp<IAfThreadBase> checkOutputThread_l(audio_io_handle_t ioHandle) const = 0;
+    virtual sp<IAfThreadBase> checkOutputThread_l(audio_io_handle_t ioHandle) const
+            REQUIRES(mutex()) = 0;
 };
 
 /**
@@ -62,7 +64,7 @@
      * implementation, false otherwise.
      */
     bool activateHalSoundDoseComputation(const std::string& module,
-                                         const sp<DeviceHalInterface>& device);
+            const sp<DeviceHalInterface>& device) EXCLUDES_MelReporter_Mutex;
 
     /**
      * Activates the MEL reporting from internal framework values. These are used
@@ -70,20 +72,26 @@
      * Note: the internal CSD computation does not guarantee a certification with
      * IEC62368-1 3rd edition or EN50332-3
      */
-    void activateInternalSoundDoseComputation();
+    void activateInternalSoundDoseComputation() EXCLUDES_MelReporter_Mutex;
 
     sp<media::ISoundDose> getSoundDoseInterface(const sp<media::ISoundDoseCallback>& callback);
 
     std::string dump();
 
     // IMelReporterCallback methods
-    void stopMelComputationForDeviceId(audio_port_handle_t deviceId) override;
-    void startMelComputationForDeviceId(audio_port_handle_t deviceId) override;
+    void stopMelComputationForDeviceId(audio_port_handle_t deviceId) final
+            EXCLUDES_MelReporter_Mutex;
+    void startMelComputationForDeviceId(audio_port_handle_t deviceId) final
+            EXCLUDES_MelReporter_Mutex;
 
     // PatchCommandListener methods
     void onCreateAudioPatch(audio_patch_handle_t handle,
-        const IAfPatchPanel::Patch& patch) final;
-    void onReleaseAudioPatch(audio_patch_handle_t handle) final;
+            const IAfPatchPanel::Patch& patch) final
+            EXCLUDES_AudioFlinger_Mutex;
+    void onReleaseAudioPatch(audio_patch_handle_t handle) final EXCLUDES_AudioFlinger_Mutex;
+    void onUpdateAudioPatch(audio_patch_handle_t oldHandle,
+                            audio_patch_handle_t newHandle,
+            const IAfPatchPanel::Patch& patch) final EXCLUDES_AudioFlinger_Mutex;
 
     /**
      * The new metadata can determine whether we should compute MEL for the given thread.
@@ -91,7 +99,9 @@
      * Otherwise, this method will disable CSD.
      **/
     void updateMetadataForCsd(audio_io_handle_t streamHandle,
-                              const std::vector<playback_track_metadata_v7_t>& metadataVec);
+            const std::vector<playback_track_metadata_v7_t>& metadataVec)
+            EXCLUDES_AudioFlinger_Mutex;
+
 private:
     struct ActiveMelPatch {
         audio_io_handle_t streamHandle{AUDIO_IO_HANDLE_NONE};
@@ -105,30 +115,34 @@
     };
 
     void stopInternalMelComputation();
+    audio_utils::mutex& mutex() const RETURN_CAPABILITY(audio_utils::MelReporter_Mutex) {
+        return mMutex;
+    }
 
-    /** Should be called with the following order of locks: mAudioFlinger.mLock -> mLock. */
-    void stopMelComputationForPatch_l(const ActiveMelPatch& patch) REQUIRES(mLock);
+    /** Should be called with the following order of locks: mAudioFlinger.mutex() -> mutex(). */
+    void stopMelComputationForPatch_l(const ActiveMelPatch& patch) REQUIRES(mutex());
 
-    /** Should be called with the following order of locks: mAudioFlinger.mLock -> mLock. */
-    void startMelComputationForActivePatch_l(const ActiveMelPatch& patch) REQUIRES(mLock);
+    /** Should be called with the following order of locks: mAudioFlinger.mutex() -> mutex(). */
+    void startMelComputationForActivePatch_l(const ActiveMelPatch& patch) REQUIRES(mutex());
 
     std::optional<audio_patch_handle_t>
-    activePatchStreamHandle_l(audio_io_handle_t streamHandle) REQUIRES(mLock);
+    activePatchStreamHandle_l(audio_io_handle_t streamHandle) REQUIRES(mutex());
 
-    bool useHalSoundDoseInterface_l() REQUIRES(mLock);
+    bool useHalSoundDoseInterface_l() REQUIRES(mutex());
 
     const sp<IAfMelReporterCallback> mAfMelReporterCallback;
 
-    sp<SoundDoseManager> mSoundDoseManager;
+    /* const */ sp<SoundDoseManager> mSoundDoseManager;  // set onFirstRef
 
     /**
      * Lock for protecting the active mel patches. Do not mix with the AudioFlinger lock.
-     * Locking order AudioFlinger::mLock -> PatchCommandThread::mLock -> MelReporter::mLock.
+     * Locking order AudioFlinger::mutex() -> PatchCommandThread::mutex() -> MelReporter::mutex().
      */
-    std::mutex mLock;
-    std::unordered_map<audio_patch_handle_t, ActiveMelPatch> mActiveMelPatches GUARDED_BY(mLock);
-    std::unordered_map<audio_port_handle_t, int> mActiveDevices GUARDED_BY(mLock);
-    bool mUseHalSoundDoseInterface GUARDED_BY(mLock) = false;
+    mutable audio_utils::mutex mMutex;
+    std::unordered_map<audio_patch_handle_t, ActiveMelPatch> mActiveMelPatches
+            GUARDED_BY(mutex());
+    std::unordered_map<audio_port_handle_t, int> mActiveDevices GUARDED_BY(mutex());
+    bool mUseHalSoundDoseInterface GUARDED_BY(mutex()) = false;
 };
 
 }  // namespace android
diff --git a/services/audioflinger/PatchCommandThread.cpp b/services/audioflinger/PatchCommandThread.cpp
index c3259f1..2cfefa0 100644
--- a/services/audioflinger/PatchCommandThread.cpp
+++ b/services/audioflinger/PatchCommandThread.cpp
@@ -29,7 +29,7 @@
 PatchCommandThread::~PatchCommandThread() {
     exit();
 
-    std::lock_guard _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     mCommands.clear();
 }
 
@@ -39,7 +39,7 @@
 
 void PatchCommandThread::addListener(const sp<PatchCommandListener>& listener) {
     ALOGV("%s add listener %p", __func__, static_cast<void*>(listener.get()));
-    std::lock_guard _l(mListenerLock);
+    audio_utils::lock_guard _l(listenerMutex());
     mListeners.emplace_back(listener);
 }
 
@@ -58,10 +58,19 @@
     releaseAudioPatchCommand(handle);
 }
 
+void PatchCommandThread::updateAudioPatch(audio_patch_handle_t oldHandle,
+        audio_patch_handle_t newHandle, const IAfPatchPanel::Patch& patch) {
+    ALOGV("%s handle %d mHalHandle %d num sinks %d device sink %08x",
+            __func__, oldHandle, patch.mHalHandle,
+            patch.mAudioPatch.num_sinks,
+            patch.mAudioPatch.num_sinks > 0 ? patch.mAudioPatch.sinks[0].ext.device.type : 0);
+
+    updateAudioPatchCommand(oldHandle, newHandle, patch);
+}
+
 bool PatchCommandThread::threadLoop()
-NO_THREAD_SAFETY_ANALYSIS  // bug in clang compiler.
 {
-    std::unique_lock _l(mLock);
+    audio_utils::unique_lock _l(mutex());
 
     while (!exitPending()) {
         while (!mCommands.empty() && !exitPending()) {
@@ -71,7 +80,7 @@
 
             std::vector<wp<PatchCommandListener>> listenersCopy;
             {
-                std::lock_guard _ll(mListenerLock);
+                audio_utils::lock_guard _ll(listenerMutex());
                 listenersCopy = mListeners;
             }
 
@@ -104,6 +113,21 @@
                     }
                 }
                     break;
+                case UPDATE_AUDIO_PATCH: {
+                    const auto data = (UpdateAudioPatchData*) command->mData.get();
+                    ALOGV("%s processing update audio patch old handle %d new handle %d",
+                          __func__,
+                          data->mOldHandle, data->mNewHandle);
+
+                    for (const auto& listener : listenersCopy) {
+                        auto spListener = listener.promote();
+                        if (spListener) {
+                            spListener->onUpdateAudioPatch(data->mOldHandle,
+                                    data->mNewHandle, data->mPatch);
+                        }
+                    }
+                }
+                    break;
                 default:
                     ALOGW("%s unknown command %d", __func__, command->mCommand);
                     break;
@@ -122,7 +146,7 @@
 }
 
 void PatchCommandThread::sendCommand(const sp<Command>& command) {
-    std::lock_guard _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     mCommands.emplace_back(command);
     mWaitWorkCV.notify_one();
 }
@@ -145,10 +169,20 @@
     sendCommand(command);
 }
 
+void PatchCommandThread::updateAudioPatchCommand(
+        audio_patch_handle_t oldHandle, audio_patch_handle_t newHandle,
+        const IAfPatchPanel::Patch& patch) {
+    sp<Command> command = sp<Command>::make(UPDATE_AUDIO_PATCH,
+                                           new UpdateAudioPatchData(oldHandle, newHandle, patch));
+    ALOGV("%s adding update patch old handle %d new handle %d mHalHandle %d.",
+            __func__, oldHandle, newHandle, patch.mHalHandle);
+    sendCommand(command);
+}
+
 void PatchCommandThread::exit() {
     ALOGV("%s", __func__);
     {
-        std::lock_guard _l(mLock);
+        audio_utils::lock_guard _l(mutex());
         requestExit();
         mWaitWorkCV.notify_one();
     }
diff --git a/services/audioflinger/PatchCommandThread.h b/services/audioflinger/PatchCommandThread.h
index a312fb7..f491e8a 100644
--- a/services/audioflinger/PatchCommandThread.h
+++ b/services/audioflinger/PatchCommandThread.h
@@ -38,6 +38,7 @@
     enum {
         CREATE_AUDIO_PATCH,
         RELEASE_AUDIO_PATCH,
+        UPDATE_AUDIO_PATCH,
     };
 
     class PatchCommandListener : public virtual RefBase {
@@ -45,15 +46,22 @@
         virtual void onCreateAudioPatch(audio_patch_handle_t handle,
                                         const IAfPatchPanel::Patch& patch) = 0;
         virtual void onReleaseAudioPatch(audio_patch_handle_t handle) = 0;
+        virtual void onUpdateAudioPatch(audio_patch_handle_t oldHandle,
+                                        audio_patch_handle_t newHandle,
+                                        const IAfPatchPanel::Patch& patch) = 0;
     };
 
     PatchCommandThread() : Thread(false /* canCallJava */) {}
     ~PatchCommandThread() override;
 
-    void addListener(const sp<PatchCommandListener>& listener);
+    void addListener(const sp<PatchCommandListener>& listener)
+            EXCLUDES_PatchCommandThread_ListenerMutex;
 
-    void createAudioPatch(audio_patch_handle_t handle, const IAfPatchPanel::Patch& patch);
-    void releaseAudioPatch(audio_patch_handle_t handle);
+    void createAudioPatch(audio_patch_handle_t handle, const IAfPatchPanel::Patch& patch)
+            EXCLUDES_PatchCommandThread_Mutex;
+    void releaseAudioPatch(audio_patch_handle_t handle) EXCLUDES_PatchCommandThread_Mutex;
+    void updateAudioPatch(audio_patch_handle_t oldHandle, audio_patch_handle_t newHandle,
+            const IAfPatchPanel::Patch& patch) EXCLUDES_PatchCommandThread_Mutex;
 
     // Thread virtuals
     void onFirstRef() override;
@@ -62,8 +70,10 @@
     void exit();
 
     void createAudioPatchCommand(audio_patch_handle_t handle,
-            const IAfPatchPanel::Patch& patch);
-    void releaseAudioPatchCommand(audio_patch_handle_t handle);
+            const IAfPatchPanel::Patch& patch) EXCLUDES_PatchCommandThread_Mutex;
+    void releaseAudioPatchCommand(audio_patch_handle_t handle) EXCLUDES_PatchCommandThread_Mutex;
+    void updateAudioPatchCommand(audio_patch_handle_t oldHandle, audio_patch_handle_t newHandle,
+            const IAfPatchPanel::Patch& patch) EXCLUDES_PatchCommandThread_Mutex;
 
 private:
     class CommandData;
@@ -98,15 +108,34 @@
         audio_patch_handle_t mHandle;
     };
 
-    void sendCommand(const sp<Command>& command);
+    class UpdateAudioPatchData : public CommandData {
+    public:
+        UpdateAudioPatchData(audio_patch_handle_t oldHandle,
+                             audio_patch_handle_t newHandle,
+                             const IAfPatchPanel::Patch& patch)
+            :   mOldHandle(oldHandle), mNewHandle(newHandle), mPatch(patch) {}
 
-    std::string mThreadName;
-    std::mutex mLock;
-    std::condition_variable mWaitWorkCV;
-    std::deque<sp<Command>> mCommands GUARDED_BY(mLock); // list of pending commands
+        const audio_patch_handle_t mOldHandle;
+        const audio_patch_handle_t mNewHandle;
+        const IAfPatchPanel::Patch mPatch;
+    };
 
-    std::mutex mListenerLock;
-    std::vector<wp<PatchCommandListener>> mListeners GUARDED_BY(mListenerLock);
+    void sendCommand(const sp<Command>& command) EXCLUDES_PatchCommandThread_Mutex;
+
+    audio_utils::mutex& mutex() const RETURN_CAPABILITY(audio_utils::PatchCommandThread_Mutex) {
+        return mMutex;
+    }
+    audio_utils::mutex& listenerMutex() const
+            RETURN_CAPABILITY(audio_utils::PatchCommandThread_ListenerMutex) {
+        return mListenerMutex;
+    }
+
+    mutable audio_utils::mutex mMutex;
+    audio_utils::condition_variable mWaitWorkCV;
+    std::deque<sp<Command>> mCommands GUARDED_BY(mutex()); // list of pending commands
+
+    mutable audio_utils::mutex mListenerMutex;
+    std::vector<wp<PatchCommandListener>> mListeners GUARDED_BY(listenerMutex());
 };
 
 }  // namespace android
diff --git a/services/audioflinger/PatchPanel.cpp b/services/audioflinger/PatchPanel.cpp
index bfc2754..89f0a6e 100644
--- a/services/audioflinger/PatchPanel.cpp
+++ b/services/audioflinger/PatchPanel.cpp
@@ -83,7 +83,7 @@
 }
 
 /* List connected audio ports and their attributes */
-status_t PatchPanel::listAudioPorts(unsigned int* /* num_ports */,
+status_t PatchPanel::listAudioPorts_l(unsigned int* /* num_ports */,
                                 struct audio_port *ports __unused)
 {
     ALOGV(__func__);
@@ -91,14 +91,14 @@
 }
 
 /* Get supported attributes for a given audio port */
-status_t PatchPanel::getAudioPort(struct audio_port_v7* port)
+status_t PatchPanel::getAudioPort_l(struct audio_port_v7* port)
 {
     if (port->type != AUDIO_PORT_TYPE_DEVICE) {
         // Only query the HAL when the port is a device.
         // TODO: implement getAudioPort for mix.
         return INVALID_OPERATION;
     }
-    AudioHwDevice* hwDevice = findAudioHwDeviceByModule(port->ext.device.hw_module);
+    AudioHwDevice* hwDevice = findAudioHwDeviceByModule_l(port->ext.device.hw_module);
     if (hwDevice == nullptr) {
         ALOGW("%s cannot find hw module %d", __func__, port->ext.device.hw_module);
         return BAD_VALUE;
@@ -110,7 +110,7 @@
 }
 
 /* Connect a patch between several source and sink ports */
-status_t PatchPanel::createAudioPatch(const struct audio_patch* patch,
+status_t PatchPanel::createAudioPatch_l(const struct audio_patch* patch,
                                    audio_patch_handle_t *handle,
                                    bool endpointPatch)
  //unlocks AudioFlinger::mLock when calling IAfThreadBase::sendCreateAudioPatchConfigEvent
@@ -134,7 +134,8 @@
     if (patch->num_sources > 2) {
         return INVALID_OPERATION;
     }
-
+    bool reuseExistingHalPatch = false;
+    audio_patch_handle_t oldhandle = AUDIO_PATCH_HANDLE_NONE;
     if (*handle != AUDIO_PATCH_HANDLE_NONE) {
         auto iter = mPatches.find(*handle);
         if (iter != mPatches.end()) {
@@ -144,7 +145,7 @@
             // 1) if a software patch is present, release the playback and capture threads and
             // tracks created. This will also release the corresponding audio HAL patches
             if (removedPatch.isSoftware()) {
-                removedPatch.clearConnections(this);
+                removedPatch.clearConnections_l(this);
             }
             // 2) if the new patch and old patch source or sink are devices from different
             // hw modules,  clear the audio HAL patches now because they will not be updated
@@ -152,6 +153,7 @@
             if (removedPatch.mHalHandle != AUDIO_PATCH_HANDLE_NONE) {
                 audio_module_handle_t hwModule = AUDIO_MODULE_HANDLE_NONE;
                 const struct audio_patch &oldPatch = removedPatch.mAudioPatch;
+                oldhandle = *handle;
                 if (oldPatch.sources[0].type == AUDIO_PORT_TYPE_DEVICE &&
                         (patch->sources[0].type != AUDIO_PORT_TYPE_DEVICE ||
                                 oldPatch.sources[0].ext.device.hw_module !=
@@ -169,13 +171,17 @@
                     // removedPatch.mHalHandle would be AUDIO_PATCH_HANDLE_NONE in this case.
                     hwModule = oldPatch.sinks[0].ext.device.hw_module;
                 }
-                sp<DeviceHalInterface> hwDevice = findHwDeviceByModule(hwModule);
+                sp<DeviceHalInterface> hwDevice = findHwDeviceByModule_l(hwModule);
                 if (hwDevice != 0) {
                     hwDevice->releaseAudioPatch(removedPatch.mHalHandle);
                 }
                 halHandle = removedPatch.mHalHandle;
+                // Prevent to remove/add device effect when mix / device did not change, and
+                // hal patch has not been released
+                // Note that no patch leak at hal layer as halHandle is reused.
+                reuseExistingHalPatch = (hwDevice == 0) && patchesHaveSameRoute(*patch, oldPatch);
             }
-            erasePatch(*handle);
+            erasePatch(*handle, reuseExistingHalPatch);
         }
     }
 
@@ -185,7 +191,7 @@
     switch (patch->sources[0].type) {
         case AUDIO_PORT_TYPE_DEVICE: {
             audio_module_handle_t srcModule = patch->sources[0].ext.device.hw_module;
-            AudioHwDevice *audioHwDevice = findAudioHwDeviceByModule(srcModule);
+            AudioHwDevice *audioHwDevice = findAudioHwDeviceByModule_l(srcModule);
             if (!audioHwDevice) {
                 status = BAD_VALUE;
                 goto exit;
@@ -317,7 +323,7 @@
                     goto exit;
                 }
                 newPatch.mRecord.setThread(thread->asIAfRecordThread().get());
-                status = newPatch.createConnections(this);
+                status = newPatch.createConnections_l(this);
                 if (status != NO_ERROR) {
                     goto exit;
                 }
@@ -338,9 +344,9 @@
                             goto exit;
                         }
                     }
-                    mAfPatchPanelCallback->unlock();
+                    mAfPatchPanelCallback->mutex().unlock();
                     status = thread->sendCreateAudioPatchConfigEvent(patch, &halHandle);
-                    mAfPatchPanelCallback->lock();
+                    mAfPatchPanelCallback->mutex().lock();
                     if (status == NO_ERROR) {
                         newPatch.setThread(thread);
                     }
@@ -406,9 +412,9 @@
                 mAfPatchPanelCallback->updateOutDevicesForRecordThreads_l(devices);
             }
 
-            mAfPatchPanelCallback->unlock();
+            mAfPatchPanelCallback->mutex().unlock();
             status = thread->sendCreateAudioPatchConfigEvent(patch, &halHandle);
-            mAfPatchPanelCallback->lock();
+            mAfPatchPanelCallback->mutex().lock();
             if (status == NO_ERROR) {
                 newPatch.setThread(thread);
             }
@@ -436,13 +442,18 @@
         *handle = static_cast<audio_patch_handle_t>(
                 mAfPatchPanelCallback->nextUniqueId(AUDIO_UNIQUE_ID_USE_PATCH));
         newPatch.mHalHandle = halHandle;
-        mAfPatchPanelCallback->getPatchCommandThread()->createAudioPatch(*handle, newPatch);
+        if (reuseExistingHalPatch) {
+            mAfPatchPanelCallback->getPatchCommandThread()->updateAudioPatch(
+                    oldhandle, *handle, newPatch);
+        } else {
+            mAfPatchPanelCallback->getPatchCommandThread()->createAudioPatch(*handle, newPatch);
+        }
         if (insertedModule != AUDIO_MODULE_HANDLE_NONE) {
-            addSoftwarePatchToInsertedModules(insertedModule, *handle, &newPatch.mAudioPatch);
+            addSoftwarePatchToInsertedModules_l(insertedModule, *handle, &newPatch.mAudioPatch);
         }
         mPatches.insert(std::make_pair(*handle, std::move(newPatch)));
     } else {
-        newPatch.clearConnections(this);
+        newPatch.clearConnections_l(this);
     }
     return status;
 }
@@ -453,10 +464,10 @@
             mRecord.handle(), mPlayback.handle());
 }
 
-status_t PatchPanel::Patch::createConnections(const sp<IAfPatchPanel>& panel)
+status_t PatchPanel::Patch::createConnections_l(const sp<IAfPatchPanel>& panel)
 {
     // create patch from source device to record thread input
-    status_t status = panel->createAudioPatch(
+    status_t status = panel->createAudioPatch_l(
             PatchBuilder().addSource(mAudioPatch.sources[0]).
                 addSink(mRecord.thread(), { .source = AUDIO_SOURCE_MIC }).patch(),
             mRecord.handlePtr(),
@@ -468,7 +479,7 @@
 
     // create patch from playback thread output to sink device
     if (mAudioPatch.num_sinks != 0) {
-        status = panel->createAudioPatch(
+        status = panel->createAudioPatch_l(
                 PatchBuilder().addSource(mPlayback.thread()).addSink(mAudioPatch.sinks[0]).patch(),
                 mPlayback.handlePtr(),
                 true /*endpointPatch*/);
@@ -617,15 +628,15 @@
     return status;
 }
 
-void PatchPanel::Patch::clearConnections(const sp<IAfPatchPanel>& panel)
+void PatchPanel::Patch::clearConnections_l(const sp<IAfPatchPanel>& panel)
 {
     ALOGV("%s() mRecord.handle %d mPlayback.handle %d",
             __func__, mRecord.handle(), mPlayback.handle());
     mRecord.stopTrack();
     mPlayback.stopTrack();
     mRecord.clearTrackPeer(); // mRecord stop is synchronous. Break PeerProxy sp<> cycle.
-    mRecord.closeConnections(panel);
-    mPlayback.closeConnections(panel);
+    mRecord.closeConnections_l(panel);
+    mPlayback.closeConnections_l(panel);
 }
 
 status_t PatchPanel::Patch::getLatencyMs(double* latencyMs) const
@@ -715,7 +726,7 @@
 }
 
 /* Disconnect a patch */
-status_t PatchPanel::releaseAudioPatch(audio_patch_handle_t handle)
+status_t PatchPanel::releaseAudioPatch_l(audio_patch_handle_t handle)
  //unlocks AudioFlinger::mLock when calling IAfThreadBase::sendReleaseAudioPatchConfigEvent
  //to avoid deadlocks if the thread loop needs to acquire AudioFlinger::mLock
  //before processing the release patch request.
@@ -734,7 +745,7 @@
     const struct audio_port_config &src = patch.sources[0];
     switch (src.type) {
         case AUDIO_PORT_TYPE_DEVICE: {
-            sp<DeviceHalInterface> hwDevice = findHwDeviceByModule(src.ext.device.hw_module);
+            sp<DeviceHalInterface> hwDevice = findHwDeviceByModule_l(src.ext.device.hw_module);
             if (hwDevice == 0) {
                 ALOGW("%s() bad src hw module %d", __func__, src.ext.device.hw_module);
                 status = BAD_VALUE;
@@ -742,7 +753,7 @@
             }
 
             if (removedPatch.isSoftware()) {
-                removedPatch.clearConnections(this);
+                removedPatch.clearConnections_l(this);
                 break;
             }
 
@@ -757,15 +768,15 @@
                         break;
                     }
                 }
-                mAfPatchPanelCallback->unlock();
+                mAfPatchPanelCallback->mutex().unlock();
                 status = thread->sendReleaseAudioPatchConfigEvent(removedPatch.mHalHandle);
-                mAfPatchPanelCallback->lock();
+                mAfPatchPanelCallback->mutex().lock();
             } else {
                 status = hwDevice->releaseAudioPatch(removedPatch.mHalHandle);
             }
         } break;
         case AUDIO_PORT_TYPE_MIX: {
-            if (findHwDeviceByModule(src.ext.mix.hw_module) == 0) {
+            if (findHwDeviceByModule_l(src.ext.mix.hw_module) == 0) {
                 ALOGW("%s() bad src hw module %d", __func__, src.ext.mix.hw_module);
                 status = BAD_VALUE;
                 break;
@@ -780,9 +791,9 @@
                     break;
                 }
             }
-            mAfPatchPanelCallback->unlock();
+            mAfPatchPanelCallback->mutex().unlock();
             status = thread->sendReleaseAudioPatchConfigEvent(removedPatch.mHalHandle);
-            mAfPatchPanelCallback->lock();
+            mAfPatchPanelCallback->mutex().lock();
         } break;
         default:
             status = BAD_VALUE;
@@ -792,14 +803,16 @@
     return status;
 }
 
-void PatchPanel::erasePatch(audio_patch_handle_t handle) {
+void PatchPanel::erasePatch(audio_patch_handle_t handle, bool reuseExistingHalPatch) {
     mPatches.erase(handle);
     removeSoftwarePatchFromInsertedModules(handle);
-    mAfPatchPanelCallback->getPatchCommandThread()->releaseAudioPatch(handle);
+    if (!reuseExistingHalPatch) {
+        mAfPatchPanelCallback->getPatchCommandThread()->releaseAudioPatch(handle);
+    }
 }
 
 /* List connected audio ports and they attributes */
-status_t PatchPanel::listAudioPatches(unsigned int* /* num_patches */,
+status_t PatchPanel::listAudioPatches_l(unsigned int* /* num_patches */,
                                   struct audio_patch *patches __unused)
 {
     ALOGV(__func__);
@@ -856,7 +869,7 @@
     }
 }
 
-AudioHwDevice* PatchPanel::findAudioHwDeviceByModule(audio_module_handle_t module)
+AudioHwDevice* PatchPanel::findAudioHwDeviceByModule_l(audio_module_handle_t module)
 {
     if (module == AUDIO_MODULE_HANDLE_NONE) return nullptr;
     ssize_t index = mAfPatchPanelCallback->getAudioHwDevs_l().indexOfKey(module);
@@ -867,13 +880,13 @@
     return mAfPatchPanelCallback->getAudioHwDevs_l().valueAt(index);
 }
 
-sp<DeviceHalInterface> PatchPanel::findHwDeviceByModule(audio_module_handle_t module)
+sp<DeviceHalInterface> PatchPanel::findHwDeviceByModule_l(audio_module_handle_t module)
 {
-    AudioHwDevice *audioHwDevice = findAudioHwDeviceByModule(module);
+    AudioHwDevice *audioHwDevice = findAudioHwDeviceByModule_l(module);
     return audioHwDevice ? audioHwDevice->hwDevice() : nullptr;
 }
 
-void PatchPanel::addSoftwarePatchToInsertedModules(
+void PatchPanel::addSoftwarePatchToInsertedModules_l(
         audio_module_handle_t module, audio_patch_handle_t handle,
         const struct audio_patch *patch)
 {
diff --git a/services/audioflinger/PatchPanel.h b/services/audioflinger/PatchPanel.h
index b8b7b79..cec554c 100644
--- a/services/audioflinger/PatchPanel.h
+++ b/services/audioflinger/PatchPanel.h
@@ -30,25 +30,29 @@
         : mAfPatchPanelCallback(afPatchPanelCallback) {}
 
     /* List connected audio ports and their attributes */
-    status_t listAudioPorts(unsigned int *num_ports,
-        struct audio_port* ports) final;
+    status_t listAudioPorts_l(unsigned int *num_ports,
+            struct audio_port* ports) final REQUIRES(audio_utils::AudioFlinger_Mutex);
 
     /* Get supported attributes for a given audio port */
-    status_t getAudioPort(struct audio_port_v7* port) final;
+    status_t getAudioPort_l(struct audio_port_v7* port) final
+            REQUIRES(audio_utils::AudioFlinger_Mutex);
 
     /* Create a patch between several source and sink ports */
-    status_t createAudioPatch(const struct audio_patch *patch,
+    status_t createAudioPatch_l(const struct audio_patch *patch,
                               audio_patch_handle_t *handle,
-                              bool endpointPatch = false) final;
+                              bool endpointPatch = false) final
+            REQUIRES(audio_utils::AudioFlinger_Mutex);
 
     /* Release a patch */
-    status_t releaseAudioPatch(audio_patch_handle_t handle) final;
+    status_t releaseAudioPatch_l(audio_patch_handle_t handle) final
+            REQUIRES(audio_utils::AudioFlinger_Mutex);
 
     /* List connected audio devices and they attributes */
-    status_t listAudioPatches(unsigned int *num_patches,
-            struct audio_patch* patches) final;
+    status_t listAudioPatches_l(unsigned int *num_patches,
+            struct audio_patch* patches) final
+            REQUIRES(audio_utils::AudioFlinger_Mutex);
 
-    // Retrieves all currently estrablished software patches for a stream
+    // Retrieves all currently established software patches for a stream
     // opened on an intermediate module.
     status_t getDownstreamSoftwarePatches(audio_io_handle_t stream,
             std::vector<SoftwarePatch>* patches) const final;
@@ -60,22 +64,57 @@
 
     void dump(int fd) const final;
 
-    // Call with AudioFlinger mLock held
-    const std::map<audio_patch_handle_t, Patch>& patches_l() const final { return mPatches; }
+    const std::map<audio_patch_handle_t, Patch>& patches_l() const final
+            REQUIRES(audio_utils::AudioFlinger_Mutex) { return mPatches; }
 
-    // Must be called under AudioFlinger::mLock
-    status_t getLatencyMs_l(audio_patch_handle_t patchHandle, double* latencyMs) const final;
+    status_t getLatencyMs_l(audio_patch_handle_t patchHandle, double* latencyMs) const final
+            REQUIRES(audio_utils::AudioFlinger_Mutex);
 
-    void closeThreadInternal_l(const sp<IAfThreadBase>& thread) const final;
+    void closeThreadInternal_l(const sp<IAfThreadBase>& thread) const final
+            REQUIRES(audio_utils::AudioFlinger_Mutex);
 
 private:
-    AudioHwDevice* findAudioHwDeviceByModule(audio_module_handle_t module);
-    sp<DeviceHalInterface> findHwDeviceByModule(audio_module_handle_t module);
-    void addSoftwarePatchToInsertedModules(
+    AudioHwDevice* findAudioHwDeviceByModule_l(audio_module_handle_t module)
+            REQUIRES(audio_utils::AudioFlinger_Mutex);
+    sp<DeviceHalInterface> findHwDeviceByModule_l(audio_module_handle_t module)
+            REQUIRES(audio_utils::AudioFlinger_Mutex);
+    void addSoftwarePatchToInsertedModules_l(
             audio_module_handle_t module, audio_patch_handle_t handle,
-            const struct audio_patch *patch);
+            const struct audio_patch *patch)
+            REQUIRES(audio_utils::AudioFlinger_Mutex);
     void removeSoftwarePatchFromInsertedModules(audio_patch_handle_t handle);
-    void erasePatch(audio_patch_handle_t handle);
+    /**
+     * erase the patch referred by its handle.
+     * @param handle of the patch to be erased
+     * @param reuseExistingHalPatch if set, do not trig the callback of listeners, listener
+     * would receive instead a onUpdateAudioPatch when the patch will be recreated.
+     * It prevents for example DeviceEffectManager to spuriously remove / add a device on an already
+     * opened input / output mix.
+     */
+    void erasePatch(audio_patch_handle_t handle, bool reuseExistingHalPatch = false);
+
+    /**
+     * Returns true if the old and new patches passed as arguments describe the same
+     * connections between the first sink and the first source
+     * @param oldPatch previous patch
+     * @param newPatch new patch
+     * @return true if the route is unchanged between the old and new patch, false otherwise
+     */
+    inline bool patchesHaveSameRoute(
+            const struct audio_patch &newPatch, const struct audio_patch &oldPatch) const {
+        return (newPatch.sources[0].type == AUDIO_PORT_TYPE_DEVICE &&
+                oldPatch.sources[0].type == AUDIO_PORT_TYPE_DEVICE &&
+                newPatch.sources[0].id == oldPatch.sources[0].id &&
+                newPatch.sinks[0].type == AUDIO_PORT_TYPE_MIX &&
+                oldPatch.sinks[0].type == AUDIO_PORT_TYPE_MIX &&
+                newPatch.sinks[0].ext.mix.handle == oldPatch.sinks[0].ext.mix.handle) ||
+                (newPatch.sinks[0].type == AUDIO_PORT_TYPE_DEVICE &&
+                oldPatch.sinks[0].type == AUDIO_PORT_TYPE_DEVICE &&
+                newPatch.sinks[0].id == oldPatch.sinks[0].id &&
+                newPatch.sources[0].type == AUDIO_PORT_TYPE_MIX &&
+                oldPatch.sources[0].type == AUDIO_PORT_TYPE_MIX &&
+                newPatch.sources[0].ext.mix.handle == oldPatch.sources[0].ext.mix.handle);
+    }
 
     const sp<IAfPatchPanelCallback> mAfPatchPanelCallback;
     std::map<audio_patch_handle_t, Patch> mPatches;
diff --git a/services/audioflinger/PlaybackTracks.h b/services/audioflinger/PlaybackTracks.h
index 5f54e11..078ae12 100644
--- a/services/audioflinger/PlaybackTracks.h
+++ b/services/audioflinger/PlaybackTracks.h
@@ -20,6 +20,7 @@
 #include "TrackBase.h"
 
 #include <android/os/BnExternalVibrationController.h>
+#include <audio_utils/mutex.h>
 #include <audio_utils/LinearMap.h>
 #include <binder/AppOpsManager.h>
 
@@ -62,8 +63,8 @@
 
     wp<IAfThreadBase> mThread;
     std::atomic_bool mHasOpPlayAudio;
-    const AttributionSourceState mAttributionSource;
-    const int32_t mUsage; // on purpose not audio_usage_t because always checked in appOps as int32_t
+    const int32_t mUsage;  // on purpose not audio_usage_t because always checked in appOps as
+                           // int32_t
     const int mId; // for logging purposes only
     const uid_t mUid;
     const String16 mPackageName;
@@ -468,7 +469,8 @@
      */
     SourceMetadatas mTrackMetadatas;
     /** Protects mTrackMetadatas against concurrent access. */
-    mutable std::mutex mTrackMetadatasMutex;
+    audio_utils::mutex& trackMetadataMutex() const { return mTrackMetadataMutex; }
+    mutable audio_utils::mutex mTrackMetadataMutex;
 };  // end of OutputTrack
 
 // playback track, used by PatchPanel
diff --git a/services/audioflinger/RecordTracks.h b/services/audioflinger/RecordTracks.h
index 021add4..8d3de38 100644
--- a/services/audioflinger/RecordTracks.h
+++ b/services/audioflinger/RecordTracks.h
@@ -20,6 +20,7 @@
 #include "TrackBase.h"
 
 #include <android/content/AttributionSourceState.h>
+#include <audio_utils/mutex.h>
 #include <datapath/AudioStreamIn.h> // struct Source
 
 namespace android {
@@ -214,15 +215,16 @@
     };
 
     sp<StreamInHalInterface> obtainStream(sp<IAfThreadBase>* thread);
+    audio_utils::mutex& readMutex() const { return mReadMutex; }
 
     PatchRecordAudioBufferProvider mPatchRecordAudioBufferProvider;
     std::unique_ptr<void, decltype(free)*> mSinkBuffer;  // frame size aligned continuous buffer
     std::unique_ptr<void, decltype(free)*> mStubBuffer;  // buffer used for AudioBufferProvider
     size_t mUnconsumedFrames = 0;
-    std::mutex mReadLock;
-    std::condition_variable mReadCV;
-    size_t mReadBytes = 0; // GUARDED_BY(mReadLock)
-    status_t mReadError = NO_ERROR; // GUARDED_BY(mReadLock)
+    mutable audio_utils::mutex mReadMutex;
+    audio_utils::condition_variable mReadCV;
+    size_t mReadBytes = 0; // GUARDED_BY(readMutex())
+    status_t mReadError = NO_ERROR; // GUARDED_BY(readMutex())
     int64_t mLastReadFrames = 0;  // accessed on RecordThread only
 };
 
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index 5ad4770..de6f694 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -685,9 +685,9 @@
         //      mWaitWorkCV.wait(...);
         //      // now thread is hung
         //  }
-        AutoMutex lock(mLock);
+        audio_utils::lock_guard lock(mutex());
         requestExit();
-        mWaitWorkCV.broadcast();
+        mWaitWorkCV.notify_all();
     }
     // When Thread::requestExitAndWait is made virtual and this method is renamed to
     // "virtual status_t requestExitAndWait()", replace by "return Thread::requestExitAndWait();"
@@ -697,7 +697,7 @@
 status_t ThreadBase::setParameters(const String8& keyValuePairs)
 {
     ALOGV("ThreadBase::setParameters() %s", keyValuePairs.c_str());
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
 
     return sendSetParameterConfigEvent_l(keyValuePairs);
 }
@@ -716,30 +716,31 @@
     }
     mConfigEvents.add(event);
     ALOGV("sendConfigEvent_l() num events %zu event %d", mConfigEvents.size(), event->mType);
-    mWaitWorkCV.signal();
-    mLock.unlock();
+    mWaitWorkCV.notify_one();
+    mutex().unlock();
     {
-        Mutex::Autolock _l(event->mLock);
+        audio_utils::unique_lock _l(event->mutex());
         while (event->mWaitStatus) {
-            if (event->mCond.waitRelative(event->mLock, kConfigEventTimeoutNs) != NO_ERROR) {
+            if (event->mCondition.wait_for(_l, std::chrono::nanoseconds(kConfigEventTimeoutNs))
+                        == std::cv_status::timeout) {
                 event->mStatus = TIMED_OUT;
                 event->mWaitStatus = false;
             }
         }
         status = event->mStatus;
     }
-    mLock.lock();
+    mutex().lock();
     return status;
 }
 
 void ThreadBase::sendIoConfigEvent(audio_io_config_event_t event, pid_t pid,
                                                  audio_port_handle_t portId)
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     sendIoConfigEvent_l(event, pid, portId);
 }
 
-// sendIoConfigEvent_l() must be called with ThreadBase::mLock held
+// sendIoConfigEvent_l() must be called with ThreadBase::mutex() held
 void ThreadBase::sendIoConfigEvent_l(audio_io_config_event_t event, pid_t pid,
                                                    audio_port_handle_t portId)
 {
@@ -759,11 +760,11 @@
 
 void ThreadBase::sendPrioConfigEvent(pid_t pid, pid_t tid, int32_t prio, bool forApp)
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     sendPrioConfigEvent_l(pid, tid, prio, forApp);
 }
 
-// sendPrioConfigEvent_l() must be called with ThreadBase::mLock held
+// sendPrioConfigEvent_l() must be called with ThreadBase::mutex() held
 void ThreadBase::sendPrioConfigEvent_l(
         pid_t pid, pid_t tid, int32_t prio, bool forApp)
 {
@@ -771,7 +772,7 @@
     sendConfigEvent_l(configEvent);
 }
 
-// sendSetParameterConfigEvent_l() must be called with ThreadBase::mLock held
+// sendSetParameterConfigEvent_l() must be called with ThreadBase::mutex() held
 status_t ThreadBase::sendSetParameterConfigEvent_l(const String8& keyValuePair)
 {
     sp<ConfigEvent> configEvent;
@@ -794,7 +795,7 @@
                                                         const struct audio_patch *patch,
                                                         audio_patch_handle_t *handle)
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     sp<ConfigEvent> configEvent = (ConfigEvent *)new CreateAudioPatchConfigEvent(*patch, *handle);
     status_t status = sendConfigEvent_l(configEvent);
     if (status == NO_ERROR) {
@@ -808,7 +809,7 @@
 status_t ThreadBase::sendReleaseAudioPatchConfigEvent(
                                                                 const audio_patch_handle_t handle)
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     sp<ConfigEvent> configEvent = (ConfigEvent *)new ReleaseAudioPatchConfigEvent(handle);
     return sendConfigEvent_l(configEvent);
 }
@@ -820,7 +821,7 @@
         // The update out device operation is only for record thread.
         return INVALID_OPERATION;
     }
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     sp<ConfigEvent> configEvent = (ConfigEvent *)new UpdateOutDevicesConfigEvent(outDevices);
     return sendConfigEvent_l(configEvent);
 }
@@ -835,7 +836,7 @@
 
 void ThreadBase::sendCheckOutputStageEffectsEvent()
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     sendCheckOutputStageEffectsEvent_l();
 }
 
@@ -874,7 +875,7 @@
         } break;
         case CFG_EVENT_IO: {
             IoConfigEventData *data = (IoConfigEventData *)event->mData.get();
-            ioConfigChanged(data->mEvent, data->mPid, data->mPortId);
+            ioConfigChanged_l(data->mEvent, data->mPid, data->mPortId);
         } break;
         case CFG_EVENT_SET_PARAMETER: {
             SetParameterConfigEventData *data = (SetParameterConfigEventData *)event->mData.get();
@@ -885,22 +886,22 @@
             }
         } break;
         case CFG_EVENT_CREATE_AUDIO_PATCH: {
-            const DeviceTypeSet oldDevices = getDeviceTypes();
+            const DeviceTypeSet oldDevices = getDeviceTypes_l();
             CreateAudioPatchConfigEventData *data =
                                             (CreateAudioPatchConfigEventData *)event->mData.get();
             event->mStatus = createAudioPatch_l(&data->mPatch, &data->mHandle);
-            const DeviceTypeSet newDevices = getDeviceTypes();
+            const DeviceTypeSet newDevices = getDeviceTypes_l();
             configChanged = oldDevices != newDevices;
             mLocalLog.log("CFG_EVENT_CREATE_AUDIO_PATCH: old device %s (%s) new device %s (%s)",
                     dumpDeviceTypes(oldDevices).c_str(), toString(oldDevices).c_str(),
                     dumpDeviceTypes(newDevices).c_str(), toString(newDevices).c_str());
         } break;
         case CFG_EVENT_RELEASE_AUDIO_PATCH: {
-            const DeviceTypeSet oldDevices = getDeviceTypes();
+            const DeviceTypeSet oldDevices = getDeviceTypes_l();
             ReleaseAudioPatchConfigEventData *data =
                                             (ReleaseAudioPatchConfigEventData *)event->mData.get();
             event->mStatus = releaseAudioPatch_l(data->mHandle);
-            const DeviceTypeSet newDevices = getDeviceTypes();
+            const DeviceTypeSet newDevices = getDeviceTypes_l();
             configChanged = oldDevices != newDevices;
             mLocalLog.log("CFG_EVENT_RELEASE_AUDIO_PATCH: old device %s (%s) new device %s (%s)",
                     dumpDeviceTypes(oldDevices).c_str(), toString(oldDevices).c_str(),
@@ -930,10 +931,10 @@
             break;
         }
         {
-            Mutex::Autolock _l(event->mLock);
+            audio_utils::lock_guard _l(event->mutex());
             if (event->mWaitStatus) {
                 event->mWaitStatus = false;
-                event->mCond.signal();
+                event->mCondition.notify_one();
             }
         }
         ALOGV_IF(mConfigEvents.isEmpty(), "processConfigEvents_l() DONE thread %p", this);
@@ -1026,7 +1027,7 @@
     dprintf(fd, "\n%s thread %p, name %s, tid %d, type %d (%s):\n", isOutput() ? "Output" : "Input",
             this, mThreadName, getTid(), type(), threadTypeToString(type()));
 
-    const bool locked = afutils::dumpTryLock(mLock);
+    const bool locked = afutils::dumpTryLock(mutex());
     if (!locked) {
         dprintf(fd, "  Thread may be deadlocked\n");
     }
@@ -1037,7 +1038,7 @@
     dumpEffectChains_l(fd, args);
 
     if (locked) {
-        mLock.unlock();
+        mutex().unlock();
     }
 
     dprintf(fd, "  Local log:\n");
@@ -1088,9 +1089,9 @@
     }
     // Note: output device may be used by capture threads for effects such as AEC.
     dprintf(fd, "  Output devices: %s (%s)\n",
-            dumpDeviceTypes(outDeviceTypes()).c_str(), toString(outDeviceTypes()).c_str());
+            dumpDeviceTypes(outDeviceTypes_l()).c_str(), toString(outDeviceTypes_l()).c_str());
     dprintf(fd, "  Input device: %#x (%s)\n",
-            inDeviceType(), toString(inDeviceType()).c_str());
+            inDeviceType_l(), toString(inDeviceType_l()).c_str());
     dprintf(fd, "  Audio source: %d (%s)\n", mAudioSource, toString(mAudioSource).c_str());
 
     // Dump timestamp statistics for the Thread types that support it.
@@ -1101,7 +1102,8 @@
             || mType == OFFLOAD
             || mType == SPATIALIZER) {
         dprintf(fd, "  Timestamp stats: %s\n", mTimestampVerifier.toString().c_str());
-        dprintf(fd, "  Timestamp corrected: %s\n", isTimestampCorrectionEnabled() ? "yes" : "no");
+        dprintf(fd, "  Timestamp corrected: %s\n",
+                isTimestampCorrectionEnabled_l() ? "yes" : "no");
     }
 
     if (mLastIoBeginNs > 0) { // MMAP may not set this
@@ -1152,7 +1154,7 @@
 
 void ThreadBase::acquireWakeLock()
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     acquireWakeLock_l();
 }
 
@@ -1206,7 +1208,7 @@
 
 void ThreadBase::releaseWakeLock()
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     releaseWakeLock_l();
 }
 
@@ -1265,7 +1267,7 @@
 
 void ThreadBase::clearPowerManager()
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     releaseWakeLock_l();
     mPowerManager.clear();
 }
@@ -1396,7 +1398,7 @@
 NO_THREAD_SAFETY_ANALYSIS  // manual locking
 {
     if (!threadLocked) {
-        mLock.lock();
+        mutex().lock();
     }
 
     if (mType != RECORD) {
@@ -1411,11 +1413,11 @@
     }
 
     if (!threadLocked) {
-        mLock.unlock();
+        mutex().unlock();
     }
 }
 
-// checkEffectCompatibility_l() must be called with ThreadBase::mLock held
+// checkEffectCompatibility_l() must be called with ThreadBase::mutex() held
 status_t RecordThread::checkEffectCompatibility_l(
         const effect_descriptor_t *desc, audio_session_t sessionId)
 {
@@ -1459,7 +1461,7 @@
     return NO_ERROR;
 }
 
-// checkEffectCompatibility_l() must be called with ThreadBase::mLock held
+// checkEffectCompatibility_l() must be called with ThreadBase::mutex() held
 status_t PlaybackThread::checkEffectCompatibility_l(
         const effect_descriptor_t *desc, audio_session_t sessionId)
 {
@@ -1614,7 +1616,7 @@
     return NO_ERROR;
 }
 
-// ThreadBase::createEffect_l() must be called with AudioFlinger::mLock held
+// ThreadBase::createEffect_l() must be called with AudioFlinger::mutex() held
 sp<IAfEffectHandle> ThreadBase::createEffect_l(
         const sp<Client>& client,
         const sp<IEffectClient>& effectClient,
@@ -1643,8 +1645,8 @@
 
     ALOGV("createEffect_l() thread %p effect %s on session %d", this, desc->name, sessionId);
 
-    { // scope for mLock
-        Mutex::Autolock _l(mLock);
+    { // scope for mutex()
+        audio_utils::lock_guard _l(mutex());
 
         lStatus = checkEffectCompatibility_l(desc, sessionId);
         if (probe || lStatus != NO_ERROR) {
@@ -1706,7 +1708,7 @@
 
 Exit:
     if (!probe && lStatus != NO_ERROR && lStatus != ALREADY_EXISTS) {
-        Mutex::Autolock _l(mLock);
+        audio_utils::lock_guard _l(mutex());
         if (effectCreated) {
             chain->removeEffect_l(effect);
         }
@@ -1726,7 +1728,7 @@
     bool remove = false;
     sp<IAfEffectModule> effect;
     {
-        Mutex::Autolock _l(mLock);
+        audio_utils::lock_guard _l(mutex());
         sp<IAfEffectBase> effectBase = handle->effect().promote();
         if (effectBase == nullptr) {
             return;
@@ -1752,7 +1754,7 @@
 
 void ThreadBase::onEffectEnable(const sp<IAfEffectModule>& effect) {
     if (isOffloadOrMmap()) {
-        Mutex::Autolock _l(mLock);
+        audio_utils::lock_guard _l(mutex());
         broadcast_l();
     }
     if (!effect->isOffloadable()) {
@@ -1768,7 +1770,7 @@
 
 void ThreadBase::onEffectDisable() {
     if (isOffloadOrMmap()) {
-        Mutex::Autolock _l(mLock);
+        audio_utils::lock_guard _l(mutex());
         broadcast_l();
     }
 }
@@ -1776,7 +1778,7 @@
 sp<IAfEffectModule> ThreadBase::getEffect(audio_session_t sessionId,
         int effectId) const
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     return getEffect_l(sessionId, effectId);
 }
 
@@ -1793,9 +1795,9 @@
     return chain != nullptr ? chain->getEffectIds() : std::vector<int>{};
 }
 
-// PlaybackThread::addEffect_l() must be called with AudioFlinger::mLock and
-// PlaybackThread::mLock held
-status_t ThreadBase::addEffect_l(const sp<IAfEffectModule>& effect)
+// PlaybackThread::addEffect_ll() must be called with AudioFlinger::mutex() and
+// ThreadBase::mutex() held
+status_t ThreadBase::addEffect_ll(const sp<IAfEffectModule>& effect)
 {
     // check for existing effect chain with the requested audio session
     audio_session_t sessionId = effect->sessionId();
@@ -1803,22 +1805,22 @@
     bool chainCreated = false;
 
     ALOGD_IF((mType == OFFLOAD) && !effect->isOffloadable(),
-             "addEffect_l() on offloaded thread %p: effect %s does not support offload flags %#x",
-                    this, effect->desc().name, effect->desc().flags);
+             "%s: on offloaded thread %p: effect %s does not support offload flags %#x",
+             __func__, this, effect->desc().name, effect->desc().flags);
 
     if (chain == 0) {
         // create a new chain for this session
-        ALOGV("addEffect_l() new effect chain for session %d", sessionId);
+        ALOGV("%s: new effect chain for session %d", __func__, sessionId);
         chain = IAfEffectChain::create(this, sessionId);
         addEffectChain_l(chain);
         chain->setStrategy(getStrategyForSession_l(sessionId));
         chainCreated = true;
     }
-    ALOGV("addEffect_l() %p chain %p effect %p", this, chain.get(), effect.get());
+    ALOGV("%s: %p chain %p effect %p", __func__, this, chain.get(), effect.get());
 
     if (chain->getEffectFromId_l(effect->id()) != 0) {
-        ALOGW("addEffect_l() %p effect %s already present in chain %p",
-                this, effect->desc().name, chain.get());
+        ALOGW("%s: %p effect %s already present in chain %p",
+                __func__, this, effect->desc().name, chain.get());
         return BAD_VALUE;
     }
 
@@ -1865,7 +1867,7 @@
 {
     effectChains = mEffectChains;
     for (size_t i = 0; i < mEffectChains.size(); i++) {
-        mEffectChains[i]->lock();
+        mEffectChains[i]->mutex().lock();
     }
 }
 
@@ -1874,13 +1876,13 @@
 NO_THREAD_SAFETY_ANALYSIS  // calls EffectChain::unlock()
 {
     for (size_t i = 0; i < effectChains.size(); i++) {
-        effectChains[i]->unlock();
+        effectChains[i]->mutex().unlock();
     }
 }
 
 sp<IAfEffectChain> ThreadBase::getEffectChain(audio_session_t sessionId) const
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     return getEffectChain_l(sessionId);
 }
 
@@ -1898,7 +1900,7 @@
 
 void ThreadBase::setMode(audio_mode_t mode)
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     size_t size = mEffectChains.size();
     for (size_t i = 0; i < size; i++) {
         mEffectChains[i]->setMode_l(mode);
@@ -1918,7 +1920,7 @@
 
 void ThreadBase::systemReady()
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     if (mSystemReady) {
         return;
     }
@@ -1977,7 +1979,7 @@
 }
 
 template <typename T>
-void ThreadBase::ActiveTracks<T>::updatePowerState(
+void ThreadBase::ActiveTracks<T>::updatePowerState_l(
         const sp<ThreadBase>& thread, bool force) {
     // Updates ActiveTracks client uids to the thread wakelock.
     if (mActiveTracksGeneration != mLastActiveTracksGeneration || force) {
@@ -2016,12 +2018,13 @@
     // If threadLoop is currently unlocked a signal of mWaitWorkCV will
     // be lost so we also flag to prevent it blocking on mWaitWorkCV
     mSignalPending = true;
-    mWaitWorkCV.broadcast();
+    mWaitWorkCV.notify_all();
 }
 
 // Call only from threadLoop() or when it is idle.
 // Do not call from high performance code as this may do binder rpc to the MediaMetrics service.
 void ThreadBase::sendStatistics(bool force)
+NO_THREAD_SAFETY_ANALYSIS
 {
     // Do not log if we have no stats.
     // We choose the timestamp verifier because it is the most likely item to be present.
@@ -2053,8 +2056,8 @@
     item->setInt64(MM_PREFIX "channelMask", (int64_t)mChannelMask);
     item->setCString(MM_PREFIX "encoding", toString(mFormat).c_str());
     item->setInt32(MM_PREFIX "frameCount", (int32_t)mFrameCount);
-    item->setCString(MM_PREFIX "outDevice", toString(outDeviceTypes()).c_str());
-    item->setCString(MM_PREFIX "inDevice", toString(inDeviceType()).c_str());
+    item->setCString(MM_PREFIX "outDevice", toString(outDeviceTypes_l()).c_str());
+    item->setCString(MM_PREFIX "inDevice", toString(inDeviceType_l()).c_str());
 
     // thread statistics
     if (mIoJitterMs.getN() > 0) {
@@ -2092,7 +2095,7 @@
     return AudioSystem::getStrategyForStream(stream);
 }
 
-// startMelComputation_l() must be called with AudioFlinger::mLock held
+// startMelComputation_l() must be called with AudioFlinger::mutex() held
 void ThreadBase::startMelComputation_l(
         const sp<audio_utils::MelProcessor>& /*processor*/)
 {
@@ -2100,7 +2103,7 @@
     ALOGW("%s: ThreadBase does not support CSD", __func__);
 }
 
-// stopMelComputation_l() must be called with AudioFlinger::mLock held
+// stopMelComputation_l() must be called with AudioFlinger::mutex() held
 void ThreadBase::stopMelComputation_l()
 {
     // Do nothing
@@ -2156,7 +2159,7 @@
     snprintf(mThreadName, kThreadNameLength, "AudioOut_%X", id);
     mNBLogWriter = afThreadCallback->newWriter_l(kLogSize, mThreadName);
 
-    // Assumes constructor is called by AudioFlinger with it's mLock held, but
+    // Assumes constructor is called by AudioFlinger with its mutex() held, but
     // it would be safer to explicitly pass initial masterVolume/masterMute as
     // parameter.
     //
@@ -2356,7 +2359,7 @@
     }
 }
 
-// PlaybackThread::createTrack_l() must be called with AudioFlinger::mLock held
+// PlaybackThread::createTrack_l() must be called with AudioFlinger::mutex() held
 sp<IAfTrack> PlaybackThread::createTrack_l(
         const sp<Client>& client,
         audio_stream_type_t streamType,
@@ -2378,7 +2381,8 @@
         audio_port_handle_t portId,
         const sp<media::IAudioTrackCallback>& callback,
         bool isSpatialized,
-        bool isBitPerfect)
+        bool isBitPerfect,
+        audio_output_flags_t *afTrackFlags)
 {
     size_t frameCount = *pFrameCount;
     size_t notificationFrameCount = *pNotificationFrameCount;
@@ -2460,8 +2464,8 @@
         }
 
         // check compatibility with audio effects.
-        { // scope for mLock
-            Mutex::Autolock _l(mLock);
+        { // scope for mutex()
+            audio_utils::lock_guard _l(mutex());
             for (audio_session_t session : {
                     AUDIO_SESSION_DEVICE,
                     AUDIO_SESSION_OUTPUT_STAGE,
@@ -2667,8 +2671,8 @@
         goto Exit;
     }
 
-    { // scope for mLock
-        Mutex::Autolock _l(mLock);
+    { // scope for mutex()
+        audio_utils::lock_guard _l(mutex());
 
         // all tracks in same audio session must share the same routing strategy otherwise
         // conflicts will happen when tracks are moved from one output to another by audio policy
@@ -2696,6 +2700,7 @@
         if (mType == DIRECT) {
             trackFlags = static_cast<audio_output_flags_t>(trackFlags | AUDIO_OUTPUT_FLAG_DIRECT);
         }
+        *afTrackFlags = trackFlags;
 
         track = IAfTrack::create(this, client, streamType, attr, sampleRate, format,
                           channelMask, frameCount,
@@ -2712,7 +2717,7 @@
         }
         mTracks.add(track);
         {
-            Mutex::Autolock _atCbL(mAudioTrackCbLock);
+            audio_utils::lock_guard _atCbL(audioTrackCbMutex());
             if (callback.get() != nullptr) {
                 mAudioTrackCallbacks.emplace(track, callback);
             }
@@ -2764,10 +2769,12 @@
 
 uint32_t PlaybackThread::latency() const
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     return latency_l();
 }
 uint32_t PlaybackThread::latency_l() const
+NO_THREAD_SAFETY_ANALYSIS
+// Fix later.
 {
     uint32_t latency;
     if (initCheck() == NO_ERROR && mOutput->stream->getLatency(&latency) == OK) {
@@ -2778,7 +2785,7 @@
 
 void PlaybackThread::setMasterVolume(float value)
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     // Don't apply master volume in SW if our HAL can do it for us.
     if (mOutput && mOutput->audioHwDev &&
         mOutput->audioHwDev->canSetMasterVolume()) {
@@ -2798,7 +2805,7 @@
     if (isDuplicating()) {
         return;
     }
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     // Don't apply master mute in SW if our HAL can do it for us.
     if (mOutput && mOutput->audioHwDev &&
         mOutput->audioHwDev->canSetMasterMute()) {
@@ -2810,21 +2817,21 @@
 
 void PlaybackThread::setStreamVolume(audio_stream_type_t stream, float value)
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     mStreamTypes[stream].volume = value;
     broadcast_l();
 }
 
 void PlaybackThread::setStreamMute(audio_stream_type_t stream, bool muted)
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     mStreamTypes[stream].mute = muted;
     broadcast_l();
 }
 
 float PlaybackThread::streamVolume(audio_stream_type_t stream) const
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     return mStreamTypes[stream].volume;
 }
 
@@ -2833,9 +2840,9 @@
     mOutput->stream->setVolume(left, right);
 }
 
-// addTrack_l() must be called with ThreadBase::mLock held
+// addTrack_l() must be called with ThreadBase::mutex() held
 status_t PlaybackThread::addTrack_l(const sp<IAfTrack>& track)
-NO_THREAD_SAFETY_ANALYSIS  // release and re-acquire mLock
+NO_THREAD_SAFETY_ANALYSIS  // release and re-acquire mutex()
 {
     status_t status = ALREADY_EXISTS;
 
@@ -2845,15 +2852,15 @@
         // effectively get the latency it requested.
         if (track->isExternalTrack()) {
             IAfTrackBase::track_state state = track->state();
-            mLock.unlock();
+            mutex().unlock();
             status = AudioSystem::startOutput(track->portId());
-            mLock.lock();
+            mutex().lock();
             // abort track was stopped/paused while we released the lock
             if (state != track->state()) {
                 if (status == NO_ERROR) {
-                    mLock.unlock();
+                    mutex().unlock();
                     AudioSystem::stopOutput(track->portId());
-                    mLock.lock();
+                    mutex().lock();
                 }
                 return INVALID_OPERATION;
             }
@@ -2892,17 +2899,17 @@
                         || (chain != nullptr && chain->containsHapticGeneratingEffect_l()))) {
             // Unlock due to VibratorService will lock for this call and will
             // call Tracks.mute/unmute which also require thread's lock.
-            mLock.unlock();
+            mutex().unlock();
             const os::HapticScale intensity = afutils::onExternalVibrationStart(
                     track->getExternalVibration());
             std::optional<media::AudioVibratorInfo> vibratorInfo;
             {
                 // TODO(b/184194780): Use the vibrator information from the vibrator that will be
                 // used to play this track.
-                Mutex::Autolock _l(mAfThreadCallback->mutex());
+                 audio_utils::lock_guard _l(mAfThreadCallback->mutex());
                 vibratorInfo = std::move(mAfThreadCallback->getDefaultVibratorInfo_l());
             }
-            mLock.lock();
+            mutex().lock();
             track->setHapticIntensity(intensity);
             if (vibratorInfo) {
                 track->setHapticMaxAmplitude(vibratorInfo->maxAmplitude);
@@ -2968,7 +2975,7 @@
 
     mTracks.remove(track);
     {
-        Mutex::Autolock _atCbL(mAudioTrackCbLock);
+        audio_utils::lock_guard _atCbL(audioTrackCbMutex());
         mAudioTrackCallbacks.erase(track);
     }
     if (track->isFastTrack()) {
@@ -2987,7 +2994,7 @@
 
 String8 PlaybackThread::getParameters(const String8& keys)
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     String8 out_s8;
     if (initCheck() == NO_ERROR && mOutput->stream->getParameters(keys, &out_s8) == OK) {
         return out_s8;
@@ -2996,14 +3003,14 @@
 }
 
 status_t DirectOutputThread::selectPresentation(int presentationId, int programId) {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     if (!isStreamInitialized()) {
         return NO_INIT;
     }
     return mOutput->stream->selectPresentation(presentationId, programId);
 }
 
-void PlaybackThread::ioConfigChanged(audio_io_config_event_t event, pid_t pid,
+void PlaybackThread::ioConfigChanged_l(audio_io_config_event_t event, pid_t pid,
                                                    audio_port_handle_t portId) {
     ALOGV("PlaybackThread::ioConfigChanged, thread %p, event %d", this, event);
     sp<AudioIoDescriptor> desc;
@@ -3025,7 +3032,7 @@
         desc = sp<AudioIoDescriptor>::make(mId);
         break;
     }
-    mAfThreadCallback->ioConfigChanged(event, desc, pid);
+    mAfThreadCallback->ioConfigChanged_l(event, desc, pid);
 }
 
 void PlaybackThread::onWriteReady()
@@ -3066,7 +3073,7 @@
             audio_utils::metadata::ByteString metaDataStr =
                     audio_utils::metadata::byteStringFromData(metadata);
             std::vector metadataVec(metaDataStr.begin(), metaDataStr.end());
-            Mutex::Autolock _l(mAudioTrackCbLock);
+            audio_utils::lock_guard _l(audioTrackCbMutex());
             for (const auto& callbackPair : mAudioTrackCallbacks) {
                 callbackPair.second->onCodecFormatChanged(metadataVec);
             }
@@ -3075,17 +3082,17 @@
 
 void PlaybackThread::resetWriteBlocked(uint32_t sequence)
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     // reject out of sequence requests
     if ((mWriteAckSequence & 1) && (sequence == mWriteAckSequence)) {
         mWriteAckSequence &= ~1;
-        mWaitWorkCV.signal();
+        mWaitWorkCV.notify_one();
     }
 }
 
 void PlaybackThread::resetDraining(uint32_t sequence)
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     // reject out of sequence requests
     if ((mDrainSequence & 1) && (sequence == mDrainSequence)) {
         // Register discontinuity when HW drain is completed because that can cause
@@ -3094,11 +3101,13 @@
         // elsewhere, e.g. in flush).
         mTimestampVerifier.discontinuity(mTimestampVerifier.DISCONTINUITY_MODE_ZERO);
         mDrainSequence &= ~1;
-        mWaitWorkCV.signal();
+        mWaitWorkCV.notify_one();
     }
 }
 
 void PlaybackThread::readOutputParameters_l()
+NO_THREAD_SAFETY_ANALYSIS
+// 'moveEffectChain_ll' requires holding mutex 'AudioFlinger_Mutex' exclusively
 {
     // unfortunately we have no way of recovering from errors here, hence the LOG_ALWAYS_FATAL
     const audio_config_base_t audioConfig = mOutput->getAudioProperties();
@@ -3209,8 +3218,8 @@
     if (hasMixer()) {
         mNormalFrameCount = (mNormalFrameCount + 15) & ~15;
     }
-    ALOGI("HAL output buffer size %zu frames, normal sink buffer size %zu frames", mFrameCount,
-            mNormalFrameCount);
+    ALOGI("HAL output buffer size %zu frames, normal sink buffer size %zu frames",
+            (size_t)mFrameCount, mNormalFrameCount);
 
     // Check if we want to throttle the processing to no more than 2x normal rate
     mThreadThrottle = property_get_bool("af.thread.throttle", true /* default_value */);
@@ -3263,13 +3272,14 @@
 
     // force reconfiguration of effect chains and engines to take new buffer size and audio
     // parameters into account
-    // Note that mLock is not held when readOutputParameters_l() is called from the constructor
+    // Note that mutex() is not held when readOutputParameters_l() is called from the constructor
     // but in this case nothing is done below as no audio sessions have effect yet so it doesn't
     // matter.
-    // create a copy of mEffectChains as calling moveEffectChain_l() can reorder some effect chains
+    // create a copy of mEffectChains as calling moveEffectChain_ll()
+    // can reorder some effect chains
     Vector<sp<IAfEffectChain>> effectChains = mEffectChains;
     for (size_t i = 0; i < effectChains.size(); i ++) {
-        mAfThreadCallback->moveEffectChain_l(effectChains[i]->sessionId(),
+        mAfThreadCallback->moveEffectChain_ll(effectChains[i]->sessionId(),
             this/* srcThread */, this/* dstThread */);
     }
 
@@ -3327,7 +3337,7 @@
     if (halFrames == NULL || dspFrames == NULL) {
         return BAD_VALUE;
     }
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     if (initCheck() != NO_ERROR) {
         return INVALID_OPERATION;
     }
@@ -3368,13 +3378,13 @@
 
 AudioStreamOut* PlaybackThread::getOutput() const
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     return mOutput;
 }
 
 AudioStreamOut* PlaybackThread::clearOutput()
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     AudioStreamOut *output = mOutput;
     mOutput = NULL;
     // FIXME FastMixer might also have a raw ptr to mOutputSink;
@@ -3385,7 +3395,7 @@
     return output;
 }
 
-// this method must always be called either with ThreadBase mLock held or inside the thread loop
+// this method must always be called either with ThreadBase mutex() held or inside the thread loop
 sp<StreamHalInterface> PlaybackThread::stream() const
 {
     if (mOutput == NULL) {
@@ -3405,7 +3415,7 @@
         return BAD_VALUE;
     }
 
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
 
     for (size_t i = 0; i < mTracks.size(); ++i) {
         sp<IAfTrack> track = mTracks[i];
@@ -3446,7 +3456,7 @@
             ALOGD("ro.audio.silent is ignored since no output device is set");
             return;
         }
-        if (isSingleDeviceType(outDeviceTypes(), AUDIO_DEVICE_OUT_REMOTE_SUBMIX)) {
+        if (isSingleDeviceType(outDeviceTypes_l(), AUDIO_DEVICE_OUT_REMOTE_SUBMIX)) {
             ALOGD("ro.audio.silent will be ignored for threads on AUDIO_DEVICE_OUT_REMOTE_SUBMIX");
             return;
         }
@@ -3535,7 +3545,7 @@
     return bytesWritten;
 }
 
-// startMelComputation_l() must be called with AudioFlinger::mLock held
+// startMelComputation_l() must be called with AudioFlinger::mutex() held
 void PlaybackThread::startMelComputation_l(
         const sp<audio_utils::MelProcessor>& processor)
 {
@@ -3545,7 +3555,7 @@
     }
 }
 
-// stopMelComputation_l() must be called with AudioFlinger::mLock held
+// stopMelComputation_l() must be called with AudioFlinger::mutex() held
 void PlaybackThread::stopMelComputation_l()
 {
     auto outputSink = static_cast<AudioStreamOutSink*>(mOutputSink.get());
@@ -3573,7 +3583,7 @@
 void PlaybackThread::threadLoop_exit()
 {
     {
-        Mutex::Autolock _l(mLock);
+        audio_utils::lock_guard _l(mutex());
         for (size_t i = 0; i < mTracks.size(); i++) {
             sp<IAfTrack> track = mTracks[i];
             track->invalidate();
@@ -3616,7 +3626,7 @@
 
     // make sure standby delay is not too short when connected to an A2DP sink to avoid
     // truncating audio when going to standby.
-    if (!Intersection(outDeviceTypes(),  getAudioDeviceOutAllA2dpSet()).empty()) {
+    if (!Intersection(outDeviceTypes_l(),  getAudioDeviceOutAllA2dpSet()).empty()) {
         if (mStandbyDelayNs < kDefaultStandbyTimeInNsecs) {
             mStandbyDelayNs = kDefaultStandbyTimeInNsecs;
         }
@@ -3641,12 +3651,12 @@
 
 void PlaybackThread::invalidateTracks(audio_stream_type_t streamType)
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     invalidateTracks_l(streamType);
 }
 
 void PlaybackThread::invalidateTracks(std::set<audio_port_handle_t>& portIds) {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     invalidateTracks_l(portIds);
 }
 
@@ -3850,7 +3860,7 @@
 status_t PlaybackThread::attachAuxEffect(
         const sp<IAfTrack>& track, int EffectId)
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     return attachAuxEffect_l(track, EffectId);
 }
 
@@ -3951,11 +3961,11 @@
 
         // If the device is AUDIO_DEVICE_OUT_BUS, check for downstream latency.
         //
-        // Note: we access outDeviceTypes() outside of mLock.
-        if (isMsdDevice() && outDeviceTypes().count(AUDIO_DEVICE_OUT_BUS) != 0) {
+        // Note: we access outDeviceTypes() outside of mutex().
+        if (isMsdDevice() && outDeviceTypes_l().count(AUDIO_DEVICE_OUT_BUS) != 0) {
             // Here, we try for the AF lock, but do not block on it as the latency
             // is more informational.
-            if (mAfThreadCallback->mutex().tryLock() == NO_ERROR) {
+            if (mAfThreadCallback->mutex().try_lock()) {
                 std::vector<SoftwarePatch> swPatches;
                 double latencyMs = 0.; // not required; initialized for clang-tidy
                 status_t status = INVALID_OPERATION;
@@ -3997,16 +4007,16 @@
         }
 
         MetadataUpdate metadataUpdate;
-        { // scope for mLock
+        { // scope for mutex()
 
-            Mutex::Autolock _l(mLock);
+            audio_utils::unique_lock _l(mutex());
 
             processConfigEvents_l();
             if (mCheckOutputStageEffects.load()) {
                 continue;
             }
 
-            // See comment at declaration of logString for why this is done under mLock
+            // See comment at declaration of logString for why this is done under mutex()
             if (logString != NULL) {
                 mNBLogWriter->logTimestamp();
                 mNBLogWriter->log(logString);
@@ -4031,8 +4041,9 @@
 
                 const int64_t waitNs = computeWaitTimeNs_l();
                 ALOGV("wait async completion (wait time: %lld)", (long long)waitNs);
-                status_t status = mWaitWorkCV.waitRelative(mLock, waitNs);
-                if (status == TIMED_OUT) {
+                std::cv_status cvstatus =
+                        mWaitWorkCV.wait_for(_l, std::chrono::nanoseconds(waitNs));
+                if (cvstatus == std::cv_status::timeout) {
                     mSignalPending = true; // if timeout recheck everything
                 }
                 ALOGV("async completion/wake");
@@ -4074,7 +4085,7 @@
                     releaseWakeLock_l();
                     // wait until we have something to do...
                     ALOGV("%s going to sleep", myName.c_str());
-                    mWaitWorkCV.wait(mLock);
+                    mWaitWorkCV.wait(_l);
                     ALOGV("%s waking up", myName.c_str());
                     acquireWakeLock_l();
 
@@ -4096,7 +4107,7 @@
             // mMixerStatusIgnoringFastTracks is also updated internally
             mMixerStatus = prepareTracks_l(&tracksToRemove);
 
-            mActiveTracks.updatePowerState(this);
+            mActiveTracks.updatePowerState_l(this);
 
             metadataUpdate = updateMetadata_l();
 
@@ -4147,9 +4158,9 @@
                     && (mKernelPositionOnStandby
                             != mTimestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL])))) {
                 mHalStarted = true;
-                mWaitHalStartCV.broadcast();
+                mWaitHalStartCV.notify_all();
             }
-        } // mLock scope ends
+        } // mutex() scope ends
 
         if (mBytesRemaining == 0) {
             mCurrentWriteLength = 0;
@@ -4380,7 +4391,7 @@
                                 const double processMs =
                                        (lastIoBeginNs - mLastIoEndNs) * 1e-6;
 
-                                Mutex::Autolock _l(mLock);
+                                audio_utils::lock_guard _l(mutex());
                                 mIoJitterMs.add(jitterMs);
                                 mProcessTimeMs.add(processMs);
 
@@ -4469,9 +4480,10 @@
                                 // notify of throttle end on debug log
                                 // but prevent spamming for bluetooth
                                 ALOGD_IF(!isSingleDeviceType(
-                                                 outDeviceTypes(), audio_is_a2dp_out_device) &&
+                                                 outDeviceTypes_l(), audio_is_a2dp_out_device) &&
                                          !isSingleDeviceType(
-                                                 outDeviceTypes(), audio_is_hearing_aid_out_device),
+                                                 outDeviceTypes_l(),
+                                                 audio_is_hearing_aid_out_device),
                                         "mixer(%p) throttle end: throttle time(%u)", this, diff);
                                 mThreadThrottleEndMs = mThreadThrottleTimeMs;
                             }
@@ -4481,7 +4493,7 @@
 
             } else {
                 ATRACE_BEGIN("sleep");
-                Mutex::Autolock _l(mLock);
+                audio_utils::unique_lock _l(mutex());
                 // suspended requires accurate metering of sleep time.
                 if (isSuspended()) {
                     // advance by expected sleepTime
@@ -4506,7 +4518,7 @@
                     mSleepTimeUs = deltaNs / 1000;
                 }
                 if (!mSignalPending && mConfigEvents.isEmpty() && !exitPending()) {
-                    mWaitWorkCV.waitRelative(mLock, microseconds((nsecs_t)mSleepTimeUs));
+                    mWaitWorkCV.wait_for(_l, std::chrono::microseconds(mSleepTimeUs));
                 }
                 ATRACE_END();
             }
@@ -4567,7 +4579,7 @@
                 timestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL],
                 mSampleRate);
 
-        if (isTimestampCorrectionEnabled()) {
+        if (isTimestampCorrectionEnabled_l()) {
             ALOGVV("TS_BEFORE: %d %lld %lld", id(),
                     (long long)timestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL],
                     (long long)timestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL]);
@@ -4679,9 +4691,9 @@
 #endif
 }
 
-// removeTracks_l() must be called with ThreadBase::mLock held
+// removeTracks_l() must be called with ThreadBase::mutex() held
 void PlaybackThread::removeTracks_l(const Vector<sp<IAfTrack>>& tracksToRemove)
-NO_THREAD_SAFETY_ANALYSIS  // release and re-acquire mLock
+NO_THREAD_SAFETY_ANALYSIS  // release and re-acquire mutex()
 {
     for (const auto& track : tracksToRemove) {
         mActiveTracks.remove(track);
@@ -4707,11 +4719,11 @@
         if (mHapticChannelCount > 0 &&
                 ((track->channelMask() & AUDIO_CHANNEL_HAPTIC_ALL) != AUDIO_CHANNEL_NONE
                         || (chain != nullptr && chain->containsHapticGeneratingEffect_l()))) {
-            mLock.unlock();
+            mutex().unlock();
             // Unlock due to VibratorService will lock for this call and will
             // call Tracks.mute/unmute which also require thread's lock.
             afutils::onExternalVibrationStop(track->getExternalVibration());
-            mLock.lock();
+            mutex().lock();
 
             // When the track is stop, set the haptic intensity as MUTE
             // for the HapticGenerator effect.
@@ -4909,13 +4921,13 @@
 
 void PlaybackThread::addPatchTrack(const sp<IAfPatchTrack>& track)
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     mTracks.add(track);
 }
 
 void PlaybackThread::deletePatchTrack(const sp<IAfPatchTrack>& track)
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     destroyTrack_l(track);
 }
 
@@ -5165,7 +5177,7 @@
 void MixerThread::onFirstRef() {
     PlaybackThread::onFirstRef();
 
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     if (mOutput != nullptr && mOutput->stream != nullptr) {
         status_t status = mOutput->stream->setLatencyModeCallback(this);
         if (status != INVALID_OPERATION) {
@@ -5280,7 +5292,7 @@
 
 bool PlaybackThread::waitingAsyncCallback()
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     return waitingAsyncCallback_l();
 }
 
@@ -5385,7 +5397,7 @@
     // TODO add standby time extension fct of effect tail
 }
 
-// prepareTracks_l() must be called with ThreadBase::mLock held
+// prepareTracks_l() must be called with ThreadBase::mutex() held
 PlaybackThread::mixer_state MixerThread::prepareTracks_l(
         Vector<sp<IAfTrack>>* tracksToRemove)
 {
@@ -6181,7 +6193,7 @@
     return mixerStatus;
 }
 
-// trackCountForUid_l() must be called with ThreadBase::mLock held
+// trackCountForUid_l() must be called with ThreadBase::mutex() held
 uint32_t PlaybackThread::trackCountForUid_l(uid_t uid) const
 {
     uint32_t trackCount = 0;
@@ -6224,7 +6236,7 @@
     mPreviousNs = 0;
 }
 
-// isTrackAllowed_l() must be called with ThreadBase::mLock held
+// isTrackAllowed_l() must be called with ThreadBase::mutex() held
 bool MixerThread::isTrackAllowed_l(
         audio_channel_mask_t channelMask, audio_format_t format,
         audio_session_t sessionId, uid_t uid) const
@@ -6244,7 +6256,7 @@
     return true;
 }
 
-// checkForNewParameter_l() must be called with ThreadBase::mLock held
+// checkForNewParameter_l() must be called with ThreadBase::mutex() held
 bool MixerThread::checkForNewParameter_l(const String8& keyValuePair,
                                                        status_t& status)
 {
@@ -6462,14 +6474,14 @@
     if (modes == nullptr) {
         return BAD_VALUE;
     }
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     *modes = mSupportedLatencyModes;
     return NO_ERROR;
 }
 
 void MixerThread::onRecommendedLatencyModeChanged(
         std::vector<audio_latency_mode_t> modes) {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     if (modes != mSupportedLatencyModes) {
         ALOGD("%s: thread(%d) supported latency modes: %s",
             __func__, mId, toString(modes).c_str());
@@ -6520,7 +6532,7 @@
 
 void DirectOutputThread::setMasterBalance(float balance)
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     if (mMasterBalance != balance) {
         mMasterBalance.store(balance);
         mBalance.computeStereoBalance(balance, &mMasterBalanceLeft, &mMasterBalanceRight);
@@ -6900,7 +6912,7 @@
 void DirectOutputThread::threadLoop_exit()
 {
     {
-        Mutex::Autolock _l(mLock);
+        audio_utils::lock_guard _l(mutex());
         for (size_t i = 0; i < mTracks.size(); i++) {
             if (mTracks[i]->isFlushPending()) {
                 mTracks[i]->flushAck();
@@ -6931,7 +6943,7 @@
     return !mStandby && !(trackPaused || (mHwPaused && !trackStopped));
 }
 
-// checkForNewParameter_l() must be called with ThreadBase::mLock held
+// checkForNewParameter_l() must be called with ThreadBase::mutex() held
 bool DirectOutputThread::checkForNewParameter_l(const String8& keyValuePair,
                                                               status_t& status)
 {
@@ -7066,12 +7078,12 @@
         bool asyncError;
 
         {
-            Mutex::Autolock _l(mLock);
+            audio_utils::unique_lock _l(mutex());
             while (!((mWriteAckSequence & 1) ||
                      (mDrainSequence & 1) ||
                      mAsyncError ||
                      exitPending())) {
-                mWaitWorkCV.wait(mLock);
+                mWaitWorkCV.wait(_l);
             }
 
             if (exitPending()) {
@@ -7107,50 +7119,50 @@
 void AsyncCallbackThread::exit()
 {
     ALOGV("AsyncCallbackThread::exit");
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     requestExit();
-    mWaitWorkCV.broadcast();
+    mWaitWorkCV.notify_all();
 }
 
 void AsyncCallbackThread::setWriteBlocked(uint32_t sequence)
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     // bit 0 is cleared
     mWriteAckSequence = sequence << 1;
 }
 
 void AsyncCallbackThread::resetWriteBlocked()
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     // ignore unexpected callbacks
     if (mWriteAckSequence & 2) {
         mWriteAckSequence |= 1;
-        mWaitWorkCV.signal();
+        mWaitWorkCV.notify_one();
     }
 }
 
 void AsyncCallbackThread::setDraining(uint32_t sequence)
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     // bit 0 is cleared
     mDrainSequence = sequence << 1;
 }
 
 void AsyncCallbackThread::resetDraining()
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     // ignore unexpected callbacks
     if (mDrainSequence & 2) {
         mDrainSequence |= 1;
-        mWaitWorkCV.signal();
+        mWaitWorkCV.notify_one();
     }
 }
 
 void AsyncCallbackThread::setAsyncError()
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     mAsyncError = true;
-    mWaitWorkCV.signal();
+    mWaitWorkCV.notify_one();
 }
 
 
@@ -7179,6 +7191,7 @@
 {
     if (mFlushPending || mHwPaused) {
         // If a flush is pending or track was paused, just discard buffered data
+        audio_utils::lock_guard l(mutex());
         flushHw_l();
     } else {
         mMixerStatus = MIXER_DRAIN_ALL;
@@ -7446,7 +7459,7 @@
 
 bool OffloadThread::waitingAsyncCallback()
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     return waitingAsyncCallback_l();
 }
 
@@ -7473,14 +7486,14 @@
 
 void OffloadThread::invalidateTracks(audio_stream_type_t streamType)
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     if (PlaybackThread::invalidateTracks_l(streamType)) {
         mFlushPending = true;
     }
 }
 
 void OffloadThread::invalidateTracks(std::set<audio_port_handle_t>& portIds) {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     if (PlaybackThread::invalidateTracks_l(portIds)) {
         mFlushPending = true;
     }
@@ -7622,7 +7635,7 @@
 
 void DuplicatingThread::addOutputTrack(IAfPlaybackThread* thread)
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     // The downstream MixerThread consumes thread->frameCount() amount of frames per mix pass.
     // Adjust for thread->sampleRate() to determine minimum buffer frame count.
     // Then triple buffer because Threads do not run synchronously and may not be clock locked.
@@ -7659,7 +7672,7 @@
 
 void DuplicatingThread::removeOutputTrack(IAfPlaybackThread* thread)
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     for (size_t i = 0; i < mOutputTracks.size(); i++) {
         if (mOutputTracks[i]->thread() == thread) {
             mOutputTracks[i]->destroy();
@@ -7674,7 +7687,7 @@
     ALOGV("removeOutputTrack(): unknown thread: %p", thread);
 }
 
-// caller must hold mLock
+// caller must hold mutex()
 void DuplicatingThread::updateWaitTime_l()
 {
     mWaitTimeMs = UINT_MAX;
@@ -7808,18 +7821,20 @@
     if (mode != AUDIO_LATENCY_MODE_LOW && mode != AUDIO_LATENCY_MODE_FREE) {
         return BAD_VALUE;
     }
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     mRequestedLatencyMode = mode;
     return NO_ERROR;
 }
 
 void SpatializerThread::checkOutputStageEffects()
+NO_THREAD_SAFETY_ANALYSIS
+//  'createEffect_l' requires holding mutex 'AudioFlinger_Mutex' exclusively
 {
     bool hasVirtualizer = false;
     bool hasDownMixer = false;
     sp<IAfEffectHandle> finalDownMixer;
     {
-        Mutex::Autolock _l(mLock);
+        audio_utils::lock_guard _l(mutex());
         sp<IAfEffectChain> chain = getEffectChain_l(AUDIO_SESSION_OUTPUT_STAGE);
         if (chain != 0) {
             hasVirtualizer = chain->getEffectFromType_l(FX_IID_SPATIALIZER) != nullptr;
@@ -7860,7 +7875,7 @@
     }
 
     {
-        Mutex::Autolock _l(mLock);
+        audio_utils::lock_guard _l(mutex());
         mFinalDownMixer = finalDownMixer;
     }
 }
@@ -8066,13 +8081,13 @@
 void RecordThread::preExit()
 {
     ALOGV("  preExit()");
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     for (size_t i = 0; i < mTracks.size(); i++) {
         sp<IAfRecordTrack> track = mTracks[i];
         track->invalidate();
     }
     mActiveTracks.clear();
-    mStartStopCond.broadcast();
+    mStartStopCV.notify_all();
 }
 
 bool RecordThread::threadLoop()
@@ -8084,7 +8099,7 @@
 reacquire_wakelock:
     sp<IAfRecordTrack> activeTrack;
     {
-        Mutex::Autolock _l(mLock);
+        audio_utils::lock_guard _l(mutex());
         acquireWakeLock_l();
     }
 
@@ -8108,13 +8123,13 @@
 
         bool silenceFastCapture = false;
 
-        { // scope for mLock
-            Mutex::Autolock _l(mLock);
+        { // scope for mutex()
+            audio_utils::unique_lock _l(mutex());
 
             processConfigEvents_l();
 
             // check exitPending here because checkForNewParameters_l() and
-            // checkForNewParameters_l() can temporarily release mLock
+            // checkForNewParameters_l() can temporarily release mutex()
             if (exitPending()) {
                 break;
             }
@@ -8122,7 +8137,7 @@
             // sleep with mutex unlocked
             if (sleepUs > 0) {
                 ATRACE_BEGIN("sleepC");
-                mWaitWorkCV.waitRelative(mLock, microseconds((nsecs_t)sleepUs));
+                (void)mWaitWorkCV.wait_for(_l, std::chrono::microseconds(sleepUs));
                 ATRACE_END();
                 sleepUs = 0;
                 continue;
@@ -8136,7 +8151,7 @@
                 releaseWakeLock_l();
                 ALOGV("RecordThread: loop stopping");
                 // go to sleep
-                mWaitWorkCV.wait(mLock);
+                mWaitWorkCV.wait(_l);
                 ALOGV("RecordThread: loop starting");
                 goto reacquire_wakelock;
             }
@@ -8235,7 +8250,7 @@
 
             }
 
-            mActiveTracks.updatePowerState(this);
+            mActiveTracks.updatePowerState_l(this);
 
             updateMetadata_l();
 
@@ -8243,7 +8258,7 @@
                 standbyIfNotAlreadyInStandby();
             }
             if (doBroadcast) {
-                mStartStopCond.broadcast();
+                mStartStopCV.notify_all();
             }
 
             // sleep if there are no active tracks to process
@@ -8413,7 +8428,12 @@
                 mTimestampVerifier.add(position, time, mSampleRate);
 
                 // Correct timestamps
-                if (isTimestampCorrectionEnabled()) {
+                bool timestampCorrectionEnabled = false;
+                {
+                    audio_utils::lock_guard l(mutex());
+                    timestampCorrectionEnabled = isTimestampCorrectionEnabled_l();
+                }
+                if (timestampCorrectionEnabled) {
                     ALOGVV("TS_BEFORE: %d %lld %lld",
                             id(), (long long)time, (long long)position);
                     auto correctedTimestamp = mTimestampVerifier.getLastCorrectedTimestamp();
@@ -8612,7 +8632,7 @@
                     {0, 0} /* lastTimestamp */, mSampleRate);
             const double processMs = (lastIoBeginNs - mLastIoEndNs) * 1e-6;
 
-            Mutex::Autolock _l(mLock);
+            audio_utils::lock_guard _l(mutex());
             mIoJitterMs.add(jitterMs);
             mProcessTimeMs.add(processMs);
         }
@@ -8625,13 +8645,13 @@
     standbyIfNotAlreadyInStandby();
 
     {
-        Mutex::Autolock _l(mLock);
+        audio_utils::lock_guard _l(mutex());
         for (size_t i = 0; i < mTracks.size(); i++) {
             sp<IAfRecordTrack> track = mTracks[i];
             track->invalidate();
         }
         mActiveTracks.clear();
-        mStartStopCond.broadcast();
+        mStartStopCV.notify_all();
     }
 
     releaseWakeLock();
@@ -8690,7 +8710,7 @@
     }
 }
 
-// RecordThread::createRecordTrack_l() must be called with AudioFlinger::mLock held
+// RecordThread::createRecordTrack_l() must be called with AudioFlinger::mutex() held
 sp<IAfRecordTrack> RecordThread::createRecordTrack_l(
         const sp<Client>& client,
         const audio_attributes_t& attr,
@@ -8782,7 +8802,7 @@
             mFastTrackAvail
         ) {
           // check compatibility with audio effects.
-          Mutex::Autolock _l(mLock);
+          audio_utils::lock_guard _l(mutex());
           // Do not accept FAST flag if the session has software effects
           sp<IAfEffectChain> chain = getEffectChain_l(sessionId);
           if (chain != 0) {
@@ -8846,8 +8866,8 @@
     *pFrameCount = frameCount;
     *pNotificationFrameCount = notificationFrameCount;
 
-    { // scope for mLock
-        Mutex::Autolock _l(mLock);
+    { // scope for mutex()
+        audio_utils::lock_guard _l(mutex());
         int32_t startFrames = -1;
         if (!mSharedAudioPackageName.empty()
                 && mSharedAudioPackageName == attributionSource.packageName
@@ -8908,7 +8928,7 @@
 
     {
         // This section is a rendezvous between binder thread executing start() and RecordThread
-        AutoMutex lock(mLock);
+         audio_utils::lock_guard lock(mutex());
         if (recordTrack->isInvalid()) {
             recordTrack->clearSyncStartEvent();
             ALOGW("%s track %d: invalidated before startInput", __func__, recordTrack->portId());
@@ -8932,9 +8952,9 @@
         recordTrack->setState(IAfTrackBase::STARTING_1);
         mActiveTracks.add(recordTrack);
         if (recordTrack->isExternalTrack()) {
-            mLock.unlock();
+            mutex().unlock();
             status = AudioSystem::startInput(recordTrack->portId());
-            mLock.lock();
+            mutex().lock();
             if (recordTrack->isInvalid()) {
                 recordTrack->clearSyncStartEvent();
                 if (status == NO_ERROR && recordTrack->state() == IAfTrackBase::STARTING_1) {
@@ -8980,7 +9000,7 @@
         }
         recordTrack->setState(IAfTrackBase::STARTING_2);
         // signal thread to start
-        mWaitWorkCV.broadcast();
+        mWaitWorkCV.notify_all();
         return status;
     }
 }
@@ -9001,7 +9021,7 @@
 
 bool RecordThread::stop(IAfRecordTrack* recordTrack) {
     ALOGV("RecordThread::stop");
-    AutoMutex _l(mLock);
+    audio_utils::unique_lock _l(mutex());
     // if we're invalid, we can't be on the ActiveTracks.
     if (mActiveTracks.indexOf(recordTrack) < 0 || recordTrack->state() == IAfTrackBase::PAUSING) {
         return false;
@@ -9012,8 +9032,8 @@
     // NOTE: Waiting here is important to keep stop synchronous.
     // This is needed for proper patchRecord peer release.
     while (recordTrack->state() == IAfTrackBase::PAUSING && !recordTrack->isInvalid()) {
-        mWaitWorkCV.broadcast(); // signal thread to stop
-        mStartStopCond.wait(mLock);
+        mWaitWorkCV.notify_all(); // signal thread to stop
+        mStartStopCV.wait(_l);
     }
 
     if (recordTrack->state() == IAfTrackBase::PAUSED) { // successful stop
@@ -9042,7 +9062,7 @@
     audio_session_t eventSession = event->triggerSession();
     status_t ret = NAME_NOT_FOUND;
 
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
 
     for (size_t i = 0; i < mTracks.size(); i++) {
         sp<IAfRecordTrack> track = mTracks[i];
@@ -9061,7 +9081,7 @@
         std::vector<media::MicrophoneInfoFw>* activeMicrophones) const
 {
     ALOGV("RecordThread::getActiveMicrophones");
-    AutoMutex _l(mLock);
+     audio_utils::lock_guard _l(mutex());
     if (!isStreamInitialized()) {
         return NO_INIT;
     }
@@ -9073,7 +9093,7 @@
             audio_microphone_direction_t direction)
 {
     ALOGV("setPreferredMicrophoneDirection(%d)", direction);
-    AutoMutex _l(mLock);
+     audio_utils::lock_guard _l(mutex());
     if (!isStreamInitialized()) {
         return NO_INIT;
     }
@@ -9083,7 +9103,7 @@
 status_t RecordThread::setPreferredMicrophoneFieldDimension(float zoom)
 {
     ALOGV("setPreferredMicrophoneFieldDimension(%f)", zoom);
-    AutoMutex _l(mLock);
+     audio_utils::lock_guard _l(mutex());
     if (!isStreamInitialized()) {
         return NO_INIT;
     }
@@ -9093,7 +9113,7 @@
 status_t RecordThread::shareAudioHistory(
         const std::string& sharedAudioPackageName, audio_session_t sharedSessionId,
         int64_t sharedAudioStartMs) {
-    AutoMutex _l(mLock);
+     audio_utils::lock_guard _l(mutex());
     return shareAudioHistory_l(sharedAudioPackageName, sharedSessionId, sharedAudioStartMs);
 }
 
@@ -9159,7 +9179,7 @@
     return change;
 }
 
-// destroyTrack_l() must be called with ThreadBase::mLock held
+// destroyTrack_l() must be called with ThreadBase::mutex() held
 void RecordThread::destroyTrack_l(const sp<IAfRecordTrack>& track)
 {
     track->terminate();
@@ -9259,7 +9279,7 @@
 
 void RecordThread::setRecordSilenced(audio_port_handle_t portId, bool silenced)
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     for (size_t i = 0; i < mTracks.size() ; i++) {
         sp<IAfRecordTrack> track = mTracks[i];
         if (track != 0 && track->portId() == portId) {
@@ -9384,7 +9404,7 @@
 
 void RecordThread::checkBtNrec()
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     checkBtNrec_l();
 }
 
@@ -9392,7 +9412,7 @@
 {
     // disable AEC and NS if the device is a BT SCO headset supporting those
     // pre processings
-    bool suspend = audio_is_bluetooth_sco_device(inDeviceType()) &&
+    bool suspend = audio_is_bluetooth_sco_device(inDeviceType_l()) &&
                         mAfThreadCallback->btNrecIsOff();
     if (mBtNrecSuspended.exchange(suspend) != suspend) {
         for (size_t i = 0; i < mEffectChains.size(); i++) {
@@ -9493,7 +9513,7 @@
 
 String8 RecordThread::getParameters(const String8& keys)
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     if (initCheck() == NO_ERROR) {
         String8 out_s8;
         if (mInput->stream->getParameters(keys, &out_s8) == OK) {
@@ -9503,7 +9523,7 @@
     return {};
 }
 
-void RecordThread::ioConfigChanged(audio_io_config_event_t event, pid_t pid,
+void RecordThread::ioConfigChanged_l(audio_io_config_event_t event, pid_t pid,
                                                  audio_port_handle_t portId) {
     sp<AudioIoDescriptor> desc;
     switch (event) {
@@ -9521,7 +9541,7 @@
         desc = sp<AudioIoDescriptor>::make(mId);
         break;
     }
-    mAfThreadCallback->ioConfigChanged(event, desc, pid);
+    mAfThreadCallback->ioConfigChanged_l(event, desc, pid);
 }
 
 void RecordThread::readInputParameters_l()
@@ -9569,7 +9589,7 @@
 
 uint32_t RecordThread::getInputFramesLost() const
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     uint32_t result;
     if (initCheck() == NO_ERROR && mInput->stream->getInputFramesLost(&result) == OK) {
         return result;
@@ -9580,7 +9600,7 @@
 KeyedVector<audio_session_t, bool> RecordThread::sessionIds() const
 {
     KeyedVector<audio_session_t, bool> ids;
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     for (size_t j = 0; j < mTracks.size(); ++j) {
         sp<IAfRecordTrack> track = mTracks[j];
         audio_session_t sessionId = track->sessionId();
@@ -9593,14 +9613,14 @@
 
 AudioStreamIn* RecordThread::clearInput()
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     AudioStreamIn *input = mInput;
     mInput = NULL;
     mInputSource.clear();
     return input;
 }
 
-// this method must always be called either with ThreadBase mLock held or inside the thread loop
+// this method must always be called either with ThreadBase mutex() held or inside the thread loop
 sp<StreamHalInterface> RecordThread::stream() const
 {
     if (mInput == NULL) {
@@ -9718,7 +9738,7 @@
 
 void RecordThread::updateOutDevices(const DeviceDescriptorBaseVector& outDevices)
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     mOutDevices = outDevices;
     mOutDeviceTypeAddrs = deviceTypeAddrsFromDescriptors(mOutDevices);
     for (size_t i = 0; i < mEffectChains.size(); i++) {
@@ -9855,7 +9875,7 @@
 
 void RecordThread::addPatchTrack(const sp<IAfPatchRecord>& record)
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     mTracks.add(record);
     if (record->getSource()) {
         mSource = record->getSource();
@@ -9864,7 +9884,7 @@
 
 void RecordThread::deletePatchTrack(const sp<IAfPatchRecord>& record)
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     if (mSource == record->getSource()) {
         mSource = mInput;
     }
@@ -9989,7 +10009,7 @@
 {
     ActiveTracks<IAfMmapTrack> activeTracks;
     {
-        Mutex::Autolock _l(mLock);
+        audio_utils::lock_guard _l(mutex());
         for (const sp<IAfMmapTrack>& t : mActiveTracks) {
             activeTracks.add(t);
         }
@@ -10133,24 +10153,24 @@
         {
             // Add the track record before starting input so that the silent status for the
             // client can be cached.
-            Mutex::Autolock _l(mLock);
+            audio_utils::lock_guard _l(mutex());
             setClientSilencedState_l(portId, false /*silenced*/);
         }
         ret = AudioSystem::startInput(portId);
     }
 
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     // abort if start is rejected by audio policy manager
     if (ret != NO_ERROR) {
         ALOGE("%s: error start rejected by AudioPolicyManager = %d", __FUNCTION__, ret);
         if (!mActiveTracks.isEmpty()) {
-            mLock.unlock();
+            mutex().unlock();
             if (isOutput()) {
                 AudioSystem::releaseOutput(portId);
             } else {
                 AudioSystem::releaseInput(portId);
             }
-            mLock.lock();
+            mutex().lock();
         } else {
             mHalStream->stop();
         }
@@ -10215,7 +10235,7 @@
         return NO_ERROR;
     }
 
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
 
     sp<IAfMmapTrack> track;
     for (const sp<IAfMmapTrack>& t : mActiveTracks) {
@@ -10231,7 +10251,7 @@
     mActiveTracks.remove(track);
     eraseClientSilencedState_l(track->portId());
 
-    mLock.unlock();
+    mutex().unlock();
     if (isOutput()) {
         AudioSystem::stopOutput(track->portId());
         AudioSystem::releaseOutput(track->portId());
@@ -10239,7 +10259,7 @@
         AudioSystem::stopInput(track->portId());
         AudioSystem::releaseInput(track->portId());
     }
-    mLock.lock();
+    mutex().lock();
 
     sp<IAfEffectChain> chain = getEffectChain_l(track->sessionId());
     if (chain != 0) {
@@ -10319,7 +10339,10 @@
 
 bool MmapThread::threadLoop()
 {
-    checkSilentMode_l();
+    {
+        audio_utils::unique_lock _l(mutex());
+        checkSilentMode_l();
+    }
 
     const String8 myName(String8::format("thread %p type %d TID %d", this, mType, gettid()));
 
@@ -10328,7 +10351,7 @@
         Vector<sp<IAfEffectChain>> effectChains;
 
         { // under Thread lock
-        Mutex::Autolock _l(mLock);
+        audio_utils::unique_lock _l(mutex());
 
         if (mSignalPending) {
             // A signal was raised while we were unlocked
@@ -10344,7 +10367,7 @@
 
                 // wait until we have something to do...
                 ALOGV("%s going to sleep", myName.c_str());
-                mWaitWorkCV.wait(mLock);
+                mWaitWorkCV.wait(_l);
                 ALOGV("%s waking up", myName.c_str());
 
                 checkSilentMode_l();
@@ -10359,7 +10382,7 @@
 
         checkInvalidTracks_l();
 
-        mActiveTracks.updatePowerState(this);
+        mActiveTracks.updatePowerState_l(this);
 
         updateMetadata_l();
 
@@ -10387,7 +10410,7 @@
     return false;
 }
 
-// checkForNewParameter_l() must be called with ThreadBase::mLock held
+// checkForNewParameter_l() must be called with ThreadBase::mutex() held
 bool MmapThread::checkForNewParameter_l(const String8& keyValuePair,
                                                               status_t& status)
 {
@@ -10408,7 +10431,7 @@
 
 String8 MmapThread::getParameters(const String8& keys)
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     String8 out_s8;
     if (initCheck() == NO_ERROR && mHalStream->getParameters(keys, &out_s8) == OK) {
         return out_s8;
@@ -10416,7 +10439,7 @@
     return {};
 }
 
-void MmapThread::ioConfigChanged(audio_io_config_event_t event, pid_t pid,
+void MmapThread::ioConfigChanged_l(audio_io_config_event_t event, pid_t pid,
                                                audio_port_handle_t portId __unused) {
     sp<AudioIoDescriptor> desc;
     bool isInput = false;
@@ -10438,12 +10461,12 @@
         desc = sp<AudioIoDescriptor>::make(mId);
         break;
     }
-    mAfThreadCallback->ioConfigChanged(event, desc, pid);
+    mAfThreadCallback->ioConfigChanged_l(event, desc, pid);
 }
 
 status_t MmapThread::createAudioPatch_l(const struct audio_patch* patch,
                                                           audio_patch_handle_t *handle)
-NO_THREAD_SAFETY_ANALYSIS  // elease and re-acquire mLock
+NO_THREAD_SAFETY_ANALYSIS  // elease and re-acquire mutex()
 {
     status_t status = NO_ERROR;
 
@@ -10519,9 +10542,9 @@
         }
         sp<MmapStreamCallback> callback = mCallback.promote();
         if (mDeviceId != deviceId && callback != 0) {
-            mLock.unlock();
+            mutex().unlock();
             callback->onRoutingChanged(deviceId);
-            mLock.lock();
+            mutex().lock();
         }
         mPatch = *patch;
         mDeviceId = deviceId;
@@ -10672,7 +10695,7 @@
 }
 
 void MmapThread::checkInvalidTracks_l()
-NO_THREAD_SAFETY_ANALYSIS  // release and re-acquire mLock
+NO_THREAD_SAFETY_ANALYSIS  // release and re-acquire mutex()
 {
     sp<MmapStreamCallback> callback;
     for (const sp<IAfMmapTrack>& track : mActiveTracks) {
@@ -10686,9 +10709,9 @@
         }
     }
     if (callback != 0) {
-        mLock.unlock();
+        mutex().unlock();
         callback->onRoutingChanged(AUDIO_PORT_HANDLE_NONE);
-        mLock.lock();
+        mutex().lock();
     }
 }
 
@@ -10734,14 +10757,24 @@
         AudioHwDevice *hwDev,  AudioStreamOut *output, bool systemReady)
     : MmapThread(afThreadCallback, id, hwDev, output->stream, systemReady, true /* isOut */),
       mStreamType(AUDIO_STREAM_MUSIC),
-      mStreamVolume(1.0),
-      mStreamMute(false),
       mOutput(output)
 {
     snprintf(mThreadName, kThreadNameLength, "AudioMmapOut_%X", id);
     mChannelCount = audio_channel_count_from_out_mask(mChannelMask);
     mMasterVolume = afThreadCallback->masterVolume_l();
     mMasterMute = afThreadCallback->masterMute_l();
+
+    for (int i = AUDIO_STREAM_MIN; i < AUDIO_STREAM_FOR_POLICY_CNT; ++i) {
+        const audio_stream_type_t stream{static_cast<audio_stream_type_t>(i)};
+        mStreamTypes[stream].volume = 0.0f;
+        mStreamTypes[stream].mute = mAfThreadCallback->streamMute_l(stream);
+    }
+    // Audio patch and call assistant volume are always max
+    mStreamTypes[AUDIO_STREAM_PATCH].volume = 1.0f;
+    mStreamTypes[AUDIO_STREAM_PATCH].mute = false;
+    mStreamTypes[AUDIO_STREAM_CALL_ASSISTANT].volume = 1.0f;
+    mStreamTypes[AUDIO_STREAM_CALL_ASSISTANT].mute = false;
+
     if (mAudioHwDev) {
         if (mAudioHwDev->canSetMasterVolume()) {
             mMasterVolume = 1.0;
@@ -10766,7 +10799,7 @@
 
 AudioStreamOut* MmapPlaybackThread::clearOutput()
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     AudioStreamOut *output = mOutput;
     mOutput = NULL;
     return output;
@@ -10774,7 +10807,7 @@
 
 void MmapPlaybackThread::setMasterVolume(float value)
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     // Don't apply master volume in SW if our HAL can do it for us.
     if (mAudioHwDev &&
             mAudioHwDev->canSetMasterVolume()) {
@@ -10786,7 +10819,7 @@
 
 void MmapPlaybackThread::setMasterMute(bool muted)
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     // Don't apply master mute in SW if our HAL can do it for us.
     if (mAudioHwDev && mAudioHwDev->canSetMasterMute()) {
         mMasterMute = false;
@@ -10797,34 +10830,31 @@
 
 void MmapPlaybackThread::setStreamVolume(audio_stream_type_t stream, float value)
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
+    mStreamTypes[stream].volume = value;
     if (stream == mStreamType) {
-        mStreamVolume = value;
         broadcast_l();
     }
 }
 
 float MmapPlaybackThread::streamVolume(audio_stream_type_t stream) const
 {
-    Mutex::Autolock _l(mLock);
-    if (stream == mStreamType) {
-        return mStreamVolume;
-    }
-    return 0.0f;
+    audio_utils::lock_guard _l(mutex());
+    return mStreamTypes[stream].volume;
 }
 
 void MmapPlaybackThread::setStreamMute(audio_stream_type_t stream, bool muted)
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
+    mStreamTypes[stream].mute = muted;
     if (stream == mStreamType) {
-        mStreamMute= muted;
         broadcast_l();
     }
 }
 
 void MmapPlaybackThread::invalidateTracks(audio_stream_type_t streamType)
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     if (streamType == mStreamType) {
         for (const sp<IAfMmapTrack>& track : mActiveTracks) {
             track->invalidate();
@@ -10835,7 +10865,7 @@
 
 void MmapPlaybackThread::invalidateTracks(std::set<audio_port_handle_t>& portIds)
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     bool trackMatch = false;
     for (const sp<IAfMmapTrack>& track : mActiveTracks) {
         if (portIds.find(track->portId()) != portIds.end()) {
@@ -10857,14 +10887,13 @@
 {
     float volume;
 
-    if (mMasterMute || mStreamMute) {
+    if (mMasterMute || streamMuted_l()) {
         volume = 0;
     } else {
-        volume = mMasterVolume * mStreamVolume;
+        volume = mMasterVolume * streamVolume_l();
     }
 
     if (volume != mHalVolFloat) {
-
         // Convert volumes from float to 8.24
         uint32_t vol = (uint32_t)(volume * (1 << 24));
 
@@ -10884,9 +10913,9 @@
             if (callback != 0) {
                 mHalVolFloat = volume; // SW volume control worked, so update value.
                 mNoCallbackWarningCount = 0;
-                mLock.unlock();
+                mutex().unlock();
                 callback->onVolumeChanged(volume);
-                mLock.lock();
+                mutex().lock();
             } else {
                 if (mNoCallbackWarningCount < kMaxNoCallbackWarnings) {
                     ALOGW("Could not set MMAP stream volume: no volume callback!");
@@ -10898,8 +10927,8 @@
             track->setMetadataHasChanged();
             track->processMuteEvent_l(mAfThreadCallback->getOrCreateAudioManager(),
                 /*muteState=*/{mMasterMute,
-                               mStreamVolume == 0.f,
-                               mStreamMute,
+                               streamVolume_l() == 0.f,
+                               streamMuted_l(),
                                // TODO(b/241533526): adjust logic to include mute from AppOps
                                false /*muteFromPlaybackRestricted*/,
                                false /*muteFromClientVolume*/,
@@ -10983,7 +11012,7 @@
     return NO_ERROR;
 }
 
-// startMelComputation_l() must be called with AudioFlinger::mLock held
+// startMelComputation_l() must be called with AudioFlinger::mutex() held
 void MmapPlaybackThread::startMelComputation_l(
         const sp<audio_utils::MelProcessor>& processor)
 {
@@ -10997,7 +11026,7 @@
     // assigned constant for each thread
 }
 
-// stopMelComputation_l() must be called with AudioFlinger::mLock held
+// stopMelComputation_l() must be called with AudioFlinger::mutex() held
 void MmapPlaybackThread::stopMelComputation_l()
 {
     ALOGV("%s: pausing mel processor for thread %d", __func__, id());
@@ -11012,7 +11041,7 @@
     MmapThread::dumpInternals_l(fd, args);
 
     dprintf(fd, "  Stream type: %d Stream volume: %f HAL volume: %f Stream mute %d\n",
-            mStreamType, mStreamVolume, mHalVolFloat, mStreamMute);
+            mStreamType, streamVolume_l(), mHalVolFloat, streamMuted_l());
     dprintf(fd, "  Master volume: %f Master mute %d\n", mMasterVolume, mMasterMute);
 }
 
@@ -11046,7 +11075,7 @@
 
 AudioStreamIn* MmapCaptureThread::clearInput()
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     AudioStreamIn *input = mInput;
     mInput = NULL;
     return input;
@@ -11104,7 +11133,7 @@
 
 void MmapCaptureThread::setRecordSilenced(audio_port_handle_t portId, bool silenced)
 {
-    Mutex::Autolock _l(mLock);
+    audio_utils::lock_guard _l(mutex());
     for (size_t i = 0; i < mActiveTracks.size() ; i++) {
         if (mActiveTracks[i]->portId() == portId) {
             mActiveTracks[i]->setSilenced_l(silenced);
diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h
index c6c585b..b6fd0b6 100644
--- a/services/audioflinger/Threads.h
+++ b/services/audioflinger/Threads.h
@@ -51,7 +51,7 @@
     ~ThreadBase() override;
 
     status_t readyToRun() final;
-    void clearPowerManager() final;
+    void clearPowerManager() final EXCLUDES_ThreadBase_Mutex;
 
     // base for record and playback
     enum {
@@ -68,29 +68,27 @@
 
     class ConfigEventData: public RefBase {
     public:
-        virtual ~ConfigEventData() {}
-
         virtual  void dump(char *buffer, size_t size) = 0;
     protected:
-        ConfigEventData() {}
+        ConfigEventData() = default;
     };
 
     // Config event sequence by client if status needed (e.g binder thread calling setParameters()):
     //  1. create SetParameterConfigEvent. This sets mWaitStatus in config event
-    //  2. Lock mLock
+    //  2. Lock mutex()
     //  3. Call sendConfigEvent_l(): Append to mConfigEvents and mWaitWorkCV.signal
     //  4. sendConfigEvent_l() reads status from event->mStatus;
     //  5. sendConfigEvent_l() returns status
     //  6. Unlock
     //
     // Parameter sequence by server: threadLoop calling processConfigEvents_l():
-    // 1. Lock mLock
+    // 1. Lock mutex()
     // 2. If there is an entry in mConfigEvents proceed ...
     // 3. Read first entry in mConfigEvents
     // 4. Remove first entry from mConfigEvents
     // 5. Process
     // 6. Set event->mStatus
-    // 7. event->mCond.signal
+    // 7. event->mCondition.notify_one()
     // 8. Unlock
 
     class ConfigEvent: public RefBase {
@@ -103,13 +101,22 @@
             }
         }
 
+        audio_utils::mutex& mutex() const RETURN_CAPABILITY(audio_utils::ConfigEvent_Mutex) {
+            return mMutex;
+        }
         const int mType; // event type e.g. CFG_EVENT_IO
-        Mutex mLock;     // mutex associated with mCond
-        Condition mCond; // condition for status return
+        mutable audio_utils::mutex mMutex; // mutex associated with mCondition
+        audio_utils::condition_variable mCondition; // condition for status return
+
+        // NO_THREAD_SAFETY_ANALYSIS Can we add GUARDED_BY?
         status_t mStatus; // status communicated to sender
-        bool mWaitStatus; // true if sender is waiting for status
-        bool mRequiresSystemReady; // true if must wait for system ready to enter event queue
-        sp<ConfigEventData> mData;     // event specific parameter data
+
+        bool mWaitStatus GUARDED_BY(mutex()); // true if sender is waiting for status
+        // true if must wait for system ready to enter event queue
+        bool mRequiresSystemReady GUARDED_BY(mutex());
+
+        // NO_THREAD_SAFETY_ANALYSIS Can we add GUARDED_BY?
+        sp<ConfigEventData> mData; // event specific parameter data
 
     protected:
         explicit ConfigEvent(int type, bool requiresSystemReady = false) :
@@ -196,7 +203,7 @@
         }
 
         const struct audio_patch mPatch;
-        audio_patch_handle_t mHandle;
+        audio_patch_handle_t mHandle;  // cannot be const
     };
 
     class CreateAudioPatchConfigEvent : public ConfigEvent {
@@ -218,7 +225,7 @@
             snprintf(buffer, size, "- Patch handle: %u\n", mHandle);
         }
 
-        audio_patch_handle_t mHandle;
+        const audio_patch_handle_t mHandle;
     };
 
     class ReleaseAudioPatchConfigEvent : public ConfigEvent {
@@ -239,7 +246,7 @@
             snprintf(buffer, size, "- Devices: %s", android::toString(mOutDevices).c_str());
         }
 
-        DeviceDescriptorBaseVector mOutDevices;
+        const DeviceDescriptorBaseVector mOutDevices;
     };
 
     class UpdateOutDevicesConfigEvent : public ConfigEvent {
@@ -259,7 +266,7 @@
             snprintf(buffer, size, "- mMaxSharedAudioHistoryMs: %d", mMaxSharedAudioHistoryMs);
         }
 
-        int32_t mMaxSharedAudioHistoryMs;
+        const int32_t mMaxSharedAudioHistoryMs;
     };
 
     class ResizeBufferConfigEvent : public ConfigEvent {
@@ -288,15 +295,14 @@
     class PMDeathRecipient : public IBinder::DeathRecipient {
     public:
         explicit    PMDeathRecipient(const wp<ThreadBase>& thread) : mThread(thread) {}
-        virtual     ~PMDeathRecipient() {}
 
         // IBinder::DeathRecipient
-        virtual     void        binderDied(const wp<IBinder>& who);
+        void binderDied(const wp<IBinder>& who) final;
 
     private:
         DISALLOW_COPY_AND_ASSIGN(PMDeathRecipient);
 
-        wp<ThreadBase> mThread;
+        const wp<ThreadBase> mThread;
     };
 
     type_t type() const final { return mType; }
@@ -310,8 +316,9 @@
     uint32_t channelCount() const final { return mChannelCount; }
     audio_channel_mask_t hapticChannelMask() const override { return AUDIO_CHANNEL_NONE; }
     uint32_t hapticChannelCount() const override { return 0; }
-    uint32_t latency_l() const override { return 0; }
-    void setVolumeForOutput_l(float /* left */, float /* right */) const override {}
+    uint32_t latency_l() const override { return 0; }  // NO_THREAD_SAFETY_ANALYSIS
+    void setVolumeForOutput_l(float /* left */, float /* right */) const override
+            REQUIRES(mutex()) {}
 
                 // Return's the HAL's frame count i.e. fast mixer buffer size.
     size_t frameCountHAL() const final { return mFrameCount; }
@@ -319,44 +326,49 @@
 
     // Should be "virtual status_t requestExitAndWait()" and override same
     // method in Thread, but Thread::requestExitAndWait() is not yet virtual.
-    void exit() final;
-    status_t setParameters(const String8& keyValuePairs) final;
+    void exit() final EXCLUDES_ThreadBase_Mutex;
+    status_t setParameters(const String8& keyValuePairs) final EXCLUDES_ThreadBase_Mutex;
 
-                // sendConfigEvent_l() must be called with ThreadBase::mLock held
+                // sendConfigEvent_l() must be called with ThreadBase::mutex() held
                 // Can temporarily release the lock if waiting for a reply from
                 // processConfigEvents_l().
-    status_t sendConfigEvent_l(sp<ConfigEvent>& event);
+    status_t sendConfigEvent_l(sp<ConfigEvent>& event) REQUIRES(mutex());
     void sendIoConfigEvent(audio_io_config_event_t event, pid_t pid = 0,
-            audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE) final;
+            audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE) final EXCLUDES_ThreadBase_Mutex;
     void sendIoConfigEvent_l(audio_io_config_event_t event, pid_t pid = 0,
-            audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE) final;
-    void sendPrioConfigEvent(pid_t pid, pid_t tid, int32_t prio, bool forApp) final;
-    void sendPrioConfigEvent_l(pid_t pid, pid_t tid, int32_t prio, bool forApp) final;
-    status_t sendSetParameterConfigEvent_l(const String8& keyValuePair) final;
+            audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE) final REQUIRES(mutex());
+    void sendPrioConfigEvent(pid_t pid, pid_t tid, int32_t prio, bool forApp) final
+            EXCLUDES_ThreadBase_Mutex;
+    void sendPrioConfigEvent_l(pid_t pid, pid_t tid, int32_t prio, bool forApp) final
+            REQUIRES(mutex());
+    status_t sendSetParameterConfigEvent_l(const String8& keyValuePair) final REQUIRES(mutex());
     status_t sendCreateAudioPatchConfigEvent(const struct audio_patch* patch,
-            audio_patch_handle_t* handle) final;
-    status_t sendReleaseAudioPatchConfigEvent(audio_patch_handle_t handle) final;
+            audio_patch_handle_t* handle) final EXCLUDES_ThreadBase_Mutex;
+    status_t sendReleaseAudioPatchConfigEvent(audio_patch_handle_t handle) final
+            EXCLUDES_ThreadBase_Mutex;
     status_t sendUpdateOutDeviceConfigEvent(
-            const DeviceDescriptorBaseVector& outDevices) final;
-    void sendResizeBufferConfigEvent_l(int32_t maxSharedAudioHistoryMs) final;
-    void sendCheckOutputStageEffectsEvent() final;
-    void sendCheckOutputStageEffectsEvent_l() final;
-    void sendHalLatencyModesChangedEvent_l() final;
+            const DeviceDescriptorBaseVector& outDevices) final EXCLUDES_ThreadBase_Mutex;
+    void sendResizeBufferConfigEvent_l(int32_t maxSharedAudioHistoryMs) final REQUIRES(mutex());
+    void sendCheckOutputStageEffectsEvent() final EXCLUDES_ThreadBase_Mutex;
+    void sendCheckOutputStageEffectsEvent_l() final REQUIRES(mutex());
+    void sendHalLatencyModesChangedEvent_l() final REQUIRES(mutex());
 
-    void processConfigEvents_l() final;
+    void processConfigEvents_l() final REQUIRES(mutex());
     void setCheckOutputStageEffects() override {}
     void updateOutDevices(const DeviceDescriptorBaseVector& outDevices) override;
     void toAudioPortConfig(struct audio_port_config* config) override;
-    void resizeInputBuffer_l(int32_t maxSharedAudioHistoryMs) override;
+    void resizeInputBuffer_l(int32_t maxSharedAudioHistoryMs) override REQUIRES(mutex());
 
     // see note at declaration of mStandby, mOutDevice and mInDevice
     bool inStandby() const override { return mStandby; }
-    const DeviceTypeSet outDeviceTypes() const final {
+    const DeviceTypeSet outDeviceTypes_l() const final REQUIRES(mutex()) {
         return getAudioDeviceTypes(mOutDeviceTypeAddrs);
     }
-    audio_devices_t inDeviceType() const final { return mInDeviceTypeAddr.mType; }
-    DeviceTypeSet getDeviceTypes() const final {
-        return isOutput() ? outDeviceTypes() : DeviceTypeSet({inDeviceType()});
+    audio_devices_t inDeviceType_l() const final REQUIRES(mutex()) {
+        return mInDeviceTypeAddr.mType;
+    }
+    DeviceTypeSet getDeviceTypes_l() const final REQUIRES(mutex()) {
+        return isOutput() ? outDeviceTypes_l() : DeviceTypeSet({inDeviceType_l()});
     }
 
     const AudioDeviceTypeAddrVector& outDeviceTypeAddrs() const final {
@@ -389,7 +401,8 @@
                                     status_t *status /*non-NULL*/,
                                     bool pinned,
                                     bool probe,
-                                    bool notifyFramesProcessed) final;
+                                    bool notifyFramesProcessed) final
+            REQUIRES(audio_utils::AudioFlinger_Mutex);
 
                 // return values for hasAudioSession (bit field)
                 enum effect_state {
@@ -415,33 +428,38 @@
                 // ThreadBase mutex before processing the mixer and effects. This guarantees the
                 // integrity of the chains during the process.
                 // Also sets the parameter 'effectChains' to current value of mEffectChains.
-    void lockEffectChains_l(Vector<sp<IAfEffectChain>>& effectChains) final;
+    void lockEffectChains_l(Vector<sp<IAfEffectChain>>& effectChains) final REQUIRES(mutex());
                 // unlock effect chains after process
     void unlockEffectChains(const Vector<sp<IAfEffectChain>>& effectChains) final;
                 // get a copy of mEffectChains vector
-    Vector<sp<IAfEffectChain>> getEffectChains_l() const final { return mEffectChains; };
+    Vector<sp<IAfEffectChain>> getEffectChains_l() const final REQUIRES(mutex()) {
+        return mEffectChains;
+    }
                 // set audio mode to all effect chains
     void setMode(audio_mode_t mode) final;
                 // get effect module with corresponding ID on specified audio session
     sp<IAfEffectModule> getEffect(audio_session_t sessionId, int effectId) const final;
-    sp<IAfEffectModule> getEffect_l(audio_session_t sessionId, int effectId) const final;
+    sp<IAfEffectModule> getEffect_l(audio_session_t sessionId, int effectId) const final
+            REQUIRES(mutex());
                 // add and effect module. Also creates the effect chain is none exists for
                 // the effects audio session. Only called in a context of moving an effect
                 // from one thread to another
-    status_t addEffect_l(const sp<IAfEffectModule>& effect) final;
+    status_t addEffect_ll(const sp<IAfEffectModule>& effect) final
+            REQUIRES(audio_utils::AudioFlinger_Mutex, mutex());
                 // remove and effect module. Also removes the effect chain is this was the last
                 // effect
-    void removeEffect_l(const sp<IAfEffectModule>& effect, bool release = false) final;
+    void removeEffect_l(const sp<IAfEffectModule>& effect, bool release = false) final
+            REQUIRES(mutex());
                 // disconnect an effect handle from module and destroy module if last handle
     void disconnectEffectHandle(IAfEffectHandle* handle, bool unpinIfLast) final;
                 // detach all tracks connected to an auxiliary effect
-    void detachAuxEffect_l(int /* effectId */) override {}
+    void detachAuxEffect_l(int /* effectId */) override REQUIRES(mutex()) {}
     // TODO(b/291317898) - remove hasAudioSession_l below.
-    uint32_t hasAudioSession_l(audio_session_t sessionId) const override = 0;
-    uint32_t hasAudioSession(audio_session_t sessionId) const final {
-                    Mutex::Autolock _l(mLock);
-                    return hasAudioSession_l(sessionId);
-                }
+    uint32_t hasAudioSession_l(audio_session_t sessionId) const override REQUIRES(mutex()) = 0;
+    uint32_t hasAudioSession(audio_session_t sessionId) const final EXCLUDES_ThreadBase_Mutex {
+        std::lock_guard _l(mutex());
+        return hasAudioSession_l(sessionId);
+    }
 
                 template <typename T>
                 uint32_t hasAudioSession_l(audio_session_t sessionId, const T& tracks) const {
@@ -473,9 +491,9 @@
                 // the value returned by default implementation is not important as the
                 // strategy is only meaningful for PlaybackThread which implements this method
     product_strategy_t getStrategyForSession_l(
-            audio_session_t /* sessionId */) const override {
-                    return static_cast<product_strategy_t>(0);
-                }
+            audio_session_t /* sessionId */) const override REQUIRES(mutex()){
+        return static_cast<product_strategy_t>(0);
+    }
 
                 // check if some effects must be suspended/restored when an effect is enabled
                 // or disabled
@@ -494,38 +512,40 @@
 
     sp<IMemory> pipeMemory() const override { return nullptr; }
 
-    void systemReady() final;
+    void systemReady() final EXCLUDES_ThreadBase_Mutex;
 
-    void broadcast_l() final;
+    void broadcast_l() final REQUIRES(mutex());
 
-    bool isTimestampCorrectionEnabled() const override { return false; }
+    bool isTimestampCorrectionEnabled_l() const override REQUIRES(mutex()) { return false; }
 
     bool isMsdDevice() const final { return mIsMsdDevice; }
 
     void dump(int fd, const Vector<String16>& args) override;
 
                 // deliver stats to mediametrics.
-    void sendStatistics(bool force) final;
+    void sendStatistics(bool force) final EXCLUDES_ThreadBase_Mutex;
 
-    Mutex& mutex() const final {
-        return mLock;
+    audio_utils::mutex& mutex() const final RETURN_CAPABILITY(audio_utils::ThreadBase_Mutex) {
+        return mMutex;
     }
-    mutable     Mutex                   mLock;
+    mutable audio_utils::mutex mMutex;
 
-    void onEffectEnable(const sp<IAfEffectModule>& effect) final;
-    void onEffectDisable() final;
+    void onEffectEnable(const sp<IAfEffectModule>& effect) final EXCLUDES_ThreadBase_Mutex;
+    void onEffectDisable() final EXCLUDES_ThreadBase_Mutex;
 
-                // invalidateTracksForAudioSession_l must be called with holding mLock.
-    void invalidateTracksForAudioSession_l(audio_session_t /* sessionId */) const override {}
+                // invalidateTracksForAudioSession_l must be called with holding mutex().
+    void invalidateTracksForAudioSession_l(audio_session_t /* sessionId */) const override
+            REQUIRES(mutex()) {}
                 // Invalidate all the tracks with the given audio session.
-    void invalidateTracksForAudioSession(audio_session_t sessionId) const final {
-                    Mutex::Autolock _l(mLock);
+    void invalidateTracksForAudioSession(audio_session_t sessionId) const final
+            EXCLUDES_ThreadBase_Mutex {
+                    std::lock_guard _l(mutex());
                     invalidateTracksForAudioSession_l(sessionId);
                 }
 
                 template <typename T>
-                void invalidateTracksForAudioSession_l(audio_session_t sessionId,
-                                                       const T& tracks) const {
+    void invalidateTracksForAudioSession_l(audio_session_t sessionId,
+            const T& tracks) const REQUIRES(mutex()) {
                     for (size_t i = 0; i < tracks.size(); ++i) {
                         const sp<IAfTrackBase>& track = tracks[i];
                         if (sessionId == track->sessionId()) {
@@ -534,8 +554,10 @@
                     }
                 }
 
-    void startMelComputation_l(const sp<audio_utils::MelProcessor>& processor) override;
-    void stopMelComputation_l() override;
+    void startMelComputation_l(const sp<audio_utils::MelProcessor>& processor) override
+            REQUIRES(audio_utils::AudioFlinger_Mutex);
+    void stopMelComputation_l() override
+            REQUIRES(audio_utils::AudioFlinger_Mutex);
 
 protected:
 
@@ -548,41 +570,41 @@
                     effect_uuid_t mType;    // effect type UUID
                 };
 
-                void        acquireWakeLock();
-                virtual void acquireWakeLock_l();
-                void        releaseWakeLock();
-                void        releaseWakeLock_l();
-                void        updateWakeLockUids_l(const SortedVector<uid_t> &uids);
-                void        getPowerManager_l();
+    void acquireWakeLock() EXCLUDES_ThreadBase_Mutex;
+    virtual void acquireWakeLock_l() REQUIRES(mutex());
+    void releaseWakeLock() EXCLUDES_ThreadBase_Mutex;
+    void releaseWakeLock_l() REQUIRES(mutex());
+    void updateWakeLockUids_l(const SortedVector<uid_t> &uids) REQUIRES(mutex());
+    void getPowerManager_l() REQUIRES(mutex());
                 // suspend or restore effects of the specified type (or all if type is NULL)
                 // on a given session. The number of suspend requests is counted and restore
                 // occurs when all suspend requests are cancelled.
-                void setEffectSuspended_l(const effect_uuid_t *type,
+    void setEffectSuspended_l(const effect_uuid_t *type,
                                           bool suspend,
-                                          audio_session_t sessionId) final;
+            audio_session_t sessionId) final REQUIRES(mutex());
                 // updated mSuspendedSessions when an effect is suspended or restored
-                void        updateSuspendedSessions_l(const effect_uuid_t *type,
+    void updateSuspendedSessions_l(const effect_uuid_t *type,
                                                       bool suspend,
-                                                      audio_session_t sessionId);
+            audio_session_t sessionId) REQUIRES(mutex());
                 // check if some effects must be suspended when an effect chain is added
-                void checkSuspendOnAddEffectChain_l(const sp<IAfEffectChain>& chain);
+    void checkSuspendOnAddEffectChain_l(const sp<IAfEffectChain>& chain) REQUIRES(mutex());
 
                 // sends the metadata of the active tracks to the HAL
                 struct MetadataUpdate {
                     std::vector<playback_track_metadata_v7_t> playbackMetadataUpdate;
                     std::vector<record_track_metadata_v7_t>   recordMetadataUpdate;
                 };
-    virtual     MetadataUpdate           updateMetadata_l() = 0;
+    virtual MetadataUpdate updateMetadata_l() REQUIRES(mutex()) = 0;
 
                 String16 getWakeLockTag();
 
-    virtual     void        preExit() { }
-    virtual     void        setMasterMono_l(bool mono __unused) { }
+    virtual void preExit() EXCLUDES_ThreadBase_Mutex {}
+    virtual void setMasterMono_l(bool mono __unused) REQUIRES(mutex()) {}
     virtual     bool        requireMonoBlend() { return false; }
 
                             // called within the threadLoop to obtain timestamp from the HAL.
-    virtual     status_t    threadloop_getHalTimestamp_l(
-                                    ExtendedTimestamp *timestamp __unused) const {
+    virtual status_t threadloop_getHalTimestamp_l(
+            ExtendedTimestamp *timestamp __unused) const REQUIRES(mutex()) {
                                 return INVALID_OPERATION;
                             }
 public:
@@ -590,16 +612,17 @@
                 product_strategy_t getStrategyForStream(audio_stream_type_t stream) const;
 protected:
 
-    virtual     void        onHalLatencyModesChanged_l() {}
+    virtual void onHalLatencyModesChanged_l() REQUIRES(mutex()) {}
 
-    virtual     void        dumpInternals_l(int fd __unused, const Vector<String16>& args __unused)
-                            { }
-    virtual     void        dumpTracks_l(int fd __unused, const Vector<String16>& args __unused) { }
+    virtual void dumpInternals_l(int fd __unused, const Vector<String16>& args __unused)
+            REQUIRES(mutex()) {}
+    virtual void dumpTracks_l(int fd __unused, const Vector<String16>& args __unused)
+            REQUIRES(mutex()) {}
 
                 const type_t            mType;
 
                 // Used by parameters, config events, addTrack_l, exit
-                Condition               mWaitWorkCV;
+                audio_utils::condition_variable mWaitWorkCV;
 
                 const sp<IAfThreadCallback>  mAfThreadCallback;
                 ThreadMetrics           mThreadMetrics;
@@ -619,8 +642,10 @@
                                                            // HAL format if Fastmixer is used.
                 audio_format_t          mHALFormat;
                 size_t                  mBufferSize;       // HAL buffer size for read() or write()
-                AudioDeviceTypeAddrVector mOutDeviceTypeAddrs; // output device types and addresses
-                AudioDeviceTypeAddr       mInDeviceTypeAddr;   // input device type and address
+
+     // output device types and addresses
+    AudioDeviceTypeAddrVector mOutDeviceTypeAddrs GUARDED_BY(mutex());
+    AudioDeviceTypeAddr mInDeviceTypeAddr GUARDED_BY(mutex());   // input device type and address
                 Vector< sp<ConfigEvent> >     mConfigEvents;
                 Vector< sp<ConfigEvent> >     mPendingConfigEvents; // events awaiting system ready
 
@@ -759,7 +784,8 @@
                     // ThreadBase thread.
                     void            clear();
                     // periodically called in the threadLoop() to update power state uids.
-                    void updatePowerState(const sp<ThreadBase>& thread, bool force = false);
+                    void updatePowerState_l(const sp<ThreadBase>& thread, bool force = false)
+                            REQUIRES(audio_utils::ThreadBase_Mutex);
 
                     /** @return true if one or move active tracks was added or removed since the
                      *          last time this function was called or the vector was created.
@@ -792,11 +818,11 @@
                     bool                mHasChanged = false;
                 };
 
-                SimpleLog mLocalLog;
+                SimpleLog mLocalLog;  // locked internally
 
 private:
-                void dumpBase_l(int fd, const Vector<String16>& args);
-                void dumpEffectChains_l(int fd, const Vector<String16>& args);
+    void dumpBase_l(int fd, const Vector<String16>& args) REQUIRES(mutex());
+    void dumpEffectChains_l(int fd, const Vector<String16>& args) REQUIRES(mutex());
 };
 
 // --- PlaybackThread ---
@@ -829,15 +855,15 @@
     ~PlaybackThread() override;
 
     // Thread virtuals
-    bool threadLoop() final;
+    bool threadLoop() final EXCLUDES_ThreadBase_Mutex;
 
     // RefBase
     void onFirstRef() override;
 
     status_t checkEffectCompatibility_l(
-            const effect_descriptor_t* desc, audio_session_t sessionId) final;
+            const effect_descriptor_t* desc, audio_session_t sessionId) final REQUIRES(mutex());
 
-    void addOutputTrack_l(const sp<IAfTrack>& track) final {
+    void addOutputTrack_l(const sp<IAfTrack>& track) final REQUIRES(mutex()) {
         mTracks.add(track);
     }
 
@@ -870,9 +896,9 @@
 protected:
 
     virtual     bool        waitingAsyncCallback();
-    virtual     bool        waitingAsyncCallback_l();
-    virtual     bool        shouldStandby_l();
-    virtual     void        onAddNewTrack_l();
+    virtual bool waitingAsyncCallback_l() REQUIRES(mutex());
+    virtual bool shouldStandby_l() REQUIRES(mutex());
+    virtual void onAddNewTrack_l() REQUIRES(mutex());
 public:  // AsyncCallbackThread
                 void        onAsyncError(); // error reported by AsyncCallbackThread
 protected:
@@ -881,20 +907,20 @@
             const std::basic_string<uint8_t>& metadataBs) final;
 
     // ThreadBase virtuals
-    virtual     void        preExit();
+    void preExit() final EXCLUDES_ThreadBase_Mutex;
 
     virtual     bool        keepWakeLock() const { return true; }
-    virtual     void        acquireWakeLock_l() {
+    virtual void acquireWakeLock_l() REQUIRES(mutex()) {
                                 ThreadBase::acquireWakeLock_l();
-                                mActiveTracks.updatePowerState(this, true /* force */);
+        mActiveTracks.updatePowerState_l(this, true /* force */);
                             }
 
     virtual     void        checkOutputStageEffects() {}
     virtual     void        setHalLatencyMode_l() {}
 
 
-    void dumpInternals_l(int fd, const Vector<String16>& args) override;
-    void dumpTracks_l(int fd, const Vector<String16>& args) final;
+    void dumpInternals_l(int fd, const Vector<String16>& args) override REQUIRES(mutex());
+    void dumpTracks_l(int fd, const Vector<String16>& args) final REQUIRES(mutex());
 
 public:
 
@@ -903,15 +929,15 @@
                 // return estimated latency in milliseconds, as reported by HAL
     uint32_t latency() const final;
                 // same, but lock must already be held
-    uint32_t latency_l() const final;
+    uint32_t latency_l() const final /* REQUIRES(mutex()) */;  // NO_THREAD_SAFETY_ANALYSIS
 
                 // VolumeInterface
     void setMasterVolume(float value) final;
-    void setMasterBalance(float balance) override;
+    void setMasterBalance(float balance) override EXCLUDES_ThreadBase_Mutex;
     void setMasterMute(bool muted) final;
-    void setStreamVolume(audio_stream_type_t stream, float value) final;
-    void setStreamMute(audio_stream_type_t stream, bool muted) final;
-    float streamVolume(audio_stream_type_t stream) const final;
+    void setStreamVolume(audio_stream_type_t stream, float value) final EXCLUDES_ThreadBase_Mutex;
+    void setStreamMute(audio_stream_type_t stream, bool muted) final EXCLUDES_ThreadBase_Mutex;
+    float streamVolume(audio_stream_type_t stream) const final EXCLUDES_ThreadBase_Mutex;
     void setVolumeForOutput_l(float left, float right) const final;
 
     sp<IAfTrack> createTrack_l(
@@ -935,13 +961,15 @@
                                 audio_port_handle_t portId,
                                 const sp<media::IAudioTrackCallback>& callback,
                                 bool isSpatialized,
-                                bool isBitPerfect) final;
+                                bool isBitPerfect,
+                                audio_output_flags_t* afTrackFlags) final
+            REQUIRES(audio_utils::AudioFlinger_Mutex);
 
     bool isTrackActive(const sp<IAfTrack>& track) const final {
         return mActiveTracks.indexOf(track) >= 0;
     }
 
-    AudioStreamOut* getOutput_l() const final { return mOutput; }
+    AudioStreamOut* getOutput_l() const final REQUIRES(mutex()) { return mOutput; }
     AudioStreamOut* getOutput() const final;
     AudioStreamOut* clearOutput() final;
     sp<StreamHalInterface> stream() const final;
@@ -959,38 +987,48 @@
     bool isSuspended() const final
                                 { return android_atomic_acquire_load(&mSuspended) > 0; }
 
-    String8 getParameters(const String8& keys);
-    void ioConfigChanged(audio_io_config_event_t event, pid_t pid = 0,
+    String8 getParameters(const String8& keys) EXCLUDES_ThreadBase_Mutex;
+
+    // Hold either the AudioFlinger::mutex or the ThreadBase::mutex
+    void ioConfigChanged_l(audio_io_config_event_t event, pid_t pid = 0,
             audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE) final;
-    status_t getRenderPosition(uint32_t* halFrames, uint32_t* dspFrames) const final;
+    status_t getRenderPosition(uint32_t* halFrames, uint32_t* dspFrames) const final
+            EXCLUDES_ThreadBase_Mutex;
                 // Consider also removing and passing an explicit mMainBuffer initialization
                 // parameter to AF::IAfTrack::Track().
     float* sinkBuffer() const final {
                     return reinterpret_cast<float *>(mSinkBuffer); };
 
-    void detachAuxEffect_l(int effectId) final;
+    void detachAuxEffect_l(int effectId) final REQUIRES(mutex());
 
-    status_t attachAuxEffect(const sp<IAfTrack>& track, int EffectId) final;
-    status_t attachAuxEffect_l(const sp<IAfTrack>& track, int EffectId) final;
+    status_t attachAuxEffect(const sp<IAfTrack>& track, int EffectId) final
+            EXCLUDES_ThreadBase_Mutex;
+    status_t attachAuxEffect_l(const sp<IAfTrack>& track, int EffectId) final REQUIRES(mutex());
 
-    status_t addEffectChain_l(const sp<IAfEffectChain>& chain) final;
-    size_t removeEffectChain_l(const sp<IAfEffectChain>& chain) final;
-    uint32_t hasAudioSession_l(audio_session_t sessionId) const final {
+    status_t addEffectChain_l(const sp<IAfEffectChain>& chain) final REQUIRES(mutex());
+    size_t removeEffectChain_l(const sp<IAfEffectChain>& chain) final REQUIRES(mutex());
+    uint32_t hasAudioSession_l(audio_session_t sessionId) const final REQUIRES(mutex()) {
                             return ThreadBase::hasAudioSession_l(sessionId, mTracks);
                         }
-    product_strategy_t getStrategyForSession_l(audio_session_t sessionId) const final;
+    product_strategy_t getStrategyForSession_l(audio_session_t sessionId) const final
+            REQUIRES(mutex());
 
 
-    status_t setSyncEvent(const sp<audioflinger::SyncEvent>& event) final;
+    status_t setSyncEvent(const sp<audioflinger::SyncEvent>& event) final
+            EXCLUDES_ThreadBase_Mutex;
+    // could be static.
     bool isValidSyncEvent(const sp<audioflinger::SyncEvent>& event) const final;
 
-                // called with AudioFlinger lock held
-    bool invalidateTracks_l(audio_stream_type_t streamType) final;
-    bool invalidateTracks_l(std::set<audio_port_handle_t>& portIds) final;
+    // Does this require the AudioFlinger mutex as well?
+    bool invalidateTracks_l(audio_stream_type_t streamType) final
+            REQUIRES(mutex());
+    bool invalidateTracks_l(std::set<audio_port_handle_t>& portIds) final
+            REQUIRES(mutex());
     void invalidateTracks(audio_stream_type_t streamType) override;
                 // Invalidate tracks by a set of port ids. The port id will be removed from
                 // the given set if the corresponding track is found and invalidated.
-    void invalidateTracks(std::set<audio_port_handle_t>& portIds) override;
+    void invalidateTracks(std::set<audio_port_handle_t>& portIds) override
+            EXCLUDES_ThreadBase_Mutex;
 
     size_t frameCount() const final { return mNormalFrameCount; }
 
@@ -998,30 +1036,32 @@
                     return mMixerChannelMask;
                 }
 
-    status_t getTimestamp_l(AudioTimestamp& timestamp) final;
+    status_t getTimestamp_l(AudioTimestamp& timestamp) final REQUIRES(mutex());
 
-    void addPatchTrack(const sp<IAfPatchTrack>& track) final;
-    void deletePatchTrack(const sp<IAfPatchTrack>& track) final;
+    void addPatchTrack(const sp<IAfPatchTrack>& track) final EXCLUDES_ThreadBase_Mutex;
+    void deletePatchTrack(const sp<IAfPatchTrack>& track) final EXCLUDES_ThreadBase_Mutex;
 
+    // NO_THREAD_SAFETY_ANALYSIS - fix this to use atomics.
     void toAudioPortConfig(struct audio_port_config* config) final;
 
                 // Return the asynchronous signal wait time.
-    int64_t computeWaitTimeNs_l() const override { return INT64_MAX; }
+    int64_t computeWaitTimeNs_l() const override REQUIRES(mutex()) { return INT64_MAX; }
                 // returns true if the track is allowed to be added to the thread.
     bool isTrackAllowed_l(
                                     audio_channel_mask_t channelMask __unused,
                                     audio_format_t format __unused,
                                     audio_session_t sessionId __unused,
-                                    uid_t uid) const override {
+            uid_t uid) const override REQUIRES(mutex()) {
                                 return trackCountForUid_l(uid) < PlaybackThread::kMaxTracksPerUid
                                        && mTracks.size() < PlaybackThread::kMaxTracks;
                             }
 
-    bool isTimestampCorrectionEnabled() const final {
-                                return audio_is_output_devices(mTimestampCorrectedDevice)
-                                        && outDeviceTypes().count(mTimestampCorrectedDevice) != 0;
+    bool isTimestampCorrectionEnabled_l() const final REQUIRES(mutex()) {
+        return audio_is_output_devices(mTimestampCorrectedDevice)
+                && outDeviceTypes_l().count(mTimestampCorrectedDevice) != 0;
                             }
 
+    // NO_THREAD_SAFETY_ANALYSIS - fix this to be atomic.
     bool isStreamInitialized() const final {
                                 return !(mOutput == nullptr || mOutput->stream == nullptr);
                             }
@@ -1038,12 +1078,12 @@
                     return (mHapticChannelMask & AUDIO_CHANNEL_HAPTIC_ALL) != AUDIO_CHANNEL_NONE;
                 }
 
-    void setDownStreamPatch(const struct audio_patch* patch) final {
-                    Mutex::Autolock _l(mLock);
+    void setDownStreamPatch(const struct audio_patch* patch) final EXCLUDES_ThreadBase_Mutex {
+                    std::lock_guard _l(mutex());
                     mDownStreamPatch = *patch;
                 }
 
-    IAfTrack* getTrackById_l(audio_port_handle_t trackId) final;
+    IAfTrack* getTrackById_l(audio_port_handle_t trackId) final REQUIRES(mutex());
 
     bool hasMixer() const final {
                     return mType == MIXER || mType == DUPLICATING || mType == SPATIALIZER;
@@ -1061,23 +1101,25 @@
                     return INVALID_OPERATION;
                 }
 
-    void startMelComputation_l(const sp<audio_utils::MelProcessor>& processor) override;
-    void stopMelComputation_l() override;
+    void startMelComputation_l(const sp<audio_utils::MelProcessor>& processor) override
+            REQUIRES(audio_utils::AudioFlinger_Mutex);
+    void stopMelComputation_l() override
+            REQUIRES(audio_utils::AudioFlinger_Mutex);
 
     void setStandby() final {
-                    Mutex::Autolock _l(mLock);
+                    std::lock_guard _l(mutex());
                     setStandby_l();
                 }
 
-    void setStandby_l() final {
+    void setStandby_l() final REQUIRES(mutex()) {
                     mStandby = true;
                     mHalStarted = false;
                     mKernelPositionOnStandby =
                         mTimestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL];
                 }
 
-    bool waitForHalStart() final {
-                    Mutex::Autolock _l(mLock);
+    bool waitForHalStart() final EXCLUDES_ThreadBase_Mutex {
+                    audio_utils::unique_lock _l(mutex());
                     static const nsecs_t kWaitHalTimeoutNs = seconds(2);
                     nsecs_t endWaitTimetNs = systemTime() + kWaitHalTimeoutNs;
                     while (!mHalStarted) {
@@ -1086,7 +1128,7 @@
                             break;
                         }
                         nsecs_t waitTimeLeftNs = endWaitTimetNs - timeNs;
-                        mWaitHalStartCV.waitRelative(mLock, waitTimeLeftNs);
+                        mWaitHalStartCV.wait_for(_l, std::chrono::nanoseconds(waitTimeLeftNs));
                     }
                     return mHalStarted;
                 }
@@ -1214,24 +1256,28 @@
     // No sleep in standby mode; waits on a condition
 
     // Code snippets that are temporarily lifted up out of threadLoop() until the merge
-    virtual void checkSilentMode_l() final;  // consider unification with MMapThread
+
+    // consider unification with MMapThread
+    virtual void checkSilentMode_l() final REQUIRES(mutex());
 
     // Non-trivial for DUPLICATING only
     virtual     void        saveOutputTracks() { }
     virtual     void        clearOutputTracks() { }
 
     // Cache various calculated values, at threadLoop() entry and after a parameter change
-    virtual     void        cacheParameters_l();
+    virtual void cacheParameters_l() REQUIRES(mutex());
                 void        setCheckOutputStageEffects() override {
                                 mCheckOutputStageEffects.store(true);
                             }
 
-    virtual     uint32_t    correctLatency_l(uint32_t latency) const;
+    virtual uint32_t correctLatency_l(uint32_t latency) const REQUIRES(mutex());
 
     virtual     status_t    createAudioPatch_l(const struct audio_patch *patch,
-                                   audio_patch_handle_t *handle);
-    virtual     status_t    releaseAudioPatch_l(const audio_patch_handle_t handle);
+            audio_patch_handle_t *handle) REQUIRES(mutex());
+    virtual status_t releaseAudioPatch_l(const audio_patch_handle_t handle)
+            REQUIRES(mutex());
 
+    // NO_THREAD_SAFETY_ANALYSIS - fix this to use atomics
     bool usesHwAvSync() const final { return mType == DIRECT && mOutput != nullptr
                                     && mHwSupportsPause
                                     && (mOutput->flags & AUDIO_OUTPUT_FLAG_HW_AV_SYNC); }
@@ -1239,22 +1285,23 @@
                 uint32_t    trackCountForUid_l(uid_t uid) const;
 
                 void        invalidateTracksForAudioSession_l(
-    audio_session_t sessionId) const override {
+            audio_session_t sessionId) const override REQUIRES(mutex()) {
                                 ThreadBase::invalidateTracksForAudioSession_l(sessionId, mTracks);
                             }
 
     DISALLOW_COPY_AND_ASSIGN(PlaybackThread);
 
-    status_t addTrack_l(const sp<IAfTrack>& track) final;
-    bool destroyTrack_l(const sp<IAfTrack>& track) final;
+    status_t addTrack_l(const sp<IAfTrack>& track) final REQUIRES(mutex());
+    bool destroyTrack_l(const sp<IAfTrack>& track) final REQUIRES(mutex());
 
-    void        removeTrack_l(const sp<IAfTrack>& track);
+    void removeTrack_l(const sp<IAfTrack>& track) REQUIRES(mutex());
 
-    void        readOutputParameters_l();
-    MetadataUpdate          updateMetadata_l() final;
-    virtual void sendMetadataToBackend_l(const StreamOutHalInterface::SourceMetadata& metadata);
+    void readOutputParameters_l() REQUIRES(mutex());
+    MetadataUpdate updateMetadata_l() final REQUIRES(mutex());
+    virtual void sendMetadataToBackend_l(const StreamOutHalInterface::SourceMetadata& metadata)
+            REQUIRES(mutex()) ;
 
-    void        collectTimestamps_l();
+    void collectTimestamps_l() REQUIRES(mutex());
 
     // The Tracks class manages tracks added and removed from the Thread.
     template <typename T>
@@ -1367,7 +1414,8 @@
 
     sp<AsyncCallbackThread>         mCallbackThread;
 
-    Mutex                                    mAudioTrackCbLock;
+    audio_utils::mutex& audioTrackCbMutex() const { return mAudioTrackCbMutex; }
+    mutable audio_utils::mutex mAudioTrackCbMutex;
     // Record of IAudioTrackCallback
     std::map<sp<IAfTrack>, sp<media::IAudioTrackCallback>> mAudioTrackCallbacks;
 
@@ -1388,7 +1436,7 @@
 
     // output stream start detection based on render position returned by the kernel
     // condition signalled when the output stream has started
-    Condition                mWaitHalStartCV;
+    audio_utils::condition_variable mWaitHalStartCV;
     // true when the output stream render position has moved, reset to false in standby
     bool                     mHalStarted = false;
     // last kernel render position saved when entering standby
@@ -1401,7 +1449,7 @@
 
 protected:
                 // accessed by both binder threads and within threadLoop(), lock on mutex needed
-     uint32_t& fastTrackAvailMask_l() final { return mFastTrackAvailMask; }
+     uint32_t& fastTrackAvailMask_l() final REQUIRES(mutex()) { return mFastTrackAvailMask; }
      uint32_t mFastTrackAvailMask;  // bit i set if fast track [i] is available
                 bool        mHwSupportsPause;
                 bool        mHwPaused;
@@ -1470,18 +1518,19 @@
 
     // Thread virtuals
 
-    bool checkForNewParameter_l(const String8& keyValuePair, status_t& status) final;
+    bool checkForNewParameter_l(const String8& keyValuePair, status_t& status) final
+            REQUIRES(mutex());
 
     bool isTrackAllowed_l(
                                     audio_channel_mask_t channelMask, audio_format_t format,
-                                    audio_session_t sessionId, uid_t uid) const final;
+            audio_session_t sessionId, uid_t uid) const final REQUIRES(mutex());
 protected:
     mixer_state prepareTracks_l(Vector<sp<IAfTrack>>* tracksToRemove) override;
     uint32_t idleSleepTimeUs() const final;
     uint32_t suspendSleepTimeUs() const final;
-    void cacheParameters_l() override;
+    void cacheParameters_l() override REQUIRES(mutex());
 
-    void acquireWakeLock_l() final {
+    void acquireWakeLock_l() final REQUIRES(mutex()) {
         PlaybackThread::acquireWakeLock_l();
         if (hasFastMixer()) {
             mFastMixer->setBoottimeOffset(
@@ -1489,18 +1538,19 @@
         }
     }
 
-                void        dumpInternals_l(int fd, const Vector<String16>& args) override;
+    void dumpInternals_l(int fd, const Vector<String16>& args) override REQUIRES(mutex());
 
     // threadLoop snippets
     ssize_t threadLoop_write() override;
     void threadLoop_standby() override;
     void threadLoop_mix() override;
     void threadLoop_sleepTime() override;
-    uint32_t correctLatency_l(uint32_t latency) const final;
+    uint32_t correctLatency_l(uint32_t latency) const final REQUIRES(mutex());
 
     status_t createAudioPatch_l(
-            const struct audio_patch* patch, audio_patch_handle_t* handle) final;
-    status_t releaseAudioPatch_l(const audio_patch_handle_t handle) final;
+            const struct audio_patch* patch, audio_patch_handle_t* handle)
+            final REQUIRES(mutex());
+    status_t releaseAudioPatch_l(const audio_patch_handle_t handle) final REQUIRES(mutex());
 
                 AudioMixer* mAudioMixer;    // normal mixer
 
@@ -1539,8 +1589,8 @@
                               return mFastMixerDumpState.mTracks[fastIndex].mUnderruns;
                             }
 
-                status_t    threadloop_getHalTimestamp_l(
-                                    ExtendedTimestamp *timestamp) const override {
+    status_t threadloop_getHalTimestamp_l(
+            ExtendedTimestamp *timestamp) const override REQUIRES(mutex()) {
                                 if (mNormalSink.get() != nullptr) {
                                     return mNormalSink->getTimestamp(*timestamp);
                                 }
@@ -1564,16 +1614,16 @@
                 // and blending without limiter is idempotent but inefficient to do twice.
     virtual     bool       requireMonoBlend() { return mMasterMono.load() && !hasFastMixer(); }
 
-                void       setMasterBalance(float balance) override {
+    void setMasterBalance(float balance) override EXCLUDES_ThreadBase_Mutex {
                                mMasterBalance.store(balance);
                                if (hasFastMixer()) {
                                    mFastMixer->setMasterBalance(balance);
                                }
                            }
 
-                void       updateHalSupportedLatencyModes_l();
-                void       onHalLatencyModesChanged_l() override;
-                void       setHalLatencyMode_l() override;
+    void updateHalSupportedLatencyModes_l() REQUIRES(mutex());
+    void onHalLatencyModesChanged_l() override REQUIRES(mutex());
+    void setHalLatencyMode_l() override REQUIRES(mutex());
 };
 
 class DirectOutputThread : public PlaybackThread, public virtual IAfDirectOutputThread {
@@ -1588,35 +1638,35 @@
                        const audio_offload_info_t& offloadInfo)
         : DirectOutputThread(afThreadCallback, output, id, DIRECT, systemReady, offloadInfo) { }
 
-    virtual                 ~DirectOutputThread();
+    ~DirectOutputThread() override;
 
     status_t selectPresentation(int presentationId, int programId) final;
 
     // Thread virtuals
 
     virtual     bool        checkForNewParameter_l(const String8& keyValuePair,
-                                                   status_t& status);
+            status_t& status) REQUIRES(mutex());
 
-                void        flushHw_l() override;
+    void flushHw_l() override REQUIRES(mutex());
 
-                void        setMasterBalance(float balance) override;
+    void setMasterBalance(float balance) override EXCLUDES_ThreadBase_Mutex;
 
 protected:
     virtual     uint32_t    activeSleepTimeUs() const;
     virtual     uint32_t    idleSleepTimeUs() const;
     virtual     uint32_t    suspendSleepTimeUs() const;
-    virtual     void        cacheParameters_l();
+    virtual void cacheParameters_l() REQUIRES(mutex());
 
-                void        dumpInternals_l(int fd, const Vector<String16>& args) override;
+    void dumpInternals_l(int fd, const Vector<String16>& args) override REQUIRES(mutex());
 
     // threadLoop snippets
-    virtual     mixer_state prepareTracks_l(Vector<sp<IAfTrack>>* tracksToRemove);
+    virtual mixer_state prepareTracks_l(Vector<sp<IAfTrack>>* tracksToRemove) REQUIRES(mutex());
     virtual     void        threadLoop_mix();
     virtual     void        threadLoop_sleepTime();
     virtual     void        threadLoop_exit();
-    virtual     bool        shouldStandby_l();
+    virtual bool shouldStandby_l() REQUIRES(mutex());
 
-    virtual     void        onAddNewTrack_l();
+    virtual void onAddNewTrack_l() REQUIRES(mutex());
 
     const       audio_offload_info_t mOffloadInfo;
 
@@ -1626,7 +1676,7 @@
     DirectOutputThread(const sp<IAfThreadCallback>& afThreadCallback, AudioStreamOut* output,
                        audio_io_handle_t id, ThreadBase::type_t type, bool systemReady,
                        const audio_offload_info_t& offloadInfo);
-    void processVolume_l(IAfTrack *track, bool lastTrack);
+    void processVolume_l(IAfTrack *track, bool lastTrack) REQUIRES(mutex());
     bool isTunerStream() const { return (mOffloadInfo.content_id > 0); }
 
     // prepareTracks_l() tells threadLoop_mix() the name of the single active track
@@ -1641,7 +1691,7 @@
 public:
     virtual     bool        hasFastMixer() const { return false; }
 
-    virtual     int64_t     computeWaitTimeNs_l() const override;
+    virtual int64_t computeWaitTimeNs_l() const override REQUIRES(mutex());
 
     status_t    threadloop_getHalTimestamp_l(ExtendedTimestamp *timestamp) const override {
                     // For DIRECT and OFFLOAD threads, query the output sink directly.
@@ -1668,19 +1718,20 @@
                   audio_io_handle_t id, bool systemReady,
                   const audio_offload_info_t& offloadInfo);
     virtual                 ~OffloadThread() {};
-                void        flushHw_l() override;
+    void flushHw_l() final REQUIRES(mutex());
 
 protected:
     // threadLoop snippets
-    virtual     mixer_state prepareTracks_l(Vector<sp<IAfTrack>>* tracksToRemove);
-    virtual     void        threadLoop_exit();
+    mixer_state prepareTracks_l(Vector<sp<IAfTrack>>* tracksToRemove) final
+            REQUIRES(mutex());
+    void threadLoop_exit() final;
 
-    virtual     bool        waitingAsyncCallback();
-    virtual     bool        waitingAsyncCallback_l();
-    virtual     void        invalidateTracks(audio_stream_type_t streamType);
-                void        invalidateTracks(std::set<audio_port_handle_t>& portIds) override;
+    bool waitingAsyncCallback() final;
+    bool waitingAsyncCallback_l() final REQUIRES(mutex());
+    void invalidateTracks(audio_stream_type_t streamType) final EXCLUDES_ThreadBase_Mutex;
+    void invalidateTracks(std::set<audio_port_handle_t>& portIds) final EXCLUDES_ThreadBase_Mutex;
 
-    virtual     bool        keepWakeLock() const { return (mKeepWakeLock || (mDrainSequence & 1)); }
+    bool keepWakeLock() const final { return (mKeepWakeLock || (mDrainSequence & 1)); }
 
 private:
     size_t      mPausedWriteLength;     // length in bytes of write interrupted by pause
@@ -1715,9 +1766,13 @@
     // setDraining(). The sequence is shifted one bit to the left and the lsb is used
     // to indicate that the callback has been received via resetDraining()
     uint32_t                   mDrainSequence;
-    Condition                  mWaitWorkCV;
-    Mutex                      mLock;
+    audio_utils::condition_variable mWaitWorkCV;
+    mutable audio_utils::mutex mMutex;
     bool                       mAsyncError;
+
+    audio_utils::mutex& mutex() const RETURN_CAPABILITY(audio_utils::AsyncCallbackThread_Mutex) {
+        return mMutex;
+    }
 };
 
 class DuplicatingThread : public MixerThread, public IAfDuplicatingThread {
@@ -1732,32 +1787,32 @@
     }
 
     // Thread virtuals
-    void addOutputTrack(IAfPlaybackThread* thread) final;
-    void removeOutputTrack(IAfPlaybackThread* thread) final;
+    void addOutputTrack(IAfPlaybackThread* thread) final EXCLUDES_ThreadBase_Mutex;
+    void removeOutputTrack(IAfPlaybackThread* thread) final EXCLUDES_ThreadBase_Mutex;
     uint32_t waitTimeMs() const final { return mWaitTimeMs; }
 
                 void        sendMetadataToBackend_l(
-                        const StreamOutHalInterface::SourceMetadata& metadata) override;
+            const StreamOutHalInterface::SourceMetadata& metadata) final REQUIRES(mutex());
 protected:
     virtual     uint32_t    activeSleepTimeUs() const;
-                void        dumpInternals_l(int fd, const Vector<String16>& args) override;
+    void dumpInternals_l(int fd, const Vector<String16>& args) final REQUIRES(mutex());
 
 private:
                 bool        outputsReady();
 protected:
     // threadLoop snippets
-    virtual     void        threadLoop_mix();
-    virtual     void        threadLoop_sleepTime();
-    virtual     ssize_t     threadLoop_write();
-    virtual     void        threadLoop_standby();
-    virtual     void        cacheParameters_l();
+    void threadLoop_mix() final;
+    void threadLoop_sleepTime() final;
+    ssize_t threadLoop_write() final;
+    void threadLoop_standby() final;
+    void cacheParameters_l() final REQUIRES(mutex());
 
 private:
     // called from threadLoop, addOutputTrack, removeOutputTrack
-    virtual     void        updateWaitTime_l();
+    void updateWaitTime_l() REQUIRES(mutex());
 protected:
-    virtual     void        saveOutputTracks();
-    virtual     void        clearOutputTracks();
+    void saveOutputTracks() final;
+    void clearOutputTracks() final;
 private:
 
                 uint32_t    mWaitTimeMs;
@@ -1766,7 +1821,7 @@
 public:
     virtual     bool        hasFastMixer() const { return false; }
                 status_t    threadloop_getHalTimestamp_l(
-                                    ExtendedTimestamp *timestamp) const override {
+            ExtendedTimestamp *timestamp) const override REQUIRES(mutex()) {
         if (mOutputTracks.size() > 0) {
             // forward the first OutputTrack's kernel information for timestamp.
             const ExtendedTimestamp trackTimestamp =
@@ -1796,11 +1851,11 @@
             // RefBase
     void onFirstRef() final;
 
-    status_t setRequestedLatencyMode(audio_latency_mode_t mode) final;
+    status_t setRequestedLatencyMode(audio_latency_mode_t mode) final EXCLUDES_ThreadBase_Mutex;
 
 protected:
-    void checkOutputStageEffects() final;
-    void setHalLatencyMode_l() final;
+    void checkOutputStageEffects() final EXCLUDES_ThreadBase_Mutex;
+    void setHalLatencyMode_l() final REQUIRES(mutex());
 
 private:
             // Do not request a specific mode by default
@@ -1826,15 +1881,15 @@
     ~RecordThread() override;
 
     // no addTrack_l ?
-    void destroyTrack_l(const sp<IAfRecordTrack>& track) final;
-    void removeTrack_l(const sp<IAfRecordTrack>& track) final;
+    void destroyTrack_l(const sp<IAfRecordTrack>& track) final REQUIRES(mutex());
+    void removeTrack_l(const sp<IAfRecordTrack>& track) final REQUIRES(mutex());
 
     // Thread virtuals
-    bool threadLoop() final;
-    void preExit() final;
+    bool threadLoop() final EXCLUDES_ThreadBase_Mutex;
+    void preExit() final EXCLUDES_ThreadBase_Mutex;
 
     // RefBase
-    void onFirstRef() final;
+    void onFirstRef() final EXCLUDES_ThreadBase_Mutex;
 
     status_t initCheck() const final { return mInput == nullptr ? NO_INIT : NO_ERROR; }
 
@@ -1857,15 +1912,16 @@
                     pid_t tid,
                     status_t *status /*non-NULL*/,
                     audio_port_handle_t portId,
-                    int32_t maxSharedAudioHistoryMs) final;
+                    int32_t maxSharedAudioHistoryMs) final
+            REQUIRES(audio_utils::AudioFlinger_Mutex) EXCLUDES_ThreadBase_Mutex;
 
             status_t start(IAfRecordTrack* recordTrack,
                               AudioSystem::sync_event_t event,
-                              audio_session_t triggerSession) final;
+            audio_session_t triggerSession) final EXCLUDES_ThreadBase_Mutex;
 
             // ask the thread to stop the specified track, and
             // return true if the caller should then do it's part of the stopping process
-    bool stop(IAfRecordTrack* recordTrack) final;
+    bool stop(IAfRecordTrack* recordTrack) final EXCLUDES_ThreadBase_Mutex;
     AudioStreamIn* getInput() const final { return mInput; }
     AudioStreamIn* clearInput() final;
 
@@ -1873,27 +1929,30 @@
             virtual sp<StreamHalInterface> stream() const;
 
 
-    virtual bool        checkForNewParameter_l(const String8& keyValuePair,
-                                               status_t& status);
-    virtual void        cacheParameters_l() {}
-    virtual String8     getParameters(const String8& keys);
-    void ioConfigChanged(audio_io_config_event_t event, pid_t pid = 0,
+    virtual bool checkForNewParameter_l(const String8& keyValuePair,
+                                               status_t& status) REQUIRES(mutex());
+    virtual void cacheParameters_l() REQUIRES(mutex()) {}
+    virtual String8 getParameters(const String8& keys) EXCLUDES_ThreadBase_Mutex;
+
+    // Hold either the AudioFlinger::mutex or the ThreadBase::mutex
+    void ioConfigChanged_l(audio_io_config_event_t event, pid_t pid = 0,
             audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE) final;
     virtual status_t    createAudioPatch_l(const struct audio_patch *patch,
-                                           audio_patch_handle_t *handle);
-    virtual status_t    releaseAudioPatch_l(const audio_patch_handle_t handle);
-            void        updateOutDevices(const DeviceDescriptorBaseVector& outDevices) override;
-            void        resizeInputBuffer_l(int32_t maxSharedAudioHistoryMs) override;
+            audio_patch_handle_t *handle) REQUIRES(mutex());
+    virtual status_t releaseAudioPatch_l(const audio_patch_handle_t handle) REQUIRES(mutex());
+    void updateOutDevices(const DeviceDescriptorBaseVector& outDevices) override
+            EXCLUDES_ThreadBase_Mutex;
+    void resizeInputBuffer_l(int32_t maxSharedAudioHistoryMs) override REQUIRES(mutex());
 
-    void addPatchTrack(const sp<IAfPatchRecord>& record) final;
-    void deletePatchTrack(const sp<IAfPatchRecord>& record) final;
+    void addPatchTrack(const sp<IAfPatchRecord>& record) final EXCLUDES_ThreadBase_Mutex;
+    void deletePatchTrack(const sp<IAfPatchRecord>& record) final EXCLUDES_ThreadBase_Mutex;
 
-            void        readInputParameters_l();
-    uint32_t getInputFramesLost() const final;
+    void readInputParameters_l() REQUIRES(mutex());
+    uint32_t getInputFramesLost() const final EXCLUDES_ThreadBase_Mutex;
 
-    virtual status_t addEffectChain_l(const sp<IAfEffectChain>& chain);
-    virtual size_t removeEffectChain_l(const sp<IAfEffectChain>& chain);
-            uint32_t hasAudioSession_l(audio_session_t sessionId) const override {
+    virtual status_t addEffectChain_l(const sp<IAfEffectChain>& chain) REQUIRES(mutex());
+    virtual size_t removeEffectChain_l(const sp<IAfEffectChain>& chain) REQUIRES(mutex());
+    uint32_t hasAudioSession_l(audio_session_t sessionId) const override REQUIRES(mutex()) {
                          return ThreadBase::hasAudioSession_l(sessionId, mTracks);
                      }
 
@@ -1902,7 +1961,8 @@
             // FIXME replace by Set [and implement Bag/Multiset for other uses].
             KeyedVector<audio_session_t, bool> sessionIds() const;
 
-            status_t setSyncEvent(const sp<audioflinger::SyncEvent>& event) override;
+    status_t setSyncEvent(const sp<audioflinger::SyncEvent>& event) override
+            EXCLUDES_ThreadBase_Mutex;
             bool     isValidSyncEvent(const sp<audioflinger::SyncEvent>& event) const override;
 
     static void syncStartEventCallback(const wp<audioflinger::SyncEvent>& event);
@@ -1911,52 +1971,55 @@
     bool hasFastCapture() const final { return mFastCapture != 0; }
     virtual void        toAudioPortConfig(struct audio_port_config *config);
 
-    virtual status_t    checkEffectCompatibility_l(const effect_descriptor_t *desc,
-                                                   audio_session_t sessionId);
+    virtual status_t checkEffectCompatibility_l(const effect_descriptor_t *desc,
+            audio_session_t sessionId) REQUIRES(mutex());
 
-    virtual void        acquireWakeLock_l() {
+    virtual void acquireWakeLock_l() REQUIRES(mutex()) {
                             ThreadBase::acquireWakeLock_l();
-                            mActiveTracks.updatePowerState(this, true /* force */);
+        mActiveTracks.updatePowerState_l(this, true /* force */);
                         }
 
-    void checkBtNrec() final;
+    void checkBtNrec() final EXCLUDES_ThreadBase_Mutex;
 
             // Sets the UID records silence
-    void setRecordSilenced(audio_port_handle_t portId, bool silenced) final;
+    void setRecordSilenced(audio_port_handle_t portId, bool silenced) final
+            EXCLUDES_ThreadBase_Mutex;
 
     status_t getActiveMicrophones(
-            std::vector<media::MicrophoneInfoFw>* activeMicrophones) const final;
-    status_t setPreferredMicrophoneDirection(audio_microphone_direction_t direction) final;
-    status_t setPreferredMicrophoneFieldDimension(float zoom) final;
+            std::vector<media::MicrophoneInfoFw>* activeMicrophones) const final
+            EXCLUDES_ThreadBase_Mutex;
+    status_t setPreferredMicrophoneDirection(audio_microphone_direction_t direction) final
+            EXCLUDES_ThreadBase_Mutex;
+    status_t setPreferredMicrophoneFieldDimension(float zoom) final EXCLUDES_ThreadBase_Mutex;
 
-            MetadataUpdate        updateMetadata_l() override;
+    MetadataUpdate updateMetadata_l() override REQUIRES(mutex());
 
     bool fastTrackAvailable() const final { return mFastTrackAvail; }
     void setFastTrackAvailable(bool available) final { mFastTrackAvail = available; }
 
-            bool        isTimestampCorrectionEnabled() const override {
+    bool isTimestampCorrectionEnabled_l() const override REQUIRES(mutex()) {
                             // checks popcount for exactly one device.
                             // Is currently disabled. Before enabling,
                             // verify compressed record timestamps.
                             return audio_is_input_device(mTimestampCorrectedDevice)
-                                    && inDeviceType() == mTimestampCorrectedDevice;
+                && inDeviceType_l() == mTimestampCorrectedDevice;
                         }
 
     status_t shareAudioHistory(const std::string& sharedAudioPackageName,
                                           audio_session_t sharedSessionId = AUDIO_SESSION_NONE,
-            int64_t sharedAudioStartMs = -1) final;
+            int64_t sharedAudioStartMs = -1) final EXCLUDES_ThreadBase_Mutex;
             status_t    shareAudioHistory_l(const std::string& sharedAudioPackageName,
                                           audio_session_t sharedSessionId = AUDIO_SESSION_NONE,
-                                          int64_t sharedAudioStartMs = -1);
-    void resetAudioHistory_l() final;
+            int64_t sharedAudioStartMs = -1) REQUIRES(mutex());
+    void resetAudioHistory_l() final REQUIRES(mutex());
 
     bool isStreamInitialized() const final {
                             return !(mInput == nullptr || mInput->stream == nullptr);
                         }
 
 protected:
-            void        dumpInternals_l(int fd, const Vector<String16>& args) override;
-            void        dumpTracks_l(int fd, const Vector<String16>& args) override;
+    void dumpInternals_l(int fd, const Vector<String16>& args) override REQUIRES(mutex());
+    void dumpTracks_l(int fd, const Vector<String16>& args) override REQUIRES(mutex());
 
 private:
             // Enter standby if not already in standby, and set mStandby flag
@@ -1965,19 +2028,19 @@
             // Call the HAL standby method unconditionally, and don't change mStandby flag
             void    inputStandBy();
 
-            void    checkBtNrec_l();
+    void checkBtNrec_l() REQUIRES(mutex());
 
-            int32_t getOldestFront_l();
-            void    updateFronts_l(int32_t offset);
+    int32_t getOldestFront_l() REQUIRES(mutex());
+    void updateFronts_l(int32_t offset) REQUIRES(mutex());
 
             AudioStreamIn                       *mInput;
             Source                              *mSource;
             SortedVector <sp<IAfRecordTrack>>    mTracks;
             // mActiveTracks has dual roles:  it indicates the current active track(s), and
-            // is used together with mStartStopCond to indicate start()/stop() progress
+            // is used together with mStartStopCV to indicate start()/stop() progress
             ActiveTracks<IAfRecordTrack>           mActiveTracks;
 
-            Condition                           mStartStopCond;
+            audio_utils::condition_variable mStartStopCV;
 
             // resampler converts input at HAL Hz to output at AudioRecord client Hz
             void                               *mRsmpInBuffer;  // size = mRsmpInFramesOA
@@ -2056,16 +2119,16 @@
                                       audio_port_handle_t deviceId,
                                       audio_port_handle_t portId) override;
 
-    void disconnect() final;
+    void disconnect() final EXCLUDES_ThreadBase_Mutex;
 
     // MmapStreamInterface for adapter.
     status_t createMmapBuffer(int32_t minSizeFrames, struct audio_mmap_buffer_info* info) final;
     status_t getMmapPosition(struct audio_mmap_position* position) const override;
     status_t start(const AudioClient& client,
                    const audio_attributes_t *attr,
-            audio_port_handle_t* handle) final;
-    status_t stop(audio_port_handle_t handle) final;
-    status_t standby() final;
+            audio_port_handle_t* handle) final EXCLUDES_ThreadBase_Mutex;
+    status_t stop(audio_port_handle_t handle) final EXCLUDES_ThreadBase_Mutex;
+    status_t standby() final EXCLUDES_ThreadBase_Mutex;
     status_t getExternalPosition(uint64_t* position, int64_t* timeNanos) const = 0;
     status_t reportData(const void* buffer, size_t frameCount) override;
 
@@ -2073,69 +2136,77 @@
     void onFirstRef() final;
 
     // Thread virtuals
-    bool threadLoop() final;
+    bool threadLoop() final EXCLUDES_ThreadBase_Mutex;
 
     // Not in ThreadBase
     virtual void threadLoop_exit() final;
     virtual void threadLoop_standby() final;
-    virtual bool shouldStandby_l() final { return false; }
-    virtual status_t exitStandby_l() REQUIRES(mLock);
+    virtual bool shouldStandby_l() final REQUIRES(mutex()){ return false; }
+    virtual status_t exitStandby_l() REQUIRES(mutex());
 
     status_t initCheck() const final { return mHalStream == nullptr ? NO_INIT : NO_ERROR; }
     size_t frameCount() const final { return mFrameCount; }
-    bool checkForNewParameter_l(const String8& keyValuePair, status_t& status) final;
-    String8 getParameters(const String8& keys) final;
-    void ioConfigChanged(audio_io_config_event_t event, pid_t pid = 0,
-            audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE) final;
-                void        readHalParameters_l();
-    void cacheParameters_l() final {}
+    bool checkForNewParameter_l(const String8& keyValuePair, status_t& status)
+            final REQUIRES(mutex());
+    String8 getParameters(const String8& keys) final EXCLUDES_ThreadBase_Mutex;
+    void ioConfigChanged_l(audio_io_config_event_t event, pid_t pid = 0,
+            audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE) final
+            /* holds either AF::mutex or TB::mutex */;
+    void readHalParameters_l() REQUIRES(mutex());
+    void cacheParameters_l() final REQUIRES(mutex()) {}
     status_t createAudioPatch_l(
-            const struct audio_patch* patch, audio_patch_handle_t* handle) final;
-    status_t releaseAudioPatch_l(const audio_patch_handle_t handle) final;
+            const struct audio_patch* patch, audio_patch_handle_t* handle) final
+            REQUIRES(mutex());
+    status_t releaseAudioPatch_l(const audio_patch_handle_t handle) final
+            REQUIRES(mutex());
     void toAudioPortConfig(struct audio_port_config* config) override;
 
     sp<StreamHalInterface> stream() const final { return mHalStream; }
-    status_t addEffectChain_l(const sp<IAfEffectChain>& chain) final;
-    size_t removeEffectChain_l(const sp<IAfEffectChain>& chain) final;
+    status_t addEffectChain_l(const sp<IAfEffectChain>& chain) final REQUIRES(mutex());
+    size_t removeEffectChain_l(const sp<IAfEffectChain>& chain) final REQUIRES(mutex());
     status_t checkEffectCompatibility_l(
-            const effect_descriptor_t *desc, audio_session_t sessionId) final;
+            const effect_descriptor_t *desc, audio_session_t sessionId) final REQUIRES(mutex());
 
-    uint32_t hasAudioSession_l(audio_session_t sessionId) const override {
+    uint32_t hasAudioSession_l(audio_session_t sessionId) const override REQUIRES(mutex()) {
                                 // Note: using mActiveTracks as no mTracks here.
                                 return ThreadBase::hasAudioSession_l(sessionId, mActiveTracks);
                             }
     status_t setSyncEvent(const sp<audioflinger::SyncEvent>& event) final;
     bool isValidSyncEvent(const sp<audioflinger::SyncEvent>& event) const final;
 
-    virtual void checkSilentMode_l() {} // cannot be const (RecordThread)
-    virtual void processVolume_l() {}
+    virtual void checkSilentMode_l() REQUIRES(mutex()) {} // cannot be const (RecordThread)
+    virtual void processVolume_l() REQUIRES(mutex()) {}
                 void        checkInvalidTracks_l();
 
     // Not in ThreadBase
     virtual audio_stream_type_t streamType() const { return AUDIO_STREAM_DEFAULT; }
-    virtual void invalidateTracks(audio_stream_type_t /* streamType */) {}
-    void invalidateTracks(std::set<audio_port_handle_t>& /* portIds */) override {}
+    virtual void invalidateTracks(audio_stream_type_t /* streamType */)
+            EXCLUDES_ThreadBase_Mutex {}
+    void invalidateTracks(std::set<audio_port_handle_t>& /* portIds */) override
+            EXCLUDES_ThreadBase_Mutex {}
 
                 // Sets the UID records silence
     void setRecordSilenced(
-            audio_port_handle_t /* portId */, bool /* silenced */) override {}
+            audio_port_handle_t /* portId */, bool /* silenced */) override
+            EXCLUDES_ThreadBase_Mutex {}
 
     bool isStreamInitialized() const override { return false; }
 
-                void        setClientSilencedState_l(audio_port_handle_t portId, bool silenced) {
+    void setClientSilencedState_l(audio_port_handle_t portId, bool silenced) REQUIRES(mutex()) {
                                 mClientSilencedStates[portId] = silenced;
                             }
 
-                size_t      eraseClientSilencedState_l(audio_port_handle_t portId) {
+    size_t eraseClientSilencedState_l(audio_port_handle_t portId) REQUIRES(mutex()) {
                                 return mClientSilencedStates.erase(portId);
                             }
 
-                bool        isClientSilenced_l(audio_port_handle_t portId) const {
+    bool isClientSilenced_l(audio_port_handle_t portId) const REQUIRES(mutex()) {
                                 const auto it = mClientSilencedStates.find(portId);
                                 return it != mClientSilencedStates.end() ? it->second : false;
                             }
 
-                void        setClientSilencedIfExists_l(audio_port_handle_t portId, bool silenced) {
+    void setClientSilencedIfExists_l(audio_port_handle_t portId, bool silenced)
+            REQUIRES(mutex()) {
                                 const auto it = mClientSilencedStates.find(portId);
                                 if (it != mClientSilencedStates.end()) {
                                     it->second = silenced;
@@ -2143,8 +2214,8 @@
                             }
 
  protected:
-    void dumpInternals_l(int fd, const Vector<String16>& args) override;
-    void dumpTracks_l(int fd, const Vector<String16>& args) final;
+    void dumpInternals_l(int fd, const Vector<String16>& args) override REQUIRES(mutex());
+    void dumpTracks_l(int fd, const Vector<String16>& args) final REQUIRES(mutex());
 
                 /**
                  * @brief mDeviceId  current device port unique identifier
@@ -2184,26 +2255,27 @@
                                       audio_port_handle_t deviceId,
                                       audio_port_handle_t portId) final;
 
-    AudioStreamOut* clearOutput() final;
+    AudioStreamOut* clearOutput() final EXCLUDES_ThreadBase_Mutex;
 
                 // VolumeInterface
     void setMasterVolume(float value) final;
-    void setMasterBalance(float /* value */) final {}  // Needs implementation?
+    // Needs implementation?
+    void setMasterBalance(float /* value */) final EXCLUDES_ThreadBase_Mutex {}
     void setMasterMute(bool muted) final;
-    void setStreamVolume(audio_stream_type_t stream, float value) final;
-    void setStreamMute(audio_stream_type_t stream, bool muted) final;
-    float streamVolume(audio_stream_type_t stream) const final;
+    void setStreamVolume(audio_stream_type_t stream, float value) final EXCLUDES_ThreadBase_Mutex;
+    void setStreamMute(audio_stream_type_t stream, bool muted) final EXCLUDES_ThreadBase_Mutex;
+    float streamVolume(audio_stream_type_t stream) const final EXCLUDES_ThreadBase_Mutex;
 
                 void        setMasterMute_l(bool muted) { mMasterMute = muted; }
 
-    void invalidateTracks(audio_stream_type_t streamType) final;
-    void invalidateTracks(std::set<audio_port_handle_t>& portIds) final;
+    void invalidateTracks(audio_stream_type_t streamType) final EXCLUDES_ThreadBase_Mutex;
+    void invalidateTracks(std::set<audio_port_handle_t>& portIds) final EXCLUDES_ThreadBase_Mutex;
 
     audio_stream_type_t streamType() const final { return mStreamType; }
-    void checkSilentMode_l() final;
-    void processVolume_l() final;
+    void checkSilentMode_l() final REQUIRES(mutex());
+    void processVolume_l() final REQUIRES(mutex());
 
-    MetadataUpdate updateMetadata_l() final;
+    MetadataUpdate updateMetadata_l() final REQUIRES(mutex());
 
     void toAudioPortConfig(struct audio_port_config* config) final;
 
@@ -2215,17 +2287,24 @@
 
     status_t reportData(const void* buffer, size_t frameCount) final;
 
-    void startMelComputation_l(const sp<audio_utils::MelProcessor>& processor) final;
-    void stopMelComputation_l() final;
+    void startMelComputation_l(const sp<audio_utils::MelProcessor>& processor) final
+            REQUIRES(audio_utils::AudioFlinger_Mutex);
+    void stopMelComputation_l() final
+            REQUIRES(audio_utils::AudioFlinger_Mutex);
 
 protected:
-    void dumpInternals_l(int fd, const Vector<String16>& args) final;
+    void dumpInternals_l(int fd, const Vector<String16>& args) final REQUIRES(mutex());
+                float       streamVolume_l() const {
+                    return mStreamTypes[mStreamType].volume;
+                }
+                bool     streamMuted_l() const {
+                    return mStreamTypes[mStreamType].mute;
+                }
 
+                stream_type_t               mStreamTypes[AUDIO_STREAM_CNT];
                 audio_stream_type_t         mStreamType;
                 float                       mMasterVolume;
-                float                       mStreamVolume;
                 bool                        mMasterMute;
-                bool                        mStreamMute;
                 AudioStreamOut*             mOutput;
 
                 mediautils::atomic_sp<audio_utils::MelProcessor> mMelProcessor;
@@ -2241,13 +2320,14 @@
         return sp<IAfMmapCaptureThread>::fromExisting(this);
     }
 
-    AudioStreamIn* clearInput() final;
+    AudioStreamIn* clearInput() final EXCLUDES_ThreadBase_Mutex;
 
-    status_t exitStandby_l() REQUIRES(mLock) final;
+    status_t exitStandby_l() REQUIRES(mutex()) final;
 
-    MetadataUpdate updateMetadata_l() final;
-    void processVolume_l() final;
-    void setRecordSilenced(audio_port_handle_t portId, bool silenced) final;
+    MetadataUpdate updateMetadata_l() final REQUIRES(mutex());
+    void processVolume_l() final REQUIRES(mutex());
+    void setRecordSilenced(audio_port_handle_t portId, bool silenced) final
+            EXCLUDES_ThreadBase_Mutex;
 
     void toAudioPortConfig(struct audio_port_config* config) final;
 
@@ -2268,7 +2348,7 @@
                      audio_io_handle_t id, bool systemReady);
 
 protected:
-    mixer_state prepareTracks_l(Vector<sp<IAfTrack>>* tracksToRemove) final;
+    mixer_state prepareTracks_l(Vector<sp<IAfTrack>>* tracksToRemove) final REQUIRES(mutex());
     void threadLoop_mix() final;
 
 private:
diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp
index ecea9eb..0dbb502 100644
--- a/services/audioflinger/Tracks.cpp
+++ b/services/audioflinger/Tracks.cpp
@@ -277,7 +277,7 @@
     mCblkMemory.clear();    // free the shared memory before releasing the heap it belongs to
     if (mClient != 0) {
         // Client destructor must run with AudioFlinger client mutex locked
-        Mutex::Autolock _l(mClient->afClientCallback()->clientMutex());
+        audio_utils::lock_guard _l(mClient->afClientCallback()->clientMutex());
         // If the client's reference count drops to zero, the associated destructor
         // must run with AudioFlinger lock held. Thus the explicit clear() rather than
         // relying on the automatic clear() at end of scope.
@@ -568,9 +568,7 @@
     getPackagesForUid(uid, packages);
     if (isServiceUid(uid)) {
         if (packages.isEmpty()) {
-            ALOGD("OpPlayAudio: not muting track:%d usage:%d for service UID %d",
-                  id,
-                  attr.usage,
+            ALOGW("OpPlayAudio: not muting track:%d usage:%d for service UID %d", id, attr.usage,
                   uid);
             return nullptr;
         }
@@ -594,7 +592,6 @@
                                        audio_usage_t usage, int id, uid_t uid)
     : mThread(wp<IAfThreadBase>::fromExisting(thread)),
       mHasOpPlayAudio(true),
-      mAttributionSource(attributionSource),
       mUsage((int32_t)usage),
       mId(id),
       mUid(uid),
@@ -614,10 +611,11 @@
     // make sure not to broadcast the initial state since it is not needed and could
     // cause a deadlock since this method can be called with the mThread->mLock held
     checkPlayAudioForUsage(/*doBroadcast=*/false);
-    if (mAttributionSource.packageName.has_value()) {
+    if (mPackageName.size()) {
         mOpCallback = new PlayAudioOpCallback(this);
-        mAppOpsManager.startWatchingMode(AppOpsManager::OP_PLAY_AUDIO,
-                mPackageName, mOpCallback);
+        mAppOpsManager.startWatchingMode(AppOpsManager::OP_PLAY_AUDIO, mPackageName, mOpCallback);
+    } else {
+        ALOGW("Skipping OpPlayAudioMonitor due to null package name");
     }
 }
 
@@ -628,21 +626,21 @@
 // Note this method is never called (and never to be) for audio server / patch record track
 // - not called from constructor due to check on UID,
 // - not called from PlayAudioOpCallback because the callback is not installed in this case
-void OpPlayAudioMonitor::checkPlayAudioForUsage(bool doBroadcast)
-{
-    const bool hasAppOps = mAttributionSource.packageName.has_value()
-        && mAppOpsManager.checkAudioOpNoThrow(
-                AppOpsManager::OP_PLAY_AUDIO, mUsage, mUid, mPackageName) ==
-                        AppOpsManager::MODE_ALLOWED;
+void OpPlayAudioMonitor::checkPlayAudioForUsage(bool doBroadcast) {
+    const bool hasAppOps =
+            mPackageName.size() &&
+            mAppOpsManager.checkAudioOpNoThrow(AppOpsManager::OP_PLAY_AUDIO, mUsage, mUid,
+                                               mPackageName) == AppOpsManager::MODE_ALLOWED;
 
     bool shouldChange = !hasAppOps;  // check if we need to update.
     if (mHasOpPlayAudio.compare_exchange_strong(shouldChange, hasAppOps)) {
-        ALOGD("OpPlayAudio: track:%d usage:%d %smuted", mId, mUsage, hasAppOps ? "not " : "");
+        ALOGI("OpPlayAudio: track:%d package:%s usage:%d %smuted", mId,
+              String8(mPackageName).c_str(), mUsage, hasAppOps ? "not " : "");
         if (doBroadcast) {
             auto thread = mThread.promote();
             if (thread != nullptr && thread->type() == IAfThreadBase::OFFLOAD) {
                 // Wake up Thread if offloaded, otherwise it may be several seconds for update.
-                Mutex::Autolock _l(thread->mutex());
+                audio_utils::lock_guard _l(thread->mutex());
                 thread->broadcast_l();
             }
         }
@@ -655,11 +653,11 @@
 
 void OpPlayAudioMonitor::PlayAudioOpCallback::opChanged(int32_t op,
             const String16& packageName) {
-    // we only have uid, so we need to check all package names anyway
-    UNUSED(packageName);
     if (op != AppOpsManager::OP_PLAY_AUDIO) {
         return;
     }
+
+    ALOGI("%s OP_PLAY_AUDIO callback received for %s", __func__, String8(packageName).c_str());
     sp<OpPlayAudioMonitor> monitor = mMonitor.promote();
     if (monitor != NULL) {
         monitor->checkPlayAudioForUsage(/*doBroadcast=*/true);
@@ -889,7 +887,7 @@
         bool wasActive = false;
         const sp<IAfThreadBase> thread = mThread.promote();
         if (thread != 0) {
-            Mutex::Autolock _l(thread->mutex());
+            audio_utils::lock_guard _l(thread->mutex());
             auto* const playbackThread = thread->asIAfPlaybackThread().get();
             wasActive = playbackThread->destroyTrack_l(this);
             forEachTeePatchTrack_l([](const auto& patchTrack) { patchTrack->destroy(); });
@@ -1169,16 +1167,18 @@
     const sp<IAfThreadBase> thread = mThread.promote();
     if (thread != 0) {
         if (isOffloaded()) {
-            Mutex::Autolock _laf(thread->afThreadCallback()->mutex());
-            Mutex::Autolock _lth(thread->mutex());
+            audio_utils::lock_guard _laf(thread->afThreadCallback()->mutex());
+            const bool nonOffloadableGlobalEffectEnabled =
+                    thread->afThreadCallback()->isNonOffloadableGlobalEffectEnabled_l();
+            audio_utils::lock_guard _lth(thread->mutex());
             sp<IAfEffectChain> ec = thread->getEffectChain_l(mSessionId);
-            if (thread->afThreadCallback()->isNonOffloadableGlobalEffectEnabled_l() ||
+            if (nonOffloadableGlobalEffectEnabled ||
                     (ec != 0 && ec->isNonOffloadableEnabled())) {
                 invalidate();
                 return PERMISSION_DENIED;
             }
         }
-        Mutex::Autolock _lth(thread->mutex());
+        audio_utils::lock_guard _lth(thread->mutex());
         track_state state = mState;
         // here the track could be either new, or restarted
         // in both cases "unstop" the track
@@ -1303,7 +1303,7 @@
     ALOGV("%s(%d): calling pid %d", __func__, mId, IPCThreadState::self()->getCallingPid());
     const sp<IAfThreadBase> thread = mThread.promote();
     if (thread != 0) {
-        Mutex::Autolock _l(thread->mutex());
+        audio_utils::lock_guard _l(thread->mutex());
         track_state state = mState;
         if (state == RESUMING || state == ACTIVE || state == PAUSING || state == PAUSED) {
             // If the track is not active (PAUSED and buffers full), flush buffers
@@ -1336,7 +1336,7 @@
     ALOGV("%s(%d): calling pid %d", __func__, mId, IPCThreadState::self()->getCallingPid());
     const sp<IAfThreadBase> thread = mThread.promote();
     if (thread != 0) {
-        Mutex::Autolock _l(thread->mutex());
+        audio_utils::lock_guard _l(thread->mutex());
         auto* const playbackThread = thread->asIAfPlaybackThread().get();
         switch (mState) {
         case STOPPING_1:
@@ -1373,7 +1373,7 @@
     ALOGV("%s(%d)", __func__, mId);
     const sp<IAfThreadBase> thread = mThread.promote();
     if (thread != 0) {
-        Mutex::Autolock _l(thread->mutex());
+        audio_utils::lock_guard _l(thread->mutex());
         auto* const playbackThread = thread->asIAfPlaybackThread().get();
 
         // Flush the ring buffer now if the track is not active in the PlaybackThread.
@@ -1505,7 +1505,7 @@
         // Signal thread to fetch new volume.
         const sp<IAfThreadBase> thread = mThread.promote();
         if (thread != 0) {
-            Mutex::Autolock _l(thread->mutex());
+            audio_utils::lock_guard _l(thread->mutex());
             thread->broadcast_l();
         }
     }
@@ -1640,22 +1640,18 @@
         if (mMuteEventExtras == nullptr) {
             mMuteEventExtras = std::make_unique<os::PersistableBundle>();
         }
-        mMuteEventExtras->putInt(String16(kExtraPlayerEventMuteKey),
-                                 static_cast<int>(muteState));
+        mMuteEventExtras->putInt(String16(kExtraPlayerEventMuteKey), static_cast<int>(muteState));
 
-        result = audioManager->portEvent(mPortId,
-                                         PLAYER_UPDATE_MUTED,
-                                         mMuteEventExtras);
+        result = audioManager->portEvent(mPortId, PLAYER_UPDATE_MUTED, mMuteEventExtras);
     }
 
     if (result == OK) {
+        ALOGI("%s(%d): processed mute state for port ID %d from %d to %d", __func__, id(), mPortId,
+              int(muteState), int(mMuteState));
         mMuteState = muteState;
     } else {
-        ALOGW("%s(%d): cannot process mute state for port ID %d, status error %d",
-              __func__,
-              id(),
-              mPortId,
-              result);
+        ALOGW("%s(%d): cannot process mute state for port ID %d, status error %d", __func__, id(),
+              mPortId, result);
     }
 }
 
@@ -1669,7 +1665,7 @@
         return INVALID_OPERATION;
     }
 
-    Mutex::Autolock _l(thread->mutex());
+    audio_utils::lock_guard _l(thread->mutex());
     auto* const playbackThread = thread->asIAfPlaybackThread().get();
     return playbackThread->getTimestamp_l(timestamp);
 }
@@ -1872,7 +1868,7 @@
     const sp<IAfThreadBase> thread = mThread.promote();
     if (thread != 0) {
         auto* const t = thread->asIAfPlaybackThread().get();
-        Mutex::Autolock _l(t->mutex());
+        audio_utils::lock_guard _l(t->mutex());
         t->broadcast_l();
     }
 }
@@ -1884,7 +1880,7 @@
         const sp<IAfThreadBase> thread = mThread.promote();
         if (thread != nullptr) {
             auto* const t = thread->asIAfPlaybackThread().get();
-            Mutex::Autolock _l(t->mutex());
+            audio_utils::lock_guard _l(t->mutex());
             status = t->getOutput_l()->stream->getDualMonoMode(mode);
             ALOGD_IF((status == NO_ERROR) && (mDualMonoMode != *mode),
                     "%s: mode %d inconsistent", __func__, mDualMonoMode);
@@ -1900,7 +1896,7 @@
         const sp<IAfThreadBase> thread = mThread.promote();
         if (thread != nullptr) {
             auto* const t = thread->asIAfPlaybackThread().get();
-            Mutex::Autolock lock(t->mutex());
+            audio_utils::lock_guard lock(t->mutex());
             status = t->getOutput_l()->stream->setDualMonoMode(mode);
             if (status == NO_ERROR) {
                 mDualMonoMode = mode;
@@ -1917,7 +1913,7 @@
         sp<IAfThreadBase> thread = mThread.promote();
         if (thread != nullptr) {
             auto* const t = thread->asIAfPlaybackThread().get();
-            Mutex::Autolock lock(t->mutex());
+            audio_utils::lock_guard lock(t->mutex());
             status = t->getOutput_l()->stream->getAudioDescriptionMixLevel(leveldB);
             ALOGD_IF((status == NO_ERROR) && (mAudioDescriptionMixLevel != *leveldB),
                     "%s: level %.3f inconsistent", __func__, mAudioDescriptionMixLevel);
@@ -1933,7 +1929,7 @@
         const sp<IAfThreadBase> thread = mThread.promote();
         if (thread != nullptr) {
             auto* const t = thread->asIAfPlaybackThread().get();
-            Mutex::Autolock lock(t->mutex());
+            audio_utils::lock_guard lock(t->mutex());
             status = t->getOutput_l()->stream->setAudioDescriptionMixLevel(leveldB);
             if (status == NO_ERROR) {
                 mAudioDescriptionMixLevel = leveldB;
@@ -1951,7 +1947,7 @@
         const sp<IAfThreadBase> thread = mThread.promote();
         if (thread != nullptr) {
             auto* const t = thread->asIAfPlaybackThread().get();
-            Mutex::Autolock lock(t->mutex());
+            audio_utils::lock_guard lock(t->mutex());
             status = t->getOutput_l()->stream->getPlaybackRateParameters(playbackRate);
             ALOGD_IF((status == NO_ERROR) &&
                     !isAudioPlaybackRateEqual(mPlaybackRateParameters, *playbackRate),
@@ -1969,7 +1965,7 @@
         const sp<IAfThreadBase> thread = mThread.promote();
         if (thread != nullptr) {
             auto* const t = thread->asIAfPlaybackThread().get();
-            Mutex::Autolock lock(t->mutex());
+            audio_utils::lock_guard lock(t->mutex());
             status = t->getOutput_l()->stream->setPlaybackRateParameters(playbackRate);
             if (status == NO_ERROR) {
                 mPlaybackRateParameters = playbackRate;
@@ -2092,7 +2088,7 @@
     const sp<IAfThreadBase> thread = mTrack->mThread.promote();
     if (thread != 0) {
         // Lock for updating mHapticPlaybackEnabled.
-        Mutex::Autolock _l(thread->mutex());
+        audio_utils::lock_guard _l(thread->mutex());
         auto* const playbackThread = thread->asIAfPlaybackThread().get();
         if ((mTrack->channelMask() & AUDIO_CHANNEL_HAPTIC_ALL) != AUDIO_CHANNEL_NONE
                 && playbackThread->hapticChannelCount() > 0) {
@@ -2355,13 +2351,13 @@
 
 void OutputTrack::copyMetadataTo(MetadataInserter& backInserter) const
 {
-    std::lock_guard<std::mutex> lock(mTrackMetadatasMutex);
+    audio_utils::lock_guard lock(trackMetadataMutex());
     backInserter = std::copy(mTrackMetadatas.begin(), mTrackMetadatas.end(), backInserter);
 }
 
 void OutputTrack::setMetadatas(const SourceMetadatas& metadatas) {
     {
-        std::lock_guard<std::mutex> lock(mTrackMetadatasMutex);
+        audio_utils::lock_guard lock(trackMetadataMutex());
         mTrackMetadatas = metadatas;
     }
     // No need to adjust metadata track volumes as OutputTrack volumes are always 0dBFS.
@@ -2848,7 +2844,7 @@
         track_state priorState = mState;
         const sp<IAfThreadBase> thread = mThread.promote();
         if (thread != 0) {
-            Mutex::Autolock _l(thread->mutex());
+            audio_utils::lock_guard _l(thread->mutex());
             auto* const recordThread = thread->asIAfRecordThread().get();
             priorState = mState;
             if (!mSharedAudioPackageName.empty()) {
@@ -3272,7 +3268,7 @@
     *thread = mThread.promote();
     if (!*thread) return nullptr;
     auto* const recordThread = (*thread)->asIAfRecordThread().get();
-    Mutex::Autolock _l(recordThread->mutex());
+    audio_utils::lock_guard _l(recordThread->mutex());
     return recordThread->getInput() ? recordThread->getInput()->stream : nullptr;
 }
 
@@ -3309,7 +3305,7 @@
     }
 
     {
-        std::lock_guard<std::mutex> lock(mReadLock);
+        audio_utils::lock_guard lock(readMutex());
         mReadBytes += bytesRead;
         mReadError = NO_ERROR;
     }
@@ -3336,7 +3332,7 @@
 stream_error:
     stream->standby();
     {
-        std::lock_guard<std::mutex> lock(mReadLock);
+        audio_utils::lock_guard lock(readMutex());
         mReadError = result;
     }
     mReadCV.notify_one();
@@ -3365,7 +3361,7 @@
 {
     bytes = std::min(bytes, mFrameCount * mFrameSize);
     {
-        std::unique_lock<std::mutex> lock(mReadLock);
+        audio_utils::unique_lock lock(readMutex());
         mReadCV.wait(lock, [&]{ return mReadError != NO_ERROR || mReadBytes != 0; });
         if (mReadError != NO_ERROR) {
             mLastReadFrames = 0;
diff --git a/services/audioflinger/afutils/DumpTryLock.h b/services/audioflinger/afutils/DumpTryLock.h
index c185a68..e4ad112 100644
--- a/services/audioflinger/afutils/DumpTryLock.h
+++ b/services/audioflinger/afutils/DumpTryLock.h
@@ -17,7 +17,9 @@
 
 #pragma once
 
+#include <audio_utils/mutex.h>
 #include <utils/Mutex.h>
+#include <utils/Timers.h>
 
 namespace android::afutils {
 
@@ -28,4 +30,19 @@
     return err == NO_ERROR;
 }
 
-}  // android::afutils
\ No newline at end of file
+// Note: the std::timed_mutex try_lock_for and try_lock_until methods are inefficient.
+// It is better to use std::mutex and call this method.
+//
+inline bool dumpTryLock(audio_utils::mutex& mutex) TRY_ACQUIRE(true, mutex)
+{
+    static constexpr int64_t kDumpLockTimeoutNs = 1'000'000'000;
+
+    const int64_t timeoutNs = kDumpLockTimeoutNs + systemTime(SYSTEM_TIME_REALTIME);
+    const struct timespec ts = {
+        .tv_sec = static_cast<time_t>(timeoutNs / 1000000000),
+        .tv_nsec = static_cast<long>(timeoutNs % 1000000000),
+    };
+    return pthread_mutex_timedlock(mutex.native_handle(), &ts) == 0;
+}
+
+}  // android::afutils
diff --git a/services/audiopolicy/AudioPolicyInterface.h b/services/audiopolicy/AudioPolicyInterface.h
index da0df5f..b954bfc 100644
--- a/services/audiopolicy/AudioPolicyInterface.h
+++ b/services/audiopolicy/AudioPolicyInterface.h
@@ -270,6 +270,10 @@
     virtual status_t registerPolicyMixes(const Vector<AudioMix>& mixes) = 0;
     virtual status_t unregisterPolicyMixes(Vector<AudioMix> mixes) = 0;
 
+    virtual status_t updatePolicyMix(
+        const AudioMix& mix,
+        const std::vector<AudioMixMatchCriterion>& updatedCriteria) = 0;
+
     virtual status_t setUidDeviceAffinities(uid_t uid, const AudioDeviceTypeAddrVector& devices)
             = 0;
     virtual status_t removeUidDeviceAffinities(uid_t uid) = 0;
diff --git a/services/audiopolicy/TEST_MAPPING b/services/audiopolicy/TEST_MAPPING
index fa3a5d3..a2ebb8d 100644
--- a/services/audiopolicy/TEST_MAPPING
+++ b/services/audiopolicy/TEST_MAPPING
@@ -34,5 +34,18 @@
     {
        "name": "audiopolicy_tests"
     }
+  ],
+  "postsubmit": [
+    {
+      "name": "GtsGmscoreHostTestCases",
+      "options" : [
+        {
+          "include-filter": "com.google.android.gts.audio.AudioHostTest"
+        },
+        {
+          "include-filter": "com.google.android.gts.audio.AudioPolicyHostTest"
+        }
+      ]
+    }
   ]
 }
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
index 1e57edd..13b70e5 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
@@ -328,6 +328,13 @@
     wp<AudioPolicyMix> mPolicyMix;  // non NULL when used by a dynamic policy
 
     virtual uint32_t getRecommendedMuteDurationMs() const { return 0; }
+    virtual std::string info() const {
+        std::string result;
+        result.append("[portId:" );
+        result.append(android::internal::ToString(getId()));
+        result.append("]");
+        return result;
+    }
 
 protected:
     const sp<PolicyAudioPort> mPolicyAudioPort;
@@ -471,6 +478,8 @@
 
     PortHandleVector getClientsForStream(audio_stream_type_t streamType) const;
 
+    virtual std::string info() const override;
+
     const sp<IOProfile> mProfile;          // I/O profile this output derives from
     audio_io_handle_t mIoHandle;           // output handle
     uint32_t mLatency;                  //
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h b/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h
index 7e29e10..b560bc4 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h
@@ -59,6 +59,8 @@
 
     status_t unregisterMix(const AudioMix& mix);
 
+    status_t updateMix(const AudioMix& mix, const std::vector<AudioMixMatchCriterion>& newCriteria);
+
     void closeOutput(sp<SwAudioOutputDescriptor> &desc);
 
     /**
diff --git a/services/audiopolicy/common/managerdefinitions/include/EffectDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/EffectDescriptor.h
index 59eee52..c2e4b11 100644
--- a/services/audiopolicy/common/managerdefinitions/include/EffectDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/EffectDescriptor.h
@@ -25,6 +25,10 @@
 
 namespace android {
 
+class AudioInputCollection;
+class AudioInputDescriptor;
+class AudioPolicyClientInterface;
+
 class EffectDescriptor : public RefBase
 {
 public:
@@ -40,6 +44,8 @@
 
     int mId;                   // effect unique ID
     audio_io_handle_t mIo;     // io the effect is attached to
+    bool mIsOrphan = false;    // on creation, effect is not yet attached but not yet orphan
+    bool mEnabledWhenMoved = false;    // Backup enabled state before being moved
     audio_session_t mSession;  // audio session the effect is on
     effect_descriptor_t mDesc; // effect descriptor
     bool mEnabled;             // enabled state: CPU load being used or not
@@ -69,12 +75,29 @@
 
     void moveEffects(audio_session_t session,
                      audio_io_handle_t srcOutput,
-                     audio_io_handle_t dstOutput);
+                     audio_io_handle_t dstOutput,
+                     AudioPolicyClientInterface *clientInterface);
     void moveEffects(const std::vector<int>& ids, audio_io_handle_t dstOutput);
+    void moveEffects(audio_session_t sessionId, audio_io_handle_t srcIo, audio_io_handle_t dstIo,
+            const AudioInputCollection *inputs, AudioPolicyClientInterface *clientInterface);
+    void moveEffectsForIo(audio_session_t sessionId, audio_io_handle_t dstIo,
+            const AudioInputCollection *inputs, AudioPolicyClientInterface *mClientInterface);
+    void putOrphanEffects(audio_session_t sessionId, audio_io_handle_t srcIo,
+            const AudioInputCollection *inputs, AudioPolicyClientInterface *clientInterface);
+    void putOrphanEffectsForIo(audio_io_handle_t srcIo);
 
+    /**
+     * @brief Checks if an effect session was already attached to an io handle and return it if
+     * found. Check only for a given effect type if effectType is not null or for any effect
+     * otherwise.
+     * @param sessionId to consider.
+     * @param effectType to consider.
+     * @return ioHandle if found, AUDIO_IO_HANDLE_NONE otherwise.
+     */
     audio_io_handle_t getIoForSession(audio_session_t sessionId,
                                       const effect_uuid_t *effectType = nullptr);
-
+    bool hasOrphansForSession(audio_session_t sessionId);
+    EffectDescriptorCollection getOrphanEffectsForSession(audio_session_t sessionId) const;
     void dump(String8 *dst, int spaces = 0, bool verbose = true) const;
 
 private:
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
index 2f424b8..8bd7575 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
@@ -987,6 +987,18 @@
     return clientsForStream;
 }
 
+std::string SwAudioOutputDescriptor::info() const {
+    std::string result;
+    result.append("[" );
+    result.append(AudioOutputDescriptor::info());
+    result.append("[io:" );
+    result.append(android::internal::ToString(mIoHandle));
+    result.append(", " );
+    result.append(isDuplicated() ? "duplicating" : mProfile->getTagName());
+    result.append("]]");
+    return result;
+}
+
 void SwAudioOutputCollection::dump(String8 *dst) const
 {
     dst->appendFormat("\n Outputs (%zu):\n", size());
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
index f870b4f..83e0108 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
@@ -21,6 +21,7 @@
 #include <iterator>
 #include <optional>
 #include <regex>
+#include <vector>
 #include "AudioPolicyMix.h"
 #include "TypeConverter.h"
 #include "HwModule.h"
@@ -225,6 +226,31 @@
     return BAD_VALUE;
 }
 
+status_t AudioPolicyMixCollection::updateMix(
+        const AudioMix& mix, const std::vector<AudioMixMatchCriterion>& updatedCriteria) {
+    if (!areMixCriteriaConsistent(mix.mCriteria)) {
+        ALOGE("updateMix(): updated criteria are not consistent "
+              "(MATCH & EXCLUDE criteria of the same type)");
+        return BAD_VALUE;
+    }
+
+    for (size_t i = 0; i < size(); i++) {
+        const sp<AudioPolicyMix>& registeredMix = itemAt(i);
+        if (mix.mDeviceType == registeredMix->mDeviceType &&
+            mix.mDeviceAddress.compare(registeredMix->mDeviceAddress) == 0 &&
+            mix.mRouteFlags == registeredMix->mRouteFlags) {
+            registeredMix->mCriteria = updatedCriteria;
+            ALOGV("updateMix(): updated mix for dev=0x%x addr=%s", mix.mDeviceType,
+                  mix.mDeviceAddress.c_str());
+            return NO_ERROR;
+        }
+    }
+
+    ALOGE("updateMix(): mix not registered for dev=0x%x addr=%s", mix.mDeviceType,
+          mix.mDeviceAddress.c_str());
+    return BAD_VALUE;
+}
+
 status_t AudioPolicyMixCollection::getAudioPolicyMix(audio_devices_t deviceType,
         const String8& address, sp<AudioPolicyMix> &policyMix) const
 {
@@ -289,12 +315,20 @@
             continue; // skip the mix
         }
 
-        if ((flags & AUDIO_OUTPUT_FLAG_MMAP_NOIRQ) && is_mix_loopback(policyMix->mRouteFlags)) {
-            // AAudio MMAP_NOIRQ streams cannot be routed to loopback/loopback+render
-            // using dynamic audio policy.
-            ALOGD("%s: Rejecting MMAP_NOIRQ request matched to loopback dynamic audio policy mix.",
-                __func__);
-            return INVALID_OPERATION;
+        if (flags & AUDIO_OUTPUT_FLAG_MMAP_NOIRQ) {
+            if (is_mix_loopback(policyMix->mRouteFlags)) {
+                // AAudio MMAP_NOIRQ streams cannot be routed to loopback/loopback+render
+                // using dynamic audio policy.
+                ALOGD("%s: Rejecting MMAP_NOIRQ request matched to loopback dynamic "
+                      "audio policy mix.", __func__);
+                return INVALID_OPERATION;
+            }
+            if (mixDevice != nullptr && !mixDevice->isMmap()) {
+                ALOGD("%s: Rejecting MMAP_NOIRQ request matched to dynamic audio policy "
+                      "mix pointing to device %s which doesn't support mmap", __func__,
+                      mixDevice->toString(false).c_str());
+                return INVALID_OPERATION;
+            }
         }
 
         if (mixDevice != nullptr && mixDevice->equals(requestedDevice)) {
diff --git a/services/audiopolicy/common/managerdefinitions/src/EffectDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/EffectDescriptor.cpp
index 3f9c8b0..c85df0f 100644
--- a/services/audiopolicy/common/managerdefinitions/src/EffectDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/EffectDescriptor.cpp
@@ -18,9 +18,15 @@
 //#define LOG_NDEBUG 0
 
 #include <android-base/stringprintf.h>
+
+#include "AudioInputDescriptor.h"
 #include "EffectDescriptor.h"
 #include <utils/String8.h>
 
+#include <AudioPolicyInterface.h>
+#include "AudioPolicyMix.h"
+#include "HwModule.h"
+
 namespace android {
 
 void EffectDescriptor::dump(String8 *dst, int spaces) const
@@ -175,30 +181,57 @@
     return MAX_EFFECTS_MEMORY;
 }
 
-void EffectDescriptorCollection::moveEffects(audio_session_t session,
-                                             audio_io_handle_t srcOutput,
-                                             audio_io_handle_t dstOutput)
+void EffectDescriptorCollection::moveEffects(audio_session_t sessionId, audio_io_handle_t srcIo,
+                                             audio_io_handle_t dstIo,
+                                             AudioPolicyClientInterface *clientInterface)
 {
-    ALOGV("%s session %d srcOutput %d dstOutput %d", __func__, session, srcOutput, dstOutput);
+    ALOGV("%s session %d srcIo %d dstIo %d", __func__, sessionId, srcIo, dstIo);
     for (size_t i = 0; i < size(); i++) {
         sp<EffectDescriptor> effect = valueAt(i);
-        if (effect->mSession == session && effect->mIo == srcOutput) {
-            effect->mIo = dstOutput;
+        if (effect->mSession == sessionId && effect->mIo == srcIo) {
+            effect->mIo = dstIo;
+            // Backup enable state before any updatePolicyState call
+            effect->mIsOrphan = (dstIo == AUDIO_IO_HANDLE_NONE);
+        }
+    }
+    clientInterface->moveEffects(sessionId, srcIo, dstIo);
+}
+
+void EffectDescriptorCollection::moveEffects(const std::vector<int>& ids, audio_io_handle_t dstIo)
+{
+    ALOGV("%s num effects %zu, first ID %d, dstIo %d",
+        __func__, ids.size(), ids.size() ? ids[0] : 0, dstIo);
+    for (size_t i = 0; i < size(); i++) {
+        sp<EffectDescriptor> effect = valueAt(i);
+        if (std::find(begin(ids), end(ids), effect->mId) != end(ids)) {
+            effect->mIo = dstIo;
+            effect->mIsOrphan = (dstIo == AUDIO_IO_HANDLE_NONE);
         }
     }
 }
 
-void EffectDescriptorCollection::moveEffects(const std::vector<int>& ids,
-                                             audio_io_handle_t dstOutput)
+bool EffectDescriptorCollection::hasOrphansForSession(audio_session_t sessionId)
 {
-    ALOGV("%s num effects %zu, first ID %d, dstOutput %d",
-        __func__, ids.size(), ids.size() ? ids[0] : 0, dstOutput);
-    for (size_t i = 0; i < size(); i++) {
+    for (size_t i = 0; i < size(); ++i) {
         sp<EffectDescriptor> effect = valueAt(i);
-        if (std::find(begin(ids), end(ids), effect->mId) != end(ids)) {
-            effect->mIo = dstOutput;
+        if (effect->mSession == sessionId && effect->mIsOrphan) {
+            return true;
         }
     }
+    return false;
+}
+
+EffectDescriptorCollection EffectDescriptorCollection::getOrphanEffectsForSession(
+        audio_session_t sessionId) const
+{
+    EffectDescriptorCollection effects;
+    for (size_t i = 0; i < size(); i++) {
+        sp<EffectDescriptor> effect = valueAt(i);
+        if (effect->mSession == sessionId && effect->mIsOrphan) {
+            effects.add(keyAt(i), effect);
+        }
+    }
+    return effects;
 }
 
 audio_io_handle_t EffectDescriptorCollection::getIoForSession(audio_session_t sessionId,
@@ -214,6 +247,84 @@
     return AUDIO_IO_HANDLE_NONE;
 }
 
+void EffectDescriptorCollection::moveEffectsForIo(audio_session_t session,
+        audio_io_handle_t dstIo, const AudioInputCollection *inputs,
+        AudioPolicyClientInterface *clientInterface)
+{
+    // No src io: try to find from effect session the src Io to move from
+    audio_io_handle_t srcIo = getIoForSession(session);
+    if (hasOrphansForSession(session) || (srcIo != AUDIO_IO_HANDLE_NONE && srcIo != dstIo)) {
+        moveEffects(session, srcIo, dstIo, inputs, clientInterface);
+    }
+}
+
+void EffectDescriptorCollection::moveEffects(audio_session_t session,
+        audio_io_handle_t srcIo, audio_io_handle_t dstIo, const AudioInputCollection *inputs,
+        AudioPolicyClientInterface *clientInterface)
+{
+    if ((srcIo != AUDIO_IO_HANDLE_NONE && srcIo == dstIo)
+            || (srcIo == AUDIO_IO_HANDLE_NONE && !hasOrphansForSession(session))) {
+        return;
+    }
+    // Either we may find orphan effects for given session or effects for this session might have
+    // been assigned first to another input (it may happen when an input is released or recreated
+    // after client sets its preferred device)
+    EffectDescriptorCollection effectsToMove;
+    if (srcIo == AUDIO_IO_HANDLE_NONE) {
+        ALOGV("%s: restoring effects for session %d from orphan park to io=%d", __func__,
+                session, dstIo);
+        effectsToMove = getOrphanEffectsForSession(session);
+    } else {
+        ALOGV("%s: moving effects for session %d from io=%d to io=%d", __func__, session, srcIo,
+              dstIo);
+        if (const sp<AudioInputDescriptor>& previousInputDesc = inputs->valueFor(srcIo)) {
+            effectsToMove = getEffectsForIo(srcIo);
+            for (size_t i = 0; i < effectsToMove.size(); ++i) {
+                const sp<EffectDescriptor>& effect = effectsToMove.valueAt(i);
+                effect->mEnabledWhenMoved = effect->mEnabled;
+                previousInputDesc->trackEffectEnabled(effect, false);
+            }
+        } else {
+            ALOGW("%s: no effect descriptor for srcIo %d", __func__, srcIo);
+        }
+    }
+    moveEffects(session, srcIo, dstIo, clientInterface);
+
+    if (dstIo != AUDIO_IO_HANDLE_NONE) {
+        if (const sp<AudioInputDescriptor>& inputDesc = inputs->valueFor(dstIo)) {
+            for (size_t i = 0; i < effectsToMove.size(); ++i) {
+                const sp<EffectDescriptor>& effect = effectsToMove.valueAt(i);
+                inputDesc->trackEffectEnabled(effect, effect->mEnabledWhenMoved);
+            }
+        } else {
+            ALOGW("%s: no effect descriptor for dstIo %d", __func__, dstIo);
+        }
+    }
+}
+
+void EffectDescriptorCollection::putOrphanEffectsForIo(audio_io_handle_t srcIo)
+{
+    for (size_t i = 0; i < size(); i++) {
+        sp<EffectDescriptor> effect = valueAt(i);
+        if (effect->mIo == srcIo) {
+            effect->mIo = AUDIO_IO_HANDLE_NONE;
+            effect->mIsOrphan = true;
+        }
+    }
+}
+
+void EffectDescriptorCollection::putOrphanEffects(audio_session_t session,
+        audio_io_handle_t srcIo, const AudioInputCollection *inputs,
+        AudioPolicyClientInterface *clientInterface)
+{
+    if (getIoForSession(session) != srcIo) {
+       // Effect session not held by this client io handle
+       return;
+    }
+    ALOGV("%s: park effects for session %d and io=%d to orphans", __func__, session, srcIo);
+    moveEffects(session, srcIo, AUDIO_IO_HANDLE_NONE, inputs, clientInterface);
+}
+
 EffectDescriptorCollection EffectDescriptorCollection::getEffectsForIo(audio_io_handle_t io) const
 {
     EffectDescriptorCollection effects;
diff --git a/services/audiopolicy/config/Android.bp b/services/audiopolicy/config/Android.bp
index 671b30a..86600f4 100644
--- a/services/audiopolicy/config/Android.bp
+++ b/services/audiopolicy/config/Android.bp
@@ -112,3 +112,15 @@
     name: "r_submix_audio_policy_configuration",
     srcs: ["r_submix_audio_policy_configuration.xml"],
 }
+filegroup {
+    name: "bluetooth_audio_policy_configuration_7_0",
+    srcs: ["bluetooth_audio_policy_configuration_7_0.xml"],
+}
+filegroup {
+    name: "bluetooth_with_le_audio_policy_configuration_7_0",
+    srcs: ["bluetooth_with_le_audio_policy_configuration_7_0.xml"],
+}
+filegroup {
+    name: "hearing_aid_audio_policy_configuration_7_0",
+    srcs: ["hearing_aid_audio_policy_configuration_7_0.xml"],
+}
diff --git a/services/audiopolicy/enginedefault/src/Engine.cpp b/services/audiopolicy/enginedefault/src/Engine.cpp
index 15f7842..fa6be39 100644
--- a/services/audiopolicy/enginedefault/src/Engine.cpp
+++ b/services/audiopolicy/enginedefault/src/Engine.cpp
@@ -175,34 +175,37 @@
         //   - cannot route from voice call RX OR
         //   - audio HAL version is < 3.0 and TX device is on the primary HW module
         if (getPhoneState() == AUDIO_MODE_IN_CALL) {
-            audio_devices_t txDevice = AUDIO_DEVICE_NONE;
-            sp<DeviceDescriptor> txDeviceDesc =
-                    getDeviceForInputSource(AUDIO_SOURCE_VOICE_COMMUNICATION);
-            if (txDeviceDesc != nullptr) {
-                txDevice = txDeviceDesc->type();
-            }
             sp<AudioOutputDescriptor> primaryOutput = outputs.getPrimaryOutput();
-            LOG_ALWAYS_FATAL_IF(primaryOutput == nullptr, "Primary output not found");
-            DeviceVector availPrimaryInputDevices =
-                    availableInputDevices.getDevicesFromHwModule(primaryOutput->getModuleHandle());
+            if (primaryOutput != nullptr) {
+                audio_devices_t txDevice = AUDIO_DEVICE_NONE;
+                sp<DeviceDescriptor> txDeviceDesc =
+                        getDeviceForInputSource(AUDIO_SOURCE_VOICE_COMMUNICATION);
+                if (txDeviceDesc != nullptr) {
+                    txDevice = txDeviceDesc->type();
+                }
+                DeviceVector availPrimaryInputDevices =
+                        availableInputDevices.getDevicesFromHwModule(
+                            primaryOutput->getModuleHandle());
 
-            // TODO: getPrimaryOutput return only devices from first module in
-            // audio_policy_configuration.xml, hearing aid is not there, but it's
-            // a primary device
-            // FIXME: this is not the right way of solving this problem
-            DeviceVector availPrimaryOutputDevices = availableOutputDevices.getDevicesFromTypes(
-                    primaryOutput->supportedDevices().types());
-            availPrimaryOutputDevices.add(
-                    availableOutputDevices.getDevicesFromType(AUDIO_DEVICE_OUT_HEARING_AID));
+                // TODO: getPrimaryOutput return only devices from first module in
+                // audio_policy_configuration.xml, hearing aid is not there, but it's
+                // a primary device
+                // FIXME: this is not the right way of solving this problem
+                DeviceVector availPrimaryOutputDevices = availableOutputDevices.getDevicesFromTypes(
+                        primaryOutput->supportedDevices().types());
+                availPrimaryOutputDevices.add(
+                        availableOutputDevices.getDevicesFromType(AUDIO_DEVICE_OUT_HEARING_AID));
 
-            if ((availableInputDevices.getDevice(AUDIO_DEVICE_IN_TELEPHONY_RX,
-                                                 String8(""), AUDIO_FORMAT_DEFAULT) == nullptr) ||
-                ((availPrimaryInputDevices.getDevice(
-                        txDevice, String8(""), AUDIO_FORMAT_DEFAULT) != nullptr) &&
-                 (primaryOutput->getPolicyAudioPort()->getModuleVersionMajor() < 3))) {
-                availableOutputDevices = availPrimaryOutputDevices;
+                if ((availableInputDevices.getDevice(AUDIO_DEVICE_IN_TELEPHONY_RX,
+                                                     String8(""), AUDIO_FORMAT_DEFAULT) == nullptr)
+                    || ((availPrimaryInputDevices.getDevice(
+                            txDevice, String8(""), AUDIO_FORMAT_DEFAULT) != nullptr) &&
+                     (primaryOutput->getPolicyAudioPort()->getModuleVersionMajor() < 3))) {
+                    availableOutputDevices = availPrimaryOutputDevices;
+                }
+            } else {
+                ALOGE("%s, STRATEGY_PHONE: Primary output not found", __func__);
             }
-
         }
         // Do not use A2DP devices when in call but use them when not in call
         // (e.g for voice mail playback)
@@ -595,8 +598,11 @@
         if ((getPhoneState() == AUDIO_MODE_IN_CALL) &&
                 (availableOutputDevices.getDevice(AUDIO_DEVICE_OUT_TELEPHONY_TX,
                         String8(""), AUDIO_FORMAT_DEFAULT)) == nullptr) {
-            LOG_ALWAYS_FATAL_IF(availablePrimaryDevices.isEmpty(), "Primary devices not found");
-            availableDevices = availablePrimaryDevices;
+            if (!availablePrimaryDevices.isEmpty()) {
+                availableDevices = availablePrimaryDevices;
+            } else {
+                ALOGE("%s, AUDIO_SOURCE_VOICE_COMMUNICATION: Primary devices not found", __func__);
+            }
         }
 
         if (audio_is_bluetooth_out_sco_device(commDeviceType)) {
@@ -650,8 +656,11 @@
     case AUDIO_SOURCE_HOTWORD:
         // We should not use primary output criteria for Hotword but rather limit
         // to devices attached to the same HW module as the build in mic
-        LOG_ALWAYS_FATAL_IF(availablePrimaryDevices.isEmpty(), "Primary devices not found");
-        availableDevices = availablePrimaryDevices;
+        if (!availablePrimaryDevices.isEmpty()) {
+            availableDevices = availablePrimaryDevices;
+        } else {
+            ALOGE("%s, AUDIO_SOURCE_HOTWORD: Primary devices not found", __func__);
+        }
         if (audio_is_bluetooth_out_sco_device(commDeviceType)) {
             device = availableDevices.getDevice(
                     AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET, String8(""), AUDIO_FORMAT_DEFAULT);
diff --git a/services/audiopolicy/fuzzer/aidl/Android.bp b/services/audiopolicy/fuzzer/aidl/Android.bp
new file mode 100644
index 0000000..38a2cde
--- /dev/null
+++ b/services/audiopolicy/fuzzer/aidl/Android.bp
@@ -0,0 +1,74 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ ******************************************************************************/
+
+cc_defaults {
+    name: "audiopolicy_aidl_fuzzer_defaults",
+    shared_libs: [
+        "audiopolicy-aidl-cpp",
+        "audiopolicy-types-aidl-cpp",
+        "framework-permission-aidl-cpp",
+        "libaudiopolicy",
+        "libaudiopolicymanagerdefault",
+        "libactivitymanager_aidl",
+        "libaudiohal",
+        "libaudiopolicyservice",
+        "libaudioflinger",
+        "libaudioclient",
+        "libaudioprocessing",
+        "libhidlbase",
+        "liblog",
+        "libmediautils",
+        "libnblog",
+        "libnbaio",
+        "libpowermanager",
+        "libvibrator",
+        "packagemanager_aidl-cpp",
+    ],
+    static_libs: [
+        "libfakeservicemanager",
+        "libmediaplayerservice",
+    ],
+    header_libs: [
+        "libaudiohal_headers",
+        "libaudioflinger_headers",
+        "libaudiopolicymanager_interface_headers",
+        "libbinder_headers",
+        "libmedia_headers",
+    ],
+    fuzz_config: {
+        cc: [
+            "android-media-fuzzing-reports@google.com",
+        ],
+        componentid: 155276,
+        hotlists: ["4593311"],
+        description: "The fuzzer targets the APIs of libaudiopolicy",
+        vector: "local_no_privileges_required",
+        service_privilege: "privileged",
+        users: "multi_user",
+        fuzzed_code_usage: "shipped",
+    },
+}
+
+cc_fuzz {
+    name: "audiopolicy_aidl_fuzzer",
+    srcs: ["audiopolicy_aidl_fuzzer.cpp"],
+    defaults: [
+        "audiopolicy_aidl_fuzzer_defaults",
+        "service_fuzzer_defaults",
+    ],
+}
diff --git a/services/audiopolicy/fuzzer/aidl/audiopolicy_aidl_fuzzer.cpp b/services/audiopolicy/fuzzer/aidl/audiopolicy_aidl_fuzzer.cpp
new file mode 100644
index 0000000..ca79c49
--- /dev/null
+++ b/services/audiopolicy/fuzzer/aidl/audiopolicy_aidl_fuzzer.cpp
@@ -0,0 +1,86 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+#include <AudioFlinger.h>
+#include <android-base/logging.h>
+#include <android/binder_interface_utils.h>
+#include <android/binder_process.h>
+#include <android/media/IAudioPolicyService.h>
+#include <fakeservicemanager/FakeServiceManager.h>
+#include <fuzzbinder/libbinder_driver.h>
+#include <fuzzbinder/random_binder.h>
+#include <fuzzer/FuzzedDataProvider.h>
+#include <media/IAudioFlinger.h>
+#include <service/AudioPolicyService.h>
+
+using namespace android;
+using namespace android::binder;
+using namespace android::hardware;
+using android::fuzzService;
+
+[[clang::no_destroy]] static std::once_flag gSmOnce;
+sp<FakeServiceManager> gFakeServiceManager;
+
+bool addService(const String16& serviceName, const sp<FakeServiceManager>& fakeServiceManager,
+                FuzzedDataProvider& fdp) {
+    sp<IBinder> binder = getRandomBinder(&fdp);
+    if (binder == nullptr) {
+        return false;
+    }
+    CHECK_EQ(NO_ERROR, fakeServiceManager->addService(serviceName, binder));
+    return true;
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    FuzzedDataProvider fdp(data, size);
+
+    std::call_once(gSmOnce, [&] {
+        /* Create a FakeServiceManager instance and add required services */
+        gFakeServiceManager = sp<FakeServiceManager>::make();
+        setDefaultServiceManager(gFakeServiceManager);
+    });
+    gFakeServiceManager->clear();
+
+    for (const char* service :
+         {"activity", "sensor_privacy", "permission", "scheduling_policy",
+          "android.hardware.audio.core.IConfig", "batterystats", "media.metrics"}) {
+        if (!addService(String16(service), gFakeServiceManager, fdp)) {
+            return 0;
+        }
+    }
+
+    const auto audioFlinger = sp<AudioFlinger>::make();
+    const auto afAdapter = sp<AudioFlingerServerAdapter>::make(audioFlinger);
+
+    CHECK_EQ(NO_ERROR,
+             gFakeServiceManager->addService(
+                     String16(IAudioFlinger::DEFAULT_SERVICE_NAME), IInterface::asBinder(afAdapter),
+                     false /* allowIsolated */, IServiceManager::DUMP_FLAG_PRIORITY_DEFAULT));
+
+    AudioSystem::get_audio_flinger_for_fuzzer();
+    const auto audioPolicyService = sp<AudioPolicyService>::make();
+
+    CHECK_EQ(NO_ERROR,
+             gFakeServiceManager->addService(String16("media.audio_policy"), audioPolicyService,
+                                             false /* allowIsolated */,
+                                             IServiceManager::DUMP_FLAG_PRIORITY_DEFAULT));
+
+    fuzzService(media::IAudioPolicyService::asBinder(audioPolicyService),
+                FuzzedDataProvider(data, size));
+
+    return 0;
+}
diff --git a/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp b/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp
index 8793085..58fcb5c 100644
--- a/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp
+++ b/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp
@@ -661,7 +661,9 @@
 }
 
 AudioPolicyManagerFuzzerDPPlaybackReRouting::~AudioPolicyManagerFuzzerDPPlaybackReRouting() {
-    mManager->stopInput(mPortId);
+    if (mManager) {
+        mManager->stopInput(mPortId);
+    }
 }
 
 bool AudioPolicyManagerFuzzerDPPlaybackReRouting::initialize() {
@@ -773,7 +775,9 @@
 }
 
 AudioPolicyManagerFuzzerDPMixRecordInjection::~AudioPolicyManagerFuzzerDPMixRecordInjection() {
-    mManager->stopOutput(mPortId);
+    if (mManager) {
+        mManager->stopOutput(mPortId);
+    }
 }
 
 bool AudioPolicyManagerFuzzerDPMixRecordInjection::initialize() {
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index 878c0cd..a72598e 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -14,6 +14,7 @@
  * limitations under the License.
  */
 
+#include "utils/Errors.h"
 #define LOG_TAG "APM_AudioPolicyManager"
 
 // Need to keep the log statements even in production builds
@@ -337,7 +338,7 @@
                     outputsToReopenWithDevices.emplace(mOutputs.keyAt(i), newDevices);
                     continue;
                 }
-                setOutputDevices(desc, newDevices, force, 0);
+                setOutputDevices(__func__, desc, newDevices, force, 0);
             }
             if (!desc->isDuplicated() && desc->mProfile->hasDynamicAudioProfile() &&
                     !activeMediaDevices.empty() && desc->devices() != activeMediaDevices &&
@@ -728,7 +729,7 @@
     // Use legacy routing method for voice calls via setOutputDevice() on primary output.
     // Otherwise, create two audio patches for TX and RX path.
     if (!createRxPatch) {
-        muteWaitMs = setOutputDevices(mPrimaryOutput, rxDevices, true, delayMs);
+        muteWaitMs = setOutputDevices(__func__, mPrimaryOutput, rxDevices, true, delayMs);
     } else { // create RX path audio patch
         connectTelephonyRxAudioSource();
         // If the TX device is on the primary HW module but RX device is
@@ -888,7 +889,7 @@
                 disconnectTelephonyAudioSource(mCallRxSourceClient);
                 disconnectTelephonyAudioSource(mCallTxSourceClient);
             }
-            setOutputDevices(mPrimaryOutput, rxDevices, force, 0);
+            setOutputDevices(__func__, mPrimaryOutput, rxDevices, force, 0);
         }
     }
 
@@ -906,7 +907,7 @@
                 outputsToReopen.emplace(mOutputs.keyAt(i), newDevices);
                 continue;
             }
-            setOutputDevices(desc, newDevices, forceRouting, 0 /*delayMs*/, nullptr,
+            setOutputDevices(__func__, desc, newDevices, forceRouting, 0 /*delayMs*/, nullptr,
                              true /*requiresMuteCheck*/, !forceRouting /*requiresVolumeCheck*/);
         }
     }
@@ -1325,10 +1326,15 @@
         AudioProfileVector profiles;
         status_t ret = getProfilesForDevices(outputDevices, profiles, *flags, false /*isInput*/);
         if (ret == NO_ERROR && !profiles.empty()) {
-            config->channel_mask = profiles[0]->getChannels().empty() ? config->channel_mask
-                    : *profiles[0]->getChannels().begin();
-            config->sample_rate = profiles[0]->getSampleRates().empty() ? config->sample_rate
-                    : *profiles[0]->getSampleRates().begin();
+            const auto channels = profiles[0]->getChannels();
+            if (!channels.empty() && (channels.find(config->channel_mask) == channels.end())) {
+                config->channel_mask = *channels.begin();
+            }
+            const auto sampleRates = profiles[0]->getSampleRates();
+            if (!sampleRates.empty() &&
+                    (sampleRates.find(config->sample_rate) == sampleRates.end())) {
+                config->sample_rate = *sampleRates.begin();
+            }
             config->format = profiles[0]->getFormat();
         }
         return INVALID_OPERATION;
@@ -2334,7 +2340,8 @@
             return DEAD_OBJECT;
         }
         const uint32_t muteWaitMs =
-                setOutputDevices(outputDesc, devices, force, 0, nullptr, requiresMuteCheck);
+                setOutputDevices(__func__, outputDesc, devices, force, 0, nullptr,
+                                 requiresMuteCheck);
 
         // apply volume rules for current stream and device if necessary
         auto &curves = getVolumeCurves(client->attributes());
@@ -2417,7 +2424,7 @@
                     outputsToReopen.emplace(mOutputs.keyAt(i), newDevices);
                     continue;
                 }
-                setOutputDevices(desc, newDevices, force, delayMs);
+                setOutputDevices(__func__, desc, newDevices, force, delayMs);
                 // re-apply device specific volume if not done by setOutputDevice()
                 if (!force) {
                     applyStreamVolumes(desc, newDevices.types(), delayMs);
@@ -2519,7 +2526,7 @@
             // still contain data that needs to be drained. The latency only covers the audio HAL
             // and kernel buffers. Also the latency does not always include additional delay in the
             // audio path (audio DSP, CODEC ...)
-            setOutputDevices(outputDesc, newDevices, false, outputDesc->latency()*2,
+            setOutputDevices(__func__, outputDesc, newDevices, false, outputDesc->latency()*2,
                              nullptr, true /*requiresMuteCheck*/, requiresVolumeCheck);
 
             // force restoring the device selection on other active outputs if it differs from the
@@ -2542,7 +2549,7 @@
                         outputsToReopen.emplace(mOutputs.keyAt(i), newDevices2);
                         continue;
                     }
-                    setOutputDevices(desc, newDevices2, force, delayMs);
+                    setOutputDevices(__func__, desc, newDevices2, force, delayMs);
 
                     // re-apply device specific volume if not done by setOutputDevice()
                     if (!force) {
@@ -2645,6 +2652,7 @@
     sp<AudioPolicyMix> policyMix;
     sp<DeviceDescriptor> device;
     sp<AudioInputDescriptor> inputDesc;
+    sp<AudioInputDescriptor> previousInputDesc;
     sp<RecordClientDescriptor> clientDesc;
     audio_port_handle_t requestedDeviceId = *selectedDeviceId;
     uid_t uid = VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_uid_t(attributionSource.uid));
@@ -2774,10 +2782,15 @@
         status_t ret = getProfilesForDevices(
                 DeviceVector(device), profiles, flags, true /*isInput*/);
         if (ret == NO_ERROR && !profiles.empty()) {
-            config->channel_mask = profiles[0]->getChannels().empty() ? config->channel_mask
-                    : *profiles[0]->getChannels().begin();
-            config->sample_rate = profiles[0]->getSampleRates().empty() ? config->sample_rate
-                    : *profiles[0]->getSampleRates().begin();
+            const auto channels = profiles[0]->getChannels();
+            if (!channels.empty() && (channels.find(config->channel_mask) == channels.end())) {
+                config->channel_mask = *channels.begin();
+            }
+            const auto sampleRates = profiles[0]->getSampleRates();
+            if (!sampleRates.empty() &&
+                    (sampleRates.find(config->sample_rate) == sampleRates.end())) {
+                config->sample_rate = *sampleRates.begin();
+            }
             config->format = profiles[0]->getFormat();
         }
         goto error;
@@ -2796,6 +2809,8 @@
                                             requestedDeviceId, attributes.source, flags,
                                             isSoundTrigger);
     inputDesc = mInputs.valueFor(*input);
+    // Move (if found) effect for the client session to its input
+    mEffects.moveEffectsForIo(session, *input, &mInputs, mpClientInterface);
     inputDesc->addClient(clientDesc);
 
     ALOGV("getInputForAttr() returns input %d type %d selectedDeviceId %d for port ID %d",
@@ -3105,7 +3120,7 @@
     ALOGV("%s %d", __FUNCTION__, input);
 
     inputDesc->removeClient(portId);
-
+    mEffects.putOrphanEffects(client->session(), input, &mInputs, mpClientInterface);
     if (inputDesc->getClientCount() > 0) {
         ALOGV("%s(%d) %zu clients remaining", __func__, portId, inputDesc->getClientCount());
         return;
@@ -3467,8 +3482,8 @@
     }
 
     if (output != mMusicEffectOutput) {
-        mEffects.moveEffects(AUDIO_SESSION_OUTPUT_MIX, mMusicEffectOutput, output);
-        mpClientInterface->moveEffects(AUDIO_SESSION_OUTPUT_MIX, mMusicEffectOutput, output);
+        mEffects.moveEffects(AUDIO_SESSION_OUTPUT_MIX, mMusicEffectOutput, output,
+                mpClientInterface);
         mMusicEffectOutput = output;
     }
 
@@ -3776,6 +3791,17 @@
     return res;
 }
 
+status_t AudioPolicyManager::updatePolicyMix(
+            const AudioMix& mix,
+            const std::vector<AudioMixMatchCriterion>& updatedCriteria) {
+    status_t res = mPolicyMixes.updateMix(mix, updatedCriteria);
+    if (res == NO_ERROR) {
+        checkForDeviceAndOutputChanges();
+        updateCallAndOutputRouting();
+    }
+    return res;
+}
+
 void AudioPolicyManager::dumpManualSurroundFormats(String8 *dst) const
 {
     size_t i = 0;
@@ -3938,8 +3964,9 @@
                 outputsToReopen.emplace(mOutputs.keyAt(i), newDevices);
                 continue;
             }
-            waitMs = setOutputDevices(outputDesc, newDevices, forceRouting, delayMs, nullptr,
-                                      !skipDelays /*requiresMuteCheck*/,
+
+            waitMs = setOutputDevices(__func__, outputDesc, newDevices, forceRouting, delayMs,
+                                       nullptr, !skipDelays /*requiresMuteCheck*/,
                                       !forceRouting /*requiresVolumeCheck*/, skipDelays);
             // Only apply special touch sound delay once
             delayMs = 0;
@@ -4926,7 +4953,7 @@
         // TODO: reconfigure output format and channels here
         ALOGV("%s setting device %s on output %d",
               __func__, dumpDeviceTypes(devices.types()).c_str(), outputDesc->mIoHandle);
-        setOutputDevices(outputDesc, devices, true, 0, handle);
+        setOutputDevices(__func__, outputDesc, devices, true, 0, handle);
         index = mAudioPatches.indexOfKey(*handle);
         if (index >= 0) {
             if (patchDesc != 0 && patchDesc != mAudioPatches.valueAt(index)) {
@@ -5185,7 +5212,7 @@
             return BAD_VALUE;
         }
 
-        setOutputDevices(outputDesc,
+        setOutputDevices(__func__, outputDesc,
                          getNewOutputDevices(outputDesc, true /*fromCache*/),
                          true,
                          0,
@@ -5239,7 +5266,7 @@
             //      3 / Inactive Output previously hosting SwBridge that can be closed.
             bool updateDevice = outputDesc->isActive() || !sourceDesc->useSwBridge() ||
                     sourceDesc->canCloseOutput();
-            setOutputDevices(outputDesc,
+            setOutputDevices(__func__, outputDesc,
                              updateDevice ? getNewOutputDevices(outputDesc, true /*fromCache*/) :
                                             outputDesc->devices(),
                              force,
@@ -5376,7 +5403,7 @@
                 outputsToReopen.emplace(mOutputs.keyAt(j), newDevices);
                 continue;
             }
-            setOutputDevices(outputDesc, newDevices, false);
+            setOutputDevices(__func__, outputDesc, newDevices, false);
         }
     }
     reopenOutputsWithDevices(outputsToReopen);
@@ -6258,7 +6285,7 @@
                 outputDesc->close();
             } else {
                 addOutput(output, outputDesc);
-                setOutputDevices(outputDesc,
+                setOutputDevices(__func__, outputDesc,
                                  DeviceVector(supportedDevice),
                                  true,
                                  0,
@@ -6458,8 +6485,8 @@
                 if (device_distinguishes_on_address(deviceType)) {
                     ALOGV("checkOutputsForDevice(): setOutputDevices %s",
                             device->toString().c_str());
-                    setOutputDevices(desc, DeviceVector(device), true/*force*/, 0/*delay*/,
-                                     NULL/*patch handle*/);
+                    setOutputDevices(__func__, desc, DeviceVector(device), true/*force*/,
+                                      0/*delay*/, NULL/*patch handle*/);
                 }
                 ALOGV("checkOutputsForDevice(): adding output %d", output);
             }
@@ -6754,6 +6781,7 @@
         mpClientInterface->onAudioPatchListUpdate();
     }
 
+    mEffects.putOrphanEffectsForIo(input);
     inputDesc->close();
     mInputs.removeItem(input);
 
@@ -7342,7 +7370,7 @@
                 if (!desc->supportedDevices().containsAtLeastOne(outputDesc->supportedDevices())) {
                     continue;
                 }
-                ALOGVV("%s() %s (curDevice %s)", __func__,
+                ALOGVV("%s() output %s %s (curDevice %s)", __func__, desc->info().c_str(),
                       mute ? "muting" : "unmuting", curDevices.toString().c_str());
                 setStrategyMute(productStrategy, mute, desc, mute ? 0 : delayMs);
                 if (desc->isStrategyActive(productStrategy)) {
@@ -7395,7 +7423,8 @@
     return 0;
 }
 
-uint32_t AudioPolicyManager::setOutputDevices(const sp<SwAudioOutputDescriptor>& outputDesc,
+uint32_t AudioPolicyManager::setOutputDevices(const char *caller,
+                                              const sp<SwAudioOutputDescriptor>& outputDesc,
                                               const DeviceVector &devices,
                                               bool force,
                                               int delayMs,
@@ -7404,13 +7433,15 @@
                                               bool skipMuteDelay)
 {
     // TODO(b/262404095): Consider if the output need to be reopened.
-    ALOGV("%s device %s delayMs %d", __func__, devices.toString().c_str(), delayMs);
+    std::string logPrefix = std::string("caller ") + caller + outputDesc->info();
+    ALOGV("%s %s device %s delayMs %d", __func__, logPrefix.c_str(),
+          devices.toString().c_str(), delayMs);
     uint32_t muteWaitMs;
 
     if (outputDesc->isDuplicated()) {
-        muteWaitMs = setOutputDevices(outputDesc->subOutput1(), devices, force, delayMs,
+        muteWaitMs = setOutputDevices(__func__, outputDesc->subOutput1(), devices, force, delayMs,
                 nullptr /* patchHandle */, requiresMuteCheck, skipMuteDelay);
-        muteWaitMs += setOutputDevices(outputDesc->subOutput2(), devices, force, delayMs,
+        muteWaitMs += setOutputDevices(__func__, outputDesc->subOutput2(), devices, force, delayMs,
                 nullptr /* patchHandle */, requiresMuteCheck, skipMuteDelay);
         return muteWaitMs;
     }
@@ -7420,7 +7451,8 @@
     DeviceVector prevDevices = outputDesc->devices();
     DeviceVector availPrevDevices = mAvailableOutputDevices.filter(prevDevices);
 
-    ALOGV("setOutputDevices() prevDevice %s", prevDevices.toString().c_str());
+    ALOGV("%s %s prevDevice %s", __func__, logPrefix.c_str(),
+          prevDevices.toString().c_str());
 
     if (!filteredDevices.isEmpty()) {
         outputDesc->setDevices(filteredDevices);
@@ -7430,7 +7462,8 @@
     if (requiresMuteCheck) {
         muteWaitMs = checkDeviceMuteStrategies(outputDesc, prevDevices, delayMs);
     } else {
-        ALOGV("%s: suppressing checkDeviceMuteStrategies", __func__);
+        ALOGV("%s: %s suppressing checkDeviceMuteStrategies", __func__,
+              logPrefix.c_str());
         muteWaitMs = 0;
     }
 
@@ -7440,7 +7473,8 @@
     // output profile or if new device is not supported AND previous device(s) is(are) still
     // available (otherwise reset device must be done on the output)
     if (!devices.isEmpty() && filteredDevices.isEmpty() && !availPrevDevices.empty()) {
-        ALOGV("%s: unsupported device %s for output", __func__, devices.toString().c_str());
+        ALOGV("%s: %s unsupported device %s for output", __func__, logPrefix.c_str(),
+              devices.toString().c_str());
         // restore previous device after evaluating strategy mute state
         outputDesc->setDevices(prevDevices);
         return muteWaitMs;
@@ -7453,16 +7487,19 @@
     //  AND the output is connected by a valid audio patch.
     // Doing this check here allows the caller to call setOutputDevices() without conditions
     if ((filteredDevices.isEmpty() || filteredDevices == prevDevices) && !force && outputRouted) {
-        ALOGV("%s setting same device %s or null device, force=%d, patch handle=%d", __func__,
-              filteredDevices.toString().c_str(), force, outputDesc->getPatchHandle());
+        ALOGV("%s %s setting same device %s or null device, force=%d, patch handle=%d",
+              __func__, logPrefix.c_str(), filteredDevices.toString().c_str(), force,
+              outputDesc->getPatchHandle());
         if (requiresVolumeCheck && !filteredDevices.isEmpty()) {
-            ALOGV("%s setting same device on routed output, force apply volumes", __func__);
+            ALOGV("%s %s setting same device on routed output, force apply volumes",
+                  __func__, logPrefix.c_str());
             applyStreamVolumes(outputDesc, filteredDevices.types(), delayMs, true /*force*/);
         }
         return muteWaitMs;
     }
 
-    ALOGV("%s changing device to %s", __func__, filteredDevices.toString().c_str());
+    ALOGV("%s %s changing device to %s", __func__, logPrefix.c_str(),
+          filteredDevices.toString().c_str());
 
     // do the routing
     if (filteredDevices.isEmpty() || mAvailableOutputDevices.filter(filteredDevices).empty()) {
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.h b/services/audiopolicy/managerdefault/AudioPolicyManager.h
index 863c785..91fe1cc 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.h
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.h
@@ -292,6 +292,9 @@
 
         virtual status_t registerPolicyMixes(const Vector<AudioMix>& mixes);
         virtual status_t unregisterPolicyMixes(Vector<AudioMix> mixes);
+        virtual status_t updatePolicyMix(
+                const AudioMix& mix,
+                const std::vector<AudioMixMatchCriterion>& updatedCriteria) override;
         virtual status_t setUidDeviceAffinities(uid_t uid,
                 const AudioDeviceTypeAddrVector& devices);
         virtual status_t removeUidDeviceAffinities(uid_t uid);
@@ -526,6 +529,7 @@
 
         /**
          * @brief setOutputDevices change the route of the specified output.
+         * @param caller of the method
          * @param outputDesc to be considered
          * @param device to be considered to route the output
          * @param force if true, force the routing even if no change.
@@ -539,7 +543,8 @@
          * @return the number of ms we have slept to allow new routing to take effect in certain
          *        cases.
          */
-        uint32_t setOutputDevices(const sp<SwAudioOutputDescriptor>& outputDesc,
+        uint32_t setOutputDevices(const char *caller,
+                                  const sp<SwAudioOutputDescriptor>& outputDesc,
                                   const DeviceVector &device,
                                   bool force = false,
                                   int delayMs = 0,
diff --git a/services/audiopolicy/service/Android.bp b/services/audiopolicy/service/Android.bp
index f4fc8f1..c674909 100644
--- a/services/audiopolicy/service/Android.bp
+++ b/services/audiopolicy/service/Android.bp
@@ -7,26 +7,8 @@
     default_applicable_licenses: ["frameworks_av_license"],
 }
 
-cc_library_shared {
-    name: "libaudiopolicyservice",
-
-    defaults: [
-        "latest_android_media_audio_common_types_cpp_shared",
-    ],
-
-    srcs: [
-        "AudioPolicyClientImpl.cpp",
-        "AudioPolicyEffects.cpp",
-        "AudioPolicyInterfaceImpl.cpp",
-        "AudioPolicyService.cpp",
-        "CaptureStateNotifier.cpp",
-        "Spatializer.cpp",
-        "SpatializerPoseController.cpp",
-    ],
-
-    include_dirs: [
-        "frameworks/av/services/audioflinger"
-    ],
+cc_defaults {
+    name: "libaudiopolicyservice_dependencies",
 
     shared_libs: [
         "libactivitymanager_aidl",
@@ -41,7 +23,6 @@
         "libaudioutils",
         "libbinder",
         "libcutils",
-        "libeffectsconfig",
         "libhardware_legacy",
         "libheadtracking",
         "libheadtracking-binding",
@@ -67,6 +48,36 @@
     ],
 
     static_libs: [
+        "libeffectsconfig",
+        "libaudiopolicycomponents",
+    ]
+}
+
+cc_library {
+    name: "libaudiopolicyservice",
+
+    defaults: [
+        "libaudiopolicyservice_dependencies",
+        "latest_android_media_audio_common_types_cpp_shared",
+    ],
+
+    srcs: [
+        "AudioRecordClient.cpp",
+        "AudioPolicyClientImpl.cpp",
+        "AudioPolicyEffects.cpp",
+        "AudioPolicyInterfaceImpl.cpp",
+        "AudioPolicyService.cpp",
+        "CaptureStateNotifier.cpp",
+        "Spatializer.cpp",
+        "SpatializerPoseController.cpp",
+    ],
+
+    include_dirs: [
+        "frameworks/av/services/audioflinger"
+    ],
+
+
+    static_libs: [
         "framework-permission-aidl-cpp",
     ],
 
diff --git a/services/audiopolicy/service/AudioPolicyEffects.cpp b/services/audiopolicy/service/AudioPolicyEffects.cpp
index 70a1785..85b7ad9 100644
--- a/services/audiopolicy/service/AudioPolicyEffects.cpp
+++ b/services/audiopolicy/service/AudioPolicyEffects.cpp
@@ -44,10 +44,7 @@
 AudioPolicyEffects::AudioPolicyEffects(const sp<EffectsFactoryHalInterface>& effectsFactoryHal) {
     // load xml config with effectsFactoryHal
     status_t loadResult = loadAudioEffectConfig(effectsFactoryHal);
-    if (loadResult == NO_ERROR) {
-        mDefaultDeviceEffectFuture =
-                std::async(std::launch::async, &AudioPolicyEffects::initDefaultDeviceEffects, this);
-    } else if (loadResult < 0) {
+    if (loadResult < 0) {
         ALOGW("Failed to query effect configuration, fallback to load .conf");
         // load automatic audio effect modules
         if (access(AUDIO_EFFECT_VENDOR_CONFIG_FILE, R_OK) == 0) {
@@ -60,6 +57,11 @@
     }
 }
 
+void AudioPolicyEffects::setDefaultDeviceEffects() {
+    mDefaultDeviceEffectFuture = std::async(
+                std::launch::async, &AudioPolicyEffects::initDefaultDeviceEffects, this);
+}
+
 AudioPolicyEffects::~AudioPolicyEffects()
 {
     size_t i = 0;
diff --git a/services/audiopolicy/service/AudioPolicyEffects.h b/services/audiopolicy/service/AudioPolicyEffects.h
index 9f65a96..e17df48 100644
--- a/services/audiopolicy/service/AudioPolicyEffects.h
+++ b/services/audiopolicy/service/AudioPolicyEffects.h
@@ -117,6 +117,8 @@
     // Remove the default stream effect from wherever it's attached.
     status_t removeStreamDefaultEffect(audio_unique_id_t id);
 
+    void setDefaultDeviceEffects();
+
 private:
     void initDefaultDeviceEffects();
 
diff --git a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
index 5d86e7c..ce9fb92 100644
--- a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
@@ -18,6 +18,7 @@
 //#define LOG_NDEBUG 0
 
 #include "AudioPolicyService.h"
+#include "AudioRecordClient.h"
 #include "TypeConverter.h"
 #include <media/AidlConversion.h>
 #include <media/AudioPolicy.h>
@@ -1763,6 +1764,22 @@
     }
 }
 
+Status AudioPolicyService::updatePolicyMixes(
+        const ::std::vector<::android::media::AudioMixUpdate>& updates) {
+    Mutex::Autolock _l(mLock);
+    for (const auto& update : updates) {
+        AudioMix mix = VALUE_OR_RETURN_BINDER_STATUS(aidl2legacy_AudioMix(update.audioMix));
+        std::vector<AudioMixMatchCriterion> newCriteria =
+                VALUE_OR_RETURN_BINDER_STATUS(convertContainer<std::vector<AudioMixMatchCriterion>>(
+                        update.newCriteria, aidl2legacy_AudioMixMatchCriterion));
+        int status;
+        if((status = mAudioPolicyManager->updatePolicyMix(mix, newCriteria)) != NO_ERROR) {
+            return binderStatusFromStatusT(status);
+        }
+    }
+    return binderStatusFromStatusT(NO_ERROR);
+}
+
 Status AudioPolicyService::setUidDeviceAffinities(
         int32_t uidAidl,
         const std::vector<AudioDevice>& devicesAidl) {
diff --git a/services/audiopolicy/service/AudioPolicyService.cpp b/services/audiopolicy/service/AudioPolicyService.cpp
index 041aa1c..aa2b6f5 100644
--- a/services/audiopolicy/service/AudioPolicyService.cpp
+++ b/services/audiopolicy/service/AudioPolicyService.cpp
@@ -25,7 +25,6 @@
 #include <sys/time.h>
 #include <dlfcn.h>
 
-#include <android/content/pm/IPackageManagerNative.h>
 #include <audio_utils/clock.h>
 #include <binder/IServiceManager.h>
 #include <utils/Log.h>
@@ -35,6 +34,7 @@
 #include <binder/IResultReceiver.h>
 #include <utils/String16.h>
 #include <utils/threads.h>
+#include "AudioRecordClient.h"
 #include "AudioPolicyService.h"
 #include <hardware_legacy/power.h>
 #include <media/AidlConversion.h>
@@ -120,6 +120,7 @@
 BINDER_METHOD_ENTRY(releaseSoundTriggerSession) \
 BINDER_METHOD_ENTRY(getPhoneState) \
 BINDER_METHOD_ENTRY(registerPolicyMixes) \
+BINDER_METHOD_ENTRY(updatePolicyMixes) \
 BINDER_METHOD_ENTRY(setUidDeviceAffinities) \
 BINDER_METHOD_ENTRY(removeUidDeviceAffinities) \
 BINDER_METHOD_ENTRY(setUserIdDeviceAffinities) \
@@ -216,27 +217,6 @@
 {
     delete interface;
 }
-
-namespace {
-int getTargetSdkForPackageName(std::string_view packageName) {
-    const auto binder = defaultServiceManager()->checkService(String16{"package_native"});
-    int targetSdk = -1;
-    if (binder != nullptr) {
-        const auto pm = interface_cast<content::pm::IPackageManagerNative>(binder);
-        if (pm != nullptr) {
-            const auto status = pm->getTargetSdkVersionForPackage(
-                    String16{packageName.data(), packageName.size()}, &targetSdk);
-            ALOGI("Capy check package %s, sdk %d", packageName.data(), targetSdk);
-            return status.isOk() ? targetSdk : -1;
-        }
-    }
-    return targetSdk;
-}
-
-bool doesPackageTargetAtLeastU(std::string_view packageName) {
-    return getTargetSdkForPackageName(packageName) >= __ANDROID_API_U__;
-}
-} // anonymous
 // ----------------------------------------------------------------------------
 
 AudioPolicyService::AudioPolicyService()
@@ -327,6 +307,9 @@
         }
     }
     AudioSystem::audioPolicyReady();
+    // AudioFlinger will handle effect creation and register these effects on audio_policy
+    // service. Hence, audio_policy service must be ready.
+    audioPolicyEffects->setDefaultDeviceEffects();
 }
 
 void AudioPolicyService::unloadAudioPolicyManager()
@@ -1186,20 +1169,6 @@
     return false;
 }
 
-/* static */
-bool AudioPolicyService::isAppOpSource(audio_source_t source)
-{
-    switch (source) {
-        case AUDIO_SOURCE_FM_TUNER:
-        case AUDIO_SOURCE_ECHO_REFERENCE:
-        case AUDIO_SOURCE_REMOTE_SUBMIX:
-            return false;
-        default:
-            break;
-    }
-    return true;
-}
-
 void AudioPolicyService::setAppState_l(sp<AudioRecordClient> client, app_state_t state)
 {
     AutoCallerClear acc;
@@ -1339,6 +1308,7 @@
         case TRANSACTION_isStreamActiveRemotely:
         case TRANSACTION_isSourceActive:
         case TRANSACTION_registerPolicyMixes:
+        case TRANSACTION_updatePolicyMixes:
         case TRANSACTION_setMasterMono:
         case TRANSACTION_getSurroundFormats:
         case TRANSACTION_getReportedSurroundFormats:
@@ -1900,113 +1870,6 @@
     return binder::Status::ok();
 }
 
-// -----------  AudioPolicyService::OpRecordAudioMonitor implementation ----------
-
-// static
-sp<AudioPolicyService::OpRecordAudioMonitor>
-AudioPolicyService::OpRecordAudioMonitor::createIfNeeded(
-            const AttributionSourceState& attributionSource, const audio_attributes_t& attr,
-            wp<AudioCommandThread> commandThread)
-{
-    if (isAudioServerOrRootUid(attributionSource.uid)) {
-        ALOGV("not silencing record for audio or root source %s",
-                attributionSource.toString().c_str());
-        return nullptr;
-    }
-
-    if (!AudioPolicyService::isAppOpSource(attr.source)) {
-        ALOGD("not monitoring app op for uid %d and source %d",
-                attributionSource.uid, attr.source);
-        return nullptr;
-    }
-
-    if (!attributionSource.packageName.has_value()
-            || attributionSource.packageName.value().size() == 0) {
-        return nullptr;
-    }
-    return new OpRecordAudioMonitor(attributionSource, getOpForSource(attr.source), commandThread);
-}
-
-AudioPolicyService::OpRecordAudioMonitor::OpRecordAudioMonitor(
-        const AttributionSourceState& attributionSource, int32_t appOp,
-        wp<AudioCommandThread> commandThread) :
-            mHasOp(true), mAttributionSource(attributionSource), mAppOp(appOp),
-            mCommandThread(commandThread)
-{
-}
-
-AudioPolicyService::OpRecordAudioMonitor::~OpRecordAudioMonitor()
-{
-    if (mOpCallback != 0) {
-        mAppOpsManager.stopWatchingMode(mOpCallback);
-    }
-    mOpCallback.clear();
-}
-
-void AudioPolicyService::OpRecordAudioMonitor::onFirstRef()
-{
-    checkOp();
-    mOpCallback = new RecordAudioOpCallback(this);
-    ALOGV("start watching op %d for %s", mAppOp, mAttributionSource.toString().c_str());
-    int flags = doesPackageTargetAtLeastU(
-            mAttributionSource.packageName.value_or("")) ?
-            AppOpsManager::WATCH_FOREGROUND_CHANGES : 0;
-    // TODO: We need to always watch AppOpsManager::OP_RECORD_AUDIO too
-    // since it controls the mic permission for legacy apps.
-    mAppOpsManager.startWatchingMode(mAppOp, VALUE_OR_FATAL(aidl2legacy_string_view_String16(
-        mAttributionSource.packageName.value_or(""))),
-        flags,
-        mOpCallback);
-}
-
-bool AudioPolicyService::OpRecordAudioMonitor::hasOp() const {
-    return mHasOp.load();
-}
-
-// Called by RecordAudioOpCallback when the app op corresponding to this OpRecordAudioMonitor
-// is updated in AppOp callback and in onFirstRef()
-// Note this method is never called (and never to be) for audio server / root track
-// due to the UID in createIfNeeded(). As a result for those record track, it's:
-// - not called from constructor,
-// - not called from RecordAudioOpCallback because the callback is not installed in this case
-void AudioPolicyService::OpRecordAudioMonitor::checkOp(bool updateUidStates)
-{
-    // TODO: We need to always check AppOpsManager::OP_RECORD_AUDIO too
-    // since it controls the mic permission for legacy apps.
-    const int32_t mode = mAppOpsManager.checkOp(mAppOp,
-            mAttributionSource.uid, VALUE_OR_FATAL(aidl2legacy_string_view_String16(
-                mAttributionSource.packageName.value_or(""))));
-    const bool hasIt = (mode == AppOpsManager::MODE_ALLOWED);
-    // verbose logging only log when appOp changed
-    ALOGI_IF(hasIt != mHasOp.load(),
-            "App op %d missing, %ssilencing record %s",
-            mAppOp, hasIt ? "un" : "", mAttributionSource.toString().c_str());
-    mHasOp.store(hasIt);
-
-    if (updateUidStates) {
-          sp<AudioCommandThread> commandThread = mCommandThread.promote();
-          if (commandThread != nullptr) {
-              commandThread->updateUidStatesCommand();
-          }
-    }
-}
-
-AudioPolicyService::OpRecordAudioMonitor::RecordAudioOpCallback::RecordAudioOpCallback(
-        const wp<OpRecordAudioMonitor>& monitor) : mMonitor(monitor)
-{ }
-
-void AudioPolicyService::OpRecordAudioMonitor::RecordAudioOpCallback::opChanged(int32_t op,
-            const String16& packageName __unused) {
-    sp<OpRecordAudioMonitor> monitor = mMonitor.promote();
-    if (monitor != NULL) {
-        if (op != monitor->getOp()) {
-            return;
-        }
-        monitor->checkOp(true);
-    }
-}
-
-
 // -----------  AudioPolicyService::AudioCommandThread implementation ----------
 
 AudioPolicyService::AudioCommandThread::AudioCommandThread(String8 name,
diff --git a/services/audiopolicy/service/AudioPolicyService.h b/services/audiopolicy/service/AudioPolicyService.h
index d0cde64..ff29305 100644
--- a/services/audiopolicy/service/AudioPolicyService.h
+++ b/services/audiopolicy/service/AudioPolicyService.h
@@ -1,4 +1,3 @@
-
 /*
  * Copyright (C) 2009 The Android Open Source Project
  *
@@ -28,7 +27,6 @@
 #include <utils/Vector.h>
 #include <utils/SortedVector.h>
 #include <binder/ActivityManager.h>
-#include <binder/AppOpsManager.h>
 #include <binder/BinderService.h>
 #include <binder/IUidObserver.h>
 #include <system/audio.h>
@@ -64,6 +62,12 @@
 
 // ----------------------------------------------------------------------------
 
+namespace media::audiopolicy {
+    class AudioRecordClient;
+}
+
+using ::android::media::audiopolicy::AudioRecordClient;
+
 class AudioPolicyService :
     public BinderService<AudioPolicyService>,
     public media::BnAudioPolicyService,
@@ -194,6 +198,8 @@
     binder::Status getPhoneState(AudioMode* _aidl_return) override;
     binder::Status registerPolicyMixes(const std::vector<media::AudioMix>& mixes,
                                        bool registration) override;
+    binder::Status updatePolicyMixes(
+        const ::std::vector<::android::media::AudioMixUpdate>& updates) override;
     binder::Status setUidDeviceAffinities(int32_t uid,
                                           const std::vector<AudioDevice>& devices) override;
     binder::Status removeUidDeviceAffinities(int32_t uid) override;
@@ -401,7 +407,6 @@
     // Handles binder shell commands
     virtual status_t shellCommand(int in, int out, int err, Vector<String16>& args);
 
-    class AudioRecordClient;
 
     // Sets whether the given UID records only silence
     virtual void setAppState_l(sp<AudioRecordClient> client, app_state_t state) REQUIRES(mLock);
@@ -542,6 +547,7 @@
     // Thread used to send audio config commands to audio flinger
     // For audio config commands, it is necessary because audio flinger requires that the calling
     // process (user) has permission to modify audio settings.
+    public:
     class AudioCommandThread : public Thread {
         class AudioCommand;
     public:
@@ -732,6 +738,7 @@
         wp<AudioPolicyService> mService;
     };
 
+    private:
     class AudioPolicyClient : public AudioPolicyClientInterface
     {
      public:
@@ -906,6 +913,7 @@
               bool                                 mAudioVolumeGroupCallbacksEnabled;
     };
 
+    public:
     class AudioClient : public virtual RefBase {
     public:
                 AudioClient(const audio_attributes_t attributes,
@@ -927,82 +935,8 @@
         const audio_port_handle_t deviceId;  // selected input device port ID
               bool active;                   // Playback/Capture is active or inactive
     };
-
-    // Checks and monitors app ops for AudioRecordClient
-    class OpRecordAudioMonitor : public RefBase {
-    public:
-        ~OpRecordAudioMonitor() override;
-        bool hasOp() const;
-        int32_t getOp() const { return mAppOp; }
-
-        static sp<OpRecordAudioMonitor> createIfNeeded(
-                const AttributionSourceState& attributionSource,
-                const audio_attributes_t& attr, wp<AudioCommandThread> commandThread);
-
     private:
-        OpRecordAudioMonitor(const AttributionSourceState& attributionSource, int32_t appOp,
-                wp<AudioCommandThread> commandThread);
 
-        void onFirstRef() override;
-
-        AppOpsManager mAppOpsManager;
-
-        class RecordAudioOpCallback : public BnAppOpsCallback {
-        public:
-            explicit RecordAudioOpCallback(const wp<OpRecordAudioMonitor>& monitor);
-            void opChanged(int32_t op, const String16& packageName) override;
-
-        private:
-            const wp<OpRecordAudioMonitor> mMonitor;
-        };
-
-        sp<RecordAudioOpCallback> mOpCallback;
-        // called by RecordAudioOpCallback when the app op for this OpRecordAudioMonitor is updated
-        // in AppOp callback and in onFirstRef()
-        // updateUidStates is true when the silenced state of active AudioRecordClients must be
-        // re-evaluated
-        void checkOp(bool updateUidStates = false);
-
-        std::atomic_bool mHasOp;
-        const AttributionSourceState mAttributionSource;
-        const int32_t mAppOp;
-        wp<AudioCommandThread> mCommandThread;
-    };
-
-    // --- AudioRecordClient ---
-    // Information about each registered AudioRecord client
-    // (between calls to getInputForAttr() and releaseInput())
-    class AudioRecordClient : public AudioClient {
-    public:
-                AudioRecordClient(const audio_attributes_t attributes,
-                          const audio_io_handle_t io,
-                          const audio_session_t session, audio_port_handle_t portId,
-                          const audio_port_handle_t deviceId,
-                          const AttributionSourceState& attributionSource,
-                          bool canCaptureOutput, bool canCaptureHotword,
-                          wp<AudioCommandThread> commandThread) :
-                    AudioClient(attributes, io, attributionSource,
-                        session, portId, deviceId), attributionSource(attributionSource),
-                        startTimeNs(0), canCaptureOutput(canCaptureOutput),
-                        canCaptureHotword(canCaptureHotword), silenced(false),
-                        mOpRecordAudioMonitor(
-                                OpRecordAudioMonitor::createIfNeeded(attributionSource,
-                                attributes, commandThread)) {}
-                ~AudioRecordClient() override = default;
-
-        bool hasOp() const {
-            return mOpRecordAudioMonitor ? mOpRecordAudioMonitor->hasOp() : true;
-        }
-
-        const AttributionSourceState attributionSource; // attribution source of client
-        nsecs_t startTimeNs;
-        const bool canCaptureOutput;
-        const bool canCaptureHotword;
-        bool silenced;
-
-    private:
-        sp<OpRecordAudioMonitor>           mOpRecordAudioMonitor;
-    };
 
 
     // --- AudioPlaybackClient ---
diff --git a/services/audiopolicy/service/AudioRecordClient.cpp b/services/audiopolicy/service/AudioRecordClient.cpp
new file mode 100644
index 0000000..a89a84d
--- /dev/null
+++ b/services/audiopolicy/service/AudioRecordClient.cpp
@@ -0,0 +1,164 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <android/content/pm/IPackageManagerNative.h>
+
+#include "AudioRecordClient.h"
+#include "AudioPolicyService.h"
+
+namespace android::media::audiopolicy {
+
+using android::AudioPolicyService;
+
+namespace {
+bool isAppOpSource(audio_source_t source)
+{
+    switch (source) {
+        case AUDIO_SOURCE_FM_TUNER:
+        case AUDIO_SOURCE_ECHO_REFERENCE:
+        case AUDIO_SOURCE_REMOTE_SUBMIX:
+            return false;
+        default:
+            break;
+    }
+    return true;
+}
+
+int getTargetSdkForPackageName(std::string_view packageName) {
+    const auto binder = defaultServiceManager()->checkService(String16{"package_native"});
+    int targetSdk = -1;
+    if (binder != nullptr) {
+        const auto pm = interface_cast<content::pm::IPackageManagerNative>(binder);
+        if (pm != nullptr) {
+            const auto status = pm->getTargetSdkVersionForPackage(
+                    String16{packageName.data(), packageName.size()}, &targetSdk);
+            return status.isOk() ? targetSdk : -1;
+        }
+    }
+    return targetSdk;
+}
+
+bool doesPackageTargetAtLeastU(std::string_view packageName) {
+    return getTargetSdkForPackageName(packageName) >= __ANDROID_API_U__;
+}
+}
+
+// static
+sp<OpRecordAudioMonitor>
+OpRecordAudioMonitor::createIfNeeded(
+            const AttributionSourceState& attributionSource, const audio_attributes_t& attr,
+            wp<AudioPolicyService::AudioCommandThread> commandThread)
+{
+    if (isAudioServerOrRootUid(attributionSource.uid)) {
+        ALOGV("not silencing record for audio or root source %s",
+                attributionSource.toString().c_str());
+        return nullptr;
+    }
+
+    if (!isAppOpSource(attr.source)) {
+        ALOGD("not monitoring app op for uid %d and source %d",
+                attributionSource.uid, attr.source);
+        return nullptr;
+    }
+
+    if (!attributionSource.packageName.has_value()
+            || attributionSource.packageName.value().size() == 0) {
+        return nullptr;
+    }
+    return new OpRecordAudioMonitor(attributionSource, getOpForSource(attr.source), commandThread);
+}
+
+OpRecordAudioMonitor::OpRecordAudioMonitor(
+        const AttributionSourceState& attributionSource, int32_t appOp,
+        wp<AudioPolicyService::AudioCommandThread> commandThread) :
+            mHasOp(true), mAttributionSource(attributionSource), mAppOp(appOp),
+            mCommandThread(commandThread)
+{
+}
+
+OpRecordAudioMonitor::~OpRecordAudioMonitor()
+{
+    if (mOpCallback != 0) {
+        mAppOpsManager.stopWatchingMode(mOpCallback);
+    }
+    mOpCallback.clear();
+}
+
+void OpRecordAudioMonitor::onFirstRef()
+{
+    checkOp();
+    mOpCallback = new RecordAudioOpCallback(this);
+    ALOGV("start watching op %d for %s", mAppOp, mAttributionSource.toString().c_str());
+
+    int flags = doesPackageTargetAtLeastU(
+            mAttributionSource.packageName.value_or("")) ?
+            AppOpsManager::WATCH_FOREGROUND_CHANGES : 0;
+    // TODO: We need to always watch AppOpsManager::OP_RECORD_AUDIO too
+    // since it controls the mic permission for legacy apps.
+    mAppOpsManager.startWatchingMode(mAppOp, VALUE_OR_FATAL(aidl2legacy_string_view_String16(
+        mAttributionSource.packageName.value_or(""))),
+        flags,
+        mOpCallback);
+}
+
+bool OpRecordAudioMonitor::hasOp() const {
+    return mHasOp.load();
+}
+
+// Called by RecordAudioOpCallback when the app op corresponding to this OpRecordAudioMonitor
+// is updated in AppOp callback and in onFirstRef()
+// Note this method is never called (and never to be) for audio server / root track
+// due to the UID in createIfNeeded(). As a result for those record track, it's:
+// - not called from constructor,
+// - not called from RecordAudioOpCallback because the callback is not installed in this case
+void OpRecordAudioMonitor::checkOp(bool updateUidStates)
+{
+    // TODO: We need to always check AppOpsManager::OP_RECORD_AUDIO too
+    // since it controls the mic permission for legacy apps.
+    const int32_t mode = mAppOpsManager.checkOp(mAppOp,
+            mAttributionSource.uid, VALUE_OR_FATAL(aidl2legacy_string_view_String16(
+                mAttributionSource.packageName.value_or(""))));
+    const bool hasIt = (mode == AppOpsManager::MODE_ALLOWED);
+    // verbose logging only log when appOp changed
+    ALOGI_IF(hasIt != mHasOp.load(),
+            "App op %d missing, %ssilencing record %s",
+            mAppOp, hasIt ? "un" : "", mAttributionSource.toString().c_str());
+    mHasOp.store(hasIt);
+
+    if (updateUidStates) {
+          sp<AudioPolicyService::AudioCommandThread> commandThread = mCommandThread.promote();
+          if (commandThread != nullptr) {
+              commandThread->updateUidStatesCommand();
+          }
+    }
+}
+
+OpRecordAudioMonitor::RecordAudioOpCallback::RecordAudioOpCallback(
+        const wp<OpRecordAudioMonitor>& monitor) : mMonitor(monitor)
+{ }
+
+void OpRecordAudioMonitor::RecordAudioOpCallback::opChanged(int32_t op,
+            const String16& packageName __unused) {
+    sp<OpRecordAudioMonitor> monitor = mMonitor.promote();
+    if (monitor != NULL) {
+        if (op != monitor->getOp()) {
+            return;
+        }
+        monitor->checkOp(true);
+    }
+}
+
+} // android::media::audiopolicy::internal
diff --git a/services/audiopolicy/service/AudioRecordClient.h b/services/audiopolicy/service/AudioRecordClient.h
new file mode 100644
index 0000000..d3be316
--- /dev/null
+++ b/services/audiopolicy/service/AudioRecordClient.h
@@ -0,0 +1,109 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <android/content/AttributionSourceState.h>
+#include <binder/AppOpsManager.h>
+#include <system/audio.h>
+#include <utils/RefBase.h>
+
+#include <cstdint>
+
+#include "AudioPolicyService.h"
+
+namespace android::media::audiopolicy {
+
+using ::android::content::AttributionSourceState;
+
+// Checks and monitors app ops for AudioRecordClient
+class OpRecordAudioMonitor : public RefBase {
+public:
+    ~OpRecordAudioMonitor() override;
+    bool hasOp() const;
+    int32_t getOp() const { return mAppOp; }
+
+    static sp<OpRecordAudioMonitor> createIfNeeded(
+            const AttributionSourceState& attributionSource,
+            const audio_attributes_t& attr,
+            wp<AudioPolicyService::AudioCommandThread> commandThread);
+
+private:
+    OpRecordAudioMonitor(const AttributionSourceState& attributionSource, int32_t appOp,
+            wp<AudioPolicyService::AudioCommandThread> commandThread);
+
+    void onFirstRef() override;
+
+    AppOpsManager mAppOpsManager;
+
+    class RecordAudioOpCallback : public BnAppOpsCallback {
+    public:
+        explicit RecordAudioOpCallback(const wp<OpRecordAudioMonitor>& monitor);
+        void opChanged(int32_t op, const String16& packageName) override;
+
+    private:
+        const wp<OpRecordAudioMonitor> mMonitor;
+    };
+
+    sp<RecordAudioOpCallback> mOpCallback;
+    // called by RecordAudioOpCallback when the app op for this OpRecordAudioMonitor is updated
+    // in AppOp callback and in onFirstRef()
+    // updateUidStates is true when the silenced state of active AudioRecordClients must be
+    // re-evaluated
+    void checkOp(bool updateUidStates = false);
+
+    std::atomic_bool mHasOp;
+    const AttributionSourceState mAttributionSource;
+    const int32_t mAppOp;
+    wp<AudioPolicyService::AudioCommandThread> mCommandThread;
+};
+
+// --- AudioRecordClient ---
+// Information about each registered AudioRecord client
+// (between calls to getInputForAttr() and releaseInput())
+class AudioRecordClient : public AudioPolicyService::AudioClient {
+public:
+            AudioRecordClient(const audio_attributes_t attributes,
+                      const audio_io_handle_t io,
+                      const audio_session_t session, audio_port_handle_t portId,
+                      const audio_port_handle_t deviceId,
+                      const AttributionSourceState& attributionSource,
+                      bool canCaptureOutput, bool canCaptureHotword,
+                      wp<AudioPolicyService::AudioCommandThread> commandThread) :
+                AudioClient(attributes, io, attributionSource,
+                    session, portId, deviceId), attributionSource(attributionSource),
+                    startTimeNs(0), canCaptureOutput(canCaptureOutput),
+                    canCaptureHotword(canCaptureHotword), silenced(false),
+                    mOpRecordAudioMonitor(
+                            OpRecordAudioMonitor::createIfNeeded(attributionSource,
+                            attributes, commandThread)) {}
+            ~AudioRecordClient() override = default;
+
+    bool hasOp() const {
+        return mOpRecordAudioMonitor ? mOpRecordAudioMonitor->hasOp() : true;
+    }
+
+    const AttributionSourceState attributionSource; // attribution source of client
+    nsecs_t startTimeNs;
+    const bool canCaptureOutput;
+    const bool canCaptureHotword;
+    bool silenced;
+
+private:
+    sp<OpRecordAudioMonitor>           mOpRecordAudioMonitor;
+};
+
+}; // namespace android::media::audiopolicy::internal
diff --git a/services/audiopolicy/tests/audiopolicymanager_tests.cpp b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
index 15eae14..110e6bf 100644
--- a/services/audiopolicy/tests/audiopolicymanager_tests.cpp
+++ b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
@@ -26,7 +26,9 @@
 #define LOG_TAG "APM_Test"
 #include <Serializer.h>
 #include <android-base/file.h>
+#include <android-base/properties.h>
 #include <android/content/AttributionSourceState.h>
+#include <hardware/audio_effect.h>
 #include <media/AudioPolicy.h>
 #include <media/PatchBuilder.h>
 #include <media/RecordingActivityTracker.h>
@@ -185,6 +187,7 @@
             bool* isBitPerfect = nullptr);
     void getInputForAttr(
             const audio_attributes_t &attr,
+            audio_io_handle_t *input,
             audio_session_t session,
             audio_unique_id_t riid,
             audio_port_handle_t *selectedDeviceId,
@@ -296,6 +299,7 @@
 
 void AudioPolicyManagerTest::getInputForAttr(
         const audio_attributes_t &attr,
+        audio_io_handle_t *input,
         const audio_session_t session,
         audio_unique_id_t riid,
         audio_port_handle_t *selectedDeviceId,
@@ -304,7 +308,6 @@
         int sampleRate,
         audio_input_flags_t flags,
         audio_port_handle_t *portId) {
-    audio_io_handle_t input = AUDIO_PORT_HANDLE_NONE;
     audio_config_base_t config = AUDIO_CONFIG_BASE_INITIALIZER;
     config.sample_rate = sampleRate;
     config.channel_mask = channelMask;
@@ -315,7 +318,7 @@
     AudioPolicyInterface::input_type_t inputType;
     AttributionSourceState attributionSource = createAttributionSourceState(/*uid=*/ 0);
     ASSERT_EQ(OK, mManager->getInputForAttr(
-            &attr, &input, riid, session, attributionSource, &config, flags,
+            &attr, input, riid, session, attributionSource, &config, flags,
             selectedDeviceId, &inputType, portId));
     ASSERT_NE(AUDIO_PORT_HANDLE_NONE, *portId);
 }
@@ -945,10 +948,13 @@
     audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
     audio_port_handle_t mixPortId = AUDIO_PORT_HANDLE_NONE;
     audio_source_t source = AUDIO_SOURCE_VOICE_COMMUNICATION;
-    audio_attributes_t attr = {
-        AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN, source, AUDIO_FLAG_NONE, ""};
-    ASSERT_NO_FATAL_FAILURE(getInputForAttr(attr, AUDIO_SESSION_NONE, 1, &selectedDeviceId,
-     AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_MONO, 8000, AUDIO_INPUT_FLAG_VOIP_TX, &mixPortId));
+    audio_attributes_t attr = {AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN, source,
+                               AUDIO_FLAG_NONE, ""};
+    audio_io_handle_t input = AUDIO_PORT_HANDLE_NONE;
+    ASSERT_NO_FATAL_FAILURE(getInputForAttr(attr, &input, AUDIO_SESSION_NONE, 1,
+                                            &selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT,
+                                            AUDIO_CHANNEL_IN_MONO, 8000, AUDIO_INPUT_FLAG_VOIP_TX,
+                                            &mixPortId));
 
     std::vector<audio_port_v7> ports;
     ASSERT_NO_FATAL_FAILURE(
@@ -1708,10 +1714,11 @@
     audio_attributes_t attr = {
         AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN, source, AUDIO_FLAG_NONE, ""};
     std::string tags = "addr=" + mMixAddress;
+    audio_io_handle_t input = AUDIO_PORT_HANDLE_NONE;
     strncpy(attr.tags, tags.c_str(), AUDIO_ATTRIBUTES_TAGS_MAX_SIZE - 1);
-    getInputForAttr(attr, param.session, mTracker->getRiid(), &selectedDeviceId,
-                    AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO, k48000SamplingRate,
-                    AUDIO_INPUT_FLAG_NONE, &mPortId);
+    getInputForAttr(attr, &input, param.session, mTracker->getRiid(),
+                    &selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO,
+                    k48000SamplingRate, AUDIO_INPUT_FLAG_NONE, &mPortId);
     ASSERT_EQ(NO_ERROR, mManager->startInput(mPortId));
     ASSERT_EQ(extractionPort.id, selectedDeviceId);
 
@@ -1905,7 +1912,7 @@
     audio_io_handle_t mOutput;
     audio_stream_type_t mStream = AUDIO_STREAM_DEFAULT;
     audio_port_handle_t mSelectedDeviceId = AUDIO_PORT_HANDLE_NONE;
-    audio_port_handle_t mPortId;
+    audio_port_handle_t mPortId = AUDIO_PORT_HANDLE_NONE;
     AudioPolicyInterface::output_type_t mOutputType;
     audio_attributes_t attr = AUDIO_ATTRIBUTES_INITIALIZER;
     bool mIsSpatialized;
@@ -1948,14 +1955,18 @@
 }
 
 TEST_F(AudioPolicyManagerTestMMapPlaybackRerouting,
-        MmapPlaybackStreamMatchingRenderDapMixSucceeds) {
-      // Add render-only mix matching the test uid.
+        MmapPlaybackStreamMatchingRenderDapMixSupportingMmapSucceeds) {
+    // Add render-only mix matching the test uid.
     const int testUid = 12345;
-    status_t ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER, AUDIO_DEVICE_OUT_SPEAKER,
-                                /*mixAddress=*/"", audioConfig, {createUidCriterion(testUid)});
+    // test_audio_policy_configuration.xml declares mmap-capable mix port
+    // for AUDIO_DEVICE_OUT_USB_DEVICE.
+    status_t ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
+                                AUDIO_DEVICE_OUT_USB_DEVICE, /*mixAddress=*/"",
+                                audioConfig, {createUidCriterion(testUid)});
     ASSERT_EQ(NO_ERROR, ret);
 
-    // Geting output for matching uid should succeed for mmaped stream.
+    // Geting output for matching uid should succeed for mmaped stream, because matched mix
+    // redirects to mmap capable device.
     audio_output_flags_t outputFlags = AUDIO_OUTPUT_FLAG_MMAP_NOIRQ;
     ASSERT_EQ(NO_ERROR,
               mManager->getOutputForAttr(&attr, &mOutput, AUDIO_SESSION_NONE, &mStream,
@@ -1964,6 +1975,26 @@
                                          &mOutputType, &mIsSpatialized, &mIsBitPerfect));
 }
 
+TEST_F(AudioPolicyManagerTestMMapPlaybackRerouting,
+        MmapPlaybackStreamMatchingRenderDapMixNotSupportingMmapFails) {
+    // Add render-only mix matching the test uid.
+    const int testUid = 12345;
+    // Per test_audio_policy_configuration.xml AUDIO_DEVICE_OUT_SPEAKER doesn't support mmap.
+    status_t ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
+                                AUDIO_DEVICE_OUT_SPEAKER, /*mixAddress=*/"", audioConfig,
+                                {createUidCriterion(testUid)});
+    ASSERT_EQ(NO_ERROR, ret);
+
+    // Geting output for matching uid should fail for mmaped stream, because
+    // matched mix redirects to device which doesn't support mmap.
+    audio_output_flags_t outputFlags = AUDIO_OUTPUT_FLAG_MMAP_NOIRQ;
+    ASSERT_EQ(INVALID_OPERATION,
+              mManager->getOutputForAttr(&attr, &mOutput, AUDIO_SESSION_NONE, &mStream,
+                                         createAttributionSourceState(testUid), &audioConfig,
+                                         &outputFlags, &mSelectedDeviceId, &mPortId, {},
+                                         &mOutputType, &mIsSpatialized, &mIsBitPerfect));
+}
+
 INSTANTIATE_TEST_SUITE_P(
         MmapPlaybackRerouting, AudioPolicyManagerTestMMapPlaybackRerouting,
         testing::Values(DPMmapTestParam(MIX_ROUTE_FLAG_LOOP_BACK, AUDIO_DEVICE_OUT_REMOTE_SUBMIX,
@@ -2027,9 +2058,10 @@
 
     audio_port_handle_t captureRoutedPortId = AUDIO_PORT_HANDLE_NONE;
     audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE;
-    getInputForAttr(param.attributes, param.session, mTracker->getRiid(), &captureRoutedPortId,
-        AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO, k48000SamplingRate,
-        AUDIO_INPUT_FLAG_NONE, &portId);
+    audio_io_handle_t input = AUDIO_PORT_HANDLE_NONE;
+    getInputForAttr(param.attributes, &input, param.session, mTracker->getRiid(),
+                    &captureRoutedPortId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO,
+                    k48000SamplingRate, AUDIO_INPUT_FLAG_NONE, &portId);
     if (param.expected_match) {
         EXPECT_EQ(mExtractionPort.id, captureRoutedPortId);
     } else {
@@ -2212,9 +2244,10 @@
                 k48000SamplingRate, AUDIO_OUTPUT_FLAG_NONE);
     } else if (audio_is_input_device(type)) {
         RecordingActivityTracker tracker;
-        getInputForAttr({}, AUDIO_SESSION_NONE, tracker.getRiid(), &routedPortId,
-         AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO, k48000SamplingRate,
-         AUDIO_INPUT_FLAG_NONE);
+        audio_io_handle_t input = AUDIO_PORT_HANDLE_NONE;
+        getInputForAttr({}, &input, AUDIO_SESSION_NONE, tracker.getRiid(), &routedPortId,
+                        AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO, k48000SamplingRate,
+                        AUDIO_INPUT_FLAG_NONE);
     }
     ASSERT_EQ(devicePort.id, routedPortId);
 
@@ -2961,7 +2994,8 @@
     audio_attributes_t attr = AUDIO_ATTRIBUTES_INITIALIZER;
     attr.source = source;
     audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
-    ASSERT_NO_FATAL_FAILURE(getInputForAttr(attr, AUDIO_SESSION_NONE, 1, &selectedDeviceId,
+    audio_io_handle_t input = AUDIO_PORT_HANDLE_NONE;
+    ASSERT_NO_FATAL_FAILURE(getInputForAttr(attr, &input, AUDIO_SESSION_NONE, 1, &selectedDeviceId,
                                             AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO,
                                             48000));
     auto selectedDevice = availableDevices.getDeviceFromId(selectedDeviceId);
@@ -2981,7 +3015,8 @@
               mManager->setDevicesRoleForCapturePreset(source, role,
                                                        {preferredDevice->getDeviceTypeAddr()}));
     selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
-    ASSERT_NO_FATAL_FAILURE(getInputForAttr(attr, AUDIO_SESSION_NONE, 1, &selectedDeviceId,
+    input = AUDIO_PORT_HANDLE_NONE;
+    ASSERT_NO_FATAL_FAILURE(getInputForAttr(attr, &input, AUDIO_SESSION_NONE, 1, &selectedDeviceId,
                                             AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO,
                                             48000));
     ASSERT_EQ(preferredDevice, availableDevices.getDeviceFromId(selectedDeviceId));
@@ -2991,7 +3026,8 @@
     ASSERT_EQ(NO_ERROR,
               mManager->clearDevicesRoleForCapturePreset(source, role));
     selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
-    ASSERT_NO_FATAL_FAILURE(getInputForAttr(attr, AUDIO_SESSION_NONE, 1, &selectedDeviceId,
+    input = AUDIO_PORT_HANDLE_NONE;
+    ASSERT_NO_FATAL_FAILURE(getInputForAttr(attr, &input, AUDIO_SESSION_NONE, 1, &selectedDeviceId,
                                             AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO,
                                             48000));
     ASSERT_EQ(selectedDevice, availableDevices.getDeviceFromId(selectedDeviceId));
@@ -3016,7 +3052,8 @@
     audio_attributes_t attr = AUDIO_ATTRIBUTES_INITIALIZER;
     attr.source = source;
     audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
-    ASSERT_NO_FATAL_FAILURE(getInputForAttr(attr, AUDIO_SESSION_NONE, 1, &selectedDeviceId,
+    audio_io_handle_t input = AUDIO_PORT_HANDLE_NONE;
+    ASSERT_NO_FATAL_FAILURE(getInputForAttr(attr, &input, AUDIO_SESSION_NONE, 1, &selectedDeviceId,
                                             AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO,
                                             48000));
     auto selectedDevice = availableDevices.getDeviceFromId(selectedDeviceId);
@@ -3028,9 +3065,10 @@
               mManager->setDevicesRoleForCapturePreset(source, role,
                                                        {selectedDevice->getDeviceTypeAddr()}));
     selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
-    ASSERT_NO_FATAL_FAILURE(getInputForAttr(attr, AUDIO_SESSION_NONE, 1, &selectedDeviceId,
-                                            AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO,
-                                            48000));
+    input = AUDIO_PORT_HANDLE_NONE;
+    ASSERT_NO_FATAL_FAILURE(getInputForAttr(attr, &input, AUDIO_SESSION_NONE, 1,
+                                            &selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT,
+                                            AUDIO_CHANNEL_IN_STEREO, 48000));
     ASSERT_NE(selectedDevice, availableDevices.getDeviceFromId(selectedDeviceId));
 
     // After clearing disabled device for capture preset, the selected device for input should be
@@ -3038,7 +3076,8 @@
     ASSERT_EQ(NO_ERROR,
               mManager->clearDevicesRoleForCapturePreset(source, role));
     selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
-    ASSERT_NO_FATAL_FAILURE(getInputForAttr(attr, AUDIO_SESSION_NONE, 1, &selectedDeviceId,
+    input = AUDIO_PORT_HANDLE_NONE;
+    ASSERT_NO_FATAL_FAILURE(getInputForAttr(attr, &input, AUDIO_SESSION_NONE, 1, &selectedDeviceId,
                                             AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO,
                                             48000));
     ASSERT_EQ(selectedDevice, availableDevices.getDeviceFromId(selectedDeviceId));
@@ -3074,3 +3113,77 @@
                 DevicesRoleForCapturePresetParam({AUDIO_SOURCE_HOTWORD, DEVICE_ROLE_PREFERRED})
                 )
         );
+
+
+const effect_descriptor_t TEST_EFFECT_DESC = {
+        {0xf2a4bb20, 0x0c3c, 0x11e3, 0x8b07, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}}, // type
+        {0xff93e360, 0x0c3c, 0x11e3, 0x8a97, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}}, // uuid
+        EFFECT_CONTROL_API_VERSION,
+        EFFECT_FLAG_TYPE_PRE_PROC,
+        0,
+        1,
+        "APM test Effect",
+        "The Android Open Source Project",
+};
+
+class AudioPolicyManagerPreProcEffectTest : public AudioPolicyManagerTestWithConfigurationFile {
+};
+
+TEST_F(AudioPolicyManagerPreProcEffectTest, DeviceDisconnectWhileClientActive) {
+    const audio_source_t source = AUDIO_SOURCE_MIC;
+    const std::string address = "BUS00_MIC";
+    const std::string deviceName = "randomName";
+    audio_port_handle_t portId;
+    audio_devices_t type = AUDIO_DEVICE_IN_BUS;
+
+    ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(type,
+            AUDIO_POLICY_DEVICE_STATE_AVAILABLE, address.c_str(), deviceName.c_str(),
+            AUDIO_FORMAT_DEFAULT));
+    auto availableDevices = mManager->getAvailableInputDevices();
+    ASSERT_GT(availableDevices.size(), 1);
+
+    audio_attributes_t attr = AUDIO_ATTRIBUTES_INITIALIZER;
+    attr.source = source;
+    audio_session_t session = TEST_SESSION_ID;
+    audio_io_handle_t inputClientHandle = 777;
+    int effectId = 666;
+    audio_port_v7 devicePort;
+    ASSERT_TRUE(findDevicePort(AUDIO_PORT_ROLE_SOURCE, type, address, &devicePort));
+
+    audio_port_handle_t routedPortId = devicePort.id;
+    ASSERT_NO_FATAL_FAILURE(getInputForAttr(attr, &inputClientHandle, session, 1, &routedPortId,
+                                            AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO,
+                                            48000, AUDIO_INPUT_FLAG_NONE, &portId));
+    ASSERT_EQ(devicePort.id, routedPortId);
+    auto selectedDevice = availableDevices.getDeviceFromId(routedPortId);
+    ASSERT_NE(nullptr, selectedDevice);
+
+    // Add a pre processing effect on the input client session
+    ASSERT_EQ(NO_ERROR, mManager->registerEffect(&TEST_EFFECT_DESC, inputClientHandle,
+            PRODUCT_STRATEGY_NONE, session, effectId));
+
+    ASSERT_EQ(NO_ERROR, mManager->startInput(portId));
+
+    // Force a device disconnection to close the input, no crash expected of APM
+    ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(
+            type, AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
+            address.c_str(), deviceName.c_str(), AUDIO_FORMAT_DEFAULT));
+
+    // Reconnect the device
+    ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(
+            type, AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
+            address.c_str(), deviceName.c_str(), AUDIO_FORMAT_DEFAULT));
+
+    inputClientHandle += 1;
+    ASSERT_TRUE(findDevicePort(AUDIO_PORT_ROLE_SOURCE, type, address, &devicePort));
+    routedPortId = devicePort.id;
+
+    // Reconnect the client changing voluntarily the io, but keeping the session to get the
+    // effect attached again
+    ASSERT_NO_FATAL_FAILURE(getInputForAttr(attr, &inputClientHandle, session, 1, &routedPortId,
+                                            AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO,
+                                            48000));
+
+    // unregister effect should succeed since effect shall have been restore on the client session
+    ASSERT_EQ(NO_ERROR, mManager->unregisterEffect(effectId));
+}
\ No newline at end of file
diff --git a/services/audiopolicy/tests/resources/test_audio_policy_configuration.xml b/services/audiopolicy/tests/resources/test_audio_policy_configuration.xml
index 50ca26a..9e092c6 100644
--- a/services/audiopolicy/tests/resources/test_audio_policy_configuration.xml
+++ b/services/audiopolicy/tests/resources/test_audio_policy_configuration.xml
@@ -55,6 +55,16 @@
                            samplingRates="8000 16000 32000 48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
                 </mixPort>
                 <mixPort name="hifi_output" role="source" flags="AUDIO_OUTPUT_FLAG_BIT_PERFECT"/>
+                <mixPort name="mmap_no_irq_out" role="source"
+                         flags="AUDIO_OUTPUT_FLAG_DIRECT|AUDIO_OUTPUT_FLAG_MMAP_NOIRQ">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+                </mixPort>
+                <mixPort name="mixport_bus_input" role="sink">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                        samplingRates="48000"
+                        channelMasks="AUDIO_CHANNEL_IN_STEREO"/>
+                </mixPort>
             </mixPorts>
             <devicePorts>
                 <devicePort tagName="Speaker" type="AUDIO_DEVICE_OUT_SPEAKER" role="sink">
@@ -79,6 +89,8 @@
                 </devicePort>
                 <devicePort tagName="USB Device In" type="AUDIO_DEVICE_IN_USB_DEVICE" role="source">
                 </devicePort>
+                <devicePort tagName="BUS Device In" type="AUDIO_DEVICE_IN_BUS" role="source" address="BUS00_MIC">
+                </devicePort>
             </devicePorts>
             <routes>
                 <route type="mix" sink="Speaker"
@@ -96,7 +108,9 @@
                 <route type="mix" sink="BT A2DP Out"
                        sources="primary output,hifi_output"/>
                 <route type="mix" sink="USB Device Out"
-                       sources="primary output,hifi_output"/>
+                       sources="primary output,hifi_output,mmap_no_irq_out"/>
+                <route type="mix" sink="mixport_bus_input"
+                    sources="BUS Device In"/>
             </routes>
         </module>
 
diff --git a/services/camera/libcameraservice/Android.bp b/services/camera/libcameraservice/Android.bp
index a45365a..ea467e7 100644
--- a/services/camera/libcameraservice/Android.bp
+++ b/services/camera/libcameraservice/Android.bp
@@ -35,9 +35,93 @@
     ],
 }
 
-cc_library_shared {
+cc_defaults {
+    name: "libcameraservice_deps",
+
+    shared_libs: [
+        "libactivitymanager_aidl",
+        "libbase",
+        "libdl",
+        "libui",
+        "liblog",
+        "libutilscallstack",
+        "libutils",
+        "libbinder",
+        "libbinder_ndk",
+        "libactivitymanager_aidl",
+        "libpermission",
+        "libcutils",
+        "libexif",
+        "libmedia",
+        "libmediautils",
+        "libcamera_client",
+        "libcamera_metadata",
+        "libfmq",
+        "libgui",
+        "libhardware",
+        "libhidlbase",
+        "libimage_io",
+        "libjpeg",
+        "libultrahdr",
+        "libmedia_codeclist",
+        "libmedia_omx",
+        "libmemunreachable",
+        "libprocessgroup",
+        "libprocinfo",
+        "libsensorprivacy",
+        "libstagefright",
+        "libstagefright_foundation",
+        "libxml2",
+        "libyuv",
+        "android.hardware.camera.common@1.0",
+        "android.hardware.camera.device@1.0",
+        "android.hardware.camera.device@3.2",
+        "android.hardware.camera.device@3.3",
+        "android.hardware.camera.device@3.4",
+        "android.hardware.camera.device@3.5",
+        "android.hardware.camera.device@3.6",
+        "android.hardware.camera.device@3.7",
+        "android.hardware.common-V2-ndk",
+        "android.hardware.common.fmq-V1-ndk",
+        "android.hardware.graphics.common-V4-ndk",
+        "media_permission-aidl-cpp",
+        "server_configurable_flags",
+    ],
+
+    static_libs: [
+        "android.frameworks.cameraservice.common@2.0",
+        "android.frameworks.cameraservice.service@2.0",
+        "android.frameworks.cameraservice.service@2.1",
+        "android.frameworks.cameraservice.service@2.2",
+        "android.frameworks.cameraservice.device@2.0",
+        "android.frameworks.cameraservice.device@2.1",
+        "android.frameworks.cameraservice.common-V1-ndk",
+        "android.frameworks.cameraservice.service-V1-ndk",
+        "android.frameworks.cameraservice.device-V1-ndk",
+        "android.hardware.camera.common-V1-ndk",
+        "android.hardware.camera.device-V2-ndk",
+        "android.hardware.camera.metadata-V2-ndk",
+        "android.hardware.camera.provider@2.4",
+        "android.hardware.camera.provider@2.5",
+        "android.hardware.camera.provider@2.6",
+        "android.hardware.camera.provider@2.7",
+        "android.hardware.camera.provider-V2-ndk",
+        "camera_platform_flags_c_lib",
+        "libaidlcommonsupport",
+        "libbinderthreadstateutils",
+        "libcameraservice_device_independent",
+        "libdynamic_depth",
+        "libprocessinfoservice_aidl",
+        "media_permission-aidl-cpp",
+    ],
+}
+
+cc_library {
     name: "libcameraservice",
 
+    defaults: [
+        "libcameraservice_deps",
+    ],
     // Camera service source
 
     srcs: [
@@ -105,6 +189,7 @@
         "utils/CameraThreadState.cpp",
         "utils/CameraTraces.cpp",
         "utils/AutoConditionLock.cpp",
+        "utils/SchedulingPolicyUtils.cpp",
         "utils/SessionConfigurationUtils.cpp",
         "utils/SessionConfigurationUtilsHidl.cpp",
         "utils/SessionStatsBuilder.cpp",
@@ -119,73 +204,6 @@
         "libmediametrics_headers",
     ],
 
-    shared_libs: [
-        "libactivitymanager_aidl",
-        "libbase",
-        "libdl",
-        "libexif",
-        "libui",
-        "liblog",
-        "libutilscallstack",
-        "libutils",
-        "libbinder",
-        "libbinder_ndk",
-        "libactivitymanager_aidl",
-        "libpermission",
-        "libcutils",
-        "libmedia",
-        "libmediautils",
-        "libcamera_client",
-        "libcamera_metadata",
-        "libdynamic_depth",
-        "libfmq",
-        "libgui",
-        "libhardware",
-        "libhidlbase",
-        "libimage_io",
-        "libjpeg",
-        "libultrahdr",
-        "libmedia_codeclist",
-        "libmedia_omx",
-        "libmemunreachable",
-        "libsensorprivacy",
-        "libstagefright",
-        "libstagefright_foundation",
-        "libxml2",
-        "libyuv",
-        "android.frameworks.cameraservice.common@2.0",
-        "android.frameworks.cameraservice.service@2.0",
-        "android.frameworks.cameraservice.service@2.1",
-        "android.frameworks.cameraservice.service@2.2",
-        "android.frameworks.cameraservice.device@2.0",
-        "android.frameworks.cameraservice.device@2.1",
-        "android.frameworks.cameraservice.common-V1-ndk",
-        "android.frameworks.cameraservice.service-V1-ndk",
-        "android.frameworks.cameraservice.device-V1-ndk",
-        "android.hardware.camera.common@1.0",
-        "android.hardware.camera.provider@2.4",
-        "android.hardware.camera.provider@2.5",
-        "android.hardware.camera.provider@2.6",
-        "android.hardware.camera.provider@2.7",
-        "android.hardware.camera.provider-V2-ndk",
-        "android.hardware.camera.device@3.2",
-        "android.hardware.camera.device@3.3",
-        "android.hardware.camera.device@3.4",
-        "android.hardware.camera.device@3.5",
-        "android.hardware.camera.device@3.6",
-        "android.hardware.camera.device@3.7",
-        "android.hardware.camera.device-V2-ndk",
-        "media_permission-aidl-cpp",
-    ],
-
-    static_libs: [
-        "libaidlcommonsupport",
-        "libprocessinfoservice_aidl",
-        "libbinderthreadstateutils",
-        "media_permission-aidl-cpp",
-        "libcameraservice_device_independent",
-    ],
-
     export_shared_lib_headers: [
         "libbinder",
         "libactivitymanager_aidl",
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index dd2e2d8..0d24838 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -89,6 +89,7 @@
     const char* kActivityServiceName = "activity";
     const char* kSensorPrivacyServiceName = "sensor_privacy";
     const char* kAppopsServiceName = "appops";
+    const char* kProcessInfoServiceName = "processinfo";
 }; // namespace anonymous
 
 namespace android {
@@ -123,6 +124,8 @@
 static const std::string sManageCameraPermission("android.permission.MANAGE_CAMERA");
 static const std::string sCameraPermission("android.permission.CAMERA");
 static const std::string sSystemCameraPermission("android.permission.SYSTEM_CAMERA");
+static const std::string sCameraHeadlessSystemUserPermission(
+        "android.permission.CAMERA_HEADLESS_SYSTEM_USER");
 static const std::string
         sCameraSendSystemEventsPermission("android.permission.CAMERA_SEND_SYSTEM_EVENTS");
 static const std::string sCameraOpenCloseListenerPermission(
@@ -694,25 +697,125 @@
     broadcastTorchModeStatus(cameraId, newStatus, systemCameraKind);
 }
 
-static bool hasPermissionsForSystemCamera(int callingPid, int callingUid) {
+static bool isAutomotiveDevice() {
+    // Checks the property ro.hardware.type and returns true if it is
+    // automotive.
+    char value[PROPERTY_VALUE_MAX] = {0};
+    property_get("ro.hardware.type", value, "");
+    return strncmp(value, "automotive", PROPERTY_VALUE_MAX) == 0;
+}
+
+static bool isHeadlessSystemUserMode() {
+    // Checks if the device is running in headless system user mode
+    // by checking the property ro.fw.mu.headless_system_user.
+    char value[PROPERTY_VALUE_MAX] = {0};
+    property_get("ro.fw.mu.headless_system_user", value, "");
+    return strncmp(value, "true", PROPERTY_VALUE_MAX) == 0;
+}
+
+static bool isAutomotivePrivilegedClient(int32_t uid) {
+    // Returns false if this is not an automotive device type.
+    if (!isAutomotiveDevice())
+        return false;
+
+    // Returns true if the uid is AID_AUTOMOTIVE_EVS which is a
+    // privileged client uid used for safety critical use cases such as
+    // rear view and surround view.
+    return uid == AID_AUTOMOTIVE_EVS;
+}
+
+bool CameraService::isAutomotiveExteriorSystemCamera(const std::string& cam_id) const{
+    // Returns false if this is not an automotive device type.
+    if (!isAutomotiveDevice())
+        return false;
+
+    // Returns false if no camera id is provided.
+    if (cam_id.empty())
+        return false;
+
+    SystemCameraKind systemCameraKind = SystemCameraKind::PUBLIC;
+    if (getSystemCameraKind(cam_id, &systemCameraKind) != OK) {
+        // This isn't a known camera ID, so it's not a system camera.
+        ALOGE("%s: Unknown camera id %s, ", __FUNCTION__, cam_id.c_str());
+        return false;
+    }
+
+    if (systemCameraKind != SystemCameraKind::SYSTEM_ONLY_CAMERA) {
+        ALOGE("%s: camera id %s is not a system camera", __FUNCTION__, cam_id.c_str());
+        return false;
+    }
+
+    CameraMetadata cameraInfo;
+    status_t res = mCameraProviderManager->getCameraCharacteristics(
+            cam_id, false, &cameraInfo, false);
+    if (res != OK){
+        ALOGE("%s: Not able to get camera characteristics for camera id %s",__FUNCTION__,
+                cam_id.c_str());
+        return false;
+    }
+
+    camera_metadata_entry auto_location  = cameraInfo.find(ANDROID_AUTOMOTIVE_LOCATION);
+    if (auto_location.count != 1)
+        return false;
+
+    uint8_t location = auto_location.data.u8[0];
+    if ((location != ANDROID_AUTOMOTIVE_LOCATION_EXTERIOR_FRONT) &&
+            (location != ANDROID_AUTOMOTIVE_LOCATION_EXTERIOR_REAR) &&
+            (location != ANDROID_AUTOMOTIVE_LOCATION_EXTERIOR_LEFT) &&
+            (location != ANDROID_AUTOMOTIVE_LOCATION_EXTERIOR_RIGHT)) {
+        return false;
+    }
+
+    return true;
+}
+
+bool CameraService::checkPermission(const std::string& cameraId, const std::string& permission,
+        const AttributionSourceState& attributionSource, const std::string& message,
+        int32_t attributedOpCode) const{
+    if (isAutomotivePrivilegedClient(attributionSource.uid)) {
+        // If cameraId is empty, then it means that this check is not used for the
+        // purpose of accessing a specific camera, hence grant permission just
+        // based on uid to the automotive privileged client.
+        if (cameraId.empty())
+            return true;
+        // If this call is used for accessing a specific camera then cam_id must be provided.
+        // In that case, only pre-grants the permission for accessing the exterior system only
+        // camera.
+        return isAutomotiveExteriorSystemCamera(cameraId);
+    }
+
     permission::PermissionChecker permissionChecker;
+    return permissionChecker.checkPermissionForPreflight(toString16(permission), attributionSource,
+            toString16(message), attributedOpCode)
+            != permission::PermissionChecker::PERMISSION_HARD_DENIED;
+}
+
+bool CameraService::hasPermissionsForSystemCamera(const std::string& cameraId, int callingPid,
+        int callingUid) const{
     AttributionSourceState attributionSource{};
     attributionSource.pid = callingPid;
     attributionSource.uid = callingUid;
-    bool checkPermissionForSystemCamera = permissionChecker.checkPermissionForPreflight(
-            toString16(sSystemCameraPermission), attributionSource, String16(),
-            AppOpsManager::OP_NONE) != permission::PermissionChecker::PERMISSION_HARD_DENIED;
-    bool checkPermissionForCamera = permissionChecker.checkPermissionForPreflight(
-            toString16(sCameraPermission), attributionSource, String16(),
-            AppOpsManager::OP_NONE) != permission::PermissionChecker::PERMISSION_HARD_DENIED;
+    bool checkPermissionForSystemCamera = checkPermission(cameraId,
+            sSystemCameraPermission, attributionSource, std::string(), AppOpsManager::OP_NONE);
+    bool checkPermissionForCamera = checkPermission(cameraId,
+            sCameraPermission, attributionSource, std::string(), AppOpsManager::OP_NONE);
     return checkPermissionForSystemCamera && checkPermissionForCamera;
 }
 
+bool CameraService::hasPermissionsForCameraHeadlessSystemUser(const std::string& cameraId,
+        int callingPid, int callingUid) const{
+    AttributionSourceState attributionSource{};
+    attributionSource.pid = callingPid;
+    attributionSource.uid = callingUid;
+    return checkPermission(cameraId, sCameraHeadlessSystemUserPermission, attributionSource,
+            std::string(), AppOpsManager::OP_NONE);
+}
+
 Status CameraService::getNumberOfCameras(int32_t type, int32_t* numCameras) {
     ATRACE_CALL();
     Mutex::Autolock l(mServiceLock);
     bool hasSystemCameraPermissions =
-            hasPermissionsForSystemCamera(CameraThreadState::getCallingPid(),
+            hasPermissionsForSystemCamera(std::string(), CameraThreadState::getCallingPid(),
                     CameraThreadState::getCallingUid());
     switch (type) {
         case CAMERA_TYPE_BACKWARD_COMPATIBLE:
@@ -738,8 +841,7 @@
     return Status::ok();
 }
 
-Status CameraService::remapCameraIds(const hardware::CameraIdRemapping&
-      cameraIdRemapping) {
+Status CameraService::remapCameraIds(const hardware::CameraIdRemapping& cameraIdRemapping) {
     if (!checkCallingPermission(toString16(sCameraInjectExternalCameraPermission))) {
         const int pid = CameraThreadState::getCallingPid();
         const int uid = CameraThreadState::getCallingUid();
@@ -764,21 +866,20 @@
     std::string cameraIdToReplace, updatedCameraId;
     for(const auto& packageIdRemapping: cameraIdRemapping.packageIdRemappings) {
         packageName = packageIdRemapping.packageName;
-        if (packageName == "") {
+        if (packageName.empty()) {
             return STATUS_ERROR(ERROR_ILLEGAL_ARGUMENT,
                     "CameraIdRemapping: Package name cannot be empty");
         }
-
         if (packageIdRemapping.cameraIdsToReplace.size()
             != packageIdRemapping.updatedCameraIds.size()) {
             return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT,
                     "CameraIdRemapping: Mismatch in CameraId Remapping lists sizes for package %s",
-                     packageName.c_str());
+                    packageName.c_str());
         }
         for(size_t i = 0; i < packageIdRemapping.cameraIdsToReplace.size(); i++) {
             cameraIdToReplace = packageIdRemapping.cameraIdsToReplace[i];
             updatedCameraId = packageIdRemapping.updatedCameraIds[i];
-            if (cameraIdToReplace == "" || updatedCameraId == "") {
+            if (cameraIdToReplace.empty() || updatedCameraId.empty()) {
                 return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT,
                         "CameraIdRemapping: Camera Id cannot be empty for package %s",
                         packageName.c_str());
@@ -882,7 +983,7 @@
     if (packageName.empty()) {
         packageNameVal = getPackageNameFromUid(clientUid);
     }
-   if (clientUid < AID_APP_START || packageNameVal.empty()) {
+    if (clientUid < AID_APP_START || packageNameVal.empty()) {
         // We shouldn't remap cameras for processes with system/vendor UIDs.
         return inputCameraId;
     }
@@ -920,9 +1021,8 @@
         return STATUS_ERROR(ERROR_DISCONNECTED,
                 "Camera subsystem is not available");
     }
-    bool hasSystemCameraPermissions =
-            hasPermissionsForSystemCamera(CameraThreadState::getCallingPid(),
-                    CameraThreadState::getCallingUid());
+    bool hasSystemCameraPermissions = hasPermissionsForSystemCamera(std::to_string(cameraId),
+            CameraThreadState::getCallingPid(), CameraThreadState::getCallingUid());
     int cameraIdBound = mNumberOfCamerasWithoutSystemCamera;
     if (hasSystemCameraPermissions) {
         cameraIdBound = mNumberOfCameras;
@@ -951,13 +1051,12 @@
     const std::vector<std::string> *deviceIds = &mNormalDeviceIdsWithoutSystemCamera;
     auto callingPid = CameraThreadState::getCallingPid();
     auto callingUid = CameraThreadState::getCallingUid();
-    permission::PermissionChecker permissionChecker;
     AttributionSourceState attributionSource{};
     attributionSource.pid = callingPid;
     attributionSource.uid = callingUid;
-    bool checkPermissionForSystemCamera = permissionChecker.checkPermissionForPreflight(
-                toString16(sSystemCameraPermission), attributionSource, String16(),
-                AppOpsManager::OP_NONE) != permission::PermissionChecker::PERMISSION_HARD_DENIED;
+    bool checkPermissionForSystemCamera = checkPermission(std::to_string(cameraIdInt),
+                sSystemCameraPermission, attributionSource, std::string(),
+                AppOpsManager::OP_NONE);
     if (checkPermissionForSystemCamera || getpid() == callingPid) {
         deviceIds = &mNormalDeviceIds;
     }
@@ -1031,13 +1130,11 @@
     // If it's not calling from cameraserver, check the permission only if
     // android.permission.CAMERA is required. If android.permission.SYSTEM_CAMERA was needed,
     // it would've already been checked in shouldRejectSystemCameraConnection.
-    permission::PermissionChecker permissionChecker;
     AttributionSourceState attributionSource{};
     attributionSource.pid = callingPid;
     attributionSource.uid = callingUid;
-    bool checkPermissionForCamera = permissionChecker.checkPermissionForPreflight(
-                toString16(sCameraPermission), attributionSource, String16(),
-                AppOpsManager::OP_NONE) != permission::PermissionChecker::PERMISSION_HARD_DENIED;
+    bool checkPermissionForCamera = checkPermission(cameraId, sCameraPermission,
+            attributionSource, std::string(), AppOpsManager::OP_NONE);
     if ((callingPid != getpid()) &&
             (deviceKind != SystemCameraKind::SYSTEM_ONLY_CAMERA) &&
             !checkPermissionForCamera) {
@@ -1071,7 +1168,7 @@
     ATRACE_CALL();
     Mutex::Autolock l(mServiceLock);
     const std::string cameraId = resolveCameraId(
-            unresolvedCameraId, CameraThreadState::getCallingUid());
+        unresolvedCameraId, CameraThreadState::getCallingUid());
     if (!mInitialized) {
         ALOGE("%s: Camera HAL couldn't be initialized.", __FUNCTION__);
         return STATUS_ERROR(ERROR_DISCONNECTED, "Camera HAL couldn't be initialized.");
@@ -1190,7 +1287,8 @@
         int api1CameraId, int facing, int sensorOrientation, int clientPid, uid_t clientUid,
         int servicePid, std::pair<int, IPCTransport> deviceVersionAndTransport,
         apiLevel effectiveApiLevel, bool overrideForPerfClass, bool overrideToPortrait,
-        bool forceSlowJpegMode, const std::string& originalCameraId, /*out*/sp<BasicClient>* client) {
+        bool forceSlowJpegMode, const std::string& originalCameraId,
+        /*out*/sp<BasicClient>* client) {
     // For HIDL devices
     if (deviceVersionAndTransport.second == IPCTransport::HIDL) {
         // Create CameraClient based on device version reported by the HAL.
@@ -1479,7 +1577,6 @@
 Status CameraService::validateClientPermissionsLocked(const std::string& cameraId,
         const std::string& clientName, int& clientUid, int& clientPid,
         /*out*/int& originalClientPid) const {
-    permission::PermissionChecker permissionChecker;
     AttributionSourceState attributionSource{};
 
     int callingPid = CameraThreadState::getCallingPid();
@@ -1531,9 +1628,8 @@
     attributionSource.pid = clientPid;
     attributionSource.uid = clientUid;
     attributionSource.packageName = clientName;
-    bool checkPermissionForCamera = permissionChecker.checkPermissionForPreflight(
-            toString16(sCameraPermission), attributionSource, String16(), AppOpsManager::OP_NONE)
-            != permission::PermissionChecker::PERMISSION_HARD_DENIED;
+    bool checkPermissionForCamera = checkPermission(cameraId, sCameraPermission, attributionSource,
+            std::string(), AppOpsManager::OP_NONE);
     if (callingPid != getpid() &&
                 (deviceKind != SystemCameraKind::SYSTEM_ONLY_CAMERA) && !checkPermissionForCamera) {
         ALOGE("Permission Denial: can't use the camera pid=%d, uid=%d", clientPid, clientUid);
@@ -1554,8 +1650,13 @@
                 callingUid, procState);
     }
 
-    // If sensor privacy is enabled then prevent access to the camera
-    if (mSensorPrivacyPolicy->isSensorPrivacyEnabled()) {
+
+    // Automotive privileged client AID_AUTOMOTIVE_EVS using exterior system camera for use cases
+    // such as rear view and surround view cannot be disabled and are exempt from sensor privacy
+    // policy. In all other cases,if sensor privacy is enabled then prevent access to the camera.
+    if ((!isAutomotivePrivilegedClient(callingUid) ||
+            !isAutomotiveExteriorSystemCamera(cameraId)) &&
+            mSensorPrivacyPolicy->isSensorPrivacyEnabled()) {
         ALOGE("Access Denial: cannot use the camera when sensor privacy is enabled");
         return STATUS_ERROR_FMT(ERROR_DISABLED,
                 "Caller \"%s\" (PID %d, UID %d) cannot open camera \"%s\" when sensor privacy "
@@ -1581,6 +1682,18 @@
                 clientUserId, cameraId.c_str());
     }
 
+    // If the System User tries to access the camera when the device is running in
+    // headless system user mode, ensure that client has the required permission
+    // CAMERA_HEADLESS_SYSTEM_USER.
+    if (isHeadlessSystemUserMode() && (clientUserId == USER_SYSTEM) &&
+            !hasPermissionsForCameraHeadlessSystemUser(cameraId, callingPid, callingUid)) {
+        ALOGE("Permission Denial: can't use the camera pid=%d, uid=%d", clientPid, clientUid);
+        return STATUS_ERROR_FMT(ERROR_PERMISSION_DENIED,
+                "Caller \"%s\" (PID %d, UID %d) cannot open camera \"%s\" as Headless System User\
+                without camera headless system user permission",
+                clientName.c_str(), clientUid, clientPid, cameraId.c_str());
+    }
+
     return Status::ok();
 }
 
@@ -1673,33 +1786,6 @@
             }
         }
 
-        // Get current active client PIDs
-        std::vector<int> ownerPids(mActiveClientManager.getAllOwners());
-        ownerPids.push_back(clientPid);
-
-        std::vector<int> priorityScores(ownerPids.size());
-        std::vector<int> states(ownerPids.size());
-
-        // Get priority scores of all active PIDs
-        status_t err = ProcessInfoService::getProcessStatesScoresFromPids(
-                ownerPids.size(), &ownerPids[0], /*out*/&states[0],
-                /*out*/&priorityScores[0]);
-        if (err != OK) {
-            ALOGE("%s: Priority score query failed: %d",
-                  __FUNCTION__, err);
-            return err;
-        }
-
-        // Update all active clients' priorities
-        std::map<int,resource_policy::ClientPriority> pidToPriorityMap;
-        for (size_t i = 0; i < ownerPids.size() - 1; i++) {
-            pidToPriorityMap.emplace(ownerPids[i],
-                    resource_policy::ClientPriority(priorityScores[i], states[i],
-                            /* isVendorClient won't get copied over*/ false,
-                            /* oomScoreOffset won't get copied over*/ 0));
-        }
-        mActiveClientManager.updatePriorities(pidToPriorityMap);
-
         // Get state for the given cameraId
         auto state = getCameraState(cameraId);
         if (state == nullptr) {
@@ -1709,16 +1795,57 @@
             return BAD_VALUE;
         }
 
-        int32_t actualScore = priorityScores[priorityScores.size() - 1];
-        int32_t actualState = states[states.size() - 1];
+        sp<IServiceManager> sm = defaultServiceManager();
+        sp<IBinder> binder = sm->checkService(String16(kProcessInfoServiceName));
+        if (!binder && isAutomotivePrivilegedClient(CameraThreadState::getCallingUid())) {
+            // If processinfo service is not available and the client is automotive privileged
+            // client used for safety critical uses cases such as rear-view and surround-view which
+            // needs to be available before android boot completes, then use the hardcoded values
+            // for the process state and priority score. As this scenario is before android system
+            // services are up and client is native client, hence using NATIVE_ADJ as the priority
+            // score and state as PROCESS_STATE_BOUND_TOP as such automotive apps need to be
+            // visible on the top.
+            clientDescriptor = CameraClientManager::makeClientDescriptor(cameraId,
+                    sp<BasicClient>{nullptr}, static_cast<int32_t>(state->getCost()),
+                    state->getConflicting(), resource_policy::NATIVE_ADJ, clientPid,
+                    ActivityManager::PROCESS_STATE_BOUND_TOP, oomScoreOffset, systemNativeClient);
+        } else {
+            // Get current active client PIDs
+            std::vector<int> ownerPids(mActiveClientManager.getAllOwners());
+            ownerPids.push_back(clientPid);
 
-        // Make descriptor for incoming client. We store the oomScoreOffset
-        // since we might need it later on new handleEvictionsLocked and
-        // ProcessInfoService would not take that into account.
-        clientDescriptor = CameraClientManager::makeClientDescriptor(cameraId,
-                sp<BasicClient>{nullptr}, static_cast<int32_t>(state->getCost()),
-                state->getConflicting(), actualScore, clientPid, actualState,
-                oomScoreOffset, systemNativeClient);
+            std::vector<int> priorityScores(ownerPids.size());
+            std::vector<int> states(ownerPids.size());
+
+            // Get priority scores of all active PIDs
+            status_t err = ProcessInfoService::getProcessStatesScoresFromPids(ownerPids.size(),
+                    &ownerPids[0], /*out*/&states[0], /*out*/&priorityScores[0]);
+            if (err != OK) {
+                ALOGE("%s: Priority score query failed: %d", __FUNCTION__, err);
+                return err;
+            }
+
+            // Update all active clients' priorities
+            std::map<int,resource_policy::ClientPriority> pidToPriorityMap;
+            for (size_t i = 0; i < ownerPids.size() - 1; i++) {
+                pidToPriorityMap.emplace(ownerPids[i],
+                        resource_policy::ClientPriority(priorityScores[i], states[i],
+                        /* isVendorClient won't get copied over*/ false,
+                        /* oomScoreOffset won't get copied over*/ 0));
+            }
+            mActiveClientManager.updatePriorities(pidToPriorityMap);
+
+            int32_t actualScore = priorityScores[priorityScores.size() - 1];
+            int32_t actualState = states[states.size() - 1];
+
+            // Make descriptor for incoming client. We store the oomScoreOffset
+            // since we might need it later on new handleEvictionsLocked and
+            // ProcessInfoService would not take that into account.
+            clientDescriptor = CameraClientManager::makeClientDescriptor(cameraId,
+                    sp<BasicClient>{nullptr}, static_cast<int32_t>(state->getCost()),
+                    state->getConflicting(), actualScore, clientPid, actualState,
+                    oomScoreOffset, systemNativeClient);
+        }
 
         resource_policy::ClientPriority clientPriority = clientDescriptor->getPriority();
 
@@ -1897,7 +2024,7 @@
     //      have android.permission.SYSTEM_CAMERA permissions.
     if (!isVendorListener && (systemCameraKind == SystemCameraKind::HIDDEN_SECURE_CAMERA ||
             (systemCameraKind == SystemCameraKind::SYSTEM_ONLY_CAMERA &&
-            !hasPermissionsForSystemCamera(clientPid, clientUid)))) {
+            !hasPermissionsForSystemCamera(std::string(), clientPid, clientUid)))) {
         return true;
     }
     return false;
@@ -1937,7 +2064,7 @@
     //     characteristics) even if clients don't have android.permission.CAMERA. We do not want the
     //     same behavior for system camera devices.
     if (!systemClient && systemCameraKind == SystemCameraKind::SYSTEM_ONLY_CAMERA &&
-            !hasPermissionsForSystemCamera(cPid, cUid)) {
+            !hasPermissionsForSystemCamera(cameraId, cPid, cUid)) {
         ALOGW("Rejecting access to system only camera %s, inadequete permissions",
                 cameraId.c_str());
         return true;
@@ -1988,16 +2115,20 @@
         clientUserId = multiuser_get_user_id(callingUid);
     }
 
-    if (mCameraServiceProxyWrapper->isCameraDisabled(clientUserId)) {
+    // Automotive privileged client AID_AUTOMOTIVE_EVS using exterior system camera for use cases
+    // such as rear view and surround view cannot be disabled.
+    if ((!isAutomotivePrivilegedClient(callingUid) || !isAutomotiveExteriorSystemCamera(cameraId))
+            && mCameraServiceProxyWrapper->isCameraDisabled(clientUserId)) {
         std::string msg = "Camera disabled by device policy";
         ALOGE("%s: %s", __FUNCTION__, msg.c_str());
         return STATUS_ERROR(ERROR_DISABLED, msg.c_str());
     }
 
     // enforce system camera permissions
-    if (oomScoreOffset > 0 &&
-            !hasPermissionsForSystemCamera(callingPid, CameraThreadState::getCallingUid()) &&
-            !isTrustedCallingUid(CameraThreadState::getCallingUid())) {
+    if (oomScoreOffset > 0
+            && !hasPermissionsForSystemCamera(cameraId, callingPid,
+                    CameraThreadState::getCallingUid())
+            && !isTrustedCallingUid(CameraThreadState::getCallingUid())) {
         std::string msg = fmt::sprintf("Cannot change the priority of a client %s pid %d for "
                         "camera id %s without SYSTEM_CAMERA permissions",
                         clientPackageNameAdj.c_str(), callingPid, cameraId.c_str());
@@ -2008,10 +2139,10 @@
     ret = connectHelper<hardware::camera2::ICameraDeviceCallbacks,CameraDeviceClient>(cameraCb,
             cameraId, /*api1CameraId*/-1, clientPackageNameAdj, systemNativeClient, clientFeatureId,
             clientUid, USE_CALLING_PID, API_2, /*shimUpdateOnly*/ false, oomScoreOffset,
-            targetSdkVersion, overrideToPortrait, /*forceSlowJpegMode*/false,
-            unresolvedCameraId, /*out*/client);
+            targetSdkVersion, overrideToPortrait, /*forceSlowJpegMode*/false, unresolvedCameraId,
+            /*out*/client);
 
-    if (!ret.isOk()) {
+    if(!ret.isOk()) {
         logRejected(cameraId, callingPid, clientPackageNameAdj, toStdString(ret.toString8()));
         return ret;
     }
@@ -2084,6 +2215,8 @@
 
     bool isNonSystemNdk = false;
     std::string clientPackageName;
+    int packageUid = (clientUid == USE_CALLING_UID) ?
+            CameraThreadState::getCallingUid() : clientUid;
     if (clientPackageNameMaybe.size() <= 0) {
         // NDK calls don't come with package names, but we need one for various cases.
         // Generally, there's a 1:1 mapping between UID and package name, but shared UIDs
@@ -2091,8 +2224,6 @@
         // same permissions, so picking any associated package name is sufficient. For some
         // other cases, this may give inaccurate names for clients in logs.
         isNonSystemNdk = true;
-        int packageUid = (clientUid == USE_CALLING_UID) ?
-            CameraThreadState::getCallingUid() : clientUid;
         clientPackageName = getPackageNameFromUid(packageUid);
     } else {
         clientPackageName = clientPackageNameMaybe;
@@ -2289,32 +2420,38 @@
                     clientPackageName));
         }
 
-        // Set camera muting behavior
-        bool isCameraPrivacyEnabled =
-                mSensorPrivacyPolicy->isCameraPrivacyEnabled();
-        if (client->supportsCameraMute()) {
-            client->setCameraMute(
-                    mOverrideCameraMuteMode || isCameraPrivacyEnabled);
-        } else if (isCameraPrivacyEnabled) {
-            // no camera mute supported, but privacy is on! => disconnect
-            ALOGI("Camera mute not supported for package: %s, camera id: %s",
-                    client->getPackageName().c_str(), cameraId.c_str());
-            // Do not hold mServiceLock while disconnecting clients, but
-            // retain the condition blocking other clients from connecting
-            // in mServiceLockWrapper if held.
-            mServiceLock.unlock();
-            // Clear caller identity temporarily so client disconnect PID
-            // checks work correctly
-            int64_t token = CameraThreadState::clearCallingIdentity();
-            // Note AppOp to trigger the "Unblock" dialog
-            client->noteAppOp();
-            client->disconnect();
-            CameraThreadState::restoreCallingIdentity(token);
-            // Reacquire mServiceLock
-            mServiceLock.lock();
+        // Automotive privileged client AID_AUTOMOTIVE_EVS using exterior system camera for use
+        // cases such as rear view and surround view cannot be disabled and are exempt from camera
+        // privacy policy.
+        if ((!isAutomotivePrivilegedClient(packageUid) ||
+                !isAutomotiveExteriorSystemCamera(cameraId))) {
+            // Set camera muting behavior.
+            bool isCameraPrivacyEnabled =
+                    mSensorPrivacyPolicy->isCameraPrivacyEnabled();
+            if (client->supportsCameraMute()) {
+                client->setCameraMute(
+                        mOverrideCameraMuteMode || isCameraPrivacyEnabled);
+            } else if (isCameraPrivacyEnabled) {
+                // no camera mute supported, but privacy is on! => disconnect
+                ALOGI("Camera mute not supported for package: %s, camera id: %s",
+                        client->getPackageName().c_str(), cameraId.c_str());
+                // Do not hold mServiceLock while disconnecting clients, but
+                // retain the condition blocking other clients from connecting
+                // in mServiceLockWrapper if held.
+                mServiceLock.unlock();
+                // Clear caller identity temporarily so client disconnect PID
+                // checks work correctly
+                int64_t token = CameraThreadState::clearCallingIdentity();
+                // Note AppOp to trigger the "Unblock" dialog
+                client->noteAppOp();
+                client->disconnect();
+                CameraThreadState::restoreCallingIdentity(token);
+                // Reacquire mServiceLock
+                mServiceLock.lock();
 
-            return STATUS_ERROR_FMT(ERROR_DISABLED,
-                    "Camera \"%s\" disabled due to camera mute", cameraId.c_str());
+                return STATUS_ERROR_FMT(ERROR_DISABLED,
+                        "Camera \"%s\" disabled due to camera mute", cameraId.c_str());
+            }
         }
 
         if (shimUpdateOnly) {
@@ -2451,8 +2588,7 @@
 }
 
 Status CameraService::turnOnTorchWithStrengthLevel(const std::string& unresolvedCameraId,
-        int32_t torchStrength,
-        const sp<IBinder>& clientBinder) {
+        int32_t torchStrength, const sp<IBinder>& clientBinder) {
     Mutex::Autolock lock(mServiceLock);
 
     ATRACE_CALL();
@@ -2579,8 +2715,7 @@
     return Status::ok();
 }
 
-Status CameraService::setTorchMode(const std::string& unresolvedCameraId,
-        bool enabled,
+Status CameraService::setTorchMode(const std::string& unresolvedCameraId, bool enabled,
         const sp<IBinder>& clientBinder) {
     Mutex::Autolock lock(mServiceLock);
 
@@ -2923,13 +3058,11 @@
     // Check for camera permissions
     int callingPid = CameraThreadState::getCallingPid();
     int callingUid = CameraThreadState::getCallingUid();
-    permission::PermissionChecker permissionChecker;
     AttributionSourceState attributionSource{};
     attributionSource.pid = callingPid;
     attributionSource.uid = callingUid;
-    bool checkPermissionForCamera = permissionChecker.checkPermissionForPreflight(
-                toString16(sCameraPermission), attributionSource, String16(),
-                AppOpsManager::OP_NONE) != permission::PermissionChecker::PERMISSION_HARD_DENIED;
+    bool checkPermissionForCamera = checkPermission(std::string(),
+                sCameraPermission, attributionSource, std::string(), AppOpsManager::OP_NONE);
     if ((callingPid != getpid()) && !checkPermissionForCamera) {
         ALOGE("%s: pid %d doesn't have camera permissions", __FUNCTION__, callingPid);
         return STATUS_ERROR(ERROR_PERMISSION_DENIED,
@@ -2977,13 +3110,13 @@
 
     auto clientUid = CameraThreadState::getCallingUid();
     auto clientPid = CameraThreadState::getCallingPid();
-    permission::PermissionChecker permissionChecker;
     AttributionSourceState attributionSource{};
     attributionSource.uid = clientUid;
     attributionSource.pid = clientPid;
-    bool openCloseCallbackAllowed = permissionChecker.checkPermissionForPreflight(
-            toString16(sCameraOpenCloseListenerPermission), attributionSource, String16(),
-            AppOpsManager::OP_NONE) != permission::PermissionChecker::PERMISSION_HARD_DENIED;
+
+   bool openCloseCallbackAllowed = checkPermission(std::string(),
+            sCameraOpenCloseListenerPermission, attributionSource, std::string(),
+            AppOpsManager::OP_NONE);
 
     Mutex::Autolock lock(mServiceLock);
 
@@ -5121,6 +5254,7 @@
     state->updateStatus(status, cameraId, rejectSourceStates, [this, &deviceKind,
                         &logicalCameraIds]
             (const std::string& cameraId, StatusInternal status) {
+
             if (status != StatusInternal::ENUMERATING) {
                 // Update torch status if it has a flash unit.
                 Mutex::Autolock al(mTorchStatusMutex);
@@ -6017,8 +6151,10 @@
         "  clear-stream-use-case-override clear the stream use case override\n"
         "  set-zoom-override <-1/0/1> enable or disable zoom override\n"
         "      Valid values -1: do not override, 0: override to OFF, 1: override to ZOOM\n"
-        "  remap-camera-id <PACKAGE> <Id0> <Id1> remaps camera ids. Must use adb root\n"
+        "  set-watchdog <VALUE> enables or disables the camera service watchdog\n"
+        "      Valid values 0=disable, 1=enable\n"
         "  watch <start|stop|dump|print|clear> manages tag monitoring in connected clients\n"
+        "  remap-camera-id <PACKAGE> <Id0> <Id1> remaps camera ids. Must use adb root\n"
         "  help print this message\n");
 }
 
diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h
index af35709..45ce4c8 100644
--- a/services/camera/libcameraservice/CameraService.h
+++ b/services/camera/libcameraservice/CameraService.h
@@ -17,6 +17,7 @@
 #ifndef ANDROID_SERVERS_CAMERA_CAMERASERVICE_H
 #define ANDROID_SERVERS_CAMERA_CAMERASERVICE_H
 
+#include <android/content/AttributionSourceState.h>
 #include <android/hardware/BnCameraService.h>
 #include <android/hardware/BnSensorPrivacyListener.h>
 #include <android/hardware/ICameraServiceListener.h>
@@ -103,6 +104,9 @@
     // Event log ID
     static const int SN_EVENT_LOG_ID = 0x534e4554;
 
+    // Keep this in sync with frameworks/base/core/java/android/os/UserHandle.java
+    static const userid_t USER_SYSTEM = 0;
+
     // Register camera service
     static void instantiate();
 
@@ -617,6 +621,13 @@
     int32_t updateAudioRestrictionLocked();
 
 private:
+    /**
+     * Returns true if the device is an automotive device and cameraId is system
+     * only camera which has characteristic AUTOMOTIVE_LOCATION value as either
+     * AUTOMOTIVE_LOCATION_EXTERIOR_LEFT,AUTOMOTIVE_LOCATION_EXTERIOR_RIGHT,
+     * AUTOMOTIVE_LOCATION_EXTERIOR_FRONT or AUTOMOTIVE_LOCATION_EXTERIOR_REAR.
+     */
+    bool isAutomotiveExteriorSystemCamera(const std::string& cameraId) const;
 
     // TODO: b/263304156 update this to make use of a death callback for more
     // robust/fault tolerant logging
@@ -633,6 +644,25 @@
     }
 
     /**
+     * Pre-grants the permission if the attribution source uid is for an automotive
+     * privileged client. Otherwise uses system service permission checker to check
+     * for the appropriate permission. If this function is called for accessing a specific
+     * camera,then the cameraID must not be empty. CameraId is used only in case of automotive
+     * privileged client so that permission is pre-granted only to access system camera device
+     * which is located outside of the vehicle body frame because camera located inside the vehicle
+     * cabin would need user permission.
+     */
+    bool checkPermission(const std::string& cameraId, const std::string& permission,
+            const content::AttributionSourceState& attributionSource, const std::string& message,
+            int32_t attributedOpCode) const;
+
+    bool hasPermissionsForSystemCamera(const std::string& cameraId, int callingPid, int callingUid)
+            const;
+
+    bool hasPermissionsForCameraHeadlessSystemUser(const std::string& cameraId, int callingPid,
+            int callingUid) const;
+
+   /**
      * Typesafe version of device status, containing both the HAL-layer and the service interface-
      * layer values.
      */
@@ -892,7 +922,7 @@
     // Should a device status update be skipped for a particular camera device ? (this can happen
     // under various conditions. For example if a camera device is advertised as
     // system only or hidden secure camera, amongst possible others.
-    static bool shouldSkipStatusUpdates(SystemCameraKind systemCameraKind, bool isVendorListener,
+    bool shouldSkipStatusUpdates(SystemCameraKind systemCameraKind, bool isVendorListener,
             int clientPid, int clientUid);
 
     // Gets the kind of camera device (i.e public, hidden secure or system only)
@@ -1421,11 +1451,11 @@
      */
     static std::string getFormattedCurrentTime();
 
-    static binder::Status makeClient(
-            const sp<CameraService>& cameraService, const sp<IInterface>& cameraCb,
-            const std::string& packageName, bool systemNativeClient,
-            const std::optional<std::string>& featureId, const std::string& cameraId, int api1CameraId,
-            int facing, int sensorOrientation, int clientPid, uid_t clientUid, int servicePid,
+    static binder::Status makeClient(const sp<CameraService>& cameraService,
+            const sp<IInterface>& cameraCb, const std::string& packageName,
+            bool systemNativeClient, const std::optional<std::string>& featureId,
+            const std::string& cameraId, int api1CameraId, int facing, int sensorOrientation,
+            int clientPid, uid_t clientUid, int servicePid,
             std::pair<int, IPCTransport> deviceVersionAndIPCTransport, apiLevel effectiveApiLevel,
             bool overrideForPerfClass, bool overrideToPortrait, bool forceSlowJpegMode,
             const std::string& originalCameraId,
diff --git a/services/camera/libcameraservice/CameraServiceWatchdog.h b/services/camera/libcameraservice/CameraServiceWatchdog.h
index 9f25865..afc432d 100644
--- a/services/camera/libcameraservice/CameraServiceWatchdog.h
+++ b/services/camera/libcameraservice/CameraServiceWatchdog.h
@@ -44,7 +44,7 @@
         watchThread([&]() { return toMonitor;}, gettid(), __FUNCTION__, cycles, cycleLength);
 
 // Default cycles and cycle length values used to calculate permitted elapsed time
-const static size_t   kMaxCycles     = 100;
+const static size_t   kMaxCycles     = 650;
 const static uint32_t kCycleLengthMs = 100;
 
 namespace android {
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index 3761f75..939f969 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -439,7 +439,7 @@
 
         CameraDeviceBase::PhysicalCameraSettingsList physicalSettingsList;
         for (const auto& it : request.mPhysicalCameraSettings) {
-            std::string resolvedId = (mOriginalCameraId == it.id) ? mDevice->getId() : it.id;
+            const std::string resolvedId = (mOriginalCameraId == it.id) ? mDevice->getId() : it.id;
             if (it.settings.isEmpty()) {
                 ALOGE("%s: Camera %s: Sent empty metadata packet. Rejecting request.",
                         __FUNCTION__, mCameraIdStr.c_str());
@@ -465,18 +465,19 @@
                 }
             }
 
+            const std::string &physicalId = resolvedId;
             bool hasTestPatternModePhysicalKey = std::find(mSupportedPhysicalRequestKeys.begin(),
                     mSupportedPhysicalRequestKeys.end(), ANDROID_SENSOR_TEST_PATTERN_MODE) !=
                     mSupportedPhysicalRequestKeys.end();
             bool hasTestPatternDataPhysicalKey = std::find(mSupportedPhysicalRequestKeys.begin(),
                     mSupportedPhysicalRequestKeys.end(), ANDROID_SENSOR_TEST_PATTERN_DATA) !=
                     mSupportedPhysicalRequestKeys.end();
-            if (resolvedId != mDevice->getId()) {
+            if (physicalId != mDevice->getId()) {
                 auto found = std::find(requestedPhysicalIds.begin(), requestedPhysicalIds.end(),
                         resolvedId);
                 if (found == requestedPhysicalIds.end()) {
                     ALOGE("%s: Camera %s: Physical camera id: %s not part of attached outputs.",
-                            __FUNCTION__, mCameraIdStr.c_str(), resolvedId.c_str());
+                            __FUNCTION__, mCameraIdStr.c_str(), physicalId.c_str());
                     return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
                             "Invalid physical camera id");
                 }
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.h b/services/camera/libcameraservice/api2/CameraDeviceClient.h
index 68bd958..86a94e2 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.h
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.h
@@ -371,7 +371,7 @@
     int mVideoStabilizationMode = -1;
 
     // This only exists in case of camera ID Remapping.
-    std::string mOriginalCameraId;
+    const std::string mOriginalCameraId;
 };
 
 }; // namespace android
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.cpp b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
index a54ba9b..a611cc6 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.cpp
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
@@ -269,15 +269,8 @@
 template <typename TClientBase>
 binder::Status Camera2ClientBase<TClientBase>::disconnect() {
     if (mCameraServiceWatchdog != nullptr && mDevice != nullptr) {
-        // Timer for the disconnect call should be greater than getExpectedInFlightDuration
-        // since this duration is used to error handle methods in the disconnect sequence
-        // thus allowing existing error handling methods to execute first
-        uint64_t maxExpectedDuration =
-                ns2ms(mDevice->getExpectedInFlightDuration() + kBufferTimeDisconnectNs);
-
         // Initialization from hal succeeded, time disconnect.
-        return mCameraServiceWatchdog->WATCH_CUSTOM_TIMER(disconnectImpl(),
-                maxExpectedDuration / kCycleLengthMs, kCycleLengthMs);
+        return mCameraServiceWatchdog->WATCH(disconnectImpl());
     }
     return disconnectImpl();
 }
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.h b/services/camera/libcameraservice/common/Camera2ClientBase.h
index 30c763d..88d65dc 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.h
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.h
@@ -137,9 +137,6 @@
 
 protected:
 
-    // Used for watchdog timeout to monitor disconnect
-    static const nsecs_t kBufferTimeDisconnectNs = 3000000000; // 3 sec.
-
     // The PID provided in the constructor call
     pid_t mInitialClientPid;
     bool mOverrideForPerfClass = false;
diff --git a/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp b/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp
index 5e79d6b..9659db8 100644
--- a/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp
+++ b/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp
@@ -607,8 +607,9 @@
             aidl::android::hardware::camera::device::CameraMetadata pChars;
             status = interface->getPhysicalCameraCharacteristics(id, &pChars);
             if (!status.isOk()) {
-                ALOGE("%s: Transaction error getting physical camera %s characteristics for %s: %s",
-                        __FUNCTION__, id.c_str(), id.c_str(), status.getMessage());
+                ALOGE("%s: Transaction error getting physical camera %s characteristics for "
+                        "logical id %s: %s", __FUNCTION__, id.c_str(), mId.c_str(),
+                        status.getMessage());
                 return;
             }
             std::vector<uint8_t> &pMetadata = pChars.metadata;
diff --git a/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp b/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
index bf7a471..aeb0abc 100644
--- a/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
+++ b/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
@@ -743,8 +743,9 @@
             });
 
             if (!ret.isOk()) {
-                ALOGE("%s: Transaction error getting physical camera %s characteristics for %s: %s",
-                        __FUNCTION__, id.c_str(), id.c_str(), ret.description().c_str());
+                ALOGE("%s: Transaction error getting physical camera %s characteristics for"
+                        " logical id %s: %s", __FUNCTION__, id.c_str(), mId.c_str(),
+                        ret.description().c_str());
                 return;
             }
             if (status != Status::OK) {
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index e2df133..e2181ec 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -63,9 +63,9 @@
 #include "device3/Camera3InputStream.h"
 #include "device3/Camera3OutputStream.h"
 #include "device3/Camera3SharedOutputStream.h"
-#include "mediautils/SchedulingPolicyService.h"
 #include "utils/CameraThreadState.h"
 #include "utils/CameraTraces.h"
+#include "utils/SchedulingPolicyUtils.h"
 #include "utils/SessionConfigurationUtils.h"
 #include "utils/TraceHFR.h"
 
@@ -296,13 +296,6 @@
                     }
                 }
             }
-            // Signal to request thread that we're not expecting any
-            // more requests. This will be true since once we're in
-            // disconnect and we've cleared off the request queue, the
-            // request thread can't receive any new requests through
-            // binder calls - since disconnect holds
-            // mBinderSerialization lock.
-            mRequestThread->setRequestClearing();
         }
 
         if (mStatus == STATUS_ERROR) {
@@ -2631,8 +2624,8 @@
     if (disableFifo != 1) {
         // Boost priority of request thread to SCHED_FIFO.
         pid_t requestThreadTid = mRequestThread->getTid();
-        res = requestPriority(getpid(), requestThreadTid,
-                kRequestThreadPriority, /*isForApp*/ false, /*asynchronous*/ false);
+        res = SchedulingPolicyUtils::requestPriorityDirect(getpid(), requestThreadTid,
+                kRequestThreadPriority);
         if (res != OK) {
             ALOGW("Can't set realtime priority for request processing thread: %s (%d)",
                     strerror(-res), res);
@@ -3302,8 +3295,12 @@
 }
 
 void Camera3Device::RequestThread::requestExit() {
-    // Call parent to set up shutdown
-    Thread::requestExit();
+    {
+        Mutex::Autolock l(mRequestLock);
+        mRequestClearing = true;
+        // Call parent to set up shutdown
+        Thread::requestExit();
+    }
     // The exit from any possible waits
     mDoPauseSignal.signal();
     mRequestSignal.signal();
@@ -4435,11 +4432,6 @@
     return;
 }
 
-void Camera3Device::RequestThread::setRequestClearing() {
-    Mutex::Autolock l(mRequestLock);
-    mRequestClearing = true;
-}
-
 sp<Camera3Device::CaptureRequest>
         Camera3Device::RequestThread::waitForNextRequestLocked() {
     status_t res;
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index 26ae8c1..a890c2b 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -941,9 +941,6 @@
          */
         void     setPaused(bool paused);
 
-        // set mRequestClearing - no new requests are expected to be queued to RequestThread
-        void setRequestClearing();
-
         /**
          * Wait until thread processes the capture request with settings'
          * android.request.id == requestId.
diff --git a/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp b/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
index 1e7e337..88a7d69 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
@@ -261,7 +261,7 @@
 
     auto mapper = states.rotateAndCropMappers.find(states.cameraId);
     if (mapper != states.rotateAndCropMappers.end()) {
-        const auto& remappedKeys = iter->second.getRemappedKeys();
+        const auto& remappedKeys = mapper->second.getRemappedKeys();
         keysToRemove.insert(remappedKeys.begin(), remappedKeys.end());
     }
 
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.cpp b/services/camera/libcameraservice/device3/Camera3Stream.cpp
index 23afa6e..701c472 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Stream.cpp
@@ -772,7 +772,8 @@
 
     // Buffer status may be changed, so make a copy of the stream_buffer struct.
     camera_stream_buffer b = buffer;
-    if (timestampIncreasing && timestamp != 0 && timestamp <= mLastTimestamp) {
+    if (timestampIncreasing && timestamp != 0 && timestamp <= mLastTimestamp
+            && b.status != CAMERA_BUFFER_STATUS_ERROR) {
         ALOGE("%s: Stream %d: timestamp %" PRId64 " is not increasing. Prev timestamp %" PRId64,
                 __FUNCTION__, mId, timestamp, mLastTimestamp);
         b.status = CAMERA_BUFFER_STATUS_ERROR;
diff --git a/services/camera/libcameraservice/tests/Android.bp b/services/camera/libcameraservice/tests/Android.bp
index 5e2a3fb..b035edd 100644
--- a/services/camera/libcameraservice/tests/Android.bp
+++ b/services/camera/libcameraservice/tests/Android.bp
@@ -31,11 +31,14 @@
         "libmedia_headers",
     ],
 
+    defaults: [
+        "libcameraservice_deps",
+    ],
+
     shared_libs: [
         "libbase",
         "libbinder",
         "libcutils",
-        "libcameraservice",
         "libhidlbase",
         "liblog",
         "libcamera_client",
@@ -45,11 +48,6 @@
         "libjpeg",
         "libexif",
         "android.hardware.camera.common@1.0",
-        "android.hardware.camera.provider@2.4",
-        "android.hardware.camera.provider@2.5",
-        "android.hardware.camera.provider@2.6",
-        "android.hardware.camera.provider@2.7",
-        "android.hardware.camera.provider-V2-ndk",
         "android.hardware.camera.device@1.0",
         "android.hardware.camera.device@3.2",
         "android.hardware.camera.device@3.4",
@@ -58,6 +56,12 @@
     ],
 
     static_libs: [
+        "android.hardware.camera.provider@2.4",
+        "android.hardware.camera.provider@2.5",
+        "android.hardware.camera.provider@2.6",
+        "android.hardware.camera.provider@2.7",
+        "android.hardware.camera.provider-V2-ndk",
+        "libcameraservice",
         "libgmock",
     ],
 
diff --git a/services/camera/libcameraservice/utils/SchedulingPolicyUtils.cpp b/services/camera/libcameraservice/utils/SchedulingPolicyUtils.cpp
new file mode 100644
index 0000000..92a1030
--- /dev/null
+++ b/services/camera/libcameraservice/utils/SchedulingPolicyUtils.cpp
@@ -0,0 +1,66 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "SchedulingPolicyUtils.h"
+
+#include <errno.h>
+#include <pthread.h>
+#include <sched.h>
+
+#include "CameraThreadState.h"
+#include <private/android_filesystem_config.h>
+#include <processgroup/processgroup.h>
+#include <processgroup/sched_policy.h>
+#include <procinfo/process.h>
+#include <utils/Log.h>
+
+namespace android {
+namespace camera3 {
+namespace SchedulingPolicyUtils {
+
+int requestPriorityDirect(int pid, int tid, int prio) {
+    android::procinfo::ProcessInfo processInfo;
+    static const int kMinPrio = 1;
+    static const int kMaxPrio = 3;
+
+    if (!android::procinfo::GetProcessInfo(tid, &processInfo)) {
+       ALOGE("%s: Error getting process info", __FUNCTION__);
+       return -EPERM;
+    }
+
+    if (prio < kMinPrio || prio > kMaxPrio || processInfo.pid != pid) {
+        ALOGE("%s: Invalid parameter prio=%d pid=%d procinfo.pid=%d", __FUNCTION__, prio, pid,
+                processInfo.pid);
+        return -EPERM;
+    }
+
+    // Set the thread group as audio system thread group in consistent with the
+    // implementation in SchedulingPolicyService.java when isApp is false in
+    // requestPriority method.
+    if (!SetTaskProfiles(tid, {get_sched_policy_profile_name(SP_AUDIO_SYS)},
+            /*use_fd_cache*/ true)) {
+        ALOGE("%s:Error in  SetTaskProfiles", __FUNCTION__);
+        return -EPERM;
+    }
+
+    struct sched_param param;
+    param.sched_priority = prio;
+    return sched_setscheduler(tid, SCHED_FIFO | SCHED_RESET_ON_FORK, &param);
+}
+
+} // namespace SchedulingPolicyUtils
+} // namespace camera3
+} // namespace android
diff --git a/services/camera/libcameraservice/utils/SchedulingPolicyUtils.h b/services/camera/libcameraservice/utils/SchedulingPolicyUtils.h
new file mode 100644
index 0000000..f71fddf
--- /dev/null
+++ b/services/camera/libcameraservice/utils/SchedulingPolicyUtils.h
@@ -0,0 +1,35 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#ifndef ANDROID_SERVICE_CAMERA_SCHEDULING_POLICY_UTILS_H
+#define ANDROID_SERVICE_CAMERA_SCHEDULING_POLICY_UTILS_H
+
+namespace android {
+namespace camera3 {
+namespace SchedulingPolicyUtils {
+
+/**
+ * Request elevated priority for thread tid, whose thread group leader must be pid.
+ * Instead of using scheduling policy service, this method uses direct system calls.
+ * The priority parameter is currently restricted from 1 to 3 matching
+ * scheduling policy service implementation.
+ */
+int requestPriorityDirect(int pid, int tid, int prio);
+
+} // SchedulingPolicyUtils
+} // camera3
+} // android
+
+#endif
diff --git a/services/mediametrics/AudioAnalytics.cpp b/services/mediametrics/AudioAnalytics.cpp
index 59d1ae4..bd4ac38 100644
--- a/services/mediametrics/AudioAnalytics.cpp
+++ b/services/mediametrics/AudioAnalytics.cpp
@@ -242,6 +242,7 @@
     "channel_count_hardware",
     "sample_rate_hardware",
     "uid",
+    "sample_rate_client",
 };
 
 static constexpr const char * HeadTrackerDeviceEnabledFields[] {
@@ -1379,6 +1380,10 @@
 
     const auto uid = item->getUid();
 
+    int32_t sampleRateClient = 0;
+    mAudioAnalytics.mAnalyticsState->timeMachine().get(
+            key, AMEDIAMETRICS_PROP_SAMPLERATECLIENT, &sampleRateClient);
+
     LOG(LOG_LEVEL) << "key:" << key
             << " path:" << path
             << " direction:" << direction << "(" << directionStr << ")"
@@ -1402,7 +1407,8 @@
             << " format_hardware:" << formatHardware << "(" << formatHardwareStr << ")"
             << " channel_count_hardware:" << channelCountHardware
             << " sample_rate_hardware: " << sampleRateHardware
-            << " uid: " << uid;
+            << " uid: " << uid
+            << " sample_rate_client: " << sampleRateClient;
 
     if (mAudioAnalytics.mDeliverStatistics) {
         const stats::media_metrics::BytesField bf_serialized(
@@ -1431,6 +1437,7 @@
                 , channelCountHardware
                 , sampleRateHardware
                 , uid
+                , sampleRateClient
                 );
         std::stringstream ss;
         ss << "result:" << result;
@@ -1458,6 +1465,7 @@
                 , channelCountHardware
                 , sampleRateHardware
                 , uid
+                , sampleRateClient
                 );
         ss << " " << fieldsStr;
         std::string str = ss.str();
diff --git a/services/mediametrics/MediaMetricsService.cpp b/services/mediametrics/MediaMetricsService.cpp
index 1b5255a..f81db53 100644
--- a/services/mediametrics/MediaMetricsService.cpp
+++ b/services/mediametrics/MediaMetricsService.cpp
@@ -524,8 +524,8 @@
                                      "audiotrack",
                                      // other media
                                      "codec",
-                                     "freeze",
-                                     "judder",
+                                     "videofreeze",
+                                     "videojudder",
                                      "extractor",
                                      "mediadrm",
                                      "mediaparser",
diff --git a/services/mediametrics/statsd_codec.cpp b/services/mediametrics/statsd_codec.cpp
index ea76bcd..5e3e654 100644
--- a/services/mediametrics/statsd_codec.cpp
+++ b/services/mediametrics/statsd_codec.cpp
@@ -187,10 +187,12 @@
     const nsecs_t timestampNanos = MediaMetricsService::roundTime(item->getTimestamp());
     AStatsEvent_writeInt64(event, timestampNanos);
 
-    std::string packageName = item->getPkgName();
+    // packageName deprecated for calling_uid and statsd support as of U-QPR2
+    std::string packageName = "";
     AStatsEvent_writeString(event, packageName.c_str());
 
-    int64_t packageVersionCode = item->getPkgVersionCode();
+    // packageVersion depreccated for calling_uid and statsd support as of U-QPR2
+    int64_t packageVersionCode = 0;
     AStatsEvent_writeInt64(event, packageVersionCode);
 
     int64_t mediaApexVersion = 0;
@@ -654,6 +656,10 @@
     }
     AStatsEvent_writeInt32(event, componentColorFormat);
 
+    uid_t app_uid = item->getUid();
+    metrics_proto.set_caller_uid(app_uid);
+    AStatsEvent_writeInt32(event, app_uid);
+
     int64_t firstRenderTimeUs = -1;
     item->getInt64("android.media.mediacodec.first-render-time-us", &firstRenderTimeUs);
     int64_t framesReleased = -1;
@@ -854,6 +860,7 @@
             << " original_qp_p_max:" << qpPMaxOri
             << " original_qp_b_min:" << qpBMinOri
             << " original_qp_b_max:" << qpBMaxOri
+            << " app_uid:" << app_uid
             << " }";
     statsdLog->log(stats::media_metrics::MEDIAMETRICS_CODEC_REPORTED, log.str());
 
diff --git a/services/mediaresourcemanager/ResourceManagerService.cpp b/services/mediaresourcemanager/ResourceManagerService.cpp
index 5d1ba2b..8e7a624 100644
--- a/services/mediaresourcemanager/ResourceManagerService.cpp
+++ b/services/mediaresourcemanager/ResourceManagerService.cpp
@@ -53,23 +53,20 @@
         std::weak_ptr<DeathNotifier> mDeathNotifier;
     };
 public:
+    static std::shared_ptr<DeathNotifier> Create(
+        const std::shared_ptr<IResourceManagerClient>& client,
+        const std::shared_ptr<ResourceManagerService>& service,
+        const ClientInfoParcel& clientInfo,
+        bool overrideProcessInfo = false);
+
     DeathNotifier(const std::shared_ptr<IResourceManagerClient>& client,
                   const std::shared_ptr<ResourceManagerService>& service,
-                  const ClientInfoParcel& clientInfo,
-                  AIBinder_DeathRecipient* recipient);
+                  const ClientInfoParcel& clientInfo);
 
     virtual ~DeathNotifier() {
         unlink();
     }
 
-    void unlink() {
-        if (mClient != nullptr) {
-            // Register for the callbacks by linking to death notification.
-            AIBinder_unlinkToDeath(mClient->asBinder().get(), mRecipient, mCookie);
-            mClient = nullptr;
-        }
-    }
-
     // Implement death recipient
     static void BinderDiedCallback(void* cookie);
     static void BinderUnlinkedCallback(void* cookie);
@@ -81,24 +78,34 @@
         // The context gets deleted at BinderUnlinkedCallback.
         mCookie = new BinderDiedContext{.mDeathNotifier = weak_from_this()};
         // Register for the callbacks by linking to death notification.
-        AIBinder_linkToDeath(mClient->asBinder().get(), mRecipient, mCookie);
+        AIBinder_linkToDeath(mClient->asBinder().get(), mDeathRecipient.get(), mCookie);
+    }
+
+    void unlink() {
+        if (mClient != nullptr) {
+            // Unlink from the death notification.
+            AIBinder_unlinkToDeath(mClient->asBinder().get(), mDeathRecipient.get(), mCookie);
+            mClient = nullptr;
+        }
     }
 
 protected:
     std::shared_ptr<IResourceManagerClient> mClient;
     std::weak_ptr<ResourceManagerService> mService;
     const ClientInfoParcel mClientInfo;
-    AIBinder_DeathRecipient* mRecipient;
     BinderDiedContext* mCookie;
+    ::ndk::ScopedAIBinder_DeathRecipient mDeathRecipient;
 };
 
 DeathNotifier::DeathNotifier(const std::shared_ptr<IResourceManagerClient>& client,
                              const std::shared_ptr<ResourceManagerService>& service,
-                             const ClientInfoParcel& clientInfo,
-                             AIBinder_DeathRecipient* recipient)
+                             const ClientInfoParcel& clientInfo)
     : mClient(client), mService(service), mClientInfo(clientInfo),
-      mRecipient(recipient), mCookie(nullptr) {
-    link();
+      mCookie(nullptr),
+      mDeathRecipient(::ndk::ScopedAIBinder_DeathRecipient(
+                      AIBinder_DeathRecipient_new(BinderDiedCallback))) {
+    // Setting callback notification when DeathRecipient gets deleted.
+    AIBinder_DeathRecipient_setOnUnlinked(mDeathRecipient.get(), BinderUnlinkedCallback);
 }
 
 //static
@@ -140,9 +147,8 @@
 public:
     OverrideProcessInfoDeathNotifier(const std::shared_ptr<IResourceManagerClient>& client,
                                      const std::shared_ptr<ResourceManagerService>& service,
-                                     const ClientInfoParcel& clientInfo,
-                                     AIBinder_DeathRecipient* recipient)
-            : DeathNotifier(client, service, clientInfo, recipient) {}
+                                     const ClientInfoParcel& clientInfo)
+            : DeathNotifier(client, service, clientInfo) {}
 
     virtual ~OverrideProcessInfoDeathNotifier() {}
 
@@ -160,6 +166,26 @@
     service->removeProcessInfoOverride(mClientInfo.pid);
 }
 
+std::shared_ptr<DeathNotifier> DeathNotifier::Create(
+    const std::shared_ptr<IResourceManagerClient>& client,
+    const std::shared_ptr<ResourceManagerService>& service,
+    const ClientInfoParcel& clientInfo,
+    bool overrideProcessInfo) {
+    std::shared_ptr<DeathNotifier> deathNotifier = nullptr;
+    if (overrideProcessInfo) {
+        deathNotifier = std::make_shared<OverrideProcessInfoDeathNotifier>(
+            client, service, clientInfo);
+    } else {
+        deathNotifier = std::make_shared<DeathNotifier>(client, service, clientInfo);
+    }
+
+    if (deathNotifier) {
+        deathNotifier->link();
+    }
+
+    return deathNotifier;
+}
+
 template <typename T>
 static String8 getString(const std::vector<T>& items) {
     String8 itemsStr;
@@ -377,9 +403,7 @@
       mServiceLog(new ServiceLog()),
       mSupportsMultipleSecureCodecs(true),
       mSupportsSecureWithNonSecureCodec(true),
-      mCpuBoostCount(0),
-      mDeathRecipient(::ndk::ScopedAIBinder_DeathRecipient(
-                      AIBinder_DeathRecipient_new(DeathNotifier::BinderDiedCallback))) {
+      mCpuBoostCount(0) {
     mSystemCB->noteResetVideo();
     // Create ResourceManagerMetrics that handles all the metrics.
     mResourceManagerMetrics = std::make_unique<ResourceManagerMetrics>(mProcessInfo);
@@ -535,8 +559,8 @@
         }
     }
     if (info.deathNotifier == nullptr && client != nullptr) {
-        info.deathNotifier = std::make_shared<DeathNotifier>(
-            client, ref<ResourceManagerService>(), clientInfo, mDeathRecipient.get());
+        info.deathNotifier = DeathNotifier::Create(
+            client, ref<ResourceManagerService>(), clientInfo);
     }
     if (mObserverService != nullptr && !resourceAdded.empty()) {
         mObserverService->onResourceAdded(uid, pid, resourceAdded);
@@ -905,8 +929,8 @@
                                 .uid = 0,
                                 .id = 0,
                                 .name = "<unknown client>"};
-    auto deathNotifier = std::make_shared<OverrideProcessInfoDeathNotifier>(
-            client, ref<ResourceManagerService>(), clientInfo, mDeathRecipient.get());
+    auto deathNotifier = DeathNotifier::Create(
+        client, ref<ResourceManagerService>(), clientInfo, true);
 
     mProcessInfoOverrideMap.emplace(pid, ProcessInfoOverride{deathNotifier, client});
 
diff --git a/services/mediaresourcemanager/ResourceManagerService.h b/services/mediaresourcemanager/ResourceManagerService.h
index aa88ac6..637525d 100644
--- a/services/mediaresourcemanager/ResourceManagerService.h
+++ b/services/mediaresourcemanager/ResourceManagerService.h
@@ -204,7 +204,6 @@
     bool mSupportsMultipleSecureCodecs;
     bool mSupportsSecureWithNonSecureCodec;
     int32_t mCpuBoostCount;
-    ::ndk::ScopedAIBinder_DeathRecipient mDeathRecipient;
     struct ProcessInfoOverride {
         std::shared_ptr<DeathNotifier> deathNotifier = nullptr;
         std::shared_ptr<IResourceManagerClient> client;
diff --git a/services/mediaresourcemanager/fuzzer/Android.bp b/services/mediaresourcemanager/fuzzer/Android.bp
index a46d87a..bbbc737 100644
--- a/services/mediaresourcemanager/fuzzer/Android.bp
+++ b/services/mediaresourcemanager/fuzzer/Android.bp
@@ -27,21 +27,18 @@
     default_applicable_licenses: ["frameworks_av_license"],
 }
 
-cc_fuzz {
-    name: "mediaresourcemanager_fuzzer",
-    srcs: [
-        "mediaresourcemanager_fuzzer.cpp",
+cc_defaults {
+    name: "mediaresourcemanager_fuzzer_defaults",
+    defaults: [
+        "service_fuzzer_defaults",
     ],
     static_libs: [
         "liblog",
         "libresourcemanagerservice",
     ],
     shared_libs: [
-        "libbinder",
-        "libbinder_ndk",
         "libmedia",
         "libmediautils",
-        "libutils",
         "libstats_media_metrics",
         "libstatspull",
         "libstatssocket",
@@ -62,3 +59,39 @@
         fuzzed_code_usage: "shipped",
     },
 }
+
+cc_fuzz {
+    name: "mediaresourcemanager_fuzzer",
+    defaults: [
+        "mediaresourcemanager_fuzzer_defaults",
+    ],
+    srcs: [
+        "mediaresourcemanager_fuzzer.cpp",
+    ],
+}
+
+cc_fuzz {
+    name: "resourcemanager_service_fuzzer",
+    defaults: [
+        "mediaresourcemanager_fuzzer_defaults",
+    ],
+    srcs: [
+        "resourcemanager_service_fuzzer.cpp",
+    ],
+}
+
+cc_fuzz {
+    name: "resourceobserver_service_fuzzer",
+    defaults: [
+        "mediaresourcemanager_fuzzer_defaults",
+    ],
+    static_libs: [
+        "resourceobserver_aidl_interface-V1-ndk",
+    ],
+    srcs: [
+        "resourceobserver_service_fuzzer.cpp",
+    ],
+    fuzz_config: {
+        triage_assignee: "waghpawan@google.com",
+    },
+}
diff --git a/services/mediaresourcemanager/fuzzer/resourcemanager_service_fuzzer.cpp b/services/mediaresourcemanager/fuzzer/resourcemanager_service_fuzzer.cpp
new file mode 100644
index 0000000..ca10d20
--- /dev/null
+++ b/services/mediaresourcemanager/fuzzer/resourcemanager_service_fuzzer.cpp
@@ -0,0 +1,32 @@
+/*
+ * Copyright 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <android/binder_interface_utils.h>
+
+#include <fuzzbinder/libbinder_ndk_driver.h>
+#include <fuzzer/FuzzedDataProvider.h>
+
+#include "ResourceManagerService.h"
+
+using android::fuzzService;
+using android::ResourceManagerService;
+using ndk::SharedRefBase;
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+   auto service = SharedRefBase::make<ResourceManagerService>();
+   fuzzService(service->asBinder().get(), FuzzedDataProvider(data, size));
+   return 0;
+}
diff --git a/services/mediaresourcemanager/fuzzer/resourceobserver_service_fuzzer.cpp b/services/mediaresourcemanager/fuzzer/resourceobserver_service_fuzzer.cpp
new file mode 100644
index 0000000..e69368d
--- /dev/null
+++ b/services/mediaresourcemanager/fuzzer/resourceobserver_service_fuzzer.cpp
@@ -0,0 +1,32 @@
+/*
+ * Copyright 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <android/binder_interface_utils.h>
+
+#include <fuzzbinder/libbinder_ndk_driver.h>
+#include <fuzzer/FuzzedDataProvider.h>
+
+#include "ResourceObserverService.h"
+
+using android::fuzzService;
+using android::ResourceObserverService;
+using ndk::SharedRefBase;
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+   auto service = SharedRefBase::make<ResourceObserverService>();
+   fuzzService(service->asBinder().get(), FuzzedDataProvider(data, size));
+   return 0;
+}
diff --git a/services/oboeservice/AAudioServiceEndpointMMAP.cpp b/services/oboeservice/AAudioServiceEndpointMMAP.cpp
index 7f228c7..2ef6fe5 100644
--- a/services/oboeservice/AAudioServiceEndpointMMAP.cpp
+++ b/services/oboeservice/AAudioServiceEndpointMMAP.cpp
@@ -45,6 +45,8 @@
 #define OUTPUT_ESTIMATED_HARDWARE_OFFSET_NANOS  (3 * AAUDIO_NANOS_PER_MILLISECOND)
 #define INPUT_ESTIMATED_HARDWARE_OFFSET_NANOS   (-1 * AAUDIO_NANOS_PER_MILLISECOND)
 
+#define AAUDIO_MAX_OPEN_ATTEMPTS    10
+
 using namespace android;  // TODO just import names needed
 using namespace aaudio;   // TODO just import names needed
 
@@ -77,14 +79,23 @@
         {AUDIO_FORMAT_PCM_24_BIT_PACKED, AUDIO_FORMAT_PCM_16_BIT}
 };
 
-audio_format_t getNextFormatToTry(audio_format_t curFormat, audio_format_t returnedFromAPM) {
-    if (returnedFromAPM != AUDIO_FORMAT_DEFAULT) {
-        return returnedFromAPM;
-    }
+audio_format_t getNextFormatToTry(audio_format_t curFormat) {
     const auto it = NEXT_FORMAT_TO_TRY.find(curFormat);
-    return it != NEXT_FORMAT_TO_TRY.end() ? it->second : AUDIO_FORMAT_DEFAULT;
+    return it != NEXT_FORMAT_TO_TRY.end() ? it->second : curFormat;
 }
 
+struct configComp {
+    bool operator() (const audio_config_base_t& lhs, const audio_config_base_t& rhs) const {
+        if (lhs.sample_rate != rhs.sample_rate) {
+            return lhs.sample_rate < rhs.sample_rate;
+        } else if (lhs.channel_mask != rhs.channel_mask) {
+            return lhs.channel_mask < rhs.channel_mask;
+        } else {
+            return lhs.format < rhs.format;
+        }
+    }
+};
+
 } // namespace
 
 aaudio_result_t AAudioServiceEndpointMMAP::open(const aaudio::AAudioStreamRequest &request) {
@@ -101,60 +112,66 @@
         legacy2aidl_pid_t_int32_t(IPCThreadState::self()->getCallingPid()));
 
     audio_format_t audioFormat = getFormat();
-    std::set<audio_format_t> formatsTried;
-    while (true) {
-        if (formatsTried.find(audioFormat) != formatsTried.end()) {
+    int32_t sampleRate = getSampleRate();
+    if (sampleRate == AAUDIO_UNSPECIFIED) {
+        sampleRate = AAUDIO_SAMPLE_RATE_DEFAULT;
+    }
+
+    const aaudio_direction_t direction = getDirection();
+    audio_config_base_t config;
+    config.format = audioFormat;
+    config.sample_rate = sampleRate;
+    config.channel_mask = AAudio_getChannelMaskForOpen(
+            getChannelMask(), getSamplesPerFrame(), direction == AAUDIO_DIRECTION_INPUT);
+
+    std::set<audio_config_base_t, configComp> configsTried;
+    int32_t numberOfAttempts = 0;
+    while (numberOfAttempts < AAUDIO_MAX_OPEN_ATTEMPTS) {
+        if (configsTried.find(config) != configsTried.end()) {
             // APM returning something that has already tried.
-            ALOGW("Have already tried to open with format=%#x, but failed before", audioFormat);
+            ALOGW("Have already tried to open with format=%#x and sr=%d, but failed before",
+                  config.format, config.sample_rate);
             break;
         }
-        formatsTried.insert(audioFormat);
+        configsTried.insert(config);
 
-        audio_format_t nextFormatToTry = AUDIO_FORMAT_DEFAULT;
-        result = openWithFormat(audioFormat, &nextFormatToTry);
+        audio_config_base_t previousConfig = config;
+        result = openWithConfig(&config);
         if (result != AAUDIO_ERROR_UNAVAILABLE) {
             // Return if it is successful or there is an error that is not
             // AAUDIO_ERROR_UNAVAILABLE happens.
-            ALOGI("Opened format=%#x with result=%d", audioFormat, result);
+            ALOGI("Opened format=%#x sr=%d, with result=%d", previousConfig.format,
+                    previousConfig.sample_rate, result);
             break;
         }
 
-        nextFormatToTry = getNextFormatToTry(audioFormat, nextFormatToTry);
-        ALOGD("%s() %#x failed, perhaps due to format. Try again with %#x",
-              __func__, audioFormat, nextFormatToTry);
-        audioFormat = nextFormatToTry;
-        if (audioFormat == AUDIO_FORMAT_DEFAULT) {
-            // Nothing else to try
-            break;
+        // Try other formats if the config from APM is the same as our current config.
+        // Some HALs may report its format support incorrectly.
+        if ((previousConfig.format == config.format) &&
+                (previousConfig.sample_rate == config.sample_rate)) {
+            config.format = getNextFormatToTry(config.format);
         }
+
+        ALOGD("%s() %#x %d failed, perhaps due to format or sample rate. Try again with %#x %d",
+                __func__, previousConfig.format, previousConfig.sample_rate, config.format,
+                config.sample_rate);
+        numberOfAttempts++;
     }
     return result;
 }
 
-aaudio_result_t AAudioServiceEndpointMMAP::openWithFormat(
-        audio_format_t audioFormat, audio_format_t* nextFormatToTry) {
+aaudio_result_t AAudioServiceEndpointMMAP::openWithConfig(
+        audio_config_base_t* config) {
     aaudio_result_t result = AAUDIO_OK;
-    audio_config_base_t config;
+    audio_config_base_t currentConfig = *config;
     audio_port_handle_t deviceId;
 
     const audio_attributes_t attributes = getAudioAttributesFrom(this);
 
     deviceId = mRequestedDeviceId;
 
-    // Fill in config
-    config.format = audioFormat;
-
-    int32_t aaudioSampleRate = getSampleRate();
-    if (aaudioSampleRate == AAUDIO_UNSPECIFIED) {
-        aaudioSampleRate = AAUDIO_SAMPLE_RATE_DEFAULT;
-    }
-    config.sample_rate = aaudioSampleRate;
-
     const aaudio_direction_t direction = getDirection();
 
-    config.channel_mask = AAudio_getChannelMaskForOpen(
-            getChannelMask(), getSamplesPerFrame(), direction == AAUDIO_DIRECTION_INPUT);
-
     if (direction == AAUDIO_DIRECTION_OUTPUT) {
         mHardwareTimeOffsetNanos = OUTPUT_ESTIMATED_HARDWARE_OFFSET_NANOS; // frames at DAC later
 
@@ -177,11 +194,11 @@
     // Open HAL stream. Set mMmapStream
     ALOGD("%s trying to open MMAP stream with format=%#x, "
           "sample_rate=%u, channel_mask=%#x, device=%d",
-          __func__, config.format, config.sample_rate,
-          config.channel_mask, deviceId);
+          __func__, config->format, config->sample_rate,
+          config->channel_mask, deviceId);
     const status_t status = MmapStreamInterface::openMmapStream(streamDirection,
                                                                 &attributes,
-                                                                &config,
+                                                                config,
                                                                 mMmapClient,
                                                                 &deviceId,
                                                                 &sessionId,
@@ -195,9 +212,9 @@
         // not match the hardware.
         ALOGD("%s() - openMmapStream() returned status=%d, suggested format=%#x, sample_rate=%u, "
               "channel_mask=%#x",
-              __func__, status, config.format, config.sample_rate, config.channel_mask);
-        *nextFormatToTry = config.format != audioFormat ? config.format
-                                                        : *nextFormatToTry;
+              __func__, status, config->format, config->sample_rate, config->channel_mask);
+        // Keep the channel mask of the current config
+        config->channel_mask = currentConfig.channel_mask;
         return AAUDIO_ERROR_UNAVAILABLE;
     }
 
@@ -217,7 +234,7 @@
     setSessionId(actualSessionId);
 
     ALOGD("%s(format = 0x%X) deviceId = %d, sessionId = %d",
-          __func__, audioFormat, getDeviceId(), getSessionId());
+          __func__, config->format, getDeviceId(), getSessionId());
 
     // Create MMAP/NOIRQ buffer.
     result = createMmapBuffer();
@@ -227,11 +244,11 @@
 
     // Get information about the stream and pass it back to the caller.
     setChannelMask(AAudioConvert_androidToAAudioChannelMask(
-            config.channel_mask, getDirection() == AAUDIO_DIRECTION_INPUT,
-            AAudio_isChannelIndexMask(config.channel_mask)));
+            config->channel_mask, getDirection() == AAUDIO_DIRECTION_INPUT,
+            AAudio_isChannelIndexMask(config->channel_mask)));
 
-    setFormat(config.format);
-    setSampleRate(config.sample_rate);
+    setFormat(config->format);
+    setSampleRate(config->sample_rate);
     setHardwareSampleRate(getSampleRate());
     setHardwareFormat(getFormat());
     setHardwareSamplesPerFrame(AAudioConvert_channelMaskToCount(getChannelMask()));
diff --git a/services/oboeservice/AAudioServiceEndpointMMAP.h b/services/oboeservice/AAudioServiceEndpointMMAP.h
index 38cf0ba..f19005c 100644
--- a/services/oboeservice/AAudioServiceEndpointMMAP.h
+++ b/services/oboeservice/AAudioServiceEndpointMMAP.h
@@ -97,7 +97,7 @@
 
 private:
 
-    aaudio_result_t openWithFormat(audio_format_t audioFormat, audio_format_t* nextFormatToTry);
+    aaudio_result_t openWithConfig(audio_config_base_t* config);
 
     aaudio_result_t createMmapBuffer();
 
diff --git a/services/oboeservice/AAudioServiceStreamBase.h b/services/oboeservice/AAudioServiceStreamBase.h
index bc7ccde..8f51ce4 100644
--- a/services/oboeservice/AAudioServiceStreamBase.h
+++ b/services/oboeservice/AAudioServiceStreamBase.h
@@ -220,18 +220,6 @@
         return mSuspended;
     }
 
-    bool isCloseNeeded() const {
-        return mCloseNeeded.load();
-    }
-
-    /**
-     * Mark this stream as needing to be closed.
-     * Once marked for closing, it cannot be unmarked.
-     */
-    void markCloseNeeded() {
-        mCloseNeeded.store(true);
-    }
-
     virtual const char *getTypeText() const { return "Base"; }
 
 protected:
@@ -419,12 +407,8 @@
     aaudio_handle_t         mHandle = -1;
     bool                    mFlowing = false;
 
-    // This indicates that a stream that is being referenced by a binder call
-    // and needs to closed.
-    std::atomic<bool>       mCloseNeeded{false}; // TODO remove
-
     // This indicate that a running stream should not be processed because of an error,
-    // for example a full message queue. Note that this atomic is unrelated to mCloseNeeded.
+    // for example a full message queue.
     std::atomic<bool>       mSuspended{false};
 
     bool                    mDisconnected GUARDED_BY(mLock) {false};
diff --git a/services/oboeservice/AAudioThread.cpp b/services/oboeservice/AAudioThread.cpp
index 549fa59..502d773 100644
--- a/services/oboeservice/AAudioThread.cpp
+++ b/services/oboeservice/AAudioThread.cpp
@@ -75,7 +75,9 @@
 
 aaudio_result_t AAudioThread::stop() {
     if (!mHasThread) {
-        ALOGE("stop() but no thread running");
+        // There can be cases that the thread is just created but not started.
+        // Logging as warning to attract attention but not too serious.
+        ALOGW("stop() but no thread running");
         return AAUDIO_ERROR_INVALID_STATE;
     }
 
diff --git a/services/oboeservice/Android.bp b/services/oboeservice/Android.bp
index c5080a4..3521979 100644
--- a/services/oboeservice/Android.bp
+++ b/services/oboeservice/Android.bp
@@ -78,12 +78,38 @@
 
 ]
 
+cc_defaults {
+    name: "libaaudioservice_dependencies",
 
-cc_library {
+    shared_libs: [
+        "libaaudio_internal",
+        "libaudioclient",
+        "libaudioutils",
+        "libmedia_helper",
+        "libmediametrics",
+        "libmediautils",
+        "libbase",
+        "libbinder",
+        "libcutils",
+        "liblog",
+        "libutils",
+        "aaudio-aidl-cpp",
+        "framework-permission-aidl-cpp",
+        "libaudioclient_aidl_conversion",
+        "packagemanager_aidl-cpp",
+    ],
+
+    static_libs: [
+        "libaudioflinger",
+    ]
+}
+
+cc_library_static {
 
     name: "libaaudioservice",
 
     defaults: [
+        "libaaudioservice_dependencies",
         "latest_android_media_audio_common_types_cpp_shared",
     ],
 
@@ -116,25 +142,6 @@
         "-Werror",
     ],
 
-    shared_libs: [
-        "libaaudio_internal",
-        "libaudioclient",
-        "libaudioflinger",
-        "libaudioutils",
-        "libmedia_helper",
-        "libmediametrics",
-        "libmediautils",
-        "libbase",
-        "libbinder",
-        "libcutils",
-        "liblog",
-        "libutils",
-        "aaudio-aidl-cpp",
-        "framework-permission-aidl-cpp",
-        "libaudioclient_aidl_conversion",
-        "packagemanager_aidl-cpp",
-    ],
-
     export_shared_lib_headers: [
         "libaaudio_internal",
         "framework-permission-aidl-cpp",
diff --git a/services/oboeservice/fuzzer/oboeservice_fuzzer.cpp b/services/oboeservice/fuzzer/oboeservice_fuzzer.cpp
index f047065..f5c2e6c 100644
--- a/services/oboeservice/fuzzer/oboeservice_fuzzer.cpp
+++ b/services/oboeservice/fuzzer/oboeservice_fuzzer.cpp
@@ -403,13 +403,6 @@
 
     request.getConfiguration().setBufferCapacity(fdp.ConsumeIntegral<int32_t>());
 
-    request.getConfiguration().setHardwareSampleRate(fdp.ConsumeIntegral<int32_t>());
-    request.getConfiguration().setHardwareSamplesPerFrame(fdp.ConsumeIntegral<int32_t>());
-    request.getConfiguration().setHardwareFormat((audio_format_t)(
-        fdp.ConsumeBool()
-            ? fdp.ConsumeIntegral<int32_t>()
-            : kAAudioFormats[fdp.ConsumeIntegralInRange<int32_t>(0, kNumAAudioFormats - 1)]));
-
     auto streamHandleInfo = mClient->openStream(request, configurationOutput);
     if (streamHandleInfo.getHandle() < 0) {
         // invalid request, stream not opened.
diff --git a/services/tuner/Android.bp b/services/tuner/Android.bp
index ea5139d..e29d520 100644
--- a/services/tuner/Android.bp
+++ b/services/tuner/Android.bp
@@ -86,6 +86,7 @@
         "android.hardware.tv.tuner@1.1",
         "android.hardware.tv.tuner-V2-ndk",
         "libbase",
+        "libcutils",
         "libbinder",
         "libfmq",
         "libhidlbase",
diff --git a/services/tuner/main_tunerservice.cpp b/services/tuner/main_tunerservice.cpp
index 90f1731..6dee324 100644
--- a/services/tuner/main_tunerservice.cpp
+++ b/services/tuner/main_tunerservice.cpp
@@ -17,6 +17,7 @@
 #include <android-base/logging.h>
 #include <binder/IPCThreadState.h>
 #include <binder/IServiceManager.h>
+#include <cutils/properties.h>
 #include <utils/Log.h>
 #include <hidl/HidlTransportSupport.h>
 
@@ -31,6 +32,12 @@
 int main() {
     ALOGD("Tuner service starting");
 
+    if (!property_get_bool("tuner.server.enable", false)
+        && !property_get_bool("ro.tuner.lazyhal", false)) {
+        ALOGD("tuner is not enabled, terminating");
+        return 0;
+    }
+
     sp<ProcessState> proc(ProcessState::self());
     sp<IServiceManager> sm = defaultServiceManager();
     hardware::configureRpcThreadpool(16, true);