[automerger skipped] Merge changes from topic "cherrypicker-L12700000961523212:N62700001381640113" into udc-dev-plus-aosp am: cf633f54c7 am: 4a3a0cf596 am: d167f10b8f -s ours

am skip reason: Merged-In If0468444741a076321dbf3666628dc89833f2dc9 with SHA-1 0f5dbbd69e is already in history

Original change: https://googleplex-android-review.googlesource.com/c/platform/frameworks/av/+/23783757

Change-Id: I00ccc69abf8c589157bd45b6e9444c2ab3c24a3c
Signed-off-by: Automerger Merge Worker <android-build-automerger-merge-worker@system.gserviceaccount.com>
diff --git a/camera/CameraBase.cpp b/camera/CameraBase.cpp
index 9ae4607..ef40f6c 100644
--- a/camera/CameraBase.cpp
+++ b/camera/CameraBase.cpp
@@ -103,7 +103,6 @@
 
 namespace {
     sp<::android::hardware::ICameraService> gCameraService;
-    const int                 kCameraServicePollDelay = 500000; // 0.5s
     const char*               kCameraServiceName      = "media.camera";
 
     Mutex                     gLock;
@@ -141,14 +140,10 @@
 
         sp<IServiceManager> sm = defaultServiceManager();
         sp<IBinder> binder;
-        do {
-            binder = sm->getService(String16(kCameraServiceName));
-            if (binder != 0) {
-                break;
-            }
-            ALOGW("CameraService not published, waiting...");
-            usleep(kCameraServicePollDelay);
-        } while(true);
+        binder = sm->waitForService(String16(kCameraServiceName));
+        if (binder == nullptr) {
+            return nullptr;
+        }
         if (gDeathNotifier == NULL) {
             gDeathNotifier = new DeathNotifier();
         }
diff --git a/camera/cameraserver/Android.bp b/camera/cameraserver/Android.bp
index 8472562..13b705c 100644
--- a/camera/cameraserver/Android.bp
+++ b/camera/cameraserver/Android.bp
@@ -26,12 +26,15 @@
 
     srcs: ["main_cameraserver.cpp"],
 
+    defaults: [
+        "libcameraservice_deps",
+    ],
+
     header_libs: [
         "libmedia_headers",
     ],
 
     shared_libs: [
-        "libcameraservice",
         "liblog",
         "libutils",
         "libui",
@@ -40,15 +43,13 @@
         "libbinder_ndk",
         "libhidlbase",
         "android.hardware.camera.common@1.0",
-        "android.hardware.camera.provider@2.4",
-        "android.hardware.camera.provider@2.5",
-        "android.hardware.camera.provider@2.6",
-        "android.hardware.camera.provider@2.7",
-        "android.hardware.camera.provider-V2-ndk",
         "android.hardware.camera.device@1.0",
         "android.hardware.camera.device@3.2",
         "android.hardware.camera.device@3.4",
     ],
+    static_libs: [
+        "libcameraservice",
+    ],
     compile_multilib: "first",
     cflags: [
         "-Wall",
diff --git a/camera/ndk/Android.bp b/camera/ndk/Android.bp
index bfd02b3..866dc72 100644
--- a/camera/ndk/Android.bp
+++ b/camera/ndk/Android.bp
@@ -182,6 +182,7 @@
     shared_libs: [
         "libcamera2ndk_vendor",
         "libcamera_metadata",
+        "libhidlbase",
         "libmediandk",
         "libnativewindow",
         "libutils",
@@ -191,6 +192,7 @@
     ],
     static_libs: [
         "android.hardware.camera.common@1.0-helper",
+        "android.hidl.token@1.0",
     ],
     cflags: [
         "-D__ANDROID_VNDK__",
diff --git a/camera/ndk/ndk_vendor/tests/AImageReaderVendorTest.cpp b/camera/ndk/ndk_vendor/tests/AImageReaderVendorTest.cpp
index 7f6ea9d..74c6cad 100644
--- a/camera/ndk/ndk_vendor/tests/AImageReaderVendorTest.cpp
+++ b/camera/ndk/ndk_vendor/tests/AImageReaderVendorTest.cpp
@@ -31,10 +31,13 @@
 #include <stdio.h>
 
 #include <android/log.h>
+#include <android/hidl/manager/1.2/IServiceManager.h>
+#include <android/hidl/token/1.0/ITokenManager.h>
 #include <camera/NdkCameraError.h>
 #include <camera/NdkCameraManager.h>
 #include <camera/NdkCameraDevice.h>
 #include <camera/NdkCameraCaptureSession.h>
+#include <hidl/ServiceManagement.h>
 #include <media/NdkImage.h>
 #include <media/NdkImageReader.h>
 #include <cutils/native_handle.h>
@@ -50,6 +53,8 @@
 static constexpr int kTestImageFormat = AIMAGE_FORMAT_YUV_420_888;
 
 using android::hardware::camera::common::V1_0::helper::VendorTagDescriptorCache;
+using android::hidl::manager::V1_0::IServiceManager;
+using android::hidl::token::V1_0::ITokenManager;
 using ConfiguredWindows = std::set<const native_handle_t *>;
 
 class CameraHelper {
@@ -981,11 +986,19 @@
 
 
 TEST_F(AImageReaderVendorTest, CreateWindowNativeHandle) {
+    auto transport = android::hardware::defaultServiceManager()->getTransport(ITokenManager::descriptor, "default");
+    if (transport.isOk() && transport == IServiceManager::Transport::EMPTY) {
+        GTEST_SKIP() << "This device no longer supports AImageReader_getWindowNativeHandle";
+    }
     testBasicTakePictures(/*prepareSurfaces*/ false);
     testBasicTakePictures(/*prepareSurfaces*/ true);
 }
 
 TEST_F(AImageReaderVendorTest, LogicalCameraPhysicalStream) {
+    auto transport = android::hardware::defaultServiceManager()->getTransport(ITokenManager::descriptor, "default");
+    if (transport.isOk() && transport == IServiceManager::Transport::EMPTY) {
+        GTEST_SKIP() << "This device no longer supports AImageReader_getWindowNativeHandle";
+    }
     for (auto & v2 : {true, false}) {
         testLogicalCameraPhysicalStream(false/*usePhysicalSettings*/, v2);
         testLogicalCameraPhysicalStream(true/*usePhysicalSettings*/, v2);
diff --git a/media/audioserver/Android.bp b/media/audioserver/Android.bp
index 828d861..2030dc7 100644
--- a/media/audioserver/Android.bp
+++ b/media/audioserver/Android.bp
@@ -25,21 +25,31 @@
         "libmediametrics_headers",
     ],
 
-    shared_libs: [
-        "packagemanager_aidl-cpp",
+    defaults: [
+        "libaaudioservice_dependencies",
+        "libaudioflinger_dependencies",
+        "libaudiopolicyservice_dependencies",
+        "latest_android_media_audio_common_types_cpp_shared",
+        "latest_android_hardware_audio_core_sounddose_ndk_shared",
+    ],
+
+    static_libs: [
         "libaaudioservice",
-        "libaudioclient",
         "libaudioflinger",
         "libaudiopolicyservice",
+        "libmedialogservice",
+        "libnbaio",
+    ],
+
+    shared_libs: [
+        "libaudioclient",
         "libaudioprocessing",
         "libbinder",
         "libcutils",
         "libhidlbase",
         "liblog",
         "libmedia",
-        "libmedialogservice",
         "libmediautils",
-        "libnbaio",
         "libnblog",
         "libpowermanager",
         "libutils",
@@ -59,9 +69,9 @@
         "frameworks/av/services/audiopolicy/engine/interface",
         "frameworks/av/services/audiopolicy/service",
         "frameworks/av/services/medialog",
+        "frameworks/av/services/oboeservice", // TODO oboeservice is the old folder name for aaudioservice. It will be changed.
 
-        // TODO oboeservice is the old folder name for aaudioservice. It will be changed.
-        "frameworks/av/services/oboeservice",
+
     ],
 
     init_rc: ["audioserver.rc"],
diff --git a/media/codec2/components/dav1d/Android.bp b/media/codec2/components/dav1d/Android.bp
new file mode 100644
index 0000000..f7850ad
--- /dev/null
+++ b/media/codec2/components/dav1d/Android.bp
@@ -0,0 +1,37 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_library {
+    name: "libcodec2_soft_av1dec_dav1d",
+    // TODO: b/277797541 - enable once ready
+    enabled: false,
+
+    defaults: [
+        "libcodec2_soft-defaults",
+        "libcodec2_soft_sanitize_all-defaults",
+    ],
+
+    cflags: [
+        "-DCODECNAME=\"c2.android.dav1d-av1.decoder\"",
+        "-Wno-unused-variable",
+    ],
+
+    srcs: ["C2SoftDav1dDec.cpp"],
+    static_libs: [
+        "libyuv_static",
+        "libdav1d_8bit",
+        "libdav1d_16bit",
+    ],
+
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.media.swcodec",
+    ],
+
+}
diff --git a/media/codec2/components/dav1d/C2SoftDav1dDec.cpp b/media/codec2/components/dav1d/C2SoftDav1dDec.cpp
new file mode 100644
index 0000000..b0cef41
--- /dev/null
+++ b/media/codec2/components/dav1d/C2SoftDav1dDec.cpp
@@ -0,0 +1,1493 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "C2SoftDav1dDec"
+#include <android-base/properties.h>
+#include <cutils/properties.h>
+#include <thread>
+
+#include <C2Debug.h>
+#include <C2PlatformSupport.h>
+#include <Codec2BufferUtils.h>
+#include <Codec2CommonUtils.h>
+#include <Codec2Mapper.h>
+#include <SimpleC2Interface.h>
+#include <libyuv.h>
+#include <log/log.h>
+#include <media/stagefright/foundation/AUtils.h>
+#include <media/stagefright/foundation/MediaDefs.h>
+#include "C2SoftDav1dDec.h"
+
+// libyuv version required for I410ToAB30Matrix and I210ToAB30Matrix.
+#if LIBYUV_VERSION >= 1780
+#include <algorithm>
+#define HAVE_LIBYUV_I410_I210_TO_AB30 1
+#else
+#define HAVE_LIBYUV_I410_I210_TO_AB30 0
+#endif
+
+namespace android {
+
+// Flag to enable dumping the bitsteram and the decoded pictures to files.
+static const bool ENABLE_DUMPING_FILES_DEFAULT = false;
+static const char ENABLE_DUMPING_FILES_PROPERTY[] = "debug.dav1d.enabledumping";
+
+// The number of frames to dump to a file
+static const int NUM_FRAMES_TO_DUMP_DEFAULT = INT_MAX;
+static const char NUM_FRAMES_TO_DUMP_PROPERTY[] = "debug.dav1d.numframestodump";
+
+// The number of threads used for the dav1d decoder.
+static const int NUM_THREADS_DAV1D_DEFAULT = 0;
+static const char NUM_THREADS_DAV1D_PROPERTY[] = "debug.dav1d.numthreads";
+
+// codecname set and passed in as a compile flag from Android.bp
+constexpr char COMPONENT_NAME[] = CODECNAME;
+
+constexpr size_t kMinInputBufferSize = 2 * 1024 * 1024;
+
+class C2SoftDav1dDec::IntfImpl : public SimpleInterface<void>::BaseParams {
+  public:
+    explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper>& helper)
+        : SimpleInterface<void>::BaseParams(helper, COMPONENT_NAME, C2Component::KIND_DECODER,
+                                            C2Component::DOMAIN_VIDEO, MEDIA_MIMETYPE_VIDEO_AV1) {
+        noPrivateBuffers();
+        noInputReferences();
+        noOutputReferences();
+        noInputLatency();
+        noTimeStretch();
+
+        addParameter(DefineParam(mAttrib, C2_PARAMKEY_COMPONENT_ATTRIBUTES)
+                             .withConstValue(new C2ComponentAttributesSetting(
+                                     C2Component::ATTRIB_IS_TEMPORAL))
+                             .build());
+
+        addParameter(DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
+                             .withDefault(new C2StreamPictureSizeInfo::output(0u, 320, 240))
+                             .withFields({
+                                     C2F(mSize, width).inRange(2, 4096),
+                                     C2F(mSize, height).inRange(2, 4096),
+                             })
+                             .withSetter(SizeSetter)
+                             .build());
+
+        addParameter(DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL)
+                             .withDefault(new C2StreamProfileLevelInfo::input(
+                                     0u, C2Config::PROFILE_AV1_0, C2Config::LEVEL_AV1_2_1))
+                             .withFields({C2F(mProfileLevel, profile)
+                                                  .oneOf({C2Config::PROFILE_AV1_0,
+                                                          C2Config::PROFILE_AV1_1}),
+                                          C2F(mProfileLevel, level)
+                                                  .oneOf({
+                                                          C2Config::LEVEL_AV1_2,
+                                                          C2Config::LEVEL_AV1_2_1,
+                                                          C2Config::LEVEL_AV1_2_2,
+                                                          C2Config::LEVEL_AV1_2_3,
+                                                          C2Config::LEVEL_AV1_3,
+                                                          C2Config::LEVEL_AV1_3_1,
+                                                          C2Config::LEVEL_AV1_3_2,
+                                                          C2Config::LEVEL_AV1_3_3,
+                                                          C2Config::LEVEL_AV1_4,
+                                                          C2Config::LEVEL_AV1_4_1,
+                                                          C2Config::LEVEL_AV1_4_2,
+                                                          C2Config::LEVEL_AV1_4_3,
+                                                          C2Config::LEVEL_AV1_5,
+                                                          C2Config::LEVEL_AV1_5_1,
+                                                          C2Config::LEVEL_AV1_5_2,
+                                                          C2Config::LEVEL_AV1_5_3,
+                                                  })})
+                             .withSetter(ProfileLevelSetter, mSize)
+                             .build());
+
+        mHdr10PlusInfoInput = C2StreamHdr10PlusInfo::input::AllocShared(0);
+        addParameter(DefineParam(mHdr10PlusInfoInput, C2_PARAMKEY_INPUT_HDR10_PLUS_INFO)
+                             .withDefault(mHdr10PlusInfoInput)
+                             .withFields({
+                                     C2F(mHdr10PlusInfoInput, m.value).any(),
+                             })
+                             .withSetter(Hdr10PlusInfoInputSetter)
+                             .build());
+
+        mHdr10PlusInfoOutput = C2StreamHdr10PlusInfo::output::AllocShared(0);
+        addParameter(DefineParam(mHdr10PlusInfoOutput, C2_PARAMKEY_OUTPUT_HDR10_PLUS_INFO)
+                             .withDefault(mHdr10PlusInfoOutput)
+                             .withFields({
+                                     C2F(mHdr10PlusInfoOutput, m.value).any(),
+                             })
+                             .withSetter(Hdr10PlusInfoOutputSetter)
+                             .build());
+
+        // default static info
+        C2HdrStaticMetadataStruct defaultStaticInfo{};
+        helper->addStructDescriptors<C2MasteringDisplayColorVolumeStruct, C2ColorXyStruct>();
+        addParameter(
+                DefineParam(mHdrStaticInfo, C2_PARAMKEY_HDR_STATIC_INFO)
+                        .withDefault(new C2StreamHdrStaticInfo::output(0u, defaultStaticInfo))
+                        .withFields({C2F(mHdrStaticInfo, mastering.red.x).inRange(0, 1),
+                                     C2F(mHdrStaticInfo, mastering.red.y).inRange(0, 1),
+                                     C2F(mHdrStaticInfo, mastering.green.x).inRange(0, 1),
+                                     C2F(mHdrStaticInfo, mastering.green.y).inRange(0, 1),
+                                     C2F(mHdrStaticInfo, mastering.blue.x).inRange(0, 1),
+                                     C2F(mHdrStaticInfo, mastering.blue.y).inRange(0, 1),
+                                     C2F(mHdrStaticInfo, mastering.white.x).inRange(0, 1),
+                                     C2F(mHdrStaticInfo, mastering.white.x).inRange(0, 1),
+                                     C2F(mHdrStaticInfo, mastering.maxLuminance).inRange(0, 65535),
+                                     C2F(mHdrStaticInfo, mastering.minLuminance).inRange(0, 6.5535),
+                                     C2F(mHdrStaticInfo, maxCll).inRange(0, 0XFFFF),
+                                     C2F(mHdrStaticInfo, maxFall).inRange(0, 0XFFFF)})
+                        .withSetter(HdrStaticInfoSetter)
+                        .build());
+
+        addParameter(DefineParam(mMaxSize, C2_PARAMKEY_MAX_PICTURE_SIZE)
+                             .withDefault(new C2StreamMaxPictureSizeTuning::output(0u, 320, 240))
+                             .withFields({
+                                     C2F(mSize, width).inRange(2, 2048, 2),
+                                     C2F(mSize, height).inRange(2, 2048, 2),
+                             })
+                             .withSetter(MaxPictureSizeSetter, mSize)
+                             .build());
+
+        addParameter(
+                DefineParam(mMaxInputSize, C2_PARAMKEY_INPUT_MAX_BUFFER_SIZE)
+                        .withDefault(new C2StreamMaxBufferSizeInfo::input(0u, kMinInputBufferSize))
+                        .withFields({
+                                C2F(mMaxInputSize, value).any(),
+                        })
+                        .calculatedAs(MaxInputSizeSetter, mMaxSize)
+                        .build());
+
+        C2ChromaOffsetStruct locations[1] = {C2ChromaOffsetStruct::ITU_YUV_420_0()};
+        std::shared_ptr<C2StreamColorInfo::output> defaultColorInfo =
+                C2StreamColorInfo::output::AllocShared(1u, 0u, 8u /* bitDepth */, C2Color::YUV_420);
+        memcpy(defaultColorInfo->m.locations, locations, sizeof(locations));
+
+        defaultColorInfo = C2StreamColorInfo::output::AllocShared(
+                {C2ChromaOffsetStruct::ITU_YUV_420_0()}, 0u, 8u /* bitDepth */, C2Color::YUV_420);
+        helper->addStructDescriptors<C2ChromaOffsetStruct>();
+
+        addParameter(DefineParam(mColorInfo, C2_PARAMKEY_CODED_COLOR_INFO)
+                             .withConstValue(defaultColorInfo)
+                             .build());
+
+        addParameter(DefineParam(mDefaultColorAspects, C2_PARAMKEY_DEFAULT_COLOR_ASPECTS)
+                             .withDefault(new C2StreamColorAspectsTuning::output(
+                                     0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED,
+                                     C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
+                             .withFields({C2F(mDefaultColorAspects, range)
+                                                  .inRange(C2Color::RANGE_UNSPECIFIED,
+                                                           C2Color::RANGE_OTHER),
+                                          C2F(mDefaultColorAspects, primaries)
+                                                  .inRange(C2Color::PRIMARIES_UNSPECIFIED,
+                                                           C2Color::PRIMARIES_OTHER),
+                                          C2F(mDefaultColorAspects, transfer)
+                                                  .inRange(C2Color::TRANSFER_UNSPECIFIED,
+                                                           C2Color::TRANSFER_OTHER),
+                                          C2F(mDefaultColorAspects, matrix)
+                                                  .inRange(C2Color::MATRIX_UNSPECIFIED,
+                                                           C2Color::MATRIX_OTHER)})
+                             .withSetter(DefaultColorAspectsSetter)
+                             .build());
+
+        addParameter(DefineParam(mCodedColorAspects, C2_PARAMKEY_VUI_COLOR_ASPECTS)
+                             .withDefault(new C2StreamColorAspectsInfo::input(
+                                     0u, C2Color::RANGE_LIMITED, C2Color::PRIMARIES_UNSPECIFIED,
+                                     C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
+                             .withFields({C2F(mCodedColorAspects, range)
+                                                  .inRange(C2Color::RANGE_UNSPECIFIED,
+                                                           C2Color::RANGE_OTHER),
+                                          C2F(mCodedColorAspects, primaries)
+                                                  .inRange(C2Color::PRIMARIES_UNSPECIFIED,
+                                                           C2Color::PRIMARIES_OTHER),
+                                          C2F(mCodedColorAspects, transfer)
+                                                  .inRange(C2Color::TRANSFER_UNSPECIFIED,
+                                                           C2Color::TRANSFER_OTHER),
+                                          C2F(mCodedColorAspects, matrix)
+                                                  .inRange(C2Color::MATRIX_UNSPECIFIED,
+                                                           C2Color::MATRIX_OTHER)})
+                             .withSetter(CodedColorAspectsSetter)
+                             .build());
+
+        addParameter(
+                DefineParam(mColorAspects, C2_PARAMKEY_COLOR_ASPECTS)
+                        .withDefault(new C2StreamColorAspectsInfo::output(
+                                0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED,
+                                C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
+                        .withFields(
+                                {C2F(mColorAspects, range)
+                                         .inRange(C2Color::RANGE_UNSPECIFIED, C2Color::RANGE_OTHER),
+                                 C2F(mColorAspects, primaries)
+                                         .inRange(C2Color::PRIMARIES_UNSPECIFIED,
+                                                  C2Color::PRIMARIES_OTHER),
+                                 C2F(mColorAspects, transfer)
+                                         .inRange(C2Color::TRANSFER_UNSPECIFIED,
+                                                  C2Color::TRANSFER_OTHER),
+                                 C2F(mColorAspects, matrix)
+                                         .inRange(C2Color::MATRIX_UNSPECIFIED,
+                                                  C2Color::MATRIX_OTHER)})
+                        .withSetter(ColorAspectsSetter, mDefaultColorAspects, mCodedColorAspects)
+                        .build());
+
+        std::vector<uint32_t> pixelFormats = {HAL_PIXEL_FORMAT_YCBCR_420_888};
+        if (isHalPixelFormatSupported((AHardwareBuffer_Format)HAL_PIXEL_FORMAT_YCBCR_P010)) {
+            pixelFormats.push_back(HAL_PIXEL_FORMAT_YCBCR_P010);
+        }
+        // If color format surface isn't added to supported formats, there is no way to know
+        // when the color-format is configured to surface. This is necessary to be able to
+        // choose 10-bit format while decoding 10-bit clips in surface mode.
+        pixelFormats.push_back(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
+
+        // TODO: support more formats?
+        addParameter(DefineParam(mPixelFormat, C2_PARAMKEY_PIXEL_FORMAT)
+                             .withDefault(new C2StreamPixelFormatInfo::output(
+                                     0u, HAL_PIXEL_FORMAT_YCBCR_420_888))
+                             .withFields({C2F(mPixelFormat, value).oneOf(pixelFormats)})
+                             .withSetter((Setter<decltype(*mPixelFormat)>::StrictValueWithNoDeps))
+                             .build());
+    }
+
+    static C2R SizeSetter(bool mayBlock, const C2P<C2StreamPictureSizeInfo::output>& oldMe,
+                          C2P<C2StreamPictureSizeInfo::output>& me) {
+        (void)mayBlock;
+        C2R res = C2R::Ok();
+        if (!me.F(me.v.width).supportsAtAll(me.v.width)) {
+            res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.width)));
+            me.set().width = oldMe.v.width;
+        }
+        if (!me.F(me.v.height).supportsAtAll(me.v.height)) {
+            res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.height)));
+            me.set().height = oldMe.v.height;
+        }
+        return res;
+    }
+
+    static C2R MaxPictureSizeSetter(bool mayBlock, C2P<C2StreamMaxPictureSizeTuning::output>& me,
+                                    const C2P<C2StreamPictureSizeInfo::output>& size) {
+        (void)mayBlock;
+        // TODO: get max width/height from the size's field helpers vs.
+        // hardcoding
+        me.set().width = c2_min(c2_max(me.v.width, size.v.width), 4096u);
+        me.set().height = c2_min(c2_max(me.v.height, size.v.height), 4096u);
+        return C2R::Ok();
+    }
+
+    static C2R MaxInputSizeSetter(bool mayBlock, C2P<C2StreamMaxBufferSizeInfo::input>& me,
+                                  const C2P<C2StreamMaxPictureSizeTuning::output>& maxSize) {
+        (void)mayBlock;
+        // assume compression ratio of 2, but enforce a floor
+        me.set().value =
+                c2_max((((maxSize.v.width + 63) / 64) * ((maxSize.v.height + 63) / 64) * 3072),
+                       kMinInputBufferSize);
+        return C2R::Ok();
+    }
+
+    static C2R DefaultColorAspectsSetter(bool mayBlock,
+                                         C2P<C2StreamColorAspectsTuning::output>& me) {
+        (void)mayBlock;
+        if (me.v.range > C2Color::RANGE_OTHER) {
+            me.set().range = C2Color::RANGE_OTHER;
+        }
+        if (me.v.primaries > C2Color::PRIMARIES_OTHER) {
+            me.set().primaries = C2Color::PRIMARIES_OTHER;
+        }
+        if (me.v.transfer > C2Color::TRANSFER_OTHER) {
+            me.set().transfer = C2Color::TRANSFER_OTHER;
+        }
+        if (me.v.matrix > C2Color::MATRIX_OTHER) {
+            me.set().matrix = C2Color::MATRIX_OTHER;
+        }
+        return C2R::Ok();
+    }
+
+    static C2R CodedColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::input>& me) {
+        (void)mayBlock;
+        if (me.v.range > C2Color::RANGE_OTHER) {
+            me.set().range = C2Color::RANGE_OTHER;
+        }
+        if (me.v.primaries > C2Color::PRIMARIES_OTHER) {
+            me.set().primaries = C2Color::PRIMARIES_OTHER;
+        }
+        if (me.v.transfer > C2Color::TRANSFER_OTHER) {
+            me.set().transfer = C2Color::TRANSFER_OTHER;
+        }
+        if (me.v.matrix > C2Color::MATRIX_OTHER) {
+            me.set().matrix = C2Color::MATRIX_OTHER;
+        }
+        return C2R::Ok();
+    }
+
+    static C2R ColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::output>& me,
+                                  const C2P<C2StreamColorAspectsTuning::output>& def,
+                                  const C2P<C2StreamColorAspectsInfo::input>& coded) {
+        (void)mayBlock;
+        // take default values for all unspecified fields, and coded values for specified ones
+        me.set().range = coded.v.range == RANGE_UNSPECIFIED ? def.v.range : coded.v.range;
+        me.set().primaries =
+                coded.v.primaries == PRIMARIES_UNSPECIFIED ? def.v.primaries : coded.v.primaries;
+        me.set().transfer =
+                coded.v.transfer == TRANSFER_UNSPECIFIED ? def.v.transfer : coded.v.transfer;
+        me.set().matrix = coded.v.matrix == MATRIX_UNSPECIFIED ? def.v.matrix : coded.v.matrix;
+        return C2R::Ok();
+    }
+
+    static C2R ProfileLevelSetter(bool mayBlock, C2P<C2StreamProfileLevelInfo::input>& me,
+                                  const C2P<C2StreamPictureSizeInfo::output>& size) {
+        (void)mayBlock;
+        (void)size;
+        (void)me;  // TODO: validate
+        return C2R::Ok();
+    }
+
+    std::shared_ptr<C2StreamColorAspectsTuning::output> getDefaultColorAspects_l() {
+        return mDefaultColorAspects;
+    }
+
+    std::shared_ptr<C2StreamColorAspectsInfo::output> getColorAspects_l() { return mColorAspects; }
+
+    static C2R Hdr10PlusInfoInputSetter(bool mayBlock, C2P<C2StreamHdr10PlusInfo::input>& me) {
+        (void)mayBlock;
+        (void)me;  // TODO: validate
+        return C2R::Ok();
+    }
+
+    static C2R Hdr10PlusInfoOutputSetter(bool mayBlock, C2P<C2StreamHdr10PlusInfo::output>& me) {
+        (void)mayBlock;
+        (void)me;  // TODO: validate
+        return C2R::Ok();
+    }
+
+    // unsafe getters
+    std::shared_ptr<C2StreamPixelFormatInfo::output> getPixelFormat_l() const {
+        return mPixelFormat;
+    }
+
+    static C2R HdrStaticInfoSetter(bool mayBlock, C2P<C2StreamHdrStaticInfo::output>& me) {
+        (void)mayBlock;
+        if (me.v.mastering.red.x > 1) {
+            me.set().mastering.red.x = 1;
+        }
+        if (me.v.mastering.red.y > 1) {
+            me.set().mastering.red.y = 1;
+        }
+        if (me.v.mastering.green.x > 1) {
+            me.set().mastering.green.x = 1;
+        }
+        if (me.v.mastering.green.y > 1) {
+            me.set().mastering.green.y = 1;
+        }
+        if (me.v.mastering.blue.x > 1) {
+            me.set().mastering.blue.x = 1;
+        }
+        if (me.v.mastering.blue.y > 1) {
+            me.set().mastering.blue.y = 1;
+        }
+        if (me.v.mastering.white.x > 1) {
+            me.set().mastering.white.x = 1;
+        }
+        if (me.v.mastering.white.y > 1) {
+            me.set().mastering.white.y = 1;
+        }
+        if (me.v.mastering.maxLuminance > 65535.0) {
+            me.set().mastering.maxLuminance = 65535.0;
+        }
+        if (me.v.mastering.minLuminance > 6.5535) {
+            me.set().mastering.minLuminance = 6.5535;
+        }
+        if (me.v.maxCll > 65535.0) {
+            me.set().maxCll = 65535.0;
+        }
+        if (me.v.maxFall > 65535.0) {
+            me.set().maxFall = 65535.0;
+        }
+        return C2R::Ok();
+    }
+
+  private:
+    std::shared_ptr<C2StreamProfileLevelInfo::input> mProfileLevel;
+    std::shared_ptr<C2StreamPictureSizeInfo::output> mSize;
+    std::shared_ptr<C2StreamMaxPictureSizeTuning::output> mMaxSize;
+    std::shared_ptr<C2StreamMaxBufferSizeInfo::input> mMaxInputSize;
+    std::shared_ptr<C2StreamColorInfo::output> mColorInfo;
+    std::shared_ptr<C2StreamPixelFormatInfo::output> mPixelFormat;
+    std::shared_ptr<C2StreamColorAspectsTuning::output> mDefaultColorAspects;
+    std::shared_ptr<C2StreamColorAspectsInfo::input> mCodedColorAspects;
+    std::shared_ptr<C2StreamColorAspectsInfo::output> mColorAspects;
+    std::shared_ptr<C2StreamHdr10PlusInfo::input> mHdr10PlusInfoInput;
+    std::shared_ptr<C2StreamHdr10PlusInfo::output> mHdr10PlusInfoOutput;
+    std::shared_ptr<C2StreamHdrStaticInfo::output> mHdrStaticInfo;
+};
+
+C2SoftDav1dDec::C2SoftDav1dDec(const char* name, c2_node_id_t id,
+                               const std::shared_ptr<IntfImpl>& intfImpl)
+    : SimpleC2Component(std::make_shared<SimpleInterface<IntfImpl>>(name, id, intfImpl)),
+      mIntf(intfImpl) {
+    mTimeStart = mTimeEnd = systemTime();
+}
+
+C2SoftDav1dDec::~C2SoftDav1dDec() {
+    onRelease();
+}
+
+c2_status_t C2SoftDav1dDec::onInit() {
+    return initDecoder() ? C2_OK : C2_CORRUPTED;
+}
+
+c2_status_t C2SoftDav1dDec::onStop() {
+    // TODO: b/277797541 - investigate if the decoder needs to be flushed.
+    mSignalledError = false;
+    mSignalledOutputEos = false;
+    return C2_OK;
+}
+
+void C2SoftDav1dDec::onReset() {
+    (void)onStop();
+    c2_status_t err = onFlush_sm();
+    if (err != C2_OK) {
+        ALOGW("Failed to flush the av1 decoder. Trying to hard reset.");
+        destroyDecoder();
+        if (!initDecoder()) {
+            ALOGE("Hard reset failed.");
+        }
+    }
+}
+
+void C2SoftDav1dDec::flushDav1d() {
+    if (mDav1dCtx) {
+        Dav1dPicture p;
+
+        while (mDecodedPictures.size() > 0) {
+            p = mDecodedPictures.front();
+            mDecodedPictures.pop_front();
+
+            dav1d_picture_unref(&p);
+        }
+
+        int res = 0;
+        while (true) {
+            memset(&p, 0, sizeof(p));
+
+            if ((res = dav1d_get_picture(mDav1dCtx, &p)) < 0) {
+                if (res != DAV1D_ERR(EAGAIN)) {
+                    ALOGE("Error decoding frame: %s\n", strerror(DAV1D_ERR(res)));
+                    break;
+                } else {
+                    res = 0;
+                    break;
+                }
+            } else {
+                dav1d_picture_unref(&p);
+            }
+        }
+
+        dav1d_flush(mDav1dCtx);
+    }
+}
+
+void C2SoftDav1dDec::onRelease() {
+    destroyDecoder();
+}
+
+c2_status_t C2SoftDav1dDec::onFlush_sm() {
+    flushDav1d();
+
+    mSignalledError = false;
+    mSignalledOutputEos = false;
+
+    return C2_OK;
+}
+
+static int GetCPUCoreCount() {
+    int cpuCoreCount = 1;
+#if defined(_SC_NPROCESSORS_ONLN)
+    cpuCoreCount = sysconf(_SC_NPROCESSORS_ONLN);
+#else
+    // _SC_NPROC_ONLN must be defined...
+    cpuCoreCount = sysconf(_SC_NPROC_ONLN);
+#endif
+    CHECK(cpuCoreCount >= 1);
+    ALOGV("Number of CPU cores: %d", cpuCoreCount);
+    return cpuCoreCount;
+}
+
+bool C2SoftDav1dDec::initDecoder() {
+    nsecs_t now = systemTime();
+#ifdef FILE_DUMP_ENABLE
+    snprintf(mInDataFileName, 256, "%s_%" PRId64 "d.%s", DUMP_FILE_PATH, now, INPUT_DATA_DUMP_EXT);
+    snprintf(mInSizeFileName, 256, "%s_%" PRId64 "d.%s", DUMP_FILE_PATH, now, INPUT_SIZE_DUMP_EXT);
+    snprintf(mDav1dOutYuvFileName, 256, "%s_%" PRId64 "dx.%s", DUMP_FILE_PATH, now,
+             OUTPUT_YUV_DUMP_EXT);
+
+    bool enableDumping = android::base::GetBoolProperty(ENABLE_DUMPING_FILES_PROPERTY,
+                                                        ENABLE_DUMPING_FILES_DEFAULT);
+
+    num_frames_to_dump =
+            android::base::GetIntProperty(NUM_FRAMES_TO_DUMP_PROPERTY, NUM_FRAMES_TO_DUMP_DEFAULT);
+
+    if (enableDumping) {
+        ALOGD("enableDumping = %d, num_frames_to_dump = %d", enableDumping, num_frames_to_dump);
+
+        mInDataFile = fopen(mInDataFileName, "wb");
+        if (mInDataFile == nullptr) {
+            ALOGD("Could not open file %s", mInDataFileName);
+        }
+
+        mInSizeFile = fopen(mInSizeFileName, "wb");
+        if (mInSizeFile == nullptr) {
+            ALOGD("Could not open file %s", mInSizeFileName);
+        }
+
+        mDav1dOutYuvFile = fopen(mDav1dOutYuvFileName, "wb");
+        if (mDav1dOutYuvFile == nullptr) {
+            ALOGD("Could not open file %s", mDav1dOutYuvFileName);
+        }
+    }
+#endif
+    mSignalledError = false;
+    mSignalledOutputEos = false;
+    mHalPixelFormat = HAL_PIXEL_FORMAT_YV12;
+    {
+        IntfImpl::Lock lock = mIntf->lock();
+        mPixelFormatInfo = mIntf->getPixelFormat_l();
+    }
+
+    const char* version = dav1d_version();
+
+    Dav1dSettings lib_settings;
+    dav1d_default_settings(&lib_settings);
+    int cpu_count = GetCPUCoreCount();
+    lib_settings.n_threads = std::max(cpu_count / 2, 1);  // use up to half the cores by default.
+
+    int32_t numThreads =
+            android::base::GetIntProperty(NUM_THREADS_DAV1D_PROPERTY, NUM_THREADS_DAV1D_DEFAULT);
+    if (numThreads > 0) lib_settings.n_threads = numThreads;
+
+    int res = 0;
+    if ((res = dav1d_open(&mDav1dCtx, &lib_settings))) {
+        ALOGE("dav1d_open failed. status: %d.", res);
+        return false;
+    } else {
+        ALOGD("dav1d_open succeeded(n_threads=%d,version=%s).", lib_settings.n_threads, version);
+    }
+
+    return true;
+}
+
+void C2SoftDav1dDec::destroyDecoder() {
+    if (mDav1dCtx) {
+        Dav1dPicture p;
+        while (mDecodedPictures.size() > 0) {
+            memset(&p, 0, sizeof(p));
+            p = mDecodedPictures.front();
+            mDecodedPictures.pop_front();
+
+            dav1d_picture_unref(&p);
+        }
+
+        dav1d_close(&mDav1dCtx);
+        mDav1dCtx = nullptr;
+        mOutputBufferIndex = 0;
+        mInputBufferIndex = 0;
+    }
+#ifdef FILE_DUMP_ENABLE
+    if (mInDataFile != nullptr) {
+        fclose(mInDataFile);
+        mInDataFile = nullptr;
+    }
+
+    if (mInSizeFile != nullptr) {
+        fclose(mInSizeFile);
+        mInSizeFile = nullptr;
+    }
+
+    if (mDav1dOutYuvFile != nullptr) {
+        fclose(mDav1dOutYuvFile);
+        mDav1dOutYuvFile = nullptr;
+    }
+#endif
+}
+
+void fillEmptyWork(const std::unique_ptr<C2Work>& work) {
+    uint32_t flags = 0;
+    if (work->input.flags & C2FrameData::FLAG_END_OF_STREAM) {
+        flags |= C2FrameData::FLAG_END_OF_STREAM;
+        ALOGV("signalling end_of_stream.");
+    }
+    work->worklets.front()->output.flags = (C2FrameData::flags_t)flags;
+    work->worklets.front()->output.buffers.clear();
+    work->worklets.front()->output.ordinal = work->input.ordinal;
+    work->workletsProcessed = 1u;
+}
+
+void C2SoftDav1dDec::finishWork(uint64_t index, const std::unique_ptr<C2Work>& work,
+                                const std::shared_ptr<C2GraphicBlock>& block) {
+    std::shared_ptr<C2Buffer> buffer = createGraphicBuffer(block, C2Rect(mWidth, mHeight));
+    {
+        IntfImpl::Lock lock = mIntf->lock();
+        buffer->setInfo(mIntf->getColorAspects_l());
+    }
+    auto fillWork = [buffer, index](const std::unique_ptr<C2Work>& work) {
+        uint32_t flags = 0;
+        if ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) &&
+            (c2_cntr64_t(index) == work->input.ordinal.frameIndex)) {
+            flags |= C2FrameData::FLAG_END_OF_STREAM;
+            ALOGV("signalling end_of_stream.");
+        }
+        work->worklets.front()->output.flags = (C2FrameData::flags_t)flags;
+        work->worklets.front()->output.buffers.clear();
+        work->worklets.front()->output.buffers.push_back(buffer);
+        work->worklets.front()->output.ordinal = work->input.ordinal;
+        work->workletsProcessed = 1u;
+    };
+    if (work && c2_cntr64_t(index) == work->input.ordinal.frameIndex) {
+        fillWork(work);
+    } else {
+        finish(index, fillWork);
+    }
+}
+
+void C2SoftDav1dDec::process(const std::unique_ptr<C2Work>& work,
+                             const std::shared_ptr<C2BlockPool>& pool) {
+    work->result = C2_OK;
+    work->workletsProcessed = 0u;
+    work->worklets.front()->output.configUpdate.clear();
+    work->worklets.front()->output.flags = work->input.flags;
+    if (mSignalledError || mSignalledOutputEos) {
+        work->result = C2_BAD_VALUE;
+        return;
+    }
+
+    size_t inOffset = 0u;
+    size_t inSize = 0u;
+    C2ReadView rView = mDummyReadView;
+    if (!work->input.buffers.empty()) {
+        rView = work->input.buffers[0]->data().linearBlocks().front().map().get();
+        inSize = rView.capacity();
+        if (inSize && rView.error()) {
+            ALOGE("read view map failed %d", rView.error());
+            work->result = C2_CORRUPTED;
+            return;
+        }
+    }
+
+    bool codecConfig = ((work->input.flags & C2FrameData::FLAG_CODEC_CONFIG) != 0);
+    bool end_of_stream = ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0);
+
+    if (codecConfig) {
+        fillEmptyWork(work);
+        return;
+    }
+
+    int64_t in_frameIndex = work->input.ordinal.frameIndex.peekll();
+    if (inSize) {
+        mInputBufferIndex = in_frameIndex;
+
+        uint8_t* bitstream = const_cast<uint8_t*>(rView.data() + inOffset);
+
+        mTimeStart = systemTime();
+        nsecs_t delay = mTimeStart - mTimeEnd;
+
+        // Send the bitstream data (inputBuffer) to dav1d.
+        if (mDav1dCtx) {
+            int i_ret = 0;
+
+            Dav1dSequenceHeader seq;
+            int res = dav1d_parse_sequence_header(&seq, bitstream, inSize);
+            if (res == 0) {
+                ALOGV("dav1d found a sequenceHeader (%dx%d) for in_frameIndex=%ld.", seq.max_width,
+                      seq.max_height, (long)in_frameIndex);
+            }
+
+            // insert OBU TD if it is not present.
+            // TODO: b/286852962
+            uint8_t obu_type = (bitstream[0] >> 3) & 0xf;
+            Dav1dData data;
+
+            uint8_t* ptr = (obu_type == DAV1D_OBU_TD) ? dav1d_data_create(&data, inSize)
+                                                      : dav1d_data_create(&data, inSize + 2);
+            if (ptr == nullptr) {
+                ALOGE("dav1d_data_create failed!");
+                i_ret = -1;
+
+            } else {
+                data.m.timestamp = in_frameIndex;
+
+                int new_Size;
+                if (obu_type != DAV1D_OBU_TD) {
+                    new_Size = (int)(inSize + 2);
+
+                    // OBU TD
+                    ptr[0] = 0x12;
+                    ptr[1] = 0;
+
+                    memcpy(ptr + 2, bitstream, inSize);
+                } else {
+                    new_Size = (int)(inSize);
+                    // TODO: b/277797541 - investigate how to wrap this pointer in Dav1dData to
+                    // avoid memcopy operations.
+                    memcpy(ptr, bitstream, new_Size);
+                }
+
+                // ALOGV("memcpy(ptr,bitstream,inSize=%ld,new_Size=%d,in_frameIndex=%ld,timestamp=%ld,"
+                //       "ptr[0,1,2,3,4]=%x,%x,%x,%x,%x)",
+                //       inSize, new_Size, frameIndex, data.m.timestamp, ptr[0], ptr[1], ptr[2],
+                //       ptr[3], ptr[4]);
+
+                // Dump the bitstream data (inputBuffer) if dumping is enabled.
+#ifdef FILE_DUMP_ENABLE
+                if (mInDataFile) {
+                    int ret = fwrite(ptr, 1, new_Size, mInDataFile);
+
+                    if (ret != new_Size) {
+                        ALOGE("Error in fwrite %s, requested %d, returned %d", mInDataFileName,
+                              new_Size, ret);
+                    }
+                }
+
+                // Dump the size per inputBuffer if dumping is enabled.
+                if (mInSizeFile) {
+                    int ret = fwrite(&new_Size, 1, 4, mInSizeFile);
+
+                    if (ret != 4) {
+                        ALOGE("Error in fwrite %s, requested %d, returned %d", mInSizeFileName, 4,
+                              ret);
+                    }
+                }
+#endif
+
+                bool b_draining = false;
+                int res;
+
+                do {
+                    res = dav1d_send_data(mDav1dCtx, &data);
+                    if (res < 0 && res != DAV1D_ERR(EAGAIN)) {
+                        ALOGE("Decoder feed error %s!", strerror(DAV1D_ERR(res)));
+                        /* bitstream decoding errors (typically DAV1D_ERR(EINVAL), are assumed
+                         * to be recoverable. Other errors returned from this function are
+                         * either unexpected, or considered critical failures.
+                         */
+                        i_ret = res == DAV1D_ERR(EINVAL) ? 0 : -1;
+                        break;
+                    }
+
+                    bool b_output_error = false;
+
+                    do {
+                        Dav1dPicture img;
+                        memset(&img, 0, sizeof(img));
+
+                        res = dav1d_get_picture(mDav1dCtx, &img);
+                        if (res == 0) {
+                            mDecodedPictures.push_back(img);
+
+                            if (!end_of_stream) break;
+                        } else if (res == DAV1D_ERR(EAGAIN)) {
+                            /* the decoder needs more data to be able to output something.
+                             * if there is more data pending, continue the loop below or
+                             * otherwise break */
+                            if (data.sz != 0) res = 0;
+                            break;
+                        } else {
+                            ALOGE("warning! Decoder error %d!", res);
+                            b_output_error = true;
+                            break;
+                        }
+                    } while (res == 0);
+
+                    if (b_output_error) break;
+
+                    /* on drain, we must ignore the 1st EAGAIN */
+                    if (!b_draining && (res == DAV1D_ERR(EAGAIN) || res == 0) &&
+                        (end_of_stream)) {
+                        b_draining = true;
+                        res = 0;
+                    }
+                } while (res == 0 && ((data.sz != 0) || b_draining));
+
+                if (data.sz > 0) {
+                    ALOGE("unexpected data.sz=%zu after dav1d_send_data", data.sz);
+                    dav1d_data_unref(&data);
+                }
+            }
+
+            mTimeEnd = systemTime();
+            nsecs_t decodeTime = mTimeEnd - mTimeStart;
+            // ALOGV("decodeTime=%4" PRId64 " delay=%4" PRId64 "\n", decodeTime, delay);
+
+            if (i_ret != 0) {
+                ALOGE("av1 decoder failed to decode frame. status: %d.", i_ret);
+                work->result = C2_CORRUPTED;
+                work->workletsProcessed = 1u;
+                mSignalledError = true;
+                return;
+            }
+        }
+    }
+
+    (void)outputBuffer(pool, work);
+
+    if (end_of_stream) {
+        drainInternal(DRAIN_COMPONENT_WITH_EOS, pool, work);
+        mSignalledOutputEos = true;
+    } else if (!inSize) {
+        fillEmptyWork(work);
+    }
+}
+
+void C2SoftDav1dDec::getHDRStaticParams(Dav1dPicture* picture,
+                                        const std::unique_ptr<C2Work>& work) {
+    C2StreamHdrStaticMetadataInfo::output hdrStaticMetadataInfo{};
+    bool infoPresent = false;
+
+    if (picture != nullptr) {
+        if (picture->mastering_display != nullptr) {
+            hdrStaticMetadataInfo.mastering.red.x =
+                    picture->mastering_display->primaries[0][0] / 65536.0;
+            hdrStaticMetadataInfo.mastering.red.y =
+                    picture->mastering_display->primaries[0][1] / 65536.0;
+
+            hdrStaticMetadataInfo.mastering.green.x =
+                    picture->mastering_display->primaries[1][0] / 65536.0;
+            hdrStaticMetadataInfo.mastering.green.y =
+                    picture->mastering_display->primaries[1][1] / 65536.0;
+
+            hdrStaticMetadataInfo.mastering.blue.x =
+                    picture->mastering_display->primaries[2][0] / 65536.0;
+            hdrStaticMetadataInfo.mastering.blue.y =
+                    picture->mastering_display->primaries[2][1] / 65536.0;
+
+            hdrStaticMetadataInfo.mastering.white.x =
+                    picture->mastering_display->white_point[0] / 65536.0;
+            hdrStaticMetadataInfo.mastering.white.y =
+                    picture->mastering_display->white_point[1] / 65536.0;
+
+            hdrStaticMetadataInfo.mastering.maxLuminance =
+                    picture->mastering_display->max_luminance / 256.0;
+            hdrStaticMetadataInfo.mastering.minLuminance =
+                    picture->mastering_display->min_luminance / 16384.0;
+
+            infoPresent = true;
+        }
+
+        if (picture->content_light != nullptr) {
+            hdrStaticMetadataInfo.maxCll = picture->content_light->max_content_light_level;
+            hdrStaticMetadataInfo.maxFall = picture->content_light->max_frame_average_light_level;
+            infoPresent = true;
+        }
+    }
+
+    // if (infoPresent) {
+    //   ALOGD("received a hdrStaticMetadataInfo (mastering.red=%f,%f mastering.green=%f,%f
+    //   mastering.blue=%f,%f mastering.white=%f,%f mastering.maxLuminance=%f
+    //   mastering.minLuminance=%f maxCll=%f maxFall=%f) at mOutputBufferIndex=%d.",
+    //   hdrStaticMetadataInfo.mastering.red.x,hdrStaticMetadataInfo.mastering.red.y,
+    //   hdrStaticMetadataInfo.mastering.green.x,hdrStaticMetadataInfo.mastering.green.y,
+    //   hdrStaticMetadataInfo.mastering.blue.x,hdrStaticMetadataInfo.mastering.blue.y,
+    //   hdrStaticMetadataInfo.mastering.white.x,hdrStaticMetadataInfo.mastering.white.y,
+    //   hdrStaticMetadataInfo.mastering.maxLuminance,hdrStaticMetadataInfo.mastering.minLuminance,
+    //   hdrStaticMetadataInfo.maxCll,
+    //   hdrStaticMetadataInfo.maxFall,
+    //   mOutputBufferIndex);
+    // }
+
+    // config if static info has changed
+    if (infoPresent && !(hdrStaticMetadataInfo == mHdrStaticMetadataInfo)) {
+        mHdrStaticMetadataInfo = hdrStaticMetadataInfo;
+        work->worklets.front()->output.configUpdate.push_back(
+                C2Param::Copy(mHdrStaticMetadataInfo));
+    }
+}
+
+void C2SoftDav1dDec::getHDR10PlusInfoData(Dav1dPicture* picture,
+                                          const std::unique_ptr<C2Work>& work) {
+    if (picture != nullptr) {
+        if (picture->itut_t35 != nullptr) {
+            std::vector<uint8_t> payload;
+            size_t payloadSize = picture->itut_t35->payload_size;
+            if (payloadSize > 0) {
+                payload.push_back(picture->itut_t35->country_code);
+                if (picture->itut_t35->country_code == 0xFF) {
+                    payload.push_back(picture->itut_t35->country_code_extension_byte);
+                }
+                payload.insert(payload.end(), picture->itut_t35->payload,
+                               picture->itut_t35->payload + picture->itut_t35->payload_size);
+            }
+
+            std::unique_ptr<C2StreamHdr10PlusInfo::output> hdr10PlusInfo =
+                    C2StreamHdr10PlusInfo::output::AllocUnique(payload.size());
+            if (!hdr10PlusInfo) {
+                ALOGE("Hdr10PlusInfo allocation failed");
+                mSignalledError = true;
+                work->result = C2_NO_MEMORY;
+                return;
+            }
+            memcpy(hdr10PlusInfo->m.value, payload.data(), payload.size());
+
+            // ALOGD("Received a hdr10PlusInfo from picture->itut_t32
+            // (payload_size=%ld,country_code=%d) at mOutputBufferIndex=%d.",
+            // picture->itut_t35->payload_size,
+            // picture->itut_t35->country_code,
+            // mOutputBufferIndex);
+
+            // config if hdr10Plus info has changed
+            if (nullptr == mHdr10PlusInfo || !(*hdr10PlusInfo == *mHdr10PlusInfo)) {
+                mHdr10PlusInfo = std::move(hdr10PlusInfo);
+                work->worklets.front()->output.configUpdate.push_back(std::move(mHdr10PlusInfo));
+            }
+        }
+    }
+}
+
+void C2SoftDav1dDec::getVuiParams(Dav1dPicture* picture) {
+    VuiColorAspects vuiColorAspects;
+
+    if (picture) {
+        vuiColorAspects.primaries = picture->seq_hdr->pri;
+        vuiColorAspects.transfer = picture->seq_hdr->trc;
+        vuiColorAspects.coeffs = picture->seq_hdr->mtrx;
+        vuiColorAspects.fullRange = picture->seq_hdr->color_range;
+
+        // ALOGD("Received a vuiColorAspects from dav1d
+        //       (primaries = % d, transfer = % d, coeffs = % d, fullRange = % d)
+        //               at mOutputBufferIndex = % d,
+        //       out_frameIndex = % ld.",
+        //                          vuiColorAspects.primaries,
+        //       vuiColorAspects.transfer, vuiColorAspects.coeffs, vuiColorAspects.fullRange,
+        //       mOutputBufferIndex, picture->m.timestamp);
+    }
+
+    // convert vui aspects to C2 values if changed
+    if (!(vuiColorAspects == mBitstreamColorAspects)) {
+        mBitstreamColorAspects = vuiColorAspects;
+        ColorAspects sfAspects;
+        C2StreamColorAspectsInfo::input codedAspects = {0u};
+        ColorUtils::convertIsoColorAspectsToCodecAspects(
+                vuiColorAspects.primaries, vuiColorAspects.transfer, vuiColorAspects.coeffs,
+                vuiColorAspects.fullRange, sfAspects);
+        if (!C2Mapper::map(sfAspects.mPrimaries, &codedAspects.primaries)) {
+            codedAspects.primaries = C2Color::PRIMARIES_UNSPECIFIED;
+        }
+        if (!C2Mapper::map(sfAspects.mRange, &codedAspects.range)) {
+            codedAspects.range = C2Color::RANGE_UNSPECIFIED;
+        }
+        if (!C2Mapper::map(sfAspects.mMatrixCoeffs, &codedAspects.matrix)) {
+            codedAspects.matrix = C2Color::MATRIX_UNSPECIFIED;
+        }
+        if (!C2Mapper::map(sfAspects.mTransfer, &codedAspects.transfer)) {
+            codedAspects.transfer = C2Color::TRANSFER_UNSPECIFIED;
+        }
+        std::vector<std::unique_ptr<C2SettingResult>> failures;
+        mIntf->config({&codedAspects}, C2_MAY_BLOCK, &failures);
+    }
+}
+
+void C2SoftDav1dDec::setError(const std::unique_ptr<C2Work>& work, c2_status_t error) {
+    mSignalledError = true;
+    work->result = error;
+    work->workletsProcessed = 1u;
+}
+
+bool C2SoftDav1dDec::allocTmpFrameBuffer(size_t size) {
+    if (size > mTmpFrameBufferSize) {
+        mTmpFrameBuffer = std::make_unique<uint16_t[]>(size);
+        if (mTmpFrameBuffer == nullptr) {
+            mTmpFrameBufferSize = 0;
+            return false;
+        }
+        mTmpFrameBufferSize = size;
+    }
+    return true;
+}
+
+#ifdef FILE_DUMP_ENABLE
+void C2SoftDav1dDec::writeDav1dOutYuvFile(const Dav1dPicture& p) {
+    if (mDav1dOutYuvFile != NULL) {
+        uint8_t* ptr;
+        const int hbd = p.p.bpc > 8;
+
+        ptr = (uint8_t*)p.data[0];
+        for (int y = 0; y < p.p.h; y++) {
+            int iSize = p.p.w << hbd;
+            int ret = fwrite(ptr, 1, iSize, mDav1dOutYuvFile);
+            if (ret != iSize) {
+                ALOGE("Error in fwrite %s, requested %d, returned %d", mDav1dOutYuvFileName, iSize,
+                      ret);
+                break;
+            }
+
+            ptr += p.stride[0];
+        }
+
+        if (p.p.layout != DAV1D_PIXEL_LAYOUT_I400) {
+            // u/v
+            const int ss_ver = p.p.layout == DAV1D_PIXEL_LAYOUT_I420;
+            const int ss_hor = p.p.layout != DAV1D_PIXEL_LAYOUT_I444;
+            const int cw = (p.p.w + ss_hor) >> ss_hor;
+            const int ch = (p.p.h + ss_ver) >> ss_ver;
+            for (int pl = 1; pl <= 2; pl++) {
+                ptr = (uint8_t*)p.data[pl];
+                for (int y = 0; y < ch; y++) {
+                    int iSize = cw << hbd;
+                    int ret = fwrite(ptr, 1, cw << hbd, mDav1dOutYuvFile);
+                    if (ret != iSize) {
+                        ALOGE("Error in fwrite %s, requested %d, returned %d", mDav1dOutYuvFileName,
+                              iSize, ret);
+                        break;
+                    }
+                    ptr += p.stride[1];
+                }
+            }
+        }
+    }
+}
+#endif
+
+bool C2SoftDav1dDec::outputBuffer(const std::shared_ptr<C2BlockPool>& pool,
+                                  const std::unique_ptr<C2Work>& work) {
+    if (!(work && pool)) return false;
+    if (mDav1dCtx == nullptr) return false;
+
+    // Get a decoded picture from dav1d if it is enabled.
+    Dav1dPicture img;
+    memset(&img, 0, sizeof(img));
+
+    int res = 0;
+    if (mDecodedPictures.size() > 0) {
+        img = mDecodedPictures.front();
+        mDecodedPictures.pop_front();
+        // ALOGD("Got a picture(out_frameIndex=%ld,timestamp=%ld) from the deque for
+        // outputBuffer.",img.m.timestamp,img.m.timestamp);
+    } else {
+        res = dav1d_get_picture(mDav1dCtx, &img);
+        if (res == 0) {
+            // ALOGD("Got a picture(out_frameIndex=%ld,timestamp=%ld) from dav1d for
+            // outputBuffer.",img.m.timestamp,img.m.timestamp);
+        } else {
+            ALOGE("failed to get a picture from dav1d for outputBuffer.");
+        }
+    }
+
+    if (res == DAV1D_ERR(EAGAIN)) {
+        ALOGD("Not enough data to output a picture.");
+        return false;
+    }
+    if (res != 0) {
+        ALOGE("The AV1 decoder failed to get a picture (res=%s).", strerror(DAV1D_ERR(res)));
+        return false;
+    }
+
+    const int width = img.p.w;
+    const int height = img.p.h;
+    if (width != mWidth || height != mHeight) {
+        mWidth = width;
+        mHeight = height;
+
+        C2StreamPictureSizeInfo::output size(0u, mWidth, mHeight);
+        std::vector<std::unique_ptr<C2SettingResult>> failures;
+        c2_status_t err = mIntf->config({&size}, C2_MAY_BLOCK, &failures);
+        if (err == C2_OK) {
+            work->worklets.front()->output.configUpdate.push_back(C2Param::Copy(size));
+        } else {
+            ALOGE("Config update size failed");
+            mSignalledError = true;
+            work->result = C2_CORRUPTED;
+            work->workletsProcessed = 1u;
+            return false;
+        }
+    }
+
+    getVuiParams(&img);
+    getHDRStaticParams(&img, work);
+    getHDR10PlusInfoData(&img, work);
+
+    // out_frameIndex that the decoded picture returns from dav1d.
+    int64_t out_frameIndex = img.m.timestamp;
+
+#if LIBYUV_VERSION < 1779
+    if (!(img.p.layout != DAV1D_PIXEL_LAYOUT_I400 || img.p.layout != DAV1D_PIXEL_LAYOUT_I420)) {
+        ALOGE("image_format %d not supported", img.p.layout);
+        mSignalledError = true;
+        work->workletsProcessed = 1u;
+        work->result = C2_CORRUPTED;
+        return false;
+    }
+#endif
+
+    const bool isMonochrome = img.p.layout == DAV1D_PIXEL_LAYOUT_I400;
+
+    int bitdepth = img.p.bpc;
+
+    std::shared_ptr<C2GraphicBlock> block;
+    uint32_t format = HAL_PIXEL_FORMAT_YV12;
+    std::shared_ptr<C2StreamColorAspectsInfo::output> codedColorAspects;
+    if (bitdepth == 10 && mPixelFormatInfo->value != HAL_PIXEL_FORMAT_YCBCR_420_888) {
+        IntfImpl::Lock lock = mIntf->lock();
+        codedColorAspects = mIntf->getColorAspects_l();
+        bool allowRGBA1010102 = false;
+        if (codedColorAspects->primaries == C2Color::PRIMARIES_BT2020 &&
+            codedColorAspects->matrix == C2Color::MATRIX_BT2020 &&
+            codedColorAspects->transfer == C2Color::TRANSFER_ST2084) {
+            allowRGBA1010102 = true;
+        }
+        format = getHalPixelFormatForBitDepth10(allowRGBA1010102);
+#if !HAVE_LIBYUV_I410_I210_TO_AB30
+        if ((format == HAL_PIXEL_FORMAT_RGBA_1010102) &&
+            (is_img_ready ? img.p.layout == DAV1D_PIXEL_LAYOUT_I420
+                          : buffer->image_format != libgav1::kImageFormatYuv420)) {
+            ALOGE("Only YUV420 output is supported when targeting RGBA_1010102");
+            mSignalledError = true;
+            work->result = C2_OMITTED;
+            work->workletsProcessed = 1u;
+            return false;
+        }
+#endif
+    }
+
+    if (mHalPixelFormat != format) {
+        C2StreamPixelFormatInfo::output pixelFormat(0u, format);
+        std::vector<std::unique_ptr<C2SettingResult>> failures;
+        c2_status_t err = mIntf->config({&pixelFormat}, C2_MAY_BLOCK, &failures);
+        if (err == C2_OK) {
+            work->worklets.front()->output.configUpdate.push_back(C2Param::Copy(pixelFormat));
+        } else {
+            ALOGE("Config update pixelFormat failed");
+            mSignalledError = true;
+            work->workletsProcessed = 1u;
+            work->result = C2_CORRUPTED;
+            return UNKNOWN_ERROR;
+        }
+        mHalPixelFormat = format;
+    }
+
+    C2MemoryUsage usage = {C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE};
+
+    // We always create a graphic block that is width aligned to 16 and height
+    // aligned to 2. We set the correct "crop" value of the image in the call to
+    // createGraphicBuffer() by setting the correct image dimensions.
+    c2_status_t err =
+            pool->fetchGraphicBlock(align(mWidth, 16), align(mHeight, 2), format, usage, &block);
+
+    if (err != C2_OK) {
+        ALOGE("fetchGraphicBlock for Output failed with status %d", err);
+        work->result = err;
+        return false;
+    }
+
+    C2GraphicView wView = block->map().get();
+
+    if (wView.error()) {
+        ALOGE("graphic view map failed %d", wView.error());
+        work->result = C2_CORRUPTED;
+        return false;
+    }
+
+    // ALOGV("provided (%dx%d) required (%dx%d), out frameindex %d", block->width(),
+    //       block->height(), mWidth, mHeight, (int)out_frameIndex);
+
+    mOutputBufferIndex = out_frameIndex;
+
+    uint8_t* dstY = const_cast<uint8_t*>(wView.data()[C2PlanarLayout::PLANE_Y]);
+    uint8_t* dstU = const_cast<uint8_t*>(wView.data()[C2PlanarLayout::PLANE_U]);
+    uint8_t* dstV = const_cast<uint8_t*>(wView.data()[C2PlanarLayout::PLANE_V]);
+
+    C2PlanarLayout layout = wView.layout();
+    size_t dstYStride = layout.planes[C2PlanarLayout::PLANE_Y].rowInc;
+    size_t dstUStride = layout.planes[C2PlanarLayout::PLANE_U].rowInc;
+    size_t dstVStride = layout.planes[C2PlanarLayout::PLANE_V].rowInc;
+
+    if (bitdepth == 10) {
+        // TODO: b/277797541 - Investigate if we can ask DAV1D to output the required format during
+        // decompression to avoid color conversion.
+        const uint16_t* srcY = (const uint16_t*)img.data[0];
+        const uint16_t* srcU = (const uint16_t*)img.data[1];
+        const uint16_t* srcV = (const uint16_t*)img.data[2];
+        size_t srcYStride = img.stride[0] / 2;
+        size_t srcUStride = img.stride[1] / 2;
+        size_t srcVStride = img.stride[1] / 2;
+
+        if (format == HAL_PIXEL_FORMAT_RGBA_1010102) {
+            bool processed = false;
+#if HAVE_LIBYUV_I410_I210_TO_AB30
+            if (img.p.layout == DAV1D_PIXEL_LAYOUT_I444) {
+                libyuv::I410ToAB30Matrix(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride, dstY,
+                                         dstYStride, &libyuv::kYuvV2020Constants, mWidth, mHeight);
+                processed = true;
+            } else if (img.p.layout == DAV1D_PIXEL_LAYOUT_I422) {
+                libyuv::I210ToAB30Matrix(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride, dstY,
+                                         dstYStride, &libyuv::kYuvV2020Constants, mWidth, mHeight);
+                processed = true;
+            }
+#endif  // HAVE_LIBYUV_I410_I210_TO_AB30
+            if (!processed) {
+                if (isMonochrome) {
+                    const size_t tmpSize = mWidth;
+                    const bool needFill = tmpSize > mTmpFrameBufferSize;
+                    if (!allocTmpFrameBuffer(tmpSize)) {
+                        ALOGE("Error allocating temp conversion buffer (%zu bytes)", tmpSize);
+                        setError(work, C2_NO_MEMORY);
+                        return false;
+                    }
+                    srcU = srcV = mTmpFrameBuffer.get();
+                    srcUStride = srcVStride = 0;
+                    if (needFill) {
+                        std::fill_n(mTmpFrameBuffer.get(), tmpSize, 512);
+                    }
+                }
+                convertYUV420Planar16ToY410OrRGBA1010102(
+                        (uint32_t*)dstY, srcY, srcU, srcV, srcYStride, srcUStride, srcVStride,
+                        dstYStride / sizeof(uint32_t), mWidth, mHeight,
+                        std::static_pointer_cast<const C2ColorAspectsStruct>(codedColorAspects));
+            }
+        } else if (format == HAL_PIXEL_FORMAT_YCBCR_P010) {
+            dstYStride /= 2;
+            dstUStride /= 2;
+            dstVStride /= 2;
+#if LIBYUV_VERSION >= 1779
+            if ((img.p.layout == DAV1D_PIXEL_LAYOUT_I444) ||
+                (img.p.layout == DAV1D_PIXEL_LAYOUT_I422)) {
+                // TODO(https://crbug.com/libyuv/952): replace this block with libyuv::I410ToP010
+                // and libyuv::I210ToP010 when they are available. Note it may be safe to alias dstY
+                // in I010ToP010, but the libyuv API doesn't make any guarantees.
+                const size_t tmpSize = dstYStride * mHeight + dstUStride * align(mHeight, 2);
+                if (!allocTmpFrameBuffer(tmpSize)) {
+                    ALOGE("Error allocating temp conversion buffer (%zu bytes)", tmpSize);
+                    setError(work, C2_NO_MEMORY);
+                    return false;
+                }
+                uint16_t* const tmpY = mTmpFrameBuffer.get();
+                uint16_t* const tmpU = tmpY + dstYStride * mHeight;
+                uint16_t* const tmpV = tmpU + dstUStride * align(mHeight, 2) / 2;
+                if (img.p.layout == DAV1D_PIXEL_LAYOUT_I444) {
+                    libyuv::I410ToI010(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride, tmpY,
+                                       dstYStride, tmpU, dstUStride, tmpV, dstUStride, mWidth,
+                                       mHeight);
+                } else {
+                    libyuv::I210ToI010(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride, tmpY,
+                                       dstYStride, tmpU, dstUStride, tmpV, dstUStride, mWidth,
+                                       mHeight);
+                }
+                libyuv::I010ToP010(tmpY, dstYStride, tmpU, dstUStride, tmpV, dstVStride,
+                                   (uint16_t*)dstY, dstYStride, (uint16_t*)dstU, dstUStride, mWidth,
+                                   mHeight);
+            } else {
+                convertYUV420Planar16ToP010((uint16_t*)dstY, (uint16_t*)dstU, srcY, srcU, srcV,
+                                            srcYStride, srcUStride, srcVStride, dstYStride,
+                                            dstUStride, mWidth, mHeight, isMonochrome);
+            }
+#else   // LIBYUV_VERSION < 1779
+            convertYUV420Planar16ToP010((uint16_t*)dstY, (uint16_t*)dstU, srcY, srcU, srcV,
+                                        srcYStride, srcUStride, srcVStride, dstYStride, dstUStride,
+                                        mWidth, mHeight, isMonochrome);
+#endif  // LIBYUV_VERSION >= 1779
+        } else {
+#if LIBYUV_VERSION >= 1779
+            if (img.p.layout == DAV1D_PIXEL_LAYOUT_I444) {
+                // TODO(https://crbug.com/libyuv/950): replace this block with libyuv::I410ToI420
+                // when it's available.
+                const size_t tmpSize = dstYStride * mHeight + dstUStride * align(mHeight, 2);
+                if (!allocTmpFrameBuffer(tmpSize)) {
+                    ALOGE("Error allocating temp conversion buffer (%zu bytes)", tmpSize);
+                    setError(work, C2_NO_MEMORY);
+                    return false;
+                }
+                uint16_t* const tmpY = mTmpFrameBuffer.get();
+                uint16_t* const tmpU = tmpY + dstYStride * mHeight;
+                uint16_t* const tmpV = tmpU + dstUStride * align(mHeight, 2) / 2;
+                libyuv::I410ToI010(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride, tmpY,
+                                   dstYStride, tmpU, dstUStride, tmpV, dstVStride, mWidth, mHeight);
+                libyuv::I010ToI420(tmpY, dstYStride, tmpU, dstUStride, tmpV, dstUStride, dstY,
+                                   dstYStride, dstU, dstUStride, dstV, dstVStride, mWidth, mHeight);
+            } else if (img.p.layout == DAV1D_PIXEL_LAYOUT_I422) {
+                libyuv::I210ToI420(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride, dstY,
+                                   dstYStride, dstU, dstUStride, dstV, dstVStride, mWidth, mHeight);
+            } else {
+                convertYUV420Planar16ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride,
+                                            srcUStride, srcVStride, dstYStride, dstUStride, mWidth,
+                                            mHeight, isMonochrome);
+            }
+#else   // LIBYUV_VERSION < 1779
+            convertYUV420Planar16ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride, srcUStride,
+                                        srcVStride, dstYStride, dstUStride, mWidth, mHeight,
+                                        isMonochrome);
+#endif  // LIBYUV_VERSION >= 1779
+        }
+
+        // Dump the output buffer if dumping is enabled (debug only).
+#ifdef FILE_DUMP_ENABLE
+        FILE* fp_out = mDav1dOutYuvFile;
+
+        // if(mOutputBufferIndex % 100 == 0)
+        ALOGV("output a 10bit picture %dx%d from dav1d "
+              "(mInputBufferIndex=%d,mOutputBufferIndex=%d,format=%d).",
+              mWidth, mHeight, mInputBufferIndex, mOutputBufferIndex, format);
+
+        if (fp_out && mOutputBufferIndex <= num_frames_to_dump) {
+            for (int i = 0; i < mHeight; i++) {
+                int ret = fwrite((uint8_t*)srcY + i * srcYStride * 2, 1, mWidth * 2, fp_out);
+                if (ret != mWidth * 2) {
+                    ALOGE("Error in fwrite, requested %d, returned %d", mWidth * 2, ret);
+                    break;
+                }
+            }
+
+            for (int i = 0; i < mHeight / 2; i++) {
+                int ret = fwrite((uint8_t*)srcU + i * srcUStride * 2, 1, mWidth, fp_out);
+                if (ret != mWidth) {
+                    ALOGE("Error in fwrite, requested %d, returned %d", mWidth, ret);
+                    break;
+                }
+            }
+
+            for (int i = 0; i < mHeight / 2; i++) {
+                int ret = fwrite((uint8_t*)srcV + i * srcVStride * 2, 1, mWidth, fp_out);
+                if (ret != mWidth) {
+                    ALOGE("Error in fwrite, requested %d, returned %d", mWidth, ret);
+                    break;
+                }
+            }
+        }
+#endif
+    } else {
+        const uint8_t* srcY = (const uint8_t*)img.data[0];
+        const uint8_t* srcU = (const uint8_t*)img.data[1];
+        const uint8_t* srcV = (const uint8_t*)img.data[2];
+
+        size_t srcYStride = img.stride[0];
+        size_t srcUStride = img.stride[1];
+        size_t srcVStride = img.stride[1];
+
+        // Dump the output buffer is dumping is enabled (debug only)
+#ifdef FILE_DUMP_ENABLE
+        FILE* fp_out = mDav1dOutYuvFile;
+        // if(mOutputBufferIndex % 100 == 0)
+        ALOGV("output a 8bit picture %dx%d from dav1d "
+              "(mInputBufferIndex=%d,mOutputBufferIndex=%d,format=%d).",
+              mWidth, mHeight, mInputBufferIndex, mOutputBufferIndex, format);
+
+        if (fp_out && mOutputBufferIndex <= num_frames_to_dump) {
+            for (int i = 0; i < mHeight; i++) {
+                int ret = fwrite((uint8_t*)srcY + i * srcYStride, 1, mWidth, fp_out);
+                if (ret != mWidth) {
+                    ALOGE("Error in fwrite, requested %d, returned %d", mWidth, ret);
+                    break;
+                }
+            }
+
+            for (int i = 0; i < mHeight / 2; i++) {
+                int ret = fwrite((uint8_t*)srcU + i * srcUStride, 1, mWidth / 2, fp_out);
+                if (ret != mWidth / 2) {
+                    ALOGE("Error in fwrite, requested %d, returned %d", mWidth / 2, ret);
+                    break;
+                }
+            }
+
+            for (int i = 0; i < mHeight / 2; i++) {
+                int ret = fwrite((uint8_t*)srcV + i * srcVStride, 1, mWidth / 2, fp_out);
+                if (ret != mWidth / 2) {
+                    ALOGE("Error in fwrite, requested %d, returned %d", mWidth / 2, ret);
+                    break;
+                }
+            }
+        }
+#endif
+        if (img.p.layout == DAV1D_PIXEL_LAYOUT_I444) {
+            libyuv::I444ToI420(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride, dstY,
+                               dstYStride, dstU, dstUStride, dstV, dstVStride, mWidth, mHeight);
+        } else if (img.p.layout == DAV1D_PIXEL_LAYOUT_I422) {
+            libyuv::I422ToI420(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride, dstY,
+                               dstYStride, dstU, dstUStride, dstV, dstVStride, mWidth, mHeight);
+        } else {
+            convertYUV420Planar8ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride, srcUStride,
+                                       srcVStride, dstYStride, dstUStride, dstVStride, mWidth,
+                                       mHeight, isMonochrome);
+        }
+    }
+
+    dav1d_picture_unref(&img);
+
+    finishWork(out_frameIndex, work, std::move(block));
+    block = nullptr;
+    return true;
+}
+
+c2_status_t C2SoftDav1dDec::drainInternal(uint32_t drainMode,
+                                          const std::shared_ptr<C2BlockPool>& pool,
+                                          const std::unique_ptr<C2Work>& work) {
+    if (drainMode == NO_DRAIN) {
+        ALOGW("drain with NO_DRAIN: no-op");
+        return C2_OK;
+    }
+    if (drainMode == DRAIN_CHAIN) {
+        ALOGW("DRAIN_CHAIN not supported");
+        return C2_OMITTED;
+    }
+
+    while (outputBuffer(pool, work)) {
+    }
+
+    if (drainMode == DRAIN_COMPONENT_WITH_EOS && work && work->workletsProcessed == 0u) {
+        fillEmptyWork(work);
+    }
+
+    return C2_OK;
+}
+
+c2_status_t C2SoftDav1dDec::drain(uint32_t drainMode, const std::shared_ptr<C2BlockPool>& pool) {
+    return drainInternal(drainMode, pool, nullptr);
+}
+
+class C2SoftDav1dFactory : public C2ComponentFactory {
+  public:
+    C2SoftDav1dFactory()
+        : mHelper(std::static_pointer_cast<C2ReflectorHelper>(
+                  GetCodec2PlatformComponentStore()->getParamReflector())) {}
+
+    virtual c2_status_t createComponent(c2_node_id_t id,
+                                        std::shared_ptr<C2Component>* const component,
+                                        std::function<void(C2Component*)> deleter) override {
+        *component = std::shared_ptr<C2Component>(
+                new C2SoftDav1dDec(COMPONENT_NAME, id,
+                                   std::make_shared<C2SoftDav1dDec::IntfImpl>(mHelper)),
+                deleter);
+        return C2_OK;
+    }
+
+    virtual c2_status_t createInterface(
+            c2_node_id_t id, std::shared_ptr<C2ComponentInterface>* const interface,
+            std::function<void(C2ComponentInterface*)> deleter) override {
+        *interface = std::shared_ptr<C2ComponentInterface>(
+                new SimpleInterface<C2SoftDav1dDec::IntfImpl>(
+                        COMPONENT_NAME, id, std::make_shared<C2SoftDav1dDec::IntfImpl>(mHelper)),
+                deleter);
+        return C2_OK;
+    }
+
+    virtual ~C2SoftDav1dFactory() override = default;
+
+  private:
+    std::shared_ptr<C2ReflectorHelper> mHelper;
+};
+
+}  // namespace android
+
+__attribute__((cfi_canonical_jump_table)) extern "C" ::C2ComponentFactory* CreateCodec2Factory() {
+    ALOGV("in %s", __func__);
+    return new ::android::C2SoftDav1dFactory();
+}
+
+__attribute__((cfi_canonical_jump_table)) extern "C" void DestroyCodec2Factory(
+        ::C2ComponentFactory* factory) {
+    ALOGV("in %s", __func__);
+    delete factory;
+}
diff --git a/media/codec2/components/dav1d/C2SoftDav1dDec.h b/media/codec2/components/dav1d/C2SoftDav1dDec.h
new file mode 100644
index 0000000..5201456
--- /dev/null
+++ b/media/codec2/components/dav1d/C2SoftDav1dDec.h
@@ -0,0 +1,143 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_C2_SOFT_DAV1D_DEC_H_
+#define ANDROID_C2_SOFT_DAV1D_DEC_H_
+
+#include <inttypes.h>
+
+#include <memory>
+
+#include <media/stagefright/foundation/ColorUtils.h>
+
+#include <C2Config.h>
+#include <SimpleC2Component.h>
+
+#include <dav1d/dav1d.h>
+#include <deque>
+
+//#define FILE_DUMP_ENABLE 1
+#define DUMP_FILE_PATH "/data/local/tmp/dump"
+#define INPUT_DATA_DUMP_EXT "av1"
+#define INPUT_SIZE_DUMP_EXT "size"
+#define OUTPUT_YUV_DUMP_EXT "yuv"
+
+namespace android {
+
+struct C2SoftDav1dDec : public SimpleC2Component {
+    class IntfImpl;
+
+    C2SoftDav1dDec(const char* name, c2_node_id_t id, const std::shared_ptr<IntfImpl>& intfImpl);
+    ~C2SoftDav1dDec();
+
+    // Begin SimpleC2Component overrides.
+    c2_status_t onInit() override;
+    c2_status_t onStop() override;
+    void onReset() override;
+    void onRelease() override;
+    c2_status_t onFlush_sm() override;
+    void process(const std::unique_ptr<C2Work>& work,
+                 const std::shared_ptr<C2BlockPool>& pool) override;
+    c2_status_t drain(uint32_t drainMode, const std::shared_ptr<C2BlockPool>& pool) override;
+    // End SimpleC2Component overrides.
+
+  private:
+    std::shared_ptr<IntfImpl> mIntf;
+
+    int mInputBufferIndex = 0;
+    int mOutputBufferIndex = 0;
+
+    Dav1dContext* mDav1dCtx = nullptr;
+    std::deque<Dav1dPicture> mDecodedPictures;
+
+    // configurations used by component in process
+    // (TODO: keep this in intf but make them internal only)
+    std::shared_ptr<C2StreamPixelFormatInfo::output> mPixelFormatInfo;
+
+    uint32_t mHalPixelFormat;
+    uint32_t mWidth;
+    uint32_t mHeight;
+    bool mSignalledOutputEos;
+    bool mSignalledError;
+    // Used during 10-bit I444/I422 to 10-bit P010 & 8-bit I420 conversions.
+    std::unique_ptr<uint16_t[]> mTmpFrameBuffer;
+    size_t mTmpFrameBufferSize = 0;
+
+    C2StreamHdrStaticMetadataInfo::output mHdrStaticMetadataInfo;
+    std::unique_ptr<C2StreamHdr10PlusInfo::output> mHdr10PlusInfo = nullptr;
+
+    // Color aspects. These are ISO values and are meant to detect changes in aspects to avoid
+    // converting them to C2 values for each frame
+    struct VuiColorAspects {
+        uint8_t primaries;
+        uint8_t transfer;
+        uint8_t coeffs;
+        uint8_t fullRange;
+
+        // default color aspects
+        VuiColorAspects()
+            : primaries(C2Color::PRIMARIES_UNSPECIFIED),
+              transfer(C2Color::TRANSFER_UNSPECIFIED),
+              coeffs(C2Color::MATRIX_UNSPECIFIED),
+              fullRange(C2Color::RANGE_UNSPECIFIED) {}
+
+        bool operator==(const VuiColorAspects& o) {
+            return primaries == o.primaries && transfer == o.transfer && coeffs == o.coeffs &&
+                   fullRange == o.fullRange;
+        }
+    } mBitstreamColorAspects;
+
+    nsecs_t mTimeStart = 0;  // Time at the start of decode()
+    nsecs_t mTimeEnd = 0;    // Time at the end of decode()
+
+    bool initDecoder();
+    void getHDRStaticParams(Dav1dPicture* picture, const std::unique_ptr<C2Work>& work);
+    void getHDR10PlusInfoData(Dav1dPicture* picture, const std::unique_ptr<C2Work>& work);
+    void getVuiParams(Dav1dPicture* picture);
+    void destroyDecoder();
+    void finishWork(uint64_t index, const std::unique_ptr<C2Work>& work,
+                    const std::shared_ptr<C2GraphicBlock>& block);
+    // Sets |work->result| and mSignalledError. Returns false.
+    void setError(const std::unique_ptr<C2Work>& work, c2_status_t error);
+    bool allocTmpFrameBuffer(size_t size);
+    bool outputBuffer(const std::shared_ptr<C2BlockPool>& pool,
+                      const std::unique_ptr<C2Work>& work);
+
+    c2_status_t drainInternal(uint32_t drainMode, const std::shared_ptr<C2BlockPool>& pool,
+                              const std::unique_ptr<C2Work>& work);
+
+    void flushDav1d();
+
+#ifdef FILE_DUMP_ENABLE
+    char mInDataFileName[256];
+    char mInSizeFileName[256];
+    char mDav1dOutYuvFileName[256];
+
+    FILE* mInDataFile = nullptr;
+    FILE* mInSizeFile = nullptr;
+    FILE* mDav1dOutYuvFile = nullptr;
+
+    void writeDav1dOutYuvFile(const Dav1dPicture& p);
+
+    int num_frames_to_dump = 0;
+#endif
+
+    C2_DO_NOT_COPY(C2SoftDav1dDec);
+};
+
+}  // namespace android
+
+#endif  // ANDROID_C2_SOFT_DAV1D_DEC_H_
diff --git a/media/codec2/components/gav1/C2SoftGav1Dec.cpp b/media/codec2/components/gav1/C2SoftGav1Dec.cpp
index 3e4247b..5f5f05d 100644
--- a/media/codec2/components/gav1/C2SoftGav1Dec.cpp
+++ b/media/codec2/components/gav1/C2SoftGav1Dec.cpp
@@ -752,6 +752,19 @@
     return true;
 }
 
+bool C2SoftGav1Dec::fillMonochromeRow(int value) {
+    const size_t tmpSize = mWidth;
+    const bool needFill = tmpSize > mTmpFrameBufferSize;
+    if (!allocTmpFrameBuffer(tmpSize)) {
+        ALOGE("Error allocating temp conversion buffer (%zu bytes)", tmpSize);
+        return false;
+    }
+    if (needFill) {
+        std::fill_n(mTmpFrameBuffer.get(), tmpSize, value);
+    }
+    return true;
+}
+
 bool C2SoftGav1Dec::outputBuffer(const std::shared_ptr<C2BlockPool> &pool,
                                  const std::unique_ptr<C2Work> &work) {
   if (!(work && pool)) return false;
@@ -773,6 +786,16 @@
     return false;
   }
 
+#if LIBYUV_VERSION < 1871
+  if (buffer->bitdepth > 10) {
+    ALOGE("bitdepth %d is not supported", buffer->bitdepth);
+    mSignalledError = true;
+    work->workletsProcessed = 1u;
+    work->result = C2_CORRUPTED;
+    return false;
+  }
+#endif
+
   const int width = buffer->displayed_width[0];
   const int height = buffer->displayed_height[0];
   if (width != mWidth || height != mHeight) {
@@ -816,7 +839,7 @@
   std::shared_ptr<C2GraphicBlock> block;
   uint32_t format = HAL_PIXEL_FORMAT_YV12;
   std::shared_ptr<C2StreamColorAspectsInfo::output> codedColorAspects;
-  if (buffer->bitdepth == 10 && mPixelFormatInfo->value != HAL_PIXEL_FORMAT_YCBCR_420_888) {
+  if (buffer->bitdepth >= 10 && mPixelFormatInfo->value != HAL_PIXEL_FORMAT_YCBCR_420_888) {
     IntfImpl::Lock lock = mIntf->lock();
     codedColorAspects = mIntf->getColorAspects_l();
     bool allowRGBA1010102 = false;
@@ -828,8 +851,9 @@
     format = getHalPixelFormatForBitDepth10(allowRGBA1010102);
 #if !HAVE_LIBYUV_I410_I210_TO_AB30
     if ((format == HAL_PIXEL_FORMAT_RGBA_1010102) &&
-        (buffer->image_format != libgav1::kImageFormatYuv420)) {
-        ALOGE("Only YUV420 output is supported when targeting RGBA_1010102");
+        (buffer->image_format != libgav1::kImageFormatYuv420) &&
+        (buffer->bitdepth == 10)) {
+        ALOGE("Only YUV420 output is supported for 10-bit when targeting RGBA_1010102");
       mSignalledError = true;
       work->result = C2_OMITTED;
       work->workletsProcessed = 1u;
@@ -837,6 +861,18 @@
     }
 #endif
   }
+  if (buffer->bitdepth == 12 && format == HAL_PIXEL_FORMAT_RGBA_1010102 &&
+      (buffer->image_format == libgav1::kImageFormatYuv422 ||
+       buffer->image_format == libgav1::kImageFormatYuv444)) {
+      // There are no 12-bit color conversion functions from YUV422/YUV444 to
+      // RGBA_1010102. Use 8-bit YV12 in this case.
+      format = HAL_PIXEL_FORMAT_YV12;
+  }
+  if (buffer->bitdepth == 12 && format == HAL_PIXEL_FORMAT_YCBCR_P010) {
+      // There are no 12-bit color conversion functions to P010. Use 8-bit YV12
+      // in this case.
+      format = HAL_PIXEL_FORMAT_YV12;
+  }
 
   if (mHalPixelFormat != format) {
     C2StreamPixelFormatInfo::output pixelFormat(0u, format);
@@ -890,7 +926,41 @@
   size_t dstUStride = layout.planes[C2PlanarLayout::PLANE_U].rowInc;
   size_t dstVStride = layout.planes[C2PlanarLayout::PLANE_V].rowInc;
 
-  if (buffer->bitdepth == 10) {
+  if (buffer->bitdepth == 12) {
+#if LIBYUV_VERSION >= 1871
+      const uint16_t *srcY = (const uint16_t *)buffer->plane[0];
+      const uint16_t *srcU = (const uint16_t *)buffer->plane[1];
+      const uint16_t *srcV = (const uint16_t *)buffer->plane[2];
+      size_t srcYStride = buffer->stride[0] / 2;
+      size_t srcUStride = buffer->stride[1] / 2;
+      size_t srcVStride = buffer->stride[2] / 2;
+      if (isMonochrome) {
+          if (!fillMonochromeRow(2048)) {
+              setError(work, C2_NO_MEMORY);
+              return false;
+          }
+          srcU = srcV = mTmpFrameBuffer.get();
+          srcUStride = srcVStride = 0;
+      }
+      if (format == HAL_PIXEL_FORMAT_RGBA_1010102) {
+          libyuv::I012ToAB30Matrix(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
+                                   dstY, dstYStride, &libyuv::kYuvV2020Constants,
+                                   mWidth, mHeight);
+      } else if (isMonochrome || buffer->image_format == libgav1::kImageFormatYuv420) {
+          libyuv::I012ToI420(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
+                             dstY, dstYStride, dstU, dstUStride, dstV, dstVStride,
+                             mWidth, mHeight);
+      } else if (buffer->image_format == libgav1::kImageFormatYuv444) {
+          libyuv::I412ToI420(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
+                             dstY, dstYStride, dstU, dstUStride, dstV, dstVStride,
+                             mWidth, mHeight);
+      } else {
+          libyuv::I212ToI420(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
+                             dstY, dstYStride, dstU, dstUStride, dstV, dstVStride,
+                             mWidth, mHeight);
+      }
+#endif  // LIBYUV_VERSION >= 1871
+  } else if (buffer->bitdepth == 10) {
     const uint16_t *srcY = (const uint16_t *)buffer->plane[0];
     const uint16_t *srcU = (const uint16_t *)buffer->plane[1];
     const uint16_t *srcV = (const uint16_t *)buffer->plane[2];
@@ -915,18 +985,12 @@
 #endif  // HAVE_LIBYUV_I410_I210_TO_AB30
         if (!processed) {
             if (isMonochrome) {
-                const size_t tmpSize = mWidth;
-                const bool needFill = tmpSize > mTmpFrameBufferSize;
-                if (!allocTmpFrameBuffer(tmpSize)) {
-                    ALOGE("Error allocating temp conversion buffer (%zu bytes)", tmpSize);
+                if (!fillMonochromeRow(512)) {
                     setError(work, C2_NO_MEMORY);
                     return false;
                 }
                 srcU = srcV = mTmpFrameBuffer.get();
                 srcUStride = srcVStride = 0;
-                if (needFill) {
-                    std::fill_n(mTmpFrameBuffer.get(), tmpSize, 512);
-                }
             }
             convertYUV420Planar16ToY410OrRGBA1010102(
                     (uint32_t *)dstY, srcY, srcU, srcV, srcYStride,
diff --git a/media/codec2/components/gav1/C2SoftGav1Dec.h b/media/codec2/components/gav1/C2SoftGav1Dec.h
index c3b27ea..0e09fcc 100644
--- a/media/codec2/components/gav1/C2SoftGav1Dec.h
+++ b/media/codec2/components/gav1/C2SoftGav1Dec.h
@@ -105,6 +105,7 @@
   // Sets |work->result| and mSignalledError. Returns false.
   void setError(const std::unique_ptr<C2Work> &work, c2_status_t error);
   bool allocTmpFrameBuffer(size_t size);
+  bool fillMonochromeRow(int value);
   bool outputBuffer(const std::shared_ptr<C2BlockPool>& pool,
                     const std::unique_ptr<C2Work>& work);
   c2_status_t drainInternal(uint32_t drainMode,
diff --git a/media/codec2/vndk/C2AllocatorGralloc.cpp b/media/codec2/vndk/C2AllocatorGralloc.cpp
index f272499..0803dc3 100644
--- a/media/codec2/vndk/C2AllocatorGralloc.cpp
+++ b/media/codec2/vndk/C2AllocatorGralloc.cpp
@@ -23,6 +23,7 @@
 #include <aidl/android/hardware/graphics/common/PlaneLayoutComponentType.h>
 #include <android/hardware/graphics/common/1.2/types.h>
 #include <cutils/native_handle.h>
+#include <drm/drm_fourcc.h>
 #include <gralloctypes/Gralloc4.h>
 #include <hardware/gralloc.h>
 #include <ui/GraphicBufferAllocator.h>
@@ -478,7 +479,25 @@
     // 'NATIVE' on Android means LITTLE_ENDIAN
     constexpr C2PlaneInfo::endianness_t kEndianness = C2PlaneInfo::NATIVE;
 
-    switch (mFormat) {
+    // Try to resolve IMPLEMENTATION_DEFINED format to accurate format if
+    // possible.
+    uint32_t format = mFormat;
+    uint32_t fourCc;
+    if (format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED &&
+        !GraphicBufferMapper::get().getPixelFormatFourCC(mBuffer, &fourCc)) {
+        switch (fourCc)  {
+            case DRM_FORMAT_XBGR8888:
+                 format = static_cast<uint32_t>(PixelFormat4::RGBX_8888);
+                 break;
+            case DRM_FORMAT_ABGR8888:
+                 format = static_cast<uint32_t>(PixelFormat4::RGBA_8888);
+                 break;
+            default:
+                 break;
+        }
+    }
+
+    switch (format) {
         case static_cast<uint32_t>(PixelFormat4::RGBA_1010102): {
             // TRICKY: this is used for media as YUV444 in the case when it is queued directly to a
             // Surface. In all other cases it is RGBA. We don't know which case it is here, so
diff --git a/media/libaaudio/src/client/AudioStreamInternal.cpp b/media/libaaudio/src/client/AudioStreamInternal.cpp
index 84c715f..93ac966 100644
--- a/media/libaaudio/src/client/AudioStreamInternal.cpp
+++ b/media/libaaudio/src/client/AudioStreamInternal.cpp
@@ -132,10 +132,6 @@
 
     request.getConfiguration().setBufferCapacity(builder.getBufferCapacity());
 
-    request.getConfiguration().setHardwareSamplesPerFrame(builder.getHardwareSamplesPerFrame());
-    request.getConfiguration().setHardwareSampleRate(builder.getHardwareSampleRate());
-    request.getConfiguration().setHardwareFormat(builder.getHardwareFormat());
-
     mDeviceChannelCount = getSamplesPerFrame(); // Assume it will be the same. Update if not.
 
     mServiceStreamHandleInfo = mServiceInterface.openStream(request, configurationOutput);
diff --git a/media/libaaudio/src/flowgraph/resampler/MultiChannelResampler.cpp b/media/libaaudio/src/flowgraph/resampler/MultiChannelResampler.cpp
index a3ce58c..611ddcd 100644
--- a/media/libaaudio/src/flowgraph/resampler/MultiChannelResampler.cpp
+++ b/media/libaaudio/src/flowgraph/resampler/MultiChannelResampler.cpp
@@ -135,10 +135,9 @@
     int coefficientIndex = 0;
     double phase = 0.0; // ranges from 0.0 to 1.0, fraction between samples
     // Stretch the sinc function for low pass filtering.
-    const float cutoffScaler = normalizedCutoff *
-            ((outputRate < inputRate)
-             ? ((float)outputRate / inputRate)
-             : ((float)inputRate / outputRate));
+    const float cutoffScaler = (outputRate < inputRate)
+             ? (normalizedCutoff * (float)outputRate / inputRate)
+             : 1.0f; // Do not filter when upsampling.
     const int numTapsHalf = getNumTaps() / 2; // numTaps must be even.
     const float numTapsHalfInverse = 1.0f / numTapsHalf;
     for (int i = 0; i < numRows; i++) {
diff --git a/media/libaaudio/src/flowgraph/resampler/MultiChannelResampler.h b/media/libaaudio/src/flowgraph/resampler/MultiChannelResampler.h
index 717f3fd..9e47335 100644
--- a/media/libaaudio/src/flowgraph/resampler/MultiChannelResampler.h
+++ b/media/libaaudio/src/flowgraph/resampler/MultiChannelResampler.h
@@ -111,6 +111,9 @@
          * Set lower to reduce aliasing.
          * Default is 0.70.
          *
+         * Note that this value is ignored when upsampling, which is when
+         * the outputRate is higher than the inputRate.
+         *
          * @param normalizedCutoff anti-aliasing filter cutoff
          * @return address of this builder for chaining calls
          */
@@ -227,6 +230,10 @@
 
     /**
      * Generate the filter coefficients in optimal order.
+     *
+     * Note that normalizedCutoff is ignored when upsampling, which is when
+     * the outputRate is higher than the inputRate.
+     *
      * @param inputRate sample rate of the input stream
      * @param outputRate  sample rate of the output stream
      * @param numRows number of rows in the array that contain a set of tap coefficients
diff --git a/media/libaaudio/tests/test_resampler.cpp b/media/libaaudio/tests/test_resampler.cpp
index 1e4f59c..13e4a20 100644
--- a/media/libaaudio/tests/test_resampler.cpp
+++ b/media/libaaudio/tests/test_resampler.cpp
@@ -101,14 +101,20 @@
         }
     }
 
+    // Flush out remaining frames from the flowgraph
+    while (!mcResampler->isWriteNeeded()) {
+        mcResampler->readNextFrame(output);
+        output++;
+        numRead++;
+    }
+
     ASSERT_LE(numRead, kNumOutputSamples);
     // Some frames are lost priming the FIR filter.
-    const int kMaxAlgorithmicFrameLoss = 16;
+    const int kMaxAlgorithmicFrameLoss = 5;
     EXPECT_GT(numRead, kNumOutputSamples - kMaxAlgorithmicFrameLoss);
 
     int sinkZeroCrossingCount = countZeroCrossingsWithHysteresis(outputBuffer.get(), numRead);
-    // Some cycles may get chopped off at the end.
-    const int kMaxZeroCrossingDelta = 3;
+    const int kMaxZeroCrossingDelta = std::max(sinkRate / sourceRate / 2, 1);
     EXPECT_LE(abs(sourceZeroCrossingCount - sinkZeroCrossingCount), kMaxZeroCrossingDelta);
 
     // Detect glitches by looking for spikes in the second derivative.
@@ -136,8 +142,7 @@
 
 
 TEST(test_resampler, resampler_scan_all) {
-    // TODO Add 64000, 88200, 96000 when they work. Failing now.
-    const int rates[] = {8000, 11025, 22050, 32000, 44100, 48000};
+    const int rates[] = {8000, 11025, 22050, 32000, 44100, 48000, 64000, 88200, 96000};
     const MultiChannelResampler::Quality qualities[] =
     {
         MultiChannelResampler::Quality::Fastest,
@@ -193,10 +198,9 @@
     checkResampler(11025, 44100, MultiChannelResampler::Quality::Best);
 }
 
-// TODO This fails because the output is very low.
-//TEST(test_resampler, resampler_11025_88200_best) {
-//    checkResampler(11025, 88200, MultiChannelResampler::Quality::Best);
-//}
+TEST(test_resampler, resampler_11025_88200_best) {
+    checkResampler(11025, 88200, MultiChannelResampler::Quality::Best);
+}
 
 TEST(test_resampler, resampler_16000_48000_best) {
     checkResampler(16000, 48000, MultiChannelResampler::Quality::Best);
diff --git a/media/libaudioclient/AudioSystem.cpp b/media/libaudioclient/AudioSystem.cpp
index 7cec2e8..871318f 100644
--- a/media/libaudioclient/AudioSystem.cpp
+++ b/media/libaudioclient/AudioSystem.cpp
@@ -132,12 +132,10 @@
                 binder = gAudioFlingerBinder;
             } else {
                 sp<IServiceManager> sm = defaultServiceManager();
-                do {
-                    binder = sm->getService(String16(IAudioFlinger::DEFAULT_SERVICE_NAME));
-                    if (binder != nullptr) break;
-                    ALOGW("AudioFlinger not published, waiting...");
-                    usleep(500000); // 0.5 s
-                } while (true);
+                binder = sm->waitForService(String16(IAudioFlinger::DEFAULT_SERVICE_NAME));
+                if (binder == nullptr) {
+                    return nullptr;
+                }
             }
             binder->linkToDeath(gAudioFlingerClient);
             const auto afs = interface_cast<media::IAudioFlingerService>(binder);
@@ -870,14 +868,10 @@
         Mutex::Autolock _l(gLockAPS);
         if (gAudioPolicyService == 0) {
             sp<IServiceManager> sm = defaultServiceManager();
-            sp<IBinder> binder;
-            do {
-                binder = sm->getService(String16("media.audio_policy"));
-                if (binder != 0)
-                    break;
-                ALOGW("AudioPolicyService not published, waiting...");
-                usleep(500000); // 0.5 s
-            } while (true);
+            sp<IBinder> binder = sm->waitForService(String16("media.audio_policy"));
+            if (binder == nullptr) {
+                return nullptr;
+            }
             if (gAudioPolicyServiceClient == NULL) {
                 gAudioPolicyServiceClient = new AudioPolicyServiceClient();
             }
@@ -2093,8 +2087,7 @@
         return BAD_VALUE;
     }
 
-    const sp<IAudioPolicyService>
-            & aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
     std::vector<AudioFormatDescription> formatsAidl;
diff --git a/media/libaudioclient/aidl/fuzzer/Android.bp b/media/libaudioclient/aidl/fuzzer/Android.bp
new file mode 100644
index 0000000..1ca3042
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/Android.bp
@@ -0,0 +1,104 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+cc_defaults {
+    name: "libaudioclient_aidl_fuzzer_defaults",
+    static_libs: [
+        "android.hardware.audio.common@7.0-enums",
+        "effect-aidl-cpp",
+        "liblog",
+        "libbinder_random_parcel",
+        "libbase",
+        "libcgrouprc",
+        "libcgrouprc_format",
+        "libcutils",
+        "libjsoncpp",
+        "libmediametricsservice",
+        "libmedia_helper",
+        "libprocessgroup",
+        "shared-file-region-aidl-cpp",
+        "libfakeservicemanager"
+    ],
+    shared_libs: [
+        "libaudioclient",
+        "libaudioflinger",
+        "libmediautils",
+        "libnblog",
+        "libaudioprocessing",
+        "libnbaio",
+        "libpowermanager",
+        "libvibrator",
+        "packagemanager_aidl-cpp",
+        "android.hardware.audio.common-util",
+        "audioclient-types-aidl-cpp",
+        "audioflinger-aidl-cpp",
+        "audiopolicy-aidl-cpp",
+        "audiopolicy-types-aidl-cpp",
+        "av-types-aidl-cpp",
+        "capture_state_listener-aidl-cpp",
+        "libaudioclient_aidl_conversion",
+        "libaudiofoundation",
+        "libaudiomanager",
+        "libaudiopolicy",
+        "libaudioutils",
+        "libdl",
+        "libutils",
+        "libxml2",
+        "mediametricsservice-aidl-cpp",
+        "framework-permission-aidl-cpp",
+        "libvndksupport",
+        "libmediametrics",
+        "libbinder_ndk",
+        "libbinder",
+        "libfakeservicemanager",
+        "libactivitymanager_aidl",
+        "libheadtracking",
+        "libaudiopolicyservice",
+        "libsensorprivacy",
+        "libaudiopolicymanagerdefault",
+        "libaudiohal",
+        "libhidlbase",
+        "libpermission",
+        "libaudiohal@7.0",
+    ],
+    header_libs: [
+        "libaudiopolicymanager_interface_headers",
+        "libbinder_headers",
+        "libaudiofoundation_headers",
+        "libmedia_headers",
+        "libaudiohal_headers",
+        "libaudioflinger_headers",
+        "mediautils_headers",
+    ],
+     fuzz_config: {
+        cc: [
+            "android-media-fuzzing-reports@google.com",
+        ],
+        componentid: 155276,
+        hotlists: ["4593311"],
+        description: "The fuzzer targets the APIs of libaudioflinger",
+        vector: "local_no_privileges_required",
+        service_privilege: "privileged",
+        users: "multi_user",
+        fuzzed_code_usage: "shipped",
+    },
+}
+
+cc_fuzz {
+    name: "audioflinger_aidl_fuzzer",
+    srcs: ["audioflinger_aidl_fuzzer.cpp"],
+    defaults: ["libaudioclient_aidl_fuzzer_defaults"],
+}
diff --git a/media/libaudioclient/aidl/fuzzer/audioflinger_aidl_fuzzer.cpp b/media/libaudioclient/aidl/fuzzer/audioflinger_aidl_fuzzer.cpp
new file mode 100644
index 0000000..fac5f53
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/audioflinger_aidl_fuzzer.cpp
@@ -0,0 +1,218 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+#include <AudioFlinger.h>
+#include <ISchedulingPolicyService.h>
+#include <fakeservicemanager/FakeServiceManager.h>
+#include <android-base/logging.h>
+#include <android/binder_interface_utils.h>
+#include <android/binder_process.h>
+#include <android/media/IAudioPolicyService.h>
+#include <binder/IActivityManager.h>
+#include <binder/IPermissionController.h>
+#include <binder/IServiceManager.h>
+#include <binder/PermissionController.h>
+#include <fuzzbinder/libbinder_driver.h>
+#include <fuzzer/FuzzedDataProvider.h>
+#include <media/IAudioFlinger.h>
+#include <mediautils/SchedulingPolicyService.h>
+#include <sensorprivacy/SensorPrivacyManager.h>
+#include <service/AudioPolicyService.h>
+
+using namespace android;
+using namespace android::binder;
+using android::fuzzService;
+
+static sp<media::IAudioFlingerService> gAudioFlingerService;
+
+class FuzzerSchedulingPolicyService : public BnInterface<ISchedulingPolicyService> {
+    int32_t requestPriority(int32_t /*pid_t*/, int32_t /*tid*/, int32_t /*prio*/, bool /*isForApp*/,
+                            bool /*asynchronous*/) {
+        return 0;
+    }
+
+    int32_t requestCpusetBoost(bool /*enable*/, const sp<IBinder>& /*client*/) { return 0; }
+};
+
+class FuzzerPermissionController : public BnInterface<IPermissionController> {
+  public:
+    bool checkPermission(const String16& /*permission*/, int32_t /*pid*/, int32_t /*uid*/) {
+        return true;
+    }
+    int32_t noteOp(const String16& /*op*/, int32_t /*uid*/, const String16& /*packageName*/) {
+        return 0;
+    }
+    void getPackagesForUid(const uid_t /*uid*/, Vector<String16>& /*packages*/) {}
+    bool isRuntimePermission(const String16& /*permission*/) { return true; }
+    int32_t getPackageUid(const String16& /*package*/, int /*flags*/) { return 0; }
+};
+
+class FuzzerSensorPrivacyManager : public BnInterface<hardware::ISensorPrivacyManager> {
+  public:
+    Status supportsSensorToggle(int32_t /*toggleType*/, int32_t /*sensor*/,
+                                bool* /*_aidl_return*/) override {
+        return Status::fromStatusT(UNKNOWN_TRANSACTION);
+    }
+    Status addSensorPrivacyListener(
+            const sp<hardware::ISensorPrivacyListener>& /*listener*/) override {
+        return Status::fromStatusT(::android::UNKNOWN_TRANSACTION);
+    }
+    Status addToggleSensorPrivacyListener(
+            const sp<hardware::ISensorPrivacyListener>& /*listener*/) override {
+        return Status::fromStatusT(UNKNOWN_TRANSACTION);
+    }
+    Status removeSensorPrivacyListener(
+            const sp<hardware::ISensorPrivacyListener>& /*listener*/) override {
+        return Status::fromStatusT(::android::UNKNOWN_TRANSACTION);
+    }
+    Status removeToggleSensorPrivacyListener(
+            const sp<hardware::ISensorPrivacyListener>& /*listener*/) override {
+        return Status::fromStatusT(::android::UNKNOWN_TRANSACTION);
+    }
+    Status isSensorPrivacyEnabled(bool* /*_aidl_return*/) override {
+        return Status::fromStatusT(UNKNOWN_TRANSACTION);
+    }
+    Status isCombinedToggleSensorPrivacyEnabled(int32_t /*sensor*/,
+                                                bool* /*_aidl_return*/) override {
+        return Status::fromStatusT(UNKNOWN_TRANSACTION);
+    }
+    Status isToggleSensorPrivacyEnabled(int32_t /*toggleType*/, int32_t /*sensor*/,
+                                        bool* /*_aidl_return*/) override {
+        return Status::fromStatusT(UNKNOWN_TRANSACTION);
+    }
+    Status setSensorPrivacy(bool /*enable*/) override {
+        return Status::fromStatusT(UNKNOWN_TRANSACTION);
+    }
+    Status setToggleSensorPrivacy(int32_t /*userId*/, int32_t /*source*/, int32_t /*sensor*/,
+                                  bool /*enable*/) override {
+        return Status::fromStatusT(UNKNOWN_TRANSACTION);
+    }
+    Status setToggleSensorPrivacyForProfileGroup(int32_t /*userId*/, int32_t /*source*/,
+                                                 int32_t /*sensor*/, bool /*enable*/) override {
+        return Status::fromStatusT(UNKNOWN_TRANSACTION);
+    }
+};
+
+class FuzzerActivityManager : public BnInterface<IActivityManager> {
+  public:
+    int32_t openContentUri(const String16& /*stringUri*/) override { return 0; }
+
+    status_t registerUidObserver(const sp<IUidObserver>& /*observer*/, const int32_t /*event*/,
+                                 const int32_t /*cutpoint*/,
+                                 const String16& /*callingPackage*/) override {
+        return OK;
+    }
+
+    status_t unregisterUidObserver(const sp<IUidObserver>& /*observer*/) override { return OK; }
+
+    bool isUidActive(const uid_t /*uid*/, const String16& /*callingPackage*/) override {
+        return true;
+    }
+
+    int32_t getUidProcessState(const uid_t /*uid*/, const String16& /*callingPackage*/) override {
+        return ActivityManager::PROCESS_STATE_UNKNOWN;
+    }
+
+    status_t checkPermission(const String16& /*permission*/, const pid_t /*pid*/,
+                             const uid_t /*uid*/, int32_t* /*outResult*/) override {
+        return NO_ERROR;
+    }
+
+    status_t registerUidObserverForUids(const sp<IUidObserver>& /*observer*/ ,
+                                        const int32_t /*event*/ ,
+                                        const int32_t /*cutpoint*/ ,
+                                        const String16& /*callingPackage*/ ,
+                                        const int32_t uids[] ,
+                                        size_t /*nUids*/ ,
+                                        /*out*/ sp<IBinder>& /*observerToken*/ ) {
+        (void)uids;
+        return OK;
+    }
+
+    status_t addUidToObserver(const sp<IBinder>& /*observerToken*/ ,
+                              const String16& /*callingPackage*/ ,
+                              int32_t /*uid*/ ) override {
+        return NO_ERROR;
+    }
+
+    status_t removeUidFromObserver(const sp<IBinder>& /*observerToken*/ ,
+                                   const String16& /*callingPackage*/ ,
+                                   int32_t /*uid*/ ) override {
+        return NO_ERROR;
+    }
+
+    status_t logFgsApiBegin(int32_t /*apiType*/ , int32_t /*appUid*/ ,
+                            int32_t /*appPid*/ ) override {
+        return NO_ERROR;
+    }
+    status_t logFgsApiEnd(int32_t /*apiType*/ , int32_t /*appUid*/ ,
+                          int32_t /*appPid*/ ) override {
+        return NO_ERROR;
+    }
+    status_t logFgsApiStateChanged(int32_t /*apiType*/ , int32_t /*state*/ ,
+                                   int32_t /*appUid*/ ,
+                                   int32_t /*appPid*/ ) override {
+        return NO_ERROR;
+    }
+};
+
+extern "C" int LLVMFuzzerInitialize(int* /* argc */, char*** /* argv */) {
+    /* Create a FakeServiceManager instance and add required services */
+    sp<FakeServiceManager> fakeServiceManager = new FakeServiceManager();
+    setDefaultServiceManager(fakeServiceManager);
+    ABinderProcess_setThreadPoolMaxThreadCount(0);
+    sp<FuzzerActivityManager> am = new FuzzerActivityManager();
+    fakeServiceManager->addService(String16("activity"), IInterface::asBinder(am));
+
+    sp<FuzzerSensorPrivacyManager> sensorPrivacyManager = new FuzzerSensorPrivacyManager();
+    fakeServiceManager->addService(String16("sensor_privacy"),
+                                   IInterface::asBinder(sensorPrivacyManager));
+    sp<FuzzerPermissionController> permissionController = new FuzzerPermissionController();
+    fakeServiceManager->addService(String16("permission"),
+                                   IInterface::asBinder(permissionController));
+
+    sp<FuzzerSchedulingPolicyService> schedulingService = new FuzzerSchedulingPolicyService();
+    fakeServiceManager->addService(String16("scheduling_policy"),
+                                   IInterface::asBinder(schedulingService));
+
+    const auto audioFlingerObj = sp<AudioFlinger>::make();
+    const auto afAdapter = sp<AudioFlingerServerAdapter>::make(audioFlingerObj);
+
+    fakeServiceManager->addService(String16(IAudioFlinger::DEFAULT_SERVICE_NAME),
+                                   IInterface::asBinder(afAdapter), false /* allowIsolated */,
+                                   IServiceManager::DUMP_FLAG_PRIORITY_DEFAULT);
+
+    const auto audioPolicyService = sp<AudioPolicyService>::make();
+    fakeServiceManager->addService(String16("media.audio_policy"), audioPolicyService,
+                                   false /* allowIsolated */,
+                                   IServiceManager::DUMP_FLAG_PRIORITY_DEFAULT);
+
+    sp<IBinder> binder =
+            fakeServiceManager->getService(String16(IAudioFlinger::DEFAULT_SERVICE_NAME));
+    gAudioFlingerService = interface_cast<media::IAudioFlingerService>(binder);
+    return 0;
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    if (!gAudioFlingerService) {
+        return 0;
+    }
+
+    fuzzService(media::IAudioFlingerService::asBinder(gAudioFlingerService),
+                FuzzedDataProvider(data, size));
+
+    return 0;
+}
diff --git a/media/libheif/HeifDecoderImpl.cpp b/media/libheif/HeifDecoderImpl.cpp
index 2ba1fc3..6834b7d 100644
--- a/media/libheif/HeifDecoderImpl.cpp
+++ b/media/libheif/HeifDecoderImpl.cpp
@@ -476,35 +476,37 @@
 }
 
 bool HeifDecoderImpl::setOutputColor(HeifColorFormat heifColor) {
-    if (heifColor == (HeifColorFormat)mOutputColor) {
-        return true;
-    }
-
+    android_pixel_format_t outputColor;
     switch(heifColor) {
         case kHeifColorFormat_RGB565:
         {
-            mOutputColor = HAL_PIXEL_FORMAT_RGB_565;
+            outputColor = HAL_PIXEL_FORMAT_RGB_565;
             break;
         }
         case kHeifColorFormat_RGBA_8888:
         {
-            mOutputColor = HAL_PIXEL_FORMAT_RGBA_8888;
+            outputColor = HAL_PIXEL_FORMAT_RGBA_8888;
             break;
         }
         case kHeifColorFormat_BGRA_8888:
         {
-            mOutputColor = HAL_PIXEL_FORMAT_BGRA_8888;
+            outputColor = HAL_PIXEL_FORMAT_BGRA_8888;
             break;
         }
         case kHeifColorFormat_RGBA_1010102:
         {
-            mOutputColor = HAL_PIXEL_FORMAT_RGBA_1010102;
+            outputColor = HAL_PIXEL_FORMAT_RGBA_1010102;
             break;
         }
         default:
             ALOGE("Unsupported output color format %d", heifColor);
             return false;
     }
+    if (outputColor == mOutputColor) {
+        return true;
+    }
+
+    mOutputColor = outputColor;
 
     if (mFrameDecoded) {
         return reinit(nullptr);
diff --git a/media/libmedia/IMediaDeathNotifier.cpp b/media/libmedia/IMediaDeathNotifier.cpp
index c43ef66..f498453 100644
--- a/media/libmedia/IMediaDeathNotifier.cpp
+++ b/media/libmedia/IMediaDeathNotifier.cpp
@@ -38,16 +38,10 @@
     Mutex::Autolock _l(sServiceLock);
     if (sMediaPlayerService == 0) {
         sp<IServiceManager> sm = defaultServiceManager();
-        sp<IBinder> binder;
-        do {
-            binder = sm->getService(String16("media.player"));
-            if (binder != 0) {
-                break;
-            }
-            ALOGW("Media player service not published, waiting...");
-            usleep(500000); // 0.5 s
-        } while (true);
-
+        sp<IBinder> binder = sm->waitForService(String16("media.player"));
+        if (binder == nullptr) {
+            return nullptr;
+        }
         if (sDeathNotifier == NULL) {
             sDeathNotifier = new DeathNotifier();
         }
diff --git a/media/libmedia/mediametadataretriever.cpp b/media/libmedia/mediametadataretriever.cpp
index 2ae76b3..40fd022 100644
--- a/media/libmedia/mediametadataretriever.cpp
+++ b/media/libmedia/mediametadataretriever.cpp
@@ -41,14 +41,10 @@
     if (sService == 0) {
         sp<IServiceManager> sm = defaultServiceManager();
         sp<IBinder> binder;
-        do {
-            binder = sm->getService(String16("media.player"));
-            if (binder != 0) {
-                break;
-            }
-            ALOGW("MediaPlayerService not published, waiting...");
-            usleep(500000); // 0.5 s
-        } while (true);
+        binder = sm->waitForService(String16("media.player"));
+        if (binder == nullptr) {
+            return nullptr;
+        }
         if (sDeathNotifier == NULL) {
             sDeathNotifier = new DeathNotifier();
         }
diff --git a/media/libnbaio/Android.bp b/media/libnbaio/Android.bp
index 89e9806..434ae00 100644
--- a/media/libnbaio/Android.bp
+++ b/media/libnbaio/Android.bp
@@ -49,7 +49,7 @@
     defaults: ["libnbaio_mono_defaults"],
 }
 
-cc_library_shared {
+cc_library {
     name: "libnbaio",
     defaults: ["libnbaio_mono_defaults"],
     srcs: [
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index 1f0ee87..d8c356a 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -31,7 +31,6 @@
 #include "include/SoftwareRenderer.h"
 
 #include <android/api-level.h>
-#include <android/binder_manager.h>
 #include <android/content/pm/IPackageManagerNative.h>
 #include <android/hardware/cas/native/1.0/IDescrambler.h>
 #include <android/hardware/media/omx/1.0/IGraphicBufferSource.h>
@@ -74,7 +73,6 @@
 #include <media/stagefright/MediaCodec.h>
 #include <media/stagefright/MediaCodecConstants.h>
 #include <media/stagefright/MediaCodecList.h>
-#include <media/stagefright/MediaCodecConstants.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MediaErrors.h>
 #include <media/stagefright/OMXClient.h>
diff --git a/media/libstagefright/rtsp/fuzzer/Android.bp b/media/libstagefright/rtsp/fuzzer/Android.bp
new file mode 100644
index 0000000..8e10b0c
--- /dev/null
+++ b/media/libstagefright/rtsp/fuzzer/Android.bp
@@ -0,0 +1,64 @@
+/*
+* Copyright (C) 2023 The Android Open Source Project
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at:
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+cc_defaults {
+    name: "libstagefright_rtsp_fuzzer_defaults",
+    shared_libs: [
+        "liblog",
+        "libmedia",
+        "libutils",
+        "libstagefright_foundation",
+    ],
+    static_libs: [
+        "libdatasource",
+        "libstagefright_rtsp",
+    ],
+    header_libs: [
+        "libstagefright_rtsp_headers",
+    ],
+    fuzz_config:{
+        cc: [
+            "android-media-fuzzing-reports@google.com",
+        ],
+        componentid: 155276,
+    },
+}
+
+cc_fuzz {
+    name: "sdploader_fuzzer",
+    srcs: [
+        "sdploader_fuzzer.cpp",
+    ],
+    defaults: [
+        "libstagefright_rtsp_fuzzer_defaults",
+    ]
+}
+
+cc_fuzz {
+    name: "rtp_writer_fuzzer",
+    srcs: [
+        "rtp_writer_fuzzer.cpp",
+    ],
+    defaults: [
+        "libstagefright_rtsp_fuzzer_defaults",
+    ],
+    shared_libs:[
+        "libandroid_net",
+        "libbase",
+        "libstagefright",
+        "libcutils",
+    ],
+}
diff --git a/media/libstagefright/rtsp/fuzzer/README.md b/media/libstagefright/rtsp/fuzzer/README.md
new file mode 100644
index 0000000..657fb48
--- /dev/null
+++ b/media/libstagefright/rtsp/fuzzer/README.md
@@ -0,0 +1,64 @@
+# Fuzzers for libstagefright_rtsp
+
+## Table of contents
++ [sdploader_fuzzer](#SDPLoader)
++ [rtp_writer_fuzzer](#ARTPWriter)
+
+# <a name="SDPLoader"></a> Fuzzer for SDPLoader
+
+SDPLoader supports the following parameters:
+1. Flag (parameter name: "flags")
+2. URL (parameter name: "url")
+3. Header (parameter name: "headers")
+
+| Parameter| Valid Values| Configured Value|
+|------------- |-------------| ----- |
+|`flags`| `UINT32_MIN`  to  `UINT32_MAX` |Value obtained from FuzzedDataProvider|
+|`url`| `String` |Value obtained from FuzzedDataProvider|
+|`headers`| `String` |Value obtained from FuzzedDataProvider|
+
+#### Steps to run
+1. Build the fuzzer
+```
+  $ mm -j$(nproc) sdploader_fuzzer
+```
+2. Run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/arm64/sdploader_fuzzer/sdploader_fuzzer
+```
+
+# <a name="ARTPWriter"></a> Fuzzer for ARTPWriter
+
+ARTPWriter supports the following parameters:
+1. File descriptor (parameter name: "fd")
+2. Local Ip (parameter name: "localIp")
+3. Local Port (parameter name: "localPort")
+4. Remote Ip (parameter name: "remoteIp")
+5. Remote Port (parameter name: "remotePort")
+6. Sequence No (parameter name: "seqNo")
+7. OpponentID (parameter name: "opponentID")
+8. Bit Rate (parameter name: "bitrate")
+9. kKeyMIMETypeArray (parameter name: "mimeType")
+
+| Parameter| Valid Values| Configured Value|
+|------------- |-------------| ----- |
+|`localIp`| `String` |Value obtained from FuzzedDataProvider|
+|`localPort`| `UINT32_MIN`  to  `UINT32_MAX` |Value obtained from FuzzedDataProvider|
+|`remoteIp`| `String` |Value obtained from FuzzedDataProvider|
+|`remotePort`| `UINT32_MIN`  to  `UINT32_MAX` |Value obtained from FuzzedDataProvider|
+|`seqNo`| `0`  to  `10000000` |Value obtained from FuzzedDataProvider|
+|`opponentID`| `UINT32_MIN`  to  `UINT32_MAX` |Value obtained from FuzzedDataProvider|
+|`bitrate`| `UINT32_MIN`  to  `UINT32_MAX` |Value obtained from FuzzedDataProvider|
+|`mimeType`| 0. `MEDIA_MIMETYPE_VIDEO_AVC`<br> 1. `MEDIA_MIMETYPE_VIDEO_HEVC`<br> 2. `MEDIA_MIMETYPE_VIDEO_H263`<br> 3. `MEDIA_MIMETYPE_AUDIO_AMR_NB`<br> 4. `MEDIA_MIMETYPE_AUDIO_AMR_WB`|Value obtained from FuzzedDataProvider|
+
+#### Steps to run
+1. Build the fuzzer
+```
+  $ mm -j$(nproc) rtp_writer_fuzzer
+```
+2. Run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/arm64/rtp_writer_fuzzer/rtp_writer_fuzzer
+```
diff --git a/media/libstagefright/rtsp/fuzzer/rtp_writer_fuzzer.cpp b/media/libstagefright/rtsp/fuzzer/rtp_writer_fuzzer.cpp
new file mode 100644
index 0000000..8d9f923
--- /dev/null
+++ b/media/libstagefright/rtsp/fuzzer/rtp_writer_fuzzer.cpp
@@ -0,0 +1,184 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+#include <fuzzer/FuzzedDataProvider.h>
+#include <media/stagefright/MediaBuffer.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/rtsp/ARTPWriter.h>
+
+constexpr int32_t kMinSize = 0;
+constexpr int32_t kMaxSize = 65536;
+constexpr int32_t kMaxTime = 1000;
+constexpr int32_t kMaxBytes = 128;
+constexpr int32_t kAMRNBFrameSizes[] = {13, 14, 16, 18, 20, 21, 27, 32};
+constexpr int32_t kAMRWBFrameSizes[] = {18, 24, 33, 37, 41, 47, 51, 59, 61};
+constexpr int32_t kAMRIndexOffset = 8;
+
+using namespace android;
+
+const char* kKeyMimeTypeArray[] = {MEDIA_MIMETYPE_VIDEO_AVC, MEDIA_MIMETYPE_VIDEO_HEVC,
+                                   MEDIA_MIMETYPE_VIDEO_H263, MEDIA_MIMETYPE_AUDIO_AMR_NB,
+                                   MEDIA_MIMETYPE_AUDIO_AMR_WB};
+
+struct TestMediaSource : public MediaSource {
+  public:
+    TestMediaSource(FuzzedDataProvider& mFdp) : mTestMetaData(new MetaData) {
+        int32_t vectorSize = 0;
+        mAllowRead = mFdp.ConsumeBool();
+        mKeySps = mFdp.ConsumeIntegral<int32_t>();
+        mKeyVps = mFdp.ConsumeIntegral<int32_t>();
+        mKeyPps = mFdp.ConsumeIntegral<int32_t>();
+        mKeyTime = mFdp.ConsumeIntegralInRange<int64_t>(kMinSize, kMaxTime);
+
+        mMimeType = mFdp.PickValueInArray(kKeyMimeTypeArray);
+        mTestMetaData->setCString(kKeyMIMEType, mMimeType);
+        if (mMimeType == MEDIA_MIMETYPE_AUDIO_AMR_NB) {
+            int32_t index =
+                    mFdp.ConsumeIntegralInRange<int32_t>(kMinSize, std::size(kAMRNBFrameSizes) - 1);
+            vectorSize = kAMRNBFrameSizes[index];
+            mData.push_back(kAMRIndexOffset * index);
+        } else if (mMimeType == MEDIA_MIMETYPE_AUDIO_AMR_WB) {
+            int32_t index =
+                    mFdp.ConsumeIntegralInRange<int32_t>(kMinSize, std::size(kAMRWBFrameSizes) - 1);
+            vectorSize = kAMRWBFrameSizes[index];
+            mData.push_back(kAMRIndexOffset * index);
+        } else if (mMimeType == MEDIA_MIMETYPE_VIDEO_H263) {
+            // Required format for H263 media data
+            mData.push_back(0);
+            mData.push_back(0);
+            vectorSize = mFdp.ConsumeIntegralInRange<int32_t>(kMinSize, kMaxSize);
+        } else {
+            vectorSize = mFdp.ConsumeIntegralInRange<int32_t>(kMinSize, kMaxSize);
+        }
+        for (size_t idx = mData.size(); idx < vectorSize; ++idx) {
+            mData.push_back(mFdp.ConsumeIntegral<uint8_t>());
+        }
+    }
+    virtual status_t start(MetaData* /*params*/) { return OK; }
+    virtual status_t stop() { return OK; }
+    virtual sp<MetaData> getFormat() { return mTestMetaData; }
+    virtual status_t read(MediaBufferBase** buffer, const ReadOptions* /*options*/) {
+        if (!mAllowRead) {
+            return -1;
+        }
+        *buffer = new MediaBuffer(mData.data() /*data*/, mData.size() /*size*/);
+        if (mKeySps) {
+            (*buffer)->meta_data().setInt32(kKeySps, mKeySps);
+        }
+        if (mKeyVps) {
+            (*buffer)->meta_data().setInt32(kKeyVps, mKeyVps);
+        }
+        if (mKeyPps) {
+            (*buffer)->meta_data().setInt32(kKeyPps, mKeyPps);
+        }
+        (*buffer)->meta_data().setInt64(kKeyTime, mKeyTime);
+        return OK;
+    }
+
+  private:
+    int32_t mKeySps;
+    int32_t mKeyVps;
+    int32_t mKeyPps;
+    int64_t mKeyTime;
+    bool mAllowRead;
+    const char* mMimeType;
+    sp<MetaData> mTestMetaData;
+    std::vector<uint8_t> mData;
+};
+
+class ARTPWriterFuzzer {
+  public:
+    ARTPWriterFuzzer(const uint8_t* data, size_t size)
+        : mDataSourceFd(memfd_create("InputFile", MFD_ALLOW_SEALING)), mFdp(data, size) {}
+    ~ARTPWriterFuzzer() { close(mDataSourceFd); }
+    void process();
+
+  private:
+    void createARTPWriter();
+    const int32_t mDataSourceFd;
+    FuzzedDataProvider mFdp;
+    sp<ARTPWriter> mArtpWriter;
+};
+
+void ARTPWriterFuzzer::createARTPWriter() {
+    String8 localIp = String8(mFdp.ConsumeRandomLengthString(kMaxBytes).c_str());
+    String8 remoteIp = String8(mFdp.ConsumeRandomLengthString(kMaxBytes).c_str());
+    mArtpWriter = sp<ARTPWriter>::make(
+            mDataSourceFd, localIp, mFdp.ConsumeIntegral<uint16_t>() /* localPort */, remoteIp,
+            mFdp.ConsumeIntegral<uint16_t>() /* remotePort */,
+            mFdp.ConsumeIntegralInRange<uint32_t>(kMinSize, kMaxSize) /* seqNo */);
+}
+
+void ARTPWriterFuzzer::process() {
+    if (mFdp.ConsumeBool()) {
+        mArtpWriter = sp<ARTPWriter>::make(mDataSourceFd);
+        if (mArtpWriter->getSequenceNum() > kMaxSize) {
+            createARTPWriter();
+        }
+    } else {
+        createARTPWriter();
+    }
+
+    mArtpWriter->addSource(sp<TestMediaSource>::make(mFdp) /* source */);
+
+    while (mFdp.remaining_bytes()) {
+        auto invokeRTPWriterFuzzer = mFdp.PickValueInArray<const std::function<void()>>({
+                [&]() {
+                    sp<MetaData> metaData = sp<MetaData>::make();
+                    if (mFdp.ConsumeBool()) {
+                        metaData->setInt32(kKeySelfID, mFdp.ConsumeIntegral<int32_t>());
+                    }
+                    if (mFdp.ConsumeBool()) {
+                        metaData->setInt32(kKeyPayloadType, mFdp.ConsumeIntegral<int32_t>());
+                    }
+                    if (mFdp.ConsumeBool()) {
+                        metaData->setInt32(kKeyRtpExtMap, mFdp.ConsumeIntegral<int32_t>());
+                    }
+                    if (mFdp.ConsumeBool()) {
+                        metaData->setInt32(kKeyRtpCvoDegrees, mFdp.ConsumeIntegral<int32_t>());
+                    }
+                    if (mFdp.ConsumeBool()) {
+                        metaData->setInt32(kKeyRtpDscp, mFdp.ConsumeIntegral<int32_t>());
+                    }
+                    if (mFdp.ConsumeBool()) {
+                        metaData->setInt64(kKeySocketNetwork, mFdp.ConsumeIntegral<int64_t>());
+                    }
+                    mArtpWriter->start(metaData.get() /*param*/);
+                },
+                [&]() {
+                    mArtpWriter->setTMMBNInfo(mFdp.ConsumeIntegral<uint32_t>() /* opponentID */,
+                                              mFdp.ConsumeIntegral<uint32_t>() /* bitrate */);
+                },
+                [&]() { mArtpWriter->stop(); },
+                [&]() {
+                    mArtpWriter->updateCVODegrees(mFdp.ConsumeIntegral<int32_t>() /* cvoDegrees */);
+                },
+                [&]() {
+                    mArtpWriter->updatePayloadType(
+                            mFdp.ConsumeIntegral<int32_t>() /* payloadType */);
+                },
+
+        });
+        invokeRTPWriterFuzzer();
+    }
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    ARTPWriterFuzzer artpWriterFuzzer(data, size);
+    artpWriterFuzzer.process();
+    return 0;
+}
diff --git a/media/libstagefright/rtsp/fuzzer/sdploader_fuzzer.cpp b/media/libstagefright/rtsp/fuzzer/sdploader_fuzzer.cpp
new file mode 100644
index 0000000..748e5b6
--- /dev/null
+++ b/media/libstagefright/rtsp/fuzzer/sdploader_fuzzer.cpp
@@ -0,0 +1,141 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+#include <datasource/HTTPBase.h>
+#include <fuzzer/FuzzedDataProvider.h>
+#include <media/MediaHTTPConnection.h>
+#include <media/MediaHTTPService.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/rtsp/SDPLoader.h>
+
+using namespace android;
+
+constexpr int32_t kMinCapacity = 0;
+constexpr int32_t kMaxCapacity = 1000;
+constexpr int32_t kMaxStringLength = 20;
+constexpr int32_t kMaxBytes = 128;
+enum { kWhatLoad = 'load' };
+
+struct FuzzAHandler : public AHandler {
+  public:
+    FuzzAHandler(std::function<void()> signalEosFunction) : mSignalEosFunction(signalEosFunction) {}
+
+  protected:
+    void onMessageReceived(const sp<AMessage>& msg) override {
+        switch (msg->what()) {
+            case kWhatLoad: {
+                mSignalEosFunction();
+                break;
+            }
+        }
+        return;
+    }
+
+  private:
+    std::function<void()> mSignalEosFunction;
+};
+
+struct FuzzMediaHTTPConnection : public MediaHTTPConnection {
+  public:
+    FuzzMediaHTTPConnection(FuzzedDataProvider* fdp) : mFdp(fdp) {
+        mSize = mFdp->ConsumeIntegralInRange(kMinCapacity, kMaxCapacity);
+        mData = mFdp->ConsumeBytes<uint8_t>(mSize);
+        mSize = mData.size();
+    }
+    virtual bool connect(const char* /* uri */,
+                         const KeyedVector<String8, String8>* /* headers */) {
+        return mFdp->ConsumeBool();
+    }
+    virtual void disconnect() { return; }
+    virtual ssize_t readAt(off64_t offset, void* data, size_t size) {
+        if ((size + offset <= mData.size()) && (offset >= 0)) {
+           memcpy(data, mData.data() + offset, size);
+           return size;
+        }
+        return 0;
+    }
+    virtual off64_t getSize() { return mSize; }
+    virtual status_t getMIMEType(String8* /*mimeType*/) {return mFdp->ConsumeIntegral<status_t>();}
+    virtual status_t getUri(String8* /*uri*/) {return mFdp->ConsumeIntegral<status_t>();}
+
+  private:
+    FuzzedDataProvider* mFdp = nullptr;
+    std::vector<uint8_t> mData;
+    size_t mSize = 0;
+};
+
+struct FuzzMediaHTTPService : public MediaHTTPService {
+  public:
+    FuzzMediaHTTPService(FuzzedDataProvider* fdp) : mFdp(fdp) {}
+    virtual sp<MediaHTTPConnection> makeHTTPConnection() {
+        mediaHTTPConnection = sp<FuzzMediaHTTPConnection>::make(mFdp);
+        return mediaHTTPConnection;
+    }
+
+  private:
+    sp<FuzzMediaHTTPConnection> mediaHTTPConnection = nullptr;
+    FuzzedDataProvider* mFdp = nullptr;
+};
+
+class SDPLoaderFuzzer {
+  public:
+    SDPLoaderFuzzer(const uint8_t* data, size_t size) : mFdp(data, size) {}
+    void process();
+
+  private:
+    void signalEos();
+
+    bool mEosReached = false;
+    std::mutex mMsgPostCompleteMutex;
+    std::condition_variable mConditionalVariable;
+    FuzzedDataProvider mFdp;
+};
+
+void SDPLoaderFuzzer::signalEos() {
+    mEosReached = true;
+    mConditionalVariable.notify_one();
+    return;
+}
+
+void SDPLoaderFuzzer::process() {
+    sp<FuzzAHandler> handler = sp<FuzzAHandler>::make(std::bind(&SDPLoaderFuzzer::signalEos, this));
+    sp<ALooper> looper = sp<ALooper>::make();
+    looper->start();
+    looper->registerHandler(handler);
+    const sp<AMessage> notify = sp<AMessage>::make(kWhatLoad, handler);
+    sp<SDPLoader> sdpLoader =
+            sp<SDPLoader>::make(notify, mFdp.ConsumeIntegral<uint32_t>() /* flags */,
+                                sp<FuzzMediaHTTPService>::make(&mFdp) /* httpService */);
+
+    KeyedVector<String8, String8> headers;
+    for (size_t idx = 0; idx < mFdp.ConsumeIntegralInRange<size_t>(kMinCapacity, kMaxCapacity);
+         ++idx) {
+        headers.add(String8(mFdp.ConsumeRandomLengthString(kMaxBytes).c_str()) /* key */,
+                    String8(mFdp.ConsumeRandomLengthString(kMaxBytes).c_str()) /* value */);
+    }
+
+    sdpLoader->load(mFdp.ConsumeRandomLengthString(kMaxBytes).c_str() /* url */, &headers);
+
+    std::unique_lock waitForMsgPostComplete(mMsgPostCompleteMutex);
+    mConditionalVariable.wait(waitForMsgPostComplete, [this] { return mEosReached; });
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    SDPLoaderFuzzer sdpLoaderFuzzer(data, size);
+    sdpLoaderFuzzer.process();
+    return 0;
+}
diff --git a/media/module/extractors/mp4/MPEG4Extractor.cpp b/media/module/extractors/mp4/MPEG4Extractor.cpp
index 38cf29d..184e4f4 100644
--- a/media/module/extractors/mp4/MPEG4Extractor.cpp
+++ b/media/module/extractors/mp4/MPEG4Extractor.cpp
@@ -2007,7 +2007,7 @@
             uint8_t mhac_header[mhac_header_size];
             off64_t data_offset = *offset;
 
-            if (chunk_size < sizeof(mhac_header)) {
+            if (mLastTrack == NULL || chunk_size < sizeof(mhac_header)) {
                 return ERROR_MALFORMED;
             }
 
diff --git a/media/ndk/NdkImageReader.cpp b/media/ndk/NdkImageReader.cpp
index 067c8f4..0f7d98b 100644
--- a/media/ndk/NdkImageReader.cpp
+++ b/media/ndk/NdkImageReader.cpp
@@ -570,6 +570,9 @@
     }
 }
 
+// The LL-NDK API is now deprecated. New devices will no longer have the token
+// manager service installed, so createHalToken will return false and this
+// will return AMEDIA_ERROR_UNKNOWN on those devices.
 media_status_t AImageReader::getWindowNativeHandle(native_handle **handle) {
     if (mWindowHandle != nullptr) {
         *handle = mWindowHandle;
diff --git a/media/ndk/fuzzer/Android.bp b/media/ndk/fuzzer/Android.bp
index a3d6a96..ba92b19 100644
--- a/media/ndk/fuzzer/Android.bp
+++ b/media/ndk/fuzzer/Android.bp
@@ -56,6 +56,14 @@
             "android-media-fuzzing-reports@google.com",
         ],
         componentid: 155276,
+        hotlists: [
+            "4593311",
+        ],
+        description: "The fuzzer targets the APIs of libmediandk library",
+        vector: "local_no_privileges_required",
+        service_privilege: "privileged",
+        users: "multi_user",
+        fuzzed_code_usage: "shipped",
     },
 }
 
@@ -63,6 +71,11 @@
     name: "ndk_crypto_fuzzer",
     srcs: ["ndk_crypto_fuzzer.cpp"],
     defaults: ["libmediandk_fuzzer_defaults"],
+    fuzz_config: {
+        libfuzzer_options: [
+            "max_len=10000",
+        ],
+    },
 }
 
 cc_fuzz {
@@ -116,3 +129,16 @@
     header_libs: ["libnativewindow_headers",],
     defaults: ["libmediandk_fuzzer_defaults",],
 }
+
+cc_fuzz {
+    name: "ndk_async_codec_fuzzer",
+    srcs: [
+           "ndk_async_codec_fuzzer.cpp",
+           "NdkMediaCodecFuzzerBase.cpp",
+          ],
+    header_libs: [
+           "libnativewindow_headers",
+           "libutils_headers",
+          ],
+    defaults: ["libmediandk_fuzzer_defaults",],
+}
diff --git a/media/ndk/fuzzer/README.md b/media/ndk/fuzzer/README.md
index 0fd08b0..7f6bdd7 100644
--- a/media/ndk/fuzzer/README.md
+++ b/media/ndk/fuzzer/README.md
@@ -8,6 +8,7 @@
 + [ndk_drm_fuzzer](#NdkDrm)
 + [ndk_mediamuxer_fuzzer](#NdkMediaMuxer)
 + [ndk_sync_codec_fuzzer](#NdkSyncCodec)
++ [ndk_async_codec_fuzzer](#NdkAsyncCodec)
 
 # <a name="NdkCrypto"></a> Fuzzer for NdkCrypto
 
@@ -156,3 +157,16 @@
   $ adb sync data
   $ adb shell /data/fuzz/arm64/ndk_sync_codec_fuzzer/ndk_sync_codec_fuzzer
 ```
+
+# <a name="NdkAsyncCodec"></a>Fuzzer for NdkAsyncCodec
+
+#### Steps to run
+1. Build the fuzzer
+```
+  $ mm -j$(nproc) ndk_async_codec_fuzzer
+```
+2. Run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/arm64/ndk_async_codec_fuzzer/ndk_sync_codec_fuzzer
+```
diff --git a/media/ndk/fuzzer/ndk_async_codec_fuzzer.cpp b/media/ndk/fuzzer/ndk_async_codec_fuzzer.cpp
new file mode 100644
index 0000000..28a38fe
--- /dev/null
+++ b/media/ndk/fuzzer/ndk_async_codec_fuzzer.cpp
@@ -0,0 +1,441 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include <NdkMediaCodecFuzzerBase.h>
+#include <media/NdkMediaFormatPriv.h>
+#include <mutex>
+#include <queue>
+#include <thread>
+
+using namespace android;
+using namespace std;
+
+constexpr int32_t kMaxCryptoInfoAPIs = 3;
+constexpr int32_t kMaxNdkCodecAPIs = 5;
+
+template <typename T>
+class CallBackQueue {
+  public:
+    void push(T elem) {
+        bool needsNotify = false;
+        {
+            unique_lock<mutex> lock(mMutex);
+            needsNotify = mQueue.empty();
+            mQueue.push(std::move(elem));
+        }
+        if (needsNotify) {
+            mQueueNotEmptyCondition.notify_one();
+        }
+    }
+
+    T pop() {
+        unique_lock<mutex> lock(mMutex);
+        if (mQueue.empty()) {
+            mQueueNotEmptyCondition.wait(lock, [this]() { return !mQueue.empty(); });
+        }
+        auto result = mQueue.front();
+        mQueue.pop();
+        return result;
+    }
+
+  private:
+    mutex mMutex;
+    std::queue<T> mQueue;
+    std::condition_variable mQueueNotEmptyCondition;
+};
+
+class CallBackHandle {
+  public:
+    CallBackHandle() : mSawError(false), mIsDone(false) {}
+
+    virtual ~CallBackHandle() {}
+
+    void ioThread();
+
+    // Implementation in child class (Decoder/Encoder)
+    virtual void invokeInputBufferAPI(AMediaCodec* codec, int32_t index) {
+        (void)codec;
+        (void)index;
+    }
+    virtual void onFormatChanged(AMediaCodec* codec, AMediaFormat* format) {
+        (void)codec;
+        (void)format;
+    }
+    virtual void receiveError(void) {}
+    virtual void invokeOutputBufferAPI(AMediaCodec* codec, int32_t index,
+                                       AMediaCodecBufferInfo* bufferInfo) {
+        (void)codec;
+        (void)index;
+        (void)bufferInfo;
+    }
+
+    // Keep a queue of all function callbacks.
+    typedef function<void()> IOTask;
+    CallBackQueue<IOTask> mIOQueue;
+    bool mSawError;
+    bool mIsDone;
+};
+
+void CallBackHandle::ioThread() {
+    while (!mIsDone && !mSawError) {
+        auto task = mIOQueue.pop();
+        task();
+    }
+}
+
+static void onAsyncInputAvailable(AMediaCodec* codec, void* userdata, int32_t index) {
+    CallBackHandle* self = (CallBackHandle*)userdata;
+    self->mIOQueue.push([self, codec, index]() { self->invokeInputBufferAPI(codec, index); });
+}
+
+static void onAsyncOutputAvailable(AMediaCodec* codec, void* userdata, int32_t index,
+                                   AMediaCodecBufferInfo* bufferInfo) {
+    CallBackHandle* self = (CallBackHandle*)userdata;
+    AMediaCodecBufferInfo bufferInfoCopy = *bufferInfo;
+    self->mIOQueue.push([self, codec, index, bufferInfoCopy]() {
+        AMediaCodecBufferInfo bc = bufferInfoCopy;
+        self->invokeOutputBufferAPI(codec, index, &bc);
+    });
+}
+
+static void onAsyncFormatChanged(AMediaCodec* codec, void* userdata, AMediaFormat* format) {
+    (void)codec;
+    (void)userdata;
+    (void)format;
+};
+
+static void onAsyncError(AMediaCodec* codec, void* userdata, media_status_t err, int32_t actionCode,
+                         const char* detail) {
+    CallBackHandle* self = (CallBackHandle*)userdata;
+    self->mSawError = true;
+    self->receiveError();
+    (void)codec;
+    (void)err;
+    (void)actionCode;
+    (void)detail;
+};
+
+class NdkAsyncCodecFuzzer : public NdkMediaCodecFuzzerBase, public CallBackHandle {
+  public:
+    NdkAsyncCodecFuzzer(const uint8_t* data, size_t size)
+        : NdkMediaCodecFuzzerBase(), mFdp(data, size) {
+        setFdp(&mFdp);
+        mStopCodec = false;
+        mSawInputEOS = false;
+        mSignalledError = false;
+        mIsEncoder = false;
+        mNumOfFrames = 0;
+        mNumInputFrames = 0;
+    };
+    ~NdkAsyncCodecFuzzer() {
+        mIOThreadPool->stop();
+        delete (mIOThreadPool);
+    };
+
+    void process();
+
+    static void codecOnFrameRendered(AMediaCodec* codec, void* userdata, int64_t mediaTimeUs,
+                                     int64_t systemNano) {
+        (void)codec;
+        (void)userdata;
+        (void)mediaTimeUs;
+        (void)systemNano;
+    };
+    class ThreadPool {
+      public:
+        void start();
+        void queueJob(const std::function<void()>& job);
+        void stop();
+
+      private:
+        void ThreadLoop();
+        bool mShouldTerminate = false;
+        std::vector<std::thread> mThreads;
+        std::mutex mQueueMutex;
+        std::condition_variable mQueueMutexCondition;
+        std::queue<std::function<void()>> mJobs;
+    };
+
+  private:
+    FuzzedDataProvider mFdp;
+    AMediaCodec* mCodec = nullptr;
+    void invokeCodecCryptoInfoAPI();
+    void invokekAsyncCodecAPIs(bool isEncoder);
+    void invokeAsyncCodeConfigAPI();
+    void invokeInputBufferAPI(AMediaCodec* codec, int32_t bufferIndex);
+    void invokeOutputBufferAPI(AMediaCodec* codec, int32_t bufferIndex,
+                               AMediaCodecBufferInfo* bufferInfo);
+    void invokeFormatAPI(AMediaCodec* codec);
+    void receiveError();
+    bool mStopCodec;
+    bool mSawInputEOS;
+    bool mSignalledError;
+    int32_t mNumOfFrames;
+    int32_t mNumInputFrames;
+    mutable Mutex mMutex;
+    bool mIsEncoder;
+    ThreadPool* mIOThreadPool = new ThreadPool();
+};
+
+void NdkAsyncCodecFuzzer::ThreadPool::start() {
+    const uint32_t numThreads = std::thread::hardware_concurrency();
+    mThreads.resize(numThreads);
+    for (uint32_t i = 0; i < numThreads; ++i) {
+        mThreads.at(i) = std::thread(&ThreadPool::ThreadLoop, this);
+    }
+}
+
+void NdkAsyncCodecFuzzer::ThreadPool::ThreadLoop() {
+    while (true) {
+        std::function<void()> job;
+        {
+            std::unique_lock<std::mutex> lock(mQueueMutex);
+            mQueueMutexCondition.wait(lock, [this] { return !mJobs.empty() || mShouldTerminate; });
+            if (mShouldTerminate) {
+                return;
+            }
+            job = mJobs.front();
+            mJobs.pop();
+        }
+        job();
+    }
+}
+
+void NdkAsyncCodecFuzzer::ThreadPool::queueJob(const std::function<void()>& job) {
+    {
+        std::unique_lock<std::mutex> lock(mQueueMutex);
+        mJobs.push(job);
+    }
+    mQueueMutexCondition.notify_one();
+}
+
+void NdkAsyncCodecFuzzer::ThreadPool::stop() {
+    {
+        std::unique_lock<std::mutex> lock(mQueueMutex);
+        mShouldTerminate = true;
+    }
+    mQueueMutexCondition.notify_all();
+    for (std::thread& active_thread : mThreads) {
+        active_thread.join();
+    }
+    mThreads.clear();
+}
+
+void NdkAsyncCodecFuzzer::receiveError(void) {
+    mSignalledError = true;
+}
+
+void NdkAsyncCodecFuzzer::invokeInputBufferAPI(AMediaCodec* codec, int32_t bufferIndex) {
+    size_t bufferSize = 0;
+    Mutex::Autolock autoLock(mMutex);
+    if (mSignalledError) {
+        CallBackHandle::mSawError = true;
+        return;
+    }
+    if (mStopCodec || bufferIndex < 0 || mSawInputEOS) {
+        return;
+    }
+
+    uint8_t* buffer = AMediaCodec_getInputBuffer(codec, bufferIndex, &bufferSize);
+    if (buffer) {
+        std::vector<uint8_t> bytesRead = mFdp.ConsumeBytes<uint8_t>(
+                std::min(mFdp.ConsumeIntegralInRange<size_t>(kMinBytes, kMaxBytes), bufferSize));
+        memcpy(buffer, bytesRead.data(), bytesRead.size());
+        bufferSize = bytesRead.size();
+    } else {
+        mSignalledError = true;
+        return;
+    }
+
+    uint32_t flag = 0;
+    if (!bufferSize || mNumInputFrames == mNumOfFrames) {
+        flag |= AMEDIACODEC_BUFFER_FLAG_END_OF_STREAM;
+        mSawInputEOS = true;
+    }
+    AMediaCodec_queueInputBuffer(codec, bufferIndex, 0 /* offset */, bufferSize, 0 /* time */,
+                                 flag);
+    mNumInputFrames++;
+}
+
+void NdkAsyncCodecFuzzer::invokeOutputBufferAPI(AMediaCodec* codec, int32_t bufferIndex,
+                                                AMediaCodecBufferInfo* bufferInfo) {
+    size_t bufferSize = 0;
+    Mutex::Autolock autoLock(mMutex);
+
+    if (mSignalledError) {
+        CallBackHandle::mSawError = true;
+        return;
+    }
+
+    if (mStopCodec || bufferIndex < 0 || mIsDone) {
+        return;
+    }
+
+    if (!mIsEncoder) {
+        (void)AMediaCodec_getOutputBuffer(codec, bufferIndex, &bufferSize);
+    }
+    AMediaCodec_releaseOutputBuffer(codec, bufferIndex, mFdp.ConsumeBool());
+    mIsDone = (0 != (bufferInfo->flags & AMEDIACODEC_BUFFER_FLAG_END_OF_STREAM));
+}
+
+void NdkAsyncCodecFuzzer::invokeFormatAPI(AMediaCodec* codec) {
+    AMediaFormat* codecFormat = nullptr;
+    if (mFdp.ConsumeBool()) {
+        codecFormat = AMediaCodec_getInputFormat(codec);
+    } else {
+        codecFormat = AMediaCodec_getOutputFormat(codec);
+    }
+    if (codecFormat) {
+        AMediaFormat_delete(codecFormat);
+    }
+}
+
+void NdkAsyncCodecFuzzer::invokekAsyncCodecAPIs(bool isEncoder) {
+    ANativeWindow* nativeWindow = nullptr;
+
+    if (mFdp.ConsumeBool()) {
+        AMediaCodec_createInputSurface(mCodec, &nativeWindow);
+    }
+
+    if (AMEDIA_OK == AMediaCodec_configure(mCodec, getCodecFormat(), nativeWindow,
+                                           nullptr /* crypto */,
+                                           (isEncoder ? AMEDIACODEC_CONFIGURE_FLAG_ENCODE : 0))) {
+        mNumOfFrames = mFdp.ConsumeIntegralInRange<size_t>(kMinIterations, kMaxIterations);
+        // Configure codecs to run in async mode.
+        AMediaCodecOnAsyncNotifyCallback callBack = {onAsyncInputAvailable, onAsyncOutputAvailable,
+                                                     onAsyncFormatChanged, onAsyncError};
+        AMediaCodec_setAsyncNotifyCallback(mCodec, callBack, this);
+        mIOThreadPool->queueJob([this] { CallBackHandle::ioThread(); });
+
+        AMediaCodec_start(mCodec);
+        sleep(5);
+        int32_t count = 0;
+        while (++count <= mNumOfFrames) {
+            int32_t ndkcodecAPI =
+                    mFdp.ConsumeIntegralInRange<size_t>(kMinAPICase, kMaxNdkCodecAPIs);
+            switch (ndkcodecAPI) {
+                case 0: {  // get input and output Format
+                    invokeFormatAPI(mCodec);
+                    break;
+                }
+                case 1: {
+                    AMediaCodec_signalEndOfInputStream(mCodec);
+                    mSawInputEOS = true;
+                    break;
+                }
+                case 2: {  // set parameters
+                    // Create a new parameter and set
+                    AMediaFormat* params = AMediaFormat_new();
+                    AMediaFormat_setInt32(
+                            params, "video-bitrate",
+                            mFdp.ConsumeIntegralInRange<size_t>(kMinIntKeyValue, kMaxIntKeyValue));
+                    AMediaCodec_setParameters(mCodec, params);
+                    AMediaFormat_delete(params);
+                    break;
+                }
+                case 3: {  // flush codec
+                    AMediaCodec_flush(mCodec);
+                    if (mFdp.ConsumeBool()) {
+                        AMediaCodec_start(mCodec);
+                    }
+                    break;
+                }
+                case 4: {
+                    char* name = nullptr;
+                    AMediaCodec_getName(mCodec, &name);
+                    AMediaCodec_releaseName(mCodec, name);
+                    break;
+                }
+                case 5:
+                default: {
+                    std::vector<uint8_t> userData = mFdp.ConsumeBytes<uint8_t>(
+                            mFdp.ConsumeIntegralInRange<size_t>(kMinBytes, kMaxBytes));
+                    AMediaCodecOnFrameRendered callback = codecOnFrameRendered;
+                    AMediaCodec_setOnFrameRenderedCallback(mCodec, callback, userData.data());
+                    break;
+                }
+            }
+        }
+        {
+            Mutex::Autolock autoLock(mMutex);
+            mStopCodec = 1;
+            AMediaCodec_stop(mCodec);
+        }
+    }
+
+    if (nativeWindow) {
+        ANativeWindow_release(nativeWindow);
+    }
+}
+
+void NdkAsyncCodecFuzzer::invokeAsyncCodeConfigAPI() {
+    mIOThreadPool->start();
+
+    while (mFdp.remaining_bytes() > 0) {
+        mIsEncoder = mFdp.ConsumeBool();
+        mCodec = createCodec(mIsEncoder, mFdp.ConsumeBool() /* isCodecForClient */);
+        if (mCodec) {
+            invokekAsyncCodecAPIs(mIsEncoder);
+            AMediaCodec_delete(mCodec);
+        }
+    }
+    mIOThreadPool->stop();
+}
+
+void NdkAsyncCodecFuzzer::invokeCodecCryptoInfoAPI() {
+    while (mFdp.remaining_bytes() > 0) {
+        AMediaCodecCryptoInfo* cryptoInfo = getAMediaCodecCryptoInfo();
+        int32_t ndkCryptoInfoAPI =
+                mFdp.ConsumeIntegralInRange<size_t>(kMinAPICase, kMaxCryptoInfoAPIs);
+        switch (ndkCryptoInfoAPI) {
+            case 0: {
+                size_t sizes[kMaxCryptoKey];
+                AMediaCodecCryptoInfo_getEncryptedBytes(cryptoInfo, sizes);
+                break;
+            }
+            case 1: {
+                size_t sizes[kMaxCryptoKey];
+                AMediaCodecCryptoInfo_getClearBytes(cryptoInfo, sizes);
+                break;
+            }
+            case 2: {
+                uint8_t bytes[kMaxCryptoKey];
+                AMediaCodecCryptoInfo_getIV(cryptoInfo, bytes);
+                break;
+            }
+            case 3:
+            default: {
+                uint8_t bytes[kMaxCryptoKey];
+                AMediaCodecCryptoInfo_getKey(cryptoInfo, bytes);
+                break;
+            }
+        }
+        AMediaCodecCryptoInfo_delete(cryptoInfo);
+    }
+}
+
+void NdkAsyncCodecFuzzer::process() {
+    if (mFdp.ConsumeBool()) {
+        invokeCodecCryptoInfoAPI();
+    } else {
+        invokeAsyncCodeConfigAPI();
+    }
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    NdkAsyncCodecFuzzer ndkAsyncCodecFuzzer(data, size);
+    ndkAsyncCodecFuzzer.process();
+    return 0;
+}
diff --git a/media/ndk/fuzzer/ndk_crypto_fuzzer.cpp b/media/ndk/fuzzer/ndk_crypto_fuzzer.cpp
index 2b22f0f..a759ae7 100644
--- a/media/ndk/fuzzer/ndk_crypto_fuzzer.cpp
+++ b/media/ndk/fuzzer/ndk_crypto_fuzzer.cpp
@@ -20,10 +20,12 @@
 constexpr size_t kMaxString = 256;
 constexpr size_t kMinBytes = 0;
 constexpr size_t kMaxBytes = 1000;
+constexpr size_t kMaxRuns = 100;
 
 extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
     FuzzedDataProvider fdp(data, size);
     AMediaUUID uuid = {};
+    size_t apiCount = 0;
     int32_t maxLen = fdp.ConsumeIntegralInRange<size_t>(kMinBytes, (size_t)sizeof(AMediaUUID));
     for (size_t idx = 0; idx < maxLen; ++idx) {
         uuid[idx] = fdp.ConsumeIntegral<uint8_t>();
@@ -31,7 +33,14 @@
     std::vector<uint8_t> initData =
             fdp.ConsumeBytes<uint8_t>(fdp.ConsumeIntegralInRange<size_t>(kMinBytes, kMaxBytes));
     AMediaCrypto* crypto = AMediaCrypto_new(uuid, initData.data(), initData.size());
-    while (fdp.remaining_bytes()) {
+    /*
+     * The AMediaCrypto_isCryptoSchemeSupported API doesn't consume any input bytes,
+     * so when PickValueInArray() selects it repeatedly, only one byte is consumed by 'fdp'.
+     * As a result, on larger inputs, AMediaCrypto_isCryptoSchemeSupported can run a large
+     * number of times, potentially causing a timeout crash.
+     * Therefore, to prevent this issue, while loop is limited to kMaxRuns.
+     */
+    while (fdp.remaining_bytes() && ++apiCount <= kMaxRuns) {
         auto invokeNdkCryptoFuzzer = fdp.PickValueInArray<const std::function<void()>>({
                 [&]() {
                     AMediaCrypto_requiresSecureDecoderComponent(
diff --git a/media/ndk/fuzzer/ndk_mediaformat_fuzzer.cpp b/media/ndk/fuzzer/ndk_mediaformat_fuzzer.cpp
index c19ea13..23e2eaf 100644
--- a/media/ndk/fuzzer/ndk_mediaformat_fuzzer.cpp
+++ b/media/ndk/fuzzer/ndk_mediaformat_fuzzer.cpp
@@ -18,6 +18,7 @@
 #include <fcntl.h>
 #include <fuzzer/FuzzedDataProvider.h>
 #include <media/NdkMediaFormat.h>
+#include <media/stagefright/foundation/AMessage.h>
 #include <sys/mman.h>
 #include <unistd.h>
 #include <utils/Log.h>
@@ -176,11 +177,13 @@
 constexpr size_t kMaxBytes = 1000;
 constexpr size_t kMinChoice = 0;
 constexpr size_t kMaxChoice = 9;
+const size_t kMaxIteration = android::AMessage::maxAllowedEntries();
 
 extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
     FuzzedDataProvider fdp(data, size);
     AMediaFormat* mediaFormat = AMediaFormat_new();
-    while (fdp.remaining_bytes()) {
+    std::vector<std::string> nameCollection;
+    while (fdp.remaining_bytes() && nameCollection.size() < kMaxIteration) {
         const char* name = nullptr;
         std::string nameString;
         if (fdp.ConsumeBool()) {
@@ -190,6 +193,11 @@
                             : fdp.ConsumeRandomLengthString(
                                       fdp.ConsumeIntegralInRange<size_t>(kMinBytes, kMaxBytes));
             name = nameString.c_str();
+            std::vector<std::string>::iterator it =
+                    find(nameCollection.begin(), nameCollection.end(), name);
+            if (it == nameCollection.end()) {
+                nameCollection.push_back(name);
+            }
         }
         switch (fdp.ConsumeIntegralInRange<int32_t>(kMinChoice, kMaxChoice)) {
             case 0: {
diff --git a/media/ndk/include/media/NdkImageReader.h b/media/ndk/include/media/NdkImageReader.h
index b6dcaae..48a0a82 100644
--- a/media/ndk/include/media/NdkImageReader.h
+++ b/media/ndk/include/media/NdkImageReader.h
@@ -540,9 +540,11 @@
  *
  * @return AMEDIA_OK if the method call succeeds.
  *         AMEDIA_ERROR_INVALID_PARAMETER if reader or handle are NULL.
- *         AMEDIA_ERROR_UNKNOWN if some other error is encountered.
+ *         AMEDIA_ERROR_UNKNOWN if some other error is encountered or
+ *         the device no longer has android.hidl.token service to
+ *         satisfy the request because it is deprecated.
  */
-media_status_t AImageReader_getWindowNativeHandle(
+[[deprecated]] media_status_t AImageReader_getWindowNativeHandle(
     AImageReader *reader, /* out */native_handle_t **handle);
 #endif
 
diff --git a/media/utils/Android.bp b/media/utils/Android.bp
index 7abb0b6..07dac5e 100644
--- a/media/utils/Android.bp
+++ b/media/utils/Android.bp
@@ -179,3 +179,8 @@
     local_include_dirs: ["include"],
     export_include_dirs: ["include"],
 }
+
+cc_library_headers {
+    name: "mediautils_headers",
+    export_include_dirs: ["include", "."],
+}
diff --git a/media/utils/tests/Android.bp b/media/utils/tests/Android.bp
index 0689083..3fdc6eb 100644
--- a/media/utils/tests/Android.bp
+++ b/media/utils/tests/Android.bp
@@ -200,7 +200,10 @@
     name: "timerthread_tests",
 
     defaults: ["libmediautils_tests_defaults"],
-
+    // TODO(b/270180838)
+    test_options: {
+        unit_test: false,
+    },
     srcs: [
         "TimerThread-test.cpp",
     ],
diff --git a/services/audioflinger/Android.bp b/services/audioflinger/Android.bp
index 17e6d15..6329bae 100644
--- a/services/audioflinger/Android.bp
+++ b/services/audioflinger/Android.bp
@@ -138,10 +138,57 @@
     ],
 }
 
-cc_library_shared {
+cc_defaults {
+    name: "libaudioflinger_dependencies",
+
+    shared_libs: [
+        "audioflinger-aidl-cpp",
+        "audioclient-types-aidl-cpp",
+        "av-types-aidl-cpp",
+        "effect-aidl-cpp",
+        "libaudioclient_aidl_conversion",
+        "libactivitymanager_aidl",
+        "libaudioflinger_datapath",
+        "libaudioflinger_fastpath",
+        "libaudioflinger_timing",
+        "libaudioflinger_utils",
+        "libaudiofoundation",
+        "libaudiohal",
+        "libaudioprocessing",
+        "libaudioutils",
+        "libcutils",
+        "libutils",
+        "liblog",
+        "libbinder",
+        "libbinder_ndk",
+        "libaudioclient",
+        "libaudiomanager",
+        "libmediametrics",
+        "libmediautils",
+        "libnbaio",
+        "libnblog",
+        "libpermission",
+        "libpowermanager",
+        "libmemunreachable",
+        "libmedia_helper",
+        "libshmemcompat",
+        "libsounddose",
+        "libvibrator",
+        "packagemanager_aidl-cpp",
+    ],
+
+    static_libs: [
+        "libmedialogservice",
+        "libaudiospdif",
+    ],
+}
+
+
+cc_library {
     name: "libaudioflinger",
 
     defaults: [
+        "libaudioflinger_dependencies",
         "latest_android_media_audio_common_types_cpp_shared",
         "latest_android_hardware_audio_core_sounddose_ndk_shared",
         "audioflinger_flags_defaults",
@@ -163,44 +210,6 @@
         "frameworks/av/services/medialog",
     ],
 
-    shared_libs: [
-        "audioflinger-aidl-cpp",
-        "audioclient-types-aidl-cpp",
-        "av-types-aidl-cpp",
-        "effect-aidl-cpp",
-        "libaudioclient_aidl_conversion",
-        "libactivitymanager_aidl",
-        "libaudioflinger_datapath",
-        "libaudioflinger_fastpath",
-        "libaudioflinger_timing",
-        "libaudioflinger_utils",
-        "libaudiofoundation",
-        "libaudiohal",
-        "libaudioprocessing",
-        "libaudiospdif",
-        "libaudioutils",
-        "libcutils",
-        "libutils",
-        "liblog",
-        "libbinder",
-        "libbinder_ndk",
-        "libaudioclient",
-        "libaudiomanager",
-        "libmedialogservice",
-        "libmediametrics",
-        "libmediautils",
-        "libnbaio",
-        "libnblog",
-        "libpermission",
-        "libpowermanager",
-        "libmemunreachable",
-        "libmedia_helper",
-        "libshmemcompat",
-        "libsounddose",
-        "libvibrator",
-        "packagemanager_aidl-cpp",
-    ],
-
     static_libs: [
         "libcpustats",
         "libpermission",
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index 60717b5..ad2ba21 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -2200,7 +2200,7 @@
 
 void AudioFlinger::removeNotificationClient(pid_t pid)
 {
-    std::vector< sp<AudioFlinger::EffectModule> > removedEffects;
+    std::vector<sp<IAfEffectModule>> removedEffects;
     {
         Mutex::Autolock _l(mLock);
         {
@@ -2536,7 +2536,7 @@
 
         // Check if one effect chain was awaiting for an AudioRecord to be created on this
         // session and move it to this thread.
-        sp<EffectChain> chain = getOrphanEffectChain_l(sessionId);
+        sp<IAfEffectChain> chain = getOrphanEffectChain_l(sessionId);
         if (chain != 0) {
             Mutex::Autolock _l2(thread->mLock);
             thread->addEffectChain_l(chain);
@@ -3216,7 +3216,7 @@
                     // audioflinger lock is held so order of thread lock acquisition doesn't matter
                     Mutex::Autolock _dl(dstThread->mLock);
                     Mutex::Autolock _sl(playbackThread->mLock);
-                    Vector< sp<EffectChain> > effectChains = playbackThread->getEffectChains_l();
+                    Vector<sp<IAfEffectChain>> effectChains = playbackThread->getEffectChains_l();
                     for (size_t i = 0; i < effectChains.size(); i ++) {
                         moveEffectChain_l(effectChains[i]->sessionId(), playbackThread.get(),
                                 dstThread);
@@ -3450,10 +3450,10 @@
             // on at least one effect. We must either move the chain to an existing thread with the
             // same session ID or put it aside in case a new record thread is opened for a
             // new capture on the same session
-            sp<EffectChain> chain;
+            sp<IAfEffectChain> chain;
             {
                 Mutex::Autolock _sl(recordThread->mLock);
-                Vector< sp<EffectChain> > effectChains = recordThread->getEffectChains_l();
+                Vector< sp<IAfEffectChain> > effectChains = recordThread->getEffectChains_l();
                 // Note: maximum one chain per record thread
                 if (effectChains.size() != 0) {
                     chain = effectChains[0];
@@ -3596,7 +3596,7 @@
 
 void AudioFlinger::releaseAudioSessionId(audio_session_t audioSession, pid_t pid)
 {
-    std::vector< sp<EffectModule> > removedEffects;
+    std::vector<sp<IAfEffectModule>> removedEffects;
     {
         Mutex::Autolock _l(mLock);
         pid_t caller = IPCThreadState::self()->getCallingPid();
@@ -3614,7 +3614,7 @@
                 if (ref->mCnt == 0) {
                     mAudioSessionRefs.removeAt(i);
                     delete ref;
-                    std::vector< sp<EffectModule> > effects = purgeStaleEffects_l();
+                    std::vector<sp<IAfEffectModule>> effects = purgeStaleEffects_l();
                     removedEffects.insert(removedEffects.end(), effects.begin(), effects.end());
                 }
                 goto Exit;
@@ -3644,18 +3644,18 @@
     return false;
 }
 
-std::vector<sp<AudioFlinger::EffectModule>> AudioFlinger::purgeStaleEffects_l() {
+std::vector<sp<IAfEffectModule>> AudioFlinger::purgeStaleEffects_l() {
 
     ALOGV("purging stale effects");
 
-    Vector< sp<EffectChain> > chains;
-    std::vector< sp<EffectModule> > removedEffects;
+    Vector< sp<IAfEffectChain> > chains;
+    std::vector< sp<IAfEffectModule> > removedEffects;
 
     for (size_t i = 0; i < mPlaybackThreads.size(); i++) {
         sp<PlaybackThread> t = mPlaybackThreads.valueAt(i);
         Mutex::Autolock _l(t->mLock);
         for (size_t j = 0; j < t->mEffectChains.size(); j++) {
-            sp<EffectChain> ec = t->mEffectChains[j];
+            sp<IAfEffectChain> ec = t->mEffectChains[j];
             if (!audio_is_global_session(ec->sessionId())) {
                 chains.push(ec);
             }
@@ -3666,7 +3666,7 @@
         sp<RecordThread> t = mRecordThreads.valueAt(i);
         Mutex::Autolock _l(t->mLock);
         for (size_t j = 0; j < t->mEffectChains.size(); j++) {
-            sp<EffectChain> ec = t->mEffectChains[j];
+            sp<IAfEffectChain> ec = t->mEffectChains[j];
             chains.push(ec);
         }
     }
@@ -3675,16 +3675,16 @@
         sp<MmapThread> t = mMmapThreads.valueAt(i);
         Mutex::Autolock _l(t->mLock);
         for (size_t j = 0; j < t->mEffectChains.size(); j++) {
-            sp<EffectChain> ec = t->mEffectChains[j];
+            sp<IAfEffectChain> ec = t->mEffectChains[j];
             chains.push(ec);
         }
     }
 
     for (size_t i = 0; i < chains.size(); i++) {
          // clang-tidy suggests const ref
-        sp<EffectChain> ec = chains[i];  // NOLINT(performance-unnecessary-copy-initialization)
+        sp<IAfEffectChain> ec = chains[i];  // NOLINT(performance-unnecessary-copy-initialization)
         int sessionid = ec->sessionId();
-        sp<ThreadBase> t = ec->thread().promote();
+        sp<ThreadBase> t = sp<ThreadBase>::cast(ec->thread().promote()); // TODO(b/288339104)
         if (t == 0) {
             continue;
         }
@@ -3702,8 +3702,8 @@
         if (!found) {
             Mutex::Autolock _l(t->mLock);
             // remove all effects from the chain
-            while (ec->mEffects.size()) {
-                sp<EffectModule> effect = ec->mEffects[0];
+            while (ec->numberOfEffects()) {
+                sp<IAfEffectModule> effect = ec->getEffectModule(0);
                 effect->unPin();
                 t->removeEffect_l(effect, /*release*/ true);
                 if (effect->purgeHandles()) {
@@ -4140,7 +4140,7 @@
             aidl2legacy_EffectDescriptor_effect_descriptor_t(request.desc));
     const bool probe = request.probe;
 
-    sp<EffectHandle> handle;
+    sp<IAfEffectHandle> handle;
     effect_descriptor_t descOut;
     int enabledOut = 0;
     int idOut = -1;
@@ -4248,7 +4248,7 @@
             goto Exit;
         }
 
-        const bool hapticPlaybackRequired = EffectModule::isHapticGenerator(&descOut.type);
+        const bool hapticPlaybackRequired = IAfEffectModule::isHapticGenerator(&descOut.type);
         if (hapticPlaybackRequired
                 && (sessionId == AUDIO_SESSION_DEVICE
                         || sessionId == AUDIO_SESSION_OUTPUT_MIX
@@ -4376,7 +4376,7 @@
         } else {
             // Check if one effect chain was awaiting for an effect to be created on this
             // session and used it instead of creating a new one.
-            sp<EffectChain> chain = getOrphanEffectChain_l(sessionId);
+            sp<IAfEffectChain> chain = getOrphanEffectChain_l(sessionId);
             if (chain != 0) {
                 Mutex::Autolock _l2(thread->mLock);
                 thread->addEffectChain_l(chain);
@@ -4428,7 +4428,7 @@
             response->alreadyExists = false;
         }
         // Check CPU and memory usage
-        sp<EffectBase> effect = handle->effect().promote();
+        sp<IAfEffectBase> effect = handle->effect().promote();
         if (effect != nullptr) {
             status_t rStatus = effect->updatePolicyState();
             if (rStatus != NO_ERROR) {
@@ -4441,7 +4441,7 @@
 
     response->id = idOut;
     response->enabled = enabledOut != 0;
-    response->effect = handle;
+    response->effect = handle->asIEffect();
     response->desc = VALUE_OR_RETURN_STATUS(
             legacy2aidl_effect_descriptor_t_EffectDescriptor(descOut));
 
@@ -4487,7 +4487,7 @@
       return;
     }
     Mutex::Autolock _sl(thread->mLock);
-    sp<EffectModule> effect = thread->getEffect_l(sessionId, effectId);
+    sp<IAfEffectModule> effect = thread->getEffect_l(sessionId, effectId);
     thread->setEffectSuspended_l(&effect->desc().type, suspended, sessionId);
 }
 
@@ -4501,7 +4501,7 @@
     ALOGV("moveEffectChain_l() session %d from thread %p to thread %p",
             sessionId, srcThread, dstThread);
 
-    sp<EffectChain> chain = srcThread->getEffectChain_l(sessionId);
+    sp<IAfEffectChain> chain = srcThread->getEffectChain_l(sessionId);
     if (chain == 0) {
         ALOGW("moveEffectChain_l() effect chain for session %d not on source thread %p",
                 sessionId, srcThread);
@@ -4525,12 +4525,12 @@
 
     // transfer all effects one by one so that new effect chain is created on new thread with
     // correct buffer sizes and audio parameters and effect engines reconfigured accordingly
-    sp<EffectChain> dstChain;
-    Vector< sp<EffectModule> > removed;
+    sp<IAfEffectChain> dstChain;
+    Vector<sp<IAfEffectModule>> removed;
     status_t status = NO_ERROR;
     std::string errorString;
     // process effects one by one.
-    for (sp<EffectModule> effect = chain->getEffectFromId_l(0); effect != nullptr;
+    for (sp<IAfEffectModule> effect = chain->getEffectFromId_l(0); effect != nullptr;
             effect = chain->getEffectFromId_l(0)) {
         srcThread->removeEffect_l(effect);
         removed.add(effect);
@@ -4578,8 +4578,8 @@
         // see b/202360137.
         dstChain->lock();
         for (const auto& effect : removed) {
-            if (effect->state() == EffectModule::ACTIVE ||
-                    effect->state() == EffectModule::STOPPING) {
+            if (effect->state() == IAfEffectModule::ACTIVE ||
+                    effect->state() == IAfEffectModule::STOPPING) {
                 ++started;
                 effect->start();
             }
@@ -4616,13 +4616,13 @@
     if (EffectId != 0 && thread != 0 && dstThread != thread.get()) {
         Mutex::Autolock _dl(dstThread->mLock);
         Mutex::Autolock _sl(thread->mLock);
-        sp<EffectChain> srcChain = thread->getEffectChain_l(AUDIO_SESSION_OUTPUT_MIX);
-        sp<EffectChain> dstChain;
+        sp<IAfEffectChain> srcChain = thread->getEffectChain_l(AUDIO_SESSION_OUTPUT_MIX);
+        sp<IAfEffectChain> dstChain;
         if (srcChain == 0) {
             return INVALID_OPERATION;
         }
 
-        sp<EffectModule> effect = srcChain->getEffectFromId_l(EffectId);
+        sp<IAfEffectModule> effect = srcChain->getEffectFromId_l(EffectId);
         if (effect == 0) {
             return INVALID_OPERATION;
         }
@@ -4642,8 +4642,8 @@
 
 Exit:
         // removeEffect_l() has stopped the effect if it was active so it must be restarted
-        if (effect->state() == EffectModule::ACTIVE ||
-            effect->state() == EffectModule::STOPPING) {
+        if (effect->state() == IAfEffectModule::ACTIVE ||
+            effect->state() == IAfEffectModule::STOPPING) {
             effect->start();
         }
     }
@@ -4663,7 +4663,7 @@
     }
 
     for (size_t i = 0; i < mPlaybackThreads.size(); i++) {
-        sp<EffectChain> ec =
+        sp<IAfEffectChain> ec =
                 mPlaybackThreads.valueAt(i)->getEffectChain_l(AUDIO_SESSION_OUTPUT_MIX);
         if (ec != 0 && ec->isNonOffloadableEnabled()) {
             return true;
@@ -4687,7 +4687,7 @@
 
 }
 
-status_t AudioFlinger::putOrphanEffectChain_l(const sp<AudioFlinger::EffectChain>& chain)
+status_t AudioFlinger::putOrphanEffectChain_l(const sp<IAfEffectChain>& chain)
 {
     // clear possible suspended state before parking the chain so that it starts in default state
     // when attached to a new record thread
@@ -4705,9 +4705,9 @@
     return NO_ERROR;
 }
 
-sp<AudioFlinger::EffectChain> AudioFlinger::getOrphanEffectChain_l(audio_session_t session)
+sp<IAfEffectChain> AudioFlinger::getOrphanEffectChain_l(audio_session_t session)
 {
-    sp<EffectChain> chain;
+    sp<IAfEffectChain> chain;
     ssize_t index = mOrphanEffectChains.indexOfKey(session);
     ALOGV("getOrphanEffectChain_l session %d index %zd", session, index);
     if (index >= 0) {
@@ -4717,14 +4717,14 @@
     return chain;
 }
 
-bool AudioFlinger::updateOrphanEffectChains(const sp<AudioFlinger::EffectModule>& effect)
+bool AudioFlinger::updateOrphanEffectChains(const sp<IAfEffectModule>& effect)
 {
     Mutex::Autolock _l(mLock);
     audio_session_t session = effect->sessionId();
     ssize_t index = mOrphanEffectChains.indexOfKey(session);
     ALOGV("updateOrphanEffectChains session %d index %zd", session, index);
     if (index >= 0) {
-        sp<EffectChain> chain = mOrphanEffectChains.valueAt(index);
+        sp<IAfEffectChain> chain = mOrphanEffectChains.valueAt(index);
         if (chain->removeEffect_l(effect, true) == 0) {
             ALOGV("updateOrphanEffectChains removing effect chain at index %zd", index);
             mOrphanEffectChains.removeItemsAt(index);
diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h
index 8b1d70b..e2d340b 100644
--- a/services/audioflinger/AudioFlinger.h
+++ b/services/audioflinger/AudioFlinger.h
@@ -117,6 +117,9 @@
 #include "android/media/BnAudioRecord.h"
 #include "android/media/BnEffect.h"
 
+// include AudioFlinger component interfaces
+#include "IAfEffect.h"
+
 namespace android {
 
 class AudioMixer;
@@ -478,16 +481,24 @@
 
     // Internal dump utilities.
     static const int kDumpLockTimeoutNs = 1 * NANOS_PER_SECOND;
+public:
+    // TODO(b/288339104) extract to afutils
     static bool dumpTryLock(Mutex& mutex);
+private:
     void dumpPermissionDenial(int fd, const Vector<String16>& args);
     void dumpClients(int fd, const Vector<String16>& args);
     void dumpInternals(int fd, const Vector<String16>& args);
 
     SimpleLog mThreadLog{16}; // 16 Thread history limit
 
+public:
+    // TODO(b/288339104)
     class ThreadBase;
+private:
     void dumpToThreadLog_l(const sp<ThreadBase> &thread);
 
+public:
+    // TODO(b/288339104) Move to separate file
     // --- Client ---
     class Client : public RefBase {
       public:
@@ -504,6 +515,7 @@
         const pid_t         mPid;
         AllocatorFactory::ClientAllocator mClientAllocator;
     };
+private:
 
     // --- Notification Client ---
     class NotificationClient : public IBinder::DeathRecipient {
@@ -575,15 +587,12 @@
     class BitPerfectThread;
     class Track;
     class RecordTrack;
-    class EffectBase;
-    class EffectModule;
-    class EffectHandle;
-    class EffectChain;
-    class DeviceEffectProxy;
     class DeviceEffectManager;
+    // TODO(b/288339104) these should be separate files
+public:
     class PatchPanel;
     class DeviceEffectManagerCallback;
-
+private:
     struct AudioStreamIn;
     struct TeePatch;
     using TeePatches = std::vector<TeePatch>;
@@ -617,8 +626,6 @@
 
 #include "PatchCommandThread.h"
 
-#include "Effects.h"
-
 #include "DeviceEffectManager.h"
 
 #include "MelReporter.h"
@@ -819,17 +826,19 @@
                 // return ALREADY_EXISTS if a chain with the same session already exists in
                 // mOrphanEffectChains. Note that this should never happen as there is only one
                 // chain for a given session and it is attached to only one thread at a time.
-                status_t        putOrphanEffectChain_l(const sp<EffectChain>& chain);
+                status_t putOrphanEffectChain_l(const sp<IAfEffectChain>& chain);
                 // Get an effect chain for the specified session in mOrphanEffectChains and remove
                 // it if found. Returns 0 if not found (this is the most common case).
-                sp<EffectChain> getOrphanEffectChain_l(audio_session_t session);
+                sp<IAfEffectChain> getOrphanEffectChain_l(audio_session_t session);
                 // Called when the last effect handle on an effect instance is removed. If this
                 // effect belongs to an effect chain in mOrphanEffectChains, the chain is updated
                 // and removed from mOrphanEffectChains if it does not contain any effect.
                 // Return true if the effect was found in mOrphanEffectChains, false otherwise.
-                bool            updateOrphanEffectChains(const sp<EffectModule>& effect);
-
-                std::vector< sp<EffectModule> > purgeStaleEffects_l();
+public:
+// TODO(b/288339104) suggest better grouping
+                bool updateOrphanEffectChains(const sp<IAfEffectModule>& effect);
+private:
+                std::vector< sp<IAfEffectModule> > purgeStaleEffects_l();
 
                 void broadcastParametersToRecordThreads_l(const String8& keyValuePairs);
                 void updateOutDevicesForRecordThreads_l(const DeviceDescriptorBaseVector& devices);
@@ -879,7 +888,10 @@
                 // protects mClients and mNotificationClients.
                 // must be locked after mLock and ThreadBase::mLock if both must be locked
                 // avoids acquiring AudioFlinger::mLock from inside thread loop.
+public:
+    // TODO(b/288339104) access by getter,
     mutable     Mutex                               mClientLock;
+private:
                 // protected by mClientLock
                 DefaultKeyedVector< pid_t, wp<Client> >     mClients;   // see ~Client()
 
@@ -958,7 +970,7 @@
                 std::list<sp<audioflinger::SyncEvent>> mPendingSyncEvents;
 
                 // Effect chains without a valid thread
-                DefaultKeyedVector< audio_session_t , sp<EffectChain> > mOrphanEffectChains;
+                DefaultKeyedVector<audio_session_t, sp<IAfEffectChain>> mOrphanEffectChains;
 
                 // list of sessions for which a valid HW A/V sync ID was retrieved from the HAL
                 DefaultKeyedVector< audio_session_t , audio_hw_sync_t >mHwAvSyncIds;
@@ -1004,7 +1016,10 @@
 
     // protected by mLock
     PatchPanel mPatchPanel;
+public:
+    // TODO(b/288339104) access by getter.
     sp<EffectsFactoryHalInterface> mEffectsFactoryHal;
+private:
 
     const sp<PatchCommandThread> mPatchCommandThread;
     sp<DeviceEffectManager> mDeviceEffectManager;
diff --git a/services/audioflinger/DeviceEffectManager.cpp b/services/audioflinger/DeviceEffectManager.cpp
index 4fb6138..f034e05 100644
--- a/services/audioflinger/DeviceEffectManager.cpp
+++ b/services/audioflinger/DeviceEffectManager.cpp
@@ -41,7 +41,7 @@
             patch.mAudioPatch.num_sinks > 0 ? patch.mAudioPatch.sinks[0].ext.device.type : 0);
     Mutex::Autolock _l(mLock);
     for (auto& effect : mDeviceEffects) {
-        status_t status = effect.second->onCreatePatch(handle, patch);
+        status_t status = effect.second->onCreatePatch(handle, &patch); // TODO(b/288339104) void*
         ALOGV("%s Effect onCreatePatch status %d", __func__, status);
         ALOGW_IF(status == BAD_VALUE, "%s onCreatePatch error %d", __func__, status);
     }
@@ -56,7 +56,7 @@
 }
 
 // DeviceEffectManager::createEffect_l() must be called with AudioFlinger::mLock held
-sp<AudioFlinger::EffectHandle> AudioFlinger::DeviceEffectManager::createEffect_l(
+sp<IAfEffectHandle> AudioFlinger::DeviceEffectManager::createEffect_l(
         effect_descriptor_t *descriptor,
         const AudioDeviceTypeAddr& device,
         const sp<AudioFlinger::Client>& client,
@@ -66,8 +66,8 @@
         status_t *status,
         bool probe,
         bool notifyFramesProcessed) {
-    sp<DeviceEffectProxy> effect;
-    sp<EffectHandle> handle;
+    sp<IAfDeviceEffectProxy> effect;
+    sp<IAfEffectHandle> handle;
     status_t lStatus;
 
     lStatus = checkEffectCompatibility(descriptor);
@@ -82,18 +82,18 @@
         if (iter != mDeviceEffects.end()) {
             effect = iter->second;
         } else {
-            effect = new DeviceEffectProxy(device, mMyCallback,
+            effect = IAfDeviceEffectProxy::create(device, mMyCallback,
                     descriptor, mAudioFlinger.nextUniqueId(AUDIO_UNIQUE_ID_USE_EFFECT),
                     notifyFramesProcessed);
         }
         // create effect handle and connect it to effect module
-        handle = new EffectHandle(effect, client, effectClient, 0 /*priority*/,
-                                  notifyFramesProcessed);
+        handle = IAfEffectHandle::create(
+                effect, client, effectClient, 0 /*priority*/, notifyFramesProcessed);
         lStatus = handle->initCheck();
         if (lStatus == NO_ERROR) {
             lStatus = effect->addHandle(handle.get());
             if (lStatus == NO_ERROR) {
-                lStatus = effect->init(patches);
+                lStatus = effect->init(&patches); // TODO(b/288339104) void*
                 if (lStatus == NAME_NOT_FOUND) {
                     lStatus = NO_ERROR;
                 }
@@ -165,7 +165,7 @@
         outStr.appendFormat("%*sEffect for device %s address %s:\n", 2, "",
                 ::android::toString(iter.first.mType).c_str(), iter.first.getAddress());
         write(fd, outStr.string(), outStr.size());
-        iter.second->dump(fd, 4);
+        iter.second->dump2(fd, 4);
     }
 
     if (locked) {
@@ -174,7 +174,7 @@
 }
 
 
-size_t AudioFlinger::DeviceEffectManager::removeEffect(const sp<DeviceEffectProxy>& effect)
+size_t AudioFlinger::DeviceEffectManager::removeEffect(const sp<IAfDeviceEffectProxy>& effect)
 {
     Mutex::Autolock _l(mLock);
     mDeviceEffects.erase(effect->device());
@@ -182,13 +182,13 @@
 }
 
 bool AudioFlinger::DeviceEffectManagerCallback::disconnectEffectHandle(
-        EffectHandle *handle, bool unpinIfLast) {
-    sp<EffectBase> effectBase = handle->effect().promote();
+        IAfEffectHandle *handle, bool unpinIfLast) {
+    sp<IAfEffectBase> effectBase = handle->effect().promote();
     if (effectBase == nullptr) {
         return false;
     }
 
-    sp<DeviceEffectProxy> effect = effectBase->asDeviceEffectProxy();
+    sp<IAfDeviceEffectProxy> effect = effectBase->asDeviceEffectProxy();
     if (effect == nullptr) {
         return false;
     }
diff --git a/services/audioflinger/DeviceEffectManager.h b/services/audioflinger/DeviceEffectManager.h
index b87f830..3a33a71 100644
--- a/services/audioflinger/DeviceEffectManager.h
+++ b/services/audioflinger/DeviceEffectManager.h
@@ -30,7 +30,7 @@
         mAudioFlinger.mPatchCommandThread->addListener(this);
     }
 
-    sp<EffectHandle> createEffect_l(effect_descriptor_t *descriptor,
+    sp<IAfEffectHandle> createEffect_l(effect_descriptor_t *descriptor,
                 const AudioDeviceTypeAddr& device,
                 const sp<AudioFlinger::Client>& client,
                 const sp<media::IEffectClient>& effectClient,
@@ -40,7 +40,7 @@
                 bool probe,
                 bool notifyFramesProcessed);
 
-    size_t removeEffect(const sp<DeviceEffectProxy>& effect);
+    size_t removeEffect(const sp<IAfDeviceEffectProxy>& effect);
     status_t createEffectHal(const effect_uuid_t *pEffectUuid,
            int32_t sessionId, int32_t deviceId,
            sp<EffectHalInterface> *effect);
@@ -69,9 +69,10 @@
     Mutex mLock;
     AudioFlinger &mAudioFlinger;
     const sp<DeviceEffectManagerCallback> mMyCallback;
-    std::map<AudioDeviceTypeAddr, sp<DeviceEffectProxy>> mDeviceEffects;
+    std::map<AudioDeviceTypeAddr, sp<IAfDeviceEffectProxy>> mDeviceEffects;
 };
 
+public: // TODO(b/288339104) extract inner class.
 class DeviceEffectManagerCallback : public EffectCallbackInterface {
 public:
     explicit DeviceEffectManagerCallback(DeviceEffectManager& manager)
@@ -84,7 +85,9 @@
             }
     status_t allocateHalBuffer(size_t size __unused,
             sp<EffectBufferHalInterface>* buffer __unused) override { return NO_ERROR; }
-    bool updateOrphanEffectChains(const sp<EffectBase>& effect __unused) override { return false; }
+    bool updateOrphanEffectChains(const sp<IAfEffectBase>& effect __unused) override {
+        return false;
+    }
 
     audio_io_handle_t io() const override  { return AUDIO_IO_HANDLE_NONE; }
     bool isOutput() const override { return false; }
@@ -112,19 +115,19 @@
         return NO_ERROR;
     }
 
-    bool disconnectEffectHandle(EffectHandle *handle, bool unpinIfLast) override;
+    bool disconnectEffectHandle(IAfEffectHandle *handle, bool unpinIfLast) override;
     void setVolumeForOutput(float left __unused, float right __unused) const override {}
 
     // check if effects should be suspended or restored when a given effect is enable or disabled
-    void checkSuspendOnEffectEnabled(const sp<EffectBase>& effect __unused,
+    void checkSuspendOnEffectEnabled(const sp<IAfEffectBase>& effect __unused,
                           bool enabled __unused, bool threadLocked __unused) override {}
     void resetVolume() override {}
     product_strategy_t strategy() const override  { return static_cast<product_strategy_t>(0); }
     int32_t activeTrackCnt() const override { return 0; }
-    void onEffectEnable(const sp<EffectBase>& effect __unused) override {}
-    void onEffectDisable(const sp<EffectBase>& effect __unused) override {}
+    void onEffectEnable(const sp<IAfEffectBase>& effect __unused) override {}
+    void onEffectDisable(const sp<IAfEffectBase>& effect __unused) override {}
 
-    wp<EffectChain> chain() const override { return nullptr; }
+    wp<IAfEffectChain> chain() const override { return nullptr; }
 
     bool isAudioPolicyReady() const override {
         return mManager.audioFlinger().isAudioPolicyReady();
@@ -143,3 +146,4 @@
 private:
     DeviceEffectManager& mManager;
 };
+private:
diff --git a/services/audioflinger/Effects.cpp b/services/audioflinger/Effects.cpp
index 822ea93..8cca719 100644
--- a/services/audioflinger/Effects.cpp
+++ b/services/audioflinger/Effects.cpp
@@ -45,6 +45,7 @@
 
 #include "AudioFlinger.h"
 #include "EffectConfiguration.h"
+#include "Effects.h"
 
 // ----------------------------------------------------------------------------
 
@@ -93,9 +94,9 @@
 // ----------------------------------------------------------------------------
 
 #undef LOG_TAG
-#define LOG_TAG "AudioFlinger::EffectBase"
+#define LOG_TAG "EffectBase"
 
-AudioFlinger::EffectBase::EffectBase(const sp<AudioFlinger::EffectCallbackInterface>& callback,
+EffectBase::EffectBase(const sp<EffectCallbackInterface>& callback,
                                         effect_descriptor_t *desc,
                                         int id,
                                         audio_session_t sessionId,
@@ -107,7 +108,7 @@
 }
 
 // must be called with EffectModule::mLock held
-status_t AudioFlinger::EffectBase::setEnabled_l(bool enabled)
+status_t EffectBase::setEnabled_l(bool enabled)
 {
 
     ALOGV("setEnabled %p enabled %d", this, enabled);
@@ -139,7 +140,7 @@
             return NO_ERROR; // simply ignore as we are being destroyed
         }
         for (size_t i = 1; i < mHandles.size(); i++) {
-            EffectHandle *h = mHandles[i];
+            IAfEffectHandle *h = mHandles[i];
             if (h != NULL && !h->disconnected()) {
                 h->setEnabled(enabled);
             }
@@ -148,7 +149,7 @@
     return NO_ERROR;
 }
 
-status_t AudioFlinger::EffectBase::setEnabled(bool enabled, bool fromHandle)
+status_t EffectBase::setEnabled(bool enabled, bool fromHandle)
 {
     status_t status;
     {
@@ -169,7 +170,7 @@
     return status;
 }
 
-bool AudioFlinger::EffectBase::isEnabled() const
+bool EffectBase::isEnabled() const
 {
     switch (mState) {
     case RESTART:
@@ -185,29 +186,29 @@
     }
 }
 
-void AudioFlinger::EffectBase::setSuspended(bool suspended)
+void EffectBase::setSuspended(bool suspended)
 {
     Mutex::Autolock _l(mLock);
     mSuspended = suspended;
 }
 
-bool AudioFlinger::EffectBase::suspended() const
+bool EffectBase::suspended() const
 {
     Mutex::Autolock _l(mLock);
     return mSuspended;
 }
 
-status_t AudioFlinger::EffectBase::addHandle(EffectHandle *handle)
+status_t EffectBase::addHandle(IAfEffectHandle *handle)
 {
     status_t status;
 
     Mutex::Autolock _l(mLock);
     int priority = handle->priority();
     size_t size = mHandles.size();
-    EffectHandle *controlHandle = NULL;
+    IAfEffectHandle *controlHandle = nullptr;
     size_t i;
     for (i = 0; i < size; i++) {
-        EffectHandle *h = mHandles[i];
+        IAfEffectHandle *h = mHandles[i];
         if (h == NULL || h->disconnected()) {
             continue;
         }
@@ -236,7 +237,7 @@
     return status;
 }
 
-status_t AudioFlinger::EffectBase::updatePolicyState()
+status_t EffectBase::updatePolicyState()
 {
     status_t status = NO_ERROR;
     bool doRegister = false;
@@ -266,7 +267,7 @@
         }
         // enable effect when registered according to enable state requested by controlling handle
         if (mHandles.size() > 0) {
-            EffectHandle *handle = controlHandle_l();
+            IAfEffectHandle *handle = controlHandle_l();
             if (handle != nullptr && mPolicyEnabled != handle->enabled()) {
                 doEnable = true;
                 mPolicyEnabled = handle->enabled();
@@ -305,13 +306,13 @@
 }
 
 
-ssize_t AudioFlinger::EffectBase::removeHandle(EffectHandle *handle)
+ssize_t EffectBase::removeHandle(IAfEffectHandle *handle)
 {
     Mutex::Autolock _l(mLock);
     return removeHandle_l(handle);
 }
 
-ssize_t AudioFlinger::EffectBase::removeHandle_l(EffectHandle *handle)
+ssize_t EffectBase::removeHandle_l(IAfEffectHandle *handle)
 {
     size_t size = mHandles.size();
     size_t i;
@@ -329,7 +330,7 @@
     mHandles.removeAt(i);
     // if removed from first place, move effect control from this handle to next in line
     if (i == 0) {
-        EffectHandle *h = controlHandle_l();
+        IAfEffectHandle *h = controlHandle_l();
         if (h != NULL) {
             h->setControl(true /*hasControl*/, true /*signal*/ , handle->enabled() /*enabled*/);
         }
@@ -346,11 +347,11 @@
 }
 
 // must be called with EffectModule::mLock held
-AudioFlinger::EffectHandle *AudioFlinger::EffectBase::controlHandle_l()
+IAfEffectHandle *EffectBase::controlHandle_l()
 {
     // the first valid handle in the list has control over the module
     for (size_t i = 0; i < mHandles.size(); i++) {
-        EffectHandle *h = mHandles[i];
+        IAfEffectHandle *h = mHandles[i];
         if (h != NULL && !h->disconnected()) {
             return h;
         }
@@ -360,7 +361,7 @@
 }
 
 // unsafe method called when the effect parent thread has been destroyed
-ssize_t AudioFlinger::EffectBase::disconnectHandle(EffectHandle *handle, bool unpinIfLast)
+ssize_t EffectBase::disconnectHandle(IAfEffectHandle *handle, bool unpinIfLast)
 {
     const auto callback = getCallback();
     ALOGV("disconnect() %p handle %p", this, handle);
@@ -378,11 +379,11 @@
     return numHandles;
 }
 
-bool AudioFlinger::EffectBase::purgeHandles()
+bool EffectBase::purgeHandles()
 {
     bool enabled = false;
     Mutex::Autolock _l(mLock);
-    EffectHandle *handle = controlHandle_l();
+    IAfEffectHandle *handle = controlHandle_l();
     if (handle != NULL) {
         enabled = handle->enabled();
     }
@@ -390,7 +391,7 @@
     return enabled;
 }
 
-void AudioFlinger::EffectBase::checkSuspendOnEffectEnabled(bool enabled, bool threadLocked) {
+void EffectBase::checkSuspendOnEffectEnabled(bool enabled, bool threadLocked) {
     getCallback()->checkSuspendOnEffectEnabled(this, enabled, threadLocked);
 }
 
@@ -499,7 +500,7 @@
     return s;
 }
 
-void AudioFlinger::EffectBase::dump(int fd, const Vector<String16>& args __unused)
+void EffectBase::dump(int fd, const Vector<String16>& args __unused) const
 NO_THREAD_SAFETY_ANALYSIS // conditional try lock
 {
     String8 result;
@@ -537,7 +538,7 @@
     result.append("\t\t\t  Pid Priority Ctrl Locked client server\n");
     char buffer[256];
     for (size_t i = 0; i < mHandles.size(); ++i) {
-        EffectHandle *handle = mHandles[i];
+        IAfEffectHandle *handle = mHandles[i];
         if (handle != NULL && !handle->disconnected()) {
             handle->dumpToBuffer(buffer, sizeof(buffer));
             result.append(buffer);
@@ -555,9 +556,9 @@
 // ----------------------------------------------------------------------------
 
 #undef LOG_TAG
-#define LOG_TAG "AudioFlinger::EffectModule"
+#define LOG_TAG "EffectModule"
 
-AudioFlinger::EffectModule::EffectModule(const sp<AudioFlinger::EffectCallbackInterface>& callback,
+EffectModule::EffectModule(const sp<EffectCallbackInterface>& callback,
                                          effect_descriptor_t *desc,
                                          int id,
                                          audio_session_t sessionId,
@@ -599,7 +600,7 @@
     ALOGV("Constructor Error %d", mStatus);
 }
 
-AudioFlinger::EffectModule::~EffectModule()
+EffectModule::~EffectModule()
 {
     ALOGV("Destructor %p", this);
     if (mEffectInterface != 0) {
@@ -612,7 +613,7 @@
 
 }
 
-bool AudioFlinger::EffectModule::updateState() {
+bool EffectModule::updateState() {
     Mutex::Autolock _l(mLock);
 
     bool started = false;
@@ -667,7 +668,7 @@
     return started;
 }
 
-void AudioFlinger::EffectModule::process()
+void EffectModule::process()
 {
     Mutex::Autolock _l(mLock);
 
@@ -821,7 +822,7 @@
     }
 }
 
-void AudioFlinger::EffectModule::reset_l()
+void EffectModule::reset_l()
 {
     if (mStatus != NO_ERROR || mEffectInterface == 0) {
         return;
@@ -829,7 +830,7 @@
     mEffectInterface->command(EFFECT_CMD_RESET, 0, NULL, 0, NULL);
 }
 
-status_t AudioFlinger::EffectModule::configure()
+status_t EffectModule::configure()
 {
     ALOGVV("configure() started");
     status_t status;
@@ -1005,8 +1006,9 @@
     // mConfig.outputCfg.buffer.frameCount cannot be zero.
     mMaxDisableWaitCnt = (uint32_t)std::max(
             (uint64_t)1, // mMaxDisableWaitCnt must be greater than zero.
-            (uint64_t)MAX_DISABLE_TIME_MS * mConfig.outputCfg.samplingRate
-                / ((uint64_t)1000 * mConfig.outputCfg.buffer.frameCount));
+            (uint64_t)mConfig.outputCfg.buffer.frameCount == 0 ? 1
+                : (MAX_DISABLE_TIME_MS * mConfig.outputCfg.samplingRate
+                / ((uint64_t)1000 * mConfig.outputCfg.buffer.frameCount)));
 
 exit:
     // TODO: consider clearing mConfig on error.
@@ -1015,7 +1017,7 @@
     return status;
 }
 
-status_t AudioFlinger::EffectModule::init()
+status_t EffectModule::init()
 {
     Mutex::Autolock _l(mLock);
     if (mEffectInterface == 0) {
@@ -1034,7 +1036,7 @@
     return status;
 }
 
-void AudioFlinger::EffectModule::addEffectToHal_l()
+void EffectModule::addEffectToHal_l()
 {
     if ((mDescriptor.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_PRE_PROC ||
          (mDescriptor.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_POST_PROC) {
@@ -1048,7 +1050,7 @@
 }
 
 // start() must be called with PlaybackThread::mLock or EffectChain::mLock held
-status_t AudioFlinger::EffectModule::start()
+status_t EffectModule::start()
 {
     status_t status;
     {
@@ -1061,7 +1063,7 @@
     return status;
 }
 
-status_t AudioFlinger::EffectModule::start_l()
+status_t EffectModule::start_l()
 {
     if (mEffectInterface == 0) {
         return NO_INIT;
@@ -1085,13 +1087,13 @@
     return status;
 }
 
-status_t AudioFlinger::EffectModule::stop()
+status_t EffectModule::stop()
 {
     Mutex::Autolock _l(mLock);
     return stop_l();
 }
 
-status_t AudioFlinger::EffectModule::stop_l()
+status_t EffectModule::stop_l()
 {
     if (mEffectInterface == 0) {
         return NO_INIT;
@@ -1125,7 +1127,7 @@
 }
 
 // must be called with EffectChain::mLock held
-void AudioFlinger::EffectModule::release_l()
+void EffectModule::release_l()
 {
     if (mEffectInterface != 0) {
         removeEffectFromHal_l();
@@ -1135,7 +1137,7 @@
     }
 }
 
-status_t AudioFlinger::EffectModule::removeEffectFromHal_l()
+status_t EffectModule::removeEffectFromHal_l()
 {
     if ((mDescriptor.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_PRE_PROC ||
              (mDescriptor.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_POST_PROC) {
@@ -1155,7 +1157,7 @@
     return remainder == 0 ? 0 : divisor - remainder;
 }
 
-status_t AudioFlinger::EffectModule::command(int32_t cmdCode,
+status_t EffectModule::command(int32_t cmdCode,
                      const std::vector<uint8_t>& cmdData,
                      int32_t maxReplySize,
                      std::vector<uint8_t>* reply)
@@ -1228,7 +1230,7 @@
     reply->resize(status == NO_ERROR ? replySize : 0);
     if (cmdCode != EFFECT_CMD_GET_PARAM && status == NO_ERROR) {
         for (size_t i = 1; i < mHandles.size(); i++) {
-            EffectHandle *h = mHandles[i];
+            IAfEffectHandle *h = mHandles[i];
             if (h != NULL && !h->disconnected()) {
                 h->commandExecuted(cmdCode, cmdData, *reply);
             }
@@ -1237,7 +1239,7 @@
     return status;
 }
 
-bool AudioFlinger::EffectModule::isProcessEnabled() const
+bool EffectModule::isProcessEnabled() const
 {
     if (mStatus != NO_ERROR) {
         return false;
@@ -1257,17 +1259,17 @@
     }
 }
 
-bool AudioFlinger::EffectModule::isOffloadedOrDirect() const
+bool EffectModule::isOffloadedOrDirect() const
 {
     return getCallback()->isOffloadOrDirect();
 }
 
-bool AudioFlinger::EffectModule::isVolumeControlEnabled() const
+bool EffectModule::isVolumeControlEnabled() const
 {
     return (isVolumeControl() && (isOffloadedOrDirect() ? isEnabled() : isProcessEnabled()));
 }
 
-void AudioFlinger::EffectModule::setInBuffer(const sp<EffectBufferHalInterface>& buffer) {
+void EffectModule::setInBuffer(const sp<EffectBufferHalInterface>& buffer) {
     ALOGVV("setInBuffer %p",(&buffer));
 
     // mConfig.inputCfg.buffer.frameCount may be zero if configure() is not called yet.
@@ -1313,7 +1315,7 @@
     }
 }
 
-void AudioFlinger::EffectModule::setOutBuffer(const sp<EffectBufferHalInterface>& buffer) {
+void EffectModule::setOutBuffer(const sp<EffectBufferHalInterface>& buffer) {
     ALOGVV("setOutBuffer %p",(&buffer));
 
     // mConfig.outputCfg.buffer.frameCount may be zero if configure() is not called yet.
@@ -1355,7 +1357,7 @@
     }
 }
 
-status_t AudioFlinger::EffectModule::setVolume(uint32_t *left, uint32_t *right, bool controller)
+status_t EffectModule::setVolume(uint32_t *left, uint32_t *right, bool controller)
 {
     AutoLockReentrant _l(mLock, mSetVolumeReentrantTid);
     if (mStatus != NO_ERROR) {
@@ -1373,7 +1375,7 @@
     return status;
 }
 
-status_t AudioFlinger::EffectModule::setVolumeInternal(
+status_t EffectModule::setVolumeInternal(
         uint32_t *left, uint32_t *right, bool controller) {
     uint32_t volume[2] = {*left, *right};
     uint32_t *pVolume = controller ? volume : nullptr;
@@ -1390,7 +1392,7 @@
     return status;
 }
 
-void AudioFlinger::EffectChain::setVolumeForOutput_l(uint32_t left, uint32_t right)
+void EffectChain::setVolumeForOutput_l(uint32_t left, uint32_t right)
 {
     // for offload or direct thread, if the effect chain has non-offloadable
     // effect and any effect module within the chain has volume control, then
@@ -1403,7 +1405,7 @@
     }
 }
 
-status_t AudioFlinger::EffectModule::sendSetAudioDevicesCommand(
+status_t EffectModule::sendSetAudioDevicesCommand(
         const AudioDeviceTypeAddrVector &devices, uint32_t cmdCode)
 {
     audio_devices_t deviceType = deviceTypesToBitMask(getAudioDeviceTypes(devices));
@@ -1429,17 +1431,17 @@
     return status;
 }
 
-status_t AudioFlinger::EffectModule::setDevices(const AudioDeviceTypeAddrVector &devices)
+status_t EffectModule::setDevices(const AudioDeviceTypeAddrVector &devices)
 {
     return sendSetAudioDevicesCommand(devices, EFFECT_CMD_SET_DEVICE);
 }
 
-status_t AudioFlinger::EffectModule::setInputDevice(const AudioDeviceTypeAddr &device)
+status_t EffectModule::setInputDevice(const AudioDeviceTypeAddr &device)
 {
     return sendSetAudioDevicesCommand({device}, EFFECT_CMD_SET_INPUT_DEVICE);
 }
 
-status_t AudioFlinger::EffectModule::setMode(audio_mode_t mode)
+status_t EffectModule::setMode(audio_mode_t mode)
 {
     Mutex::Autolock _l(mLock);
     if (mStatus != NO_ERROR) {
@@ -1461,7 +1463,7 @@
     return status;
 }
 
-status_t AudioFlinger::EffectModule::setAudioSource(audio_source_t source)
+status_t EffectModule::setAudioSource(audio_source_t source)
 {
     Mutex::Autolock _l(mLock);
     if (mStatus != NO_ERROR) {
@@ -1479,7 +1481,7 @@
     return status;
 }
 
-status_t AudioFlinger::EffectModule::setOffloaded(bool offloaded, audio_io_handle_t io)
+status_t EffectModule::setOffloaded(bool offloaded, audio_io_handle_t io)
 {
     Mutex::Autolock _l(mLock);
     if (mStatus != NO_ERROR) {
@@ -1512,22 +1514,22 @@
     return status;
 }
 
-bool AudioFlinger::EffectModule::isOffloaded() const
+bool EffectModule::isOffloaded() const
 {
     Mutex::Autolock _l(mLock);
     return mOffloaded;
 }
 
 /*static*/
-bool AudioFlinger::EffectModule::isHapticGenerator(const effect_uuid_t *type) {
+bool IAfEffectModule::isHapticGenerator(const effect_uuid_t *type) {
     return memcmp(type, FX_IID_HAPTICGENERATOR, sizeof(effect_uuid_t)) == 0;
 }
 
-bool AudioFlinger::EffectModule::isHapticGenerator() const {
-    return isHapticGenerator(&mDescriptor.type);
+bool EffectModule::isHapticGenerator() const {
+    return IAfEffectModule::isHapticGenerator(&mDescriptor.type);
 }
 
-status_t AudioFlinger::EffectModule::setHapticIntensity(int id, os::HapticScale intensity)
+status_t EffectModule::setHapticIntensity(int id, os::HapticScale intensity)
 {
     if (mStatus != NO_ERROR) {
         return mStatus;
@@ -1553,7 +1555,7 @@
     return status;
 }
 
-status_t AudioFlinger::EffectModule::setVibratorInfo(const media::AudioVibratorInfo& vibratorInfo)
+status_t EffectModule::setVibratorInfo(const media::AudioVibratorInfo& vibratorInfo)
 {
     if (mStatus != NO_ERROR) {
         return mStatus;
@@ -1583,7 +1585,7 @@
     return status;
 }
 
-status_t AudioFlinger::EffectModule::getConfigs(
+status_t EffectModule::getConfigs(
         audio_config_base_t* inputCfg, audio_config_base_t* outputCfg, bool* isOutput) const {
     Mutex::Autolock _l(mLock);
     if (mConfig.inputCfg.mask == 0 || mConfig.outputCfg.mask == 0) {
@@ -1614,7 +1616,7 @@
     return ss.str();
 }
 
-void AudioFlinger::EffectModule::dump(int fd, const Vector<String16>& args)
+void EffectModule::dump(int fd, const Vector<String16>& args) const
 NO_THREAD_SAFETY_ANALYSIS  // conditional try lock
 {
     EffectBase::dump(fd, args);
@@ -1672,9 +1674,21 @@
 // ----------------------------------------------------------------------------
 
 #undef LOG_TAG
-#define LOG_TAG "AudioFlinger::EffectHandle"
+#define LOG_TAG "EffectHandle"
 
-AudioFlinger::EffectHandle::EffectHandle(const sp<EffectBase>& effect,
+/* static */
+sp<IAfEffectHandle> IAfEffectHandle::create(
+        const sp<IAfEffectBase>& effect,
+        const sp<RefBase /*AudioFlinger::Client */>& client, // TODO(b/288339104) update type
+        const sp<media::IEffectClient>& effectClient,
+        int32_t priority, bool notifyFramesProcessed)
+{
+    return sp<EffectHandle>::make(
+            effect, sp<AudioFlinger::Client>::cast(client),
+            effectClient, priority, notifyFramesProcessed);
+}
+
+EffectHandle::EffectHandle(const sp<IAfEffectBase>& effect,
                                          const sp<AudioFlinger::Client>& client,
                                          const sp<media::IEffectClient>& effectClient,
                                          int32_t priority, bool notifyFramesProcessed)
@@ -1709,7 +1723,7 @@
     mBuffer = (uint8_t *)mCblk + bufOffset;
 }
 
-AudioFlinger::EffectHandle::~EffectHandle()
+EffectHandle::~EffectHandle()
 {
     ALOGV("Destructor %p", this);
     disconnect(false);
@@ -1742,7 +1756,7 @@
     return methodStatistics;
 }
 
-status_t AudioFlinger::EffectHandle::onTransact(
+status_t EffectHandle::onTransact(
         uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags) {
     const std::string methodName = getIEffectStatistics().getMethodForCode(code);
     mediautils::TimeCheck check(
@@ -1757,7 +1771,7 @@
     return BnEffect::onTransact(code, data, reply, flags);
 }
 
-status_t AudioFlinger::EffectHandle::initCheck()
+status_t EffectHandle::initCheck() const
 {
     return mClient == 0 || mCblkMemory != 0 ? OK : NO_MEMORY;
 }
@@ -1773,11 +1787,11 @@
         std::move(_tmp.value());                        \
     })
 
-Status AudioFlinger::EffectHandle::enable(int32_t* _aidl_return)
+Status EffectHandle::enable(int32_t* _aidl_return)
 {
     AutoMutex _l(mLock);
     ALOGV("enable %p", this);
-    sp<EffectBase> effect = mEffect.promote();
+    sp<IAfEffectBase> effect = mEffect.promote();
     if (effect == 0 || mDisconnected) {
         RETURN(DEAD_OBJECT);
     }
@@ -1811,11 +1825,11 @@
     RETURN(status);
 }
 
-Status AudioFlinger::EffectHandle::disable(int32_t* _aidl_return)
+Status EffectHandle::disable(int32_t* _aidl_return)
 {
     ALOGV("disable %p", this);
     AutoMutex _l(mLock);
-    sp<EffectBase> effect = mEffect.promote();
+    sp<IAfEffectBase> effect = mEffect.promote();
     if (effect == 0 || mDisconnected) {
         RETURN(DEAD_OBJECT);
     }
@@ -1838,14 +1852,14 @@
     RETURN(status);
 }
 
-Status AudioFlinger::EffectHandle::disconnect()
+Status EffectHandle::disconnect()
 {
     ALOGV("%s %p", __FUNCTION__, this);
     disconnect(true);
     return Status::ok();
 }
 
-void AudioFlinger::EffectHandle::disconnect(bool unpinIfLast)
+void EffectHandle::disconnect(bool unpinIfLast)
 {
     AutoMutex _l(mLock);
     ALOGV("disconnect(%s) %p", unpinIfLast ? "true" : "false", this);
@@ -1857,7 +1871,7 @@
     }
     mDisconnected = true;
     {
-        sp<EffectBase> effect = mEffect.promote();
+        sp<IAfEffectBase> effect = mEffect.promote();
         if (effect != 0) {
             if (effect->disconnectHandle(this, unpinIfLast) > 0) {
                 ALOGW("%s Effect handle %p disconnected after thread destruction",
@@ -1879,19 +1893,19 @@
     }
 }
 
-Status AudioFlinger::EffectHandle::getCblk(media::SharedFileRegion* _aidl_return) {
+Status EffectHandle::getCblk(media::SharedFileRegion* _aidl_return) {
     LOG_ALWAYS_FATAL_IF(!convertIMemoryToSharedFileRegion(mCblkMemory, _aidl_return));
     return Status::ok();
 }
 
-Status AudioFlinger::EffectHandle::getConfig(
+Status EffectHandle::getConfig(
         media::EffectConfig* _config, int32_t* _aidl_return) {
     AutoMutex _l(mLock);
-    sp<EffectBase> effect = mEffect.promote();
+    sp<IAfEffectBase> effect = mEffect.promote();
     if (effect == nullptr || mDisconnected) {
         RETURN(DEAD_OBJECT);
     }
-    sp<EffectModule> effectModule = effect->asEffectModule();
+    sp<IAfEffectModule> effectModule = effect->asEffectModule();
     if (effectModule == nullptr) {
         RETURN(INVALID_OPERATION);
     }
@@ -1910,7 +1924,7 @@
     RETURN(status);
 }
 
-Status AudioFlinger::EffectHandle::command(int32_t cmdCode,
+Status EffectHandle::command(int32_t cmdCode,
                        const std::vector<uint8_t>& cmdData,
                        int32_t maxResponseSize,
                        std::vector<uint8_t>* response,
@@ -1954,7 +1968,7 @@
     }
 
     AutoMutex _l(mLock);
-    sp<EffectBase> effect = mEffect.promote();
+    sp<IAfEffectBase> effect = mEffect.promote();
     if (effect == 0 || mDisconnected) {
         RETURN(DEAD_OBJECT);
     }
@@ -2043,7 +2057,7 @@
     RETURN(status);
 }
 
-void AudioFlinger::EffectHandle::setControl(bool hasControl, bool signal, bool enabled)
+void EffectHandle::setControl(bool hasControl, bool signal, bool enabled)
 {
     ALOGV("setControl %p control %d", this, hasControl);
 
@@ -2055,7 +2069,7 @@
     }
 }
 
-void AudioFlinger::EffectHandle::commandExecuted(uint32_t cmdCode,
+void EffectHandle::commandExecuted(uint32_t cmdCode,
                          const std::vector<uint8_t>& cmdData,
                          const std::vector<uint8_t>& replyData)
 {
@@ -2066,21 +2080,21 @@
 
 
 
-void AudioFlinger::EffectHandle::setEnabled(bool enabled)
+void EffectHandle::setEnabled(bool enabled)
 {
     if (mEffectClient != 0) {
         mEffectClient->enableStatusChanged(enabled);
     }
 }
 
-void AudioFlinger::EffectHandle::framesProcessed(int32_t frames) const
+void EffectHandle::framesProcessed(int32_t frames) const
 {
     if (mEffectClient != 0 && mNotifyFramesProcessed) {
         mEffectClient->framesProcessed(frames);
     }
 }
 
-void AudioFlinger::EffectHandle::dumpToBuffer(char* buffer, size_t size)
+void EffectHandle::dumpToBuffer(char* buffer, size_t size) const
 NO_THREAD_SAFETY_ANALYSIS  // conditional try lock
 {
     bool locked = mCblk != NULL && AudioFlinger::dumpTryLock(mCblk->lock);
@@ -2100,16 +2114,25 @@
 }
 
 #undef LOG_TAG
-#define LOG_TAG "AudioFlinger::EffectChain"
+#define LOG_TAG "EffectChain"
 
-AudioFlinger::EffectChain::EffectChain(const wp<ThreadBase>& thread,
+/* static */
+sp<IAfEffectChain> IAfEffectChain::create(
+        const wp<Thread /*ThreadBase*/>& wThread,  // TODO(b/288339104) update type
+        audio_session_t sessionId)
+{
+    // TODO(b/288339104) no weak pointer cast.
+    return sp<EffectChain>::make(sp<AudioFlinger::ThreadBase>::cast(wThread.promote()), sessionId);
+}
+
+EffectChain::EffectChain(const wp<AudioFlinger::ThreadBase>& thread,
                                        audio_session_t sessionId)
     : mSessionId(sessionId), mActiveTrackCnt(0), mTrackCnt(0), mTailBufferCount(0),
       mVolumeCtrlIdx(-1), mLeftVolume(UINT_MAX), mRightVolume(UINT_MAX),
       mNewLeftVolume(UINT_MAX), mNewRightVolume(UINT_MAX),
       mEffectCallback(new EffectCallback(wp<EffectChain>(this), thread))
 {
-    sp<ThreadBase> p = thread.promote();
+    sp<AudioFlinger::ThreadBase> p = thread.promote();
     if (p == nullptr) {
         return;
     }
@@ -2118,13 +2141,13 @@
                                     p->frameCount();
 }
 
-AudioFlinger::EffectChain::~EffectChain()
+EffectChain::~EffectChain()
 {
 }
 
-// getEffectFromDesc_l() must be called with ThreadBase::mLock held
-sp<AudioFlinger::EffectModule> AudioFlinger::EffectChain::getEffectFromDesc_l(
-        effect_descriptor_t *descriptor)
+// getEffectFromDesc_l() must be called with AudioFlinger::ThreadBase::mLock held
+sp<IAfEffectModule> EffectChain::getEffectFromDesc_l(
+        effect_descriptor_t *descriptor) const
 {
     size_t size = mEffects.size();
 
@@ -2136,8 +2159,8 @@
     return 0;
 }
 
-// getEffectFromId_l() must be called with ThreadBase::mLock held
-sp<AudioFlinger::EffectModule> AudioFlinger::EffectChain::getEffectFromId_l(int id)
+// getEffectFromId_l() must be called with AudioFlinger::ThreadBase::mLock held
+sp<IAfEffectModule> EffectChain::getEffectFromId_l(int id) const
 {
     size_t size = mEffects.size();
 
@@ -2150,9 +2173,9 @@
     return 0;
 }
 
-// getEffectFromType_l() must be called with ThreadBase::mLock held
-sp<AudioFlinger::EffectModule> AudioFlinger::EffectChain::getEffectFromType_l(
-        const effect_uuid_t *type)
+// getEffectFromType_l() must be called with AudioFlinger::ThreadBase::mLock held
+sp<IAfEffectModule> EffectChain::getEffectFromType_l(
+        const effect_uuid_t *type) const
 {
     size_t size = mEffects.size();
 
@@ -2164,7 +2187,7 @@
     return 0;
 }
 
-std::vector<int> AudioFlinger::EffectChain::getEffectIds()
+std::vector<int> EffectChain::getEffectIds() const
 {
     std::vector<int> ids;
     Mutex::Autolock _l(mLock);
@@ -2174,14 +2197,14 @@
     return ids;
 }
 
-void AudioFlinger::EffectChain::clearInputBuffer()
+void EffectChain::clearInputBuffer()
 {
     Mutex::Autolock _l(mLock);
     clearInputBuffer_l();
 }
 
 // Must be called with EffectChain::mLock locked
-void AudioFlinger::EffectChain::clearInputBuffer_l()
+void EffectChain::clearInputBuffer_l()
 {
     if (mInBuffer == NULL) {
         return;
@@ -2194,7 +2217,7 @@
 }
 
 // Must be called with EffectChain::mLock locked
-void AudioFlinger::EffectChain::process_l()
+void EffectChain::process_l()
 {
     // never process effects when:
     // - on an OFFLOAD thread
@@ -2245,8 +2268,8 @@
     }
 }
 
-// createEffect_l() must be called with ThreadBase::mLock held
-status_t AudioFlinger::EffectChain::createEffect_l(sp<EffectModule>& effect,
+// createEffect_l() must be called with AudioFlinger::ThreadBase::mLock held
+status_t EffectChain::createEffect_l(sp<IAfEffectModule>& effect,
                                                    effect_descriptor_t *desc,
                                                    int id,
                                                    audio_session_t sessionId,
@@ -2264,14 +2287,14 @@
     return lStatus;
 }
 
-// addEffect_l() must be called with ThreadBase::mLock held
-status_t AudioFlinger::EffectChain::addEffect_l(const sp<EffectModule>& effect)
+// addEffect_l() must be called with AudioFlinger::ThreadBase::mLock held
+status_t EffectChain::addEffect_l(const sp<IAfEffectModule>& effect)
 {
     Mutex::Autolock _l(mLock);
     return addEffect_ll(effect);
 }
-// addEffect_l() must be called with ThreadBase::mLock and EffectChain::mLock held
-status_t AudioFlinger::EffectChain::addEffect_ll(const sp<EffectModule>& effect)
+// addEffect_l() must be called with AudioFlinger::ThreadBase::mLock and EffectChain::mLock held
+status_t EffectChain::addEffect_ll(const sp<IAfEffectModule>& effect)
 {
     effect->setCallback(mEffectCallback);
 
@@ -2351,7 +2374,7 @@
     return NO_ERROR;
 }
 
-ssize_t AudioFlinger::EffectChain::getInsertIndex(const effect_descriptor_t& desc) {
+ssize_t EffectChain::getInsertIndex(const effect_descriptor_t& desc) {
     // Insert effects are inserted at the end of mEffects vector as they are processed
     //  after track and auxiliary effects.
     // Insert effect order as a function of indicated preference:
@@ -2424,8 +2447,8 @@
     return idx_insert;
 }
 
-// removeEffect_l() must be called with ThreadBase::mLock held
-size_t AudioFlinger::EffectChain::removeEffect_l(const sp<EffectModule>& effect,
+// removeEffect_l() must be called with AudioFlinger::ThreadBase::mLock held
+size_t EffectChain::removeEffect_l(const sp<IAfEffectModule>& effect,
                                                  bool release)
 {
     Mutex::Autolock _l(mLock);
@@ -2472,8 +2495,8 @@
     return mEffects.size();
 }
 
-// setDevices_l() must be called with ThreadBase::mLock held
-void AudioFlinger::EffectChain::setDevices_l(const AudioDeviceTypeAddrVector &devices)
+// setDevices_l() must be called with AudioFlinger::ThreadBase::mLock held
+void EffectChain::setDevices_l(const AudioDeviceTypeAddrVector &devices)
 {
     size_t size = mEffects.size();
     for (size_t i = 0; i < size; i++) {
@@ -2481,8 +2504,8 @@
     }
 }
 
-// setInputDevice_l() must be called with ThreadBase::mLock held
-void AudioFlinger::EffectChain::setInputDevice_l(const AudioDeviceTypeAddr &device)
+// setInputDevice_l() must be called with AudioFlinger::ThreadBase::mLock held
+void EffectChain::setInputDevice_l(const AudioDeviceTypeAddr &device)
 {
     size_t size = mEffects.size();
     for (size_t i = 0; i < size; i++) {
@@ -2490,8 +2513,8 @@
     }
 }
 
-// setMode_l() must be called with ThreadBase::mLock held
-void AudioFlinger::EffectChain::setMode_l(audio_mode_t mode)
+// setMode_l() must be called with AudioFlinger::ThreadBase::mLock held
+void EffectChain::setMode_l(audio_mode_t mode)
 {
     size_t size = mEffects.size();
     for (size_t i = 0; i < size; i++) {
@@ -2499,8 +2522,8 @@
     }
 }
 
-// setAudioSource_l() must be called with ThreadBase::mLock held
-void AudioFlinger::EffectChain::setAudioSource_l(audio_source_t source)
+// setAudioSource_l() must be called with AudioFlinger::ThreadBase::mLock held
+void EffectChain::setAudioSource_l(audio_source_t source)
 {
     size_t size = mEffects.size();
     for (size_t i = 0; i < size; i++) {
@@ -2508,15 +2531,15 @@
     }
 }
 
-bool AudioFlinger::EffectChain::hasVolumeControlEnabled_l() const {
+bool EffectChain::hasVolumeControlEnabled_l() const {
     for (const auto &effect : mEffects) {
         if (effect->isVolumeControlEnabled()) return true;
     }
     return false;
 }
 
-// setVolume_l() must be called with ThreadBase::mLock or EffectChain::mLock held
-bool AudioFlinger::EffectChain::setVolume_l(uint32_t *left, uint32_t *right, bool force)
+// setVolume_l() must be called with AudioFlinger::ThreadBase::mLock or EffectChain::mLock held
+bool EffectChain::setVolume_l(uint32_t *left, uint32_t *right, bool force)
 {
     uint32_t newLeft = *left;
     uint32_t newRight = *right;
@@ -2582,8 +2605,8 @@
     return hasControl;
 }
 
-// resetVolume_l() must be called with ThreadBase::mLock or EffectChain::mLock held
-void AudioFlinger::EffectChain::resetVolume_l()
+// resetVolume_l() must be called with AudioFlinger::ThreadBase::mLock or EffectChain::mLock held
+void EffectChain::resetVolume_l()
 {
     if ((mLeftVolume != UINT_MAX) && (mRightVolume != UINT_MAX)) {
         uint32_t left = mLeftVolume;
@@ -2592,8 +2615,9 @@
     }
 }
 
-// containsHapticGeneratingEffect_l must be called with ThreadBase::mLock or EffectChain::mLock held
-bool AudioFlinger::EffectChain::containsHapticGeneratingEffect_l()
+// containsHapticGeneratingEffect_l must be called with
+// AudioFlinger::ThreadBase::mLock or EffectChain::mLock held
+bool EffectChain::containsHapticGeneratingEffect_l()
 {
     for (size_t i = 0; i < mEffects.size(); ++i) {
         if (mEffects[i]->isHapticGenerator()) {
@@ -2603,7 +2627,7 @@
     return false;
 }
 
-void AudioFlinger::EffectChain::setHapticIntensity_l(int id, os::HapticScale intensity)
+void EffectChain::setHapticIntensity_l(int id, os::HapticScale intensity)
 {
     Mutex::Autolock _l(mLock);
     for (size_t i = 0; i < mEffects.size(); ++i) {
@@ -2611,7 +2635,7 @@
     }
 }
 
-void AudioFlinger::EffectChain::syncHalEffectsState()
+void EffectChain::syncHalEffectsState()
 {
     Mutex::Autolock _l(mLock);
     for (size_t i = 0; i < mEffects.size(); i++) {
@@ -2622,7 +2646,7 @@
     }
 }
 
-void AudioFlinger::EffectChain::dump(int fd, const Vector<String16>& args)
+void EffectChain::dump(int fd, const Vector<String16>& args) const
 NO_THREAD_SAFETY_ANALYSIS  // conditional try lock
 {
     String8 result;
@@ -2647,7 +2671,7 @@
         write(fd, result.string(), result.size());
 
         for (size_t i = 0; i < numEffects; ++i) {
-            sp<EffectModule> effect = mEffects[i];
+            sp<IAfEffectModule> effect = mEffects[i];
             if (effect != 0) {
                 effect->dump(fd, args);
             }
@@ -2661,8 +2685,8 @@
     }
 }
 
-// must be called with ThreadBase::mLock held
-void AudioFlinger::EffectChain::setEffectSuspended_l(
+// must be called with AudioFlinger::ThreadBase::mLock held
+void EffectChain::setEffectSuspended_l(
         const effect_uuid_t *type, bool suspend)
 {
     sp<SuspendedEffectDesc> desc;
@@ -2680,7 +2704,7 @@
         }
 
         if (desc->mRefCount++ == 0) {
-            sp<EffectModule> effect = getEffectIfEnabled(type);
+            sp<IAfEffectModule> effect = getEffectIfEnabled(type);
             if (effect != 0) {
                 desc->mEffect = effect;
                 effect->setSuspended(true);
@@ -2700,11 +2724,11 @@
         if (--desc->mRefCount == 0) {
             ALOGV("setEffectSuspended_l() remove entry for %08x", mSuspendedEffects.keyAt(index));
             if (desc->mEffect != 0) {
-                sp<EffectModule> effect = desc->mEffect.promote();
+                sp<IAfEffectModule> effect = desc->mEffect.promote();
                 if (effect != 0) {
                     effect->setSuspended(false);
                     effect->lock();
-                    EffectHandle *handle = effect->controlHandle_l();
+                    IAfEffectHandle *handle = effect->controlHandle_l();
                     if (handle != NULL && !handle->disconnected()) {
                         effect->setEnabled_l(handle->enabled());
                     }
@@ -2717,8 +2741,8 @@
     }
 }
 
-// must be called with ThreadBase::mLock held
-void AudioFlinger::EffectChain::setEffectSuspendedAll_l(bool suspend)
+// must be called with AudioFlinger::ThreadBase::mLock held
+void EffectChain::setEffectSuspendedAll_l(bool suspend)
 {
     sp<SuspendedEffectDesc> desc;
 
@@ -2732,7 +2756,7 @@
             ALOGV("setEffectSuspendedAll_l() add entry for 0");
         }
         if (desc->mRefCount++ == 0) {
-            Vector< sp<EffectModule> > effects;
+            Vector< sp<IAfEffectModule> > effects;
             getSuspendEligibleEffects(effects);
             for (size_t i = 0; i < effects.size(); i++) {
                 setEffectSuspended_l(&effects[i]->desc().type, true);
@@ -2774,7 +2798,7 @@
 #endif //OPENSL_ES_H_
 
 /* static */
-bool AudioFlinger::EffectChain::isEffectEligibleForBtNrecSuspend(const effect_uuid_t *type)
+bool EffectChain::isEffectEligibleForBtNrecSuspend(const effect_uuid_t *type)
 {
     // Only NS and AEC are suspended when BtNRec is off
     if ((memcmp(type, FX_IID_AEC, sizeof(effect_uuid_t)) == 0) ||
@@ -2784,7 +2808,7 @@
     return false;
 }
 
-bool AudioFlinger::EffectChain::isEffectEligibleForSuspend(const effect_descriptor_t& desc)
+bool EffectChain::isEffectEligibleForSuspend(const effect_descriptor_t& desc)
 {
     // auxiliary effects and visualizer are never suspended on output mix
     if ((mSessionId == AUDIO_SESSION_OUTPUT_MIX) &&
@@ -2797,8 +2821,8 @@
     return true;
 }
 
-void AudioFlinger::EffectChain::getSuspendEligibleEffects(
-        Vector< sp<AudioFlinger::EffectModule> > &effects)
+void EffectChain::getSuspendEligibleEffects(
+        Vector< sp<IAfEffectModule> > &effects)
 {
     effects.clear();
     for (size_t i = 0; i < mEffects.size(); i++) {
@@ -2808,14 +2832,13 @@
     }
 }
 
-sp<AudioFlinger::EffectModule> AudioFlinger::EffectChain::getEffectIfEnabled(
-                                                            const effect_uuid_t *type)
+sp<IAfEffectModule> EffectChain::getEffectIfEnabled(const effect_uuid_t *type)
 {
-    sp<EffectModule> effect = getEffectFromType_l(type);
+    sp<IAfEffectModule> effect = getEffectFromType_l(type);
     return effect != 0 && effect->isEnabled() ? effect : 0;
 }
 
-void AudioFlinger::EffectChain::checkSuspendOnEffectEnabled(const sp<EffectModule>& effect,
+void EffectChain::checkSuspendOnEffectEnabled(const sp<IAfEffectModule>& effect,
                                                             bool enabled)
 {
     ssize_t index = mSuspendedEffects.indexOfKey(effect->desc().type.timeLow);
@@ -2857,13 +2880,13 @@
     }
 }
 
-bool AudioFlinger::EffectChain::isNonOffloadableEnabled()
+bool EffectChain::isNonOffloadableEnabled() const
 {
     Mutex::Autolock _l(mLock);
     return isNonOffloadableEnabled_l();
 }
 
-bool AudioFlinger::EffectChain::isNonOffloadableEnabled_l()
+bool EffectChain::isNonOffloadableEnabled_l() const
 {
     size_t size = mEffects.size();
     for (size_t i = 0; i < size; i++) {
@@ -2874,13 +2897,13 @@
     return false;
 }
 
-void AudioFlinger::EffectChain::setThread(const sp<ThreadBase>& thread)
+void EffectChain::setThread(const sp<AudioFlinger::ThreadBase>& thread)
 {
     Mutex::Autolock _l(mLock);
     mEffectCallback->setThread(thread);
 }
 
-void AudioFlinger::EffectChain::checkOutputFlagCompatibility(audio_output_flags_t *flags) const
+void EffectChain::checkOutputFlagCompatibility(audio_output_flags_t *flags) const
 {
     if ((*flags & AUDIO_OUTPUT_FLAG_RAW) != 0 && !isRawCompatible()) {
         *flags = (audio_output_flags_t)(*flags & ~AUDIO_OUTPUT_FLAG_RAW);
@@ -2893,7 +2916,7 @@
     }
 }
 
-void AudioFlinger::EffectChain::checkInputFlagCompatibility(audio_input_flags_t *flags) const
+void EffectChain::checkInputFlagCompatibility(audio_input_flags_t *flags) const
 {
     if ((*flags & AUDIO_INPUT_FLAG_RAW) != 0 && !isRawCompatible()) {
         *flags = (audio_input_flags_t)(*flags & ~AUDIO_INPUT_FLAG_RAW);
@@ -2903,7 +2926,7 @@
     }
 }
 
-bool AudioFlinger::EffectChain::isRawCompatible() const
+bool EffectChain::isRawCompatible() const
 {
     Mutex::Autolock _l(mLock);
     for (const auto &effect : mEffects) {
@@ -2915,7 +2938,7 @@
     return true;
 }
 
-bool AudioFlinger::EffectChain::isFastCompatible() const
+bool EffectChain::isFastCompatible() const
 {
     Mutex::Autolock _l(mLock);
     for (const auto &effect : mEffects) {
@@ -2928,7 +2951,7 @@
     return true;
 }
 
-bool AudioFlinger::EffectChain::isBitPerfectCompatible() const {
+bool EffectChain::isBitPerfectCompatible() const {
     Mutex::Autolock _l(mLock);
     for (const auto &effect : mEffects) {
         if (effect->isProcessImplemented()
@@ -2941,7 +2964,7 @@
 }
 
 // isCompatibleWithThread_l() must be called with thread->mLock held
-bool AudioFlinger::EffectChain::isCompatibleWithThread_l(const sp<ThreadBase>& thread) const
+bool EffectChain::isCompatibleWithThread_l(const sp<AudioFlinger::ThreadBase>& thread) const
 {
     Mutex::Autolock _l(mLock);
     for (size_t i = 0; i < mEffects.size(); i++) {
@@ -2953,7 +2976,7 @@
 }
 
 // EffectCallbackInterface implementation
-status_t AudioFlinger::EffectChain::EffectCallback::createEffectHal(
+status_t EffectChain::EffectCallback::createEffectHal(
         const effect_uuid_t *pEffectUuid, int32_t sessionId, int32_t deviceId,
         sp<EffectHalInterface> *effect) {
     status_t status = NO_INIT;
@@ -2965,21 +2988,21 @@
     return status;
 }
 
-bool AudioFlinger::EffectChain::EffectCallback::updateOrphanEffectChains(
-        const sp<AudioFlinger::EffectBase>& effect) {
+bool EffectChain::EffectCallback::updateOrphanEffectChains(
+        const sp<IAfEffectBase>& effect) {
     // in EffectChain context, an EffectBase is always from an EffectModule so static cast is safe
     return mAudioFlinger.updateOrphanEffectChains(effect->asEffectModule());
 }
 
-status_t AudioFlinger::EffectChain::EffectCallback::allocateHalBuffer(
+status_t EffectChain::EffectCallback::allocateHalBuffer(
         size_t size, sp<EffectBufferHalInterface>* buffer) {
     return mAudioFlinger.mEffectsFactoryHal->allocateBuffer(size, buffer);
 }
 
-status_t AudioFlinger::EffectChain::EffectCallback::addEffectToHal(
+status_t EffectChain::EffectCallback::addEffectToHal(
         const sp<EffectHalInterface>& effect) {
     status_t result = NO_INIT;
-    sp<ThreadBase> t = thread().promote();
+    sp<AudioFlinger::ThreadBase> t = thread().promote();
     if (t == nullptr) {
         return result;
     }
@@ -2992,10 +3015,10 @@
     return result;
 }
 
-status_t AudioFlinger::EffectChain::EffectCallback::removeEffectFromHal(
+status_t EffectChain::EffectCallback::removeEffectFromHal(
         const sp<EffectHalInterface>& effect) {
     status_t result = NO_INIT;
-    sp<ThreadBase> t = thread().promote();
+    sp<AudioFlinger::ThreadBase> t = thread().promote();
     if (t == nullptr) {
         return result;
     }
@@ -3008,64 +3031,65 @@
     return result;
 }
 
-audio_io_handle_t AudioFlinger::EffectChain::EffectCallback::io() const {
-    sp<ThreadBase> t = thread().promote();
+audio_io_handle_t EffectChain::EffectCallback::io() const {
+    sp<AudioFlinger::ThreadBase> t = thread().promote();
     if (t == nullptr) {
         return AUDIO_IO_HANDLE_NONE;
     }
     return t->id();
 }
 
-bool AudioFlinger::EffectChain::EffectCallback::isOutput() const {
-    sp<ThreadBase> t = thread().promote();
+bool EffectChain::EffectCallback::isOutput() const {
+    sp<AudioFlinger::ThreadBase> t = thread().promote();
     if (t == nullptr) {
         return true;
     }
     return t->isOutput();
 }
 
-bool AudioFlinger::EffectChain::EffectCallback::isOffload() const {
-    return mThreadType == ThreadBase::OFFLOAD;
+bool EffectChain::EffectCallback::isOffload() const {
+    return mThreadType == AudioFlinger::ThreadBase::OFFLOAD;
 }
 
-bool AudioFlinger::EffectChain::EffectCallback::isOffloadOrDirect() const {
-    return mThreadType == ThreadBase::OFFLOAD || mThreadType == ThreadBase::DIRECT;
+bool EffectChain::EffectCallback::isOffloadOrDirect() const {
+    return mThreadType == AudioFlinger::ThreadBase::OFFLOAD
+            || mThreadType == AudioFlinger::ThreadBase::DIRECT;
 }
 
-bool AudioFlinger::EffectChain::EffectCallback::isOffloadOrMmap() const {
+bool EffectChain::EffectCallback::isOffloadOrMmap() const {
     switch (mThreadType) {
-    case ThreadBase::OFFLOAD:
-    case ThreadBase::MMAP_PLAYBACK:
-    case ThreadBase::MMAP_CAPTURE:
+    case AudioFlinger::ThreadBase::OFFLOAD:
+    case AudioFlinger::ThreadBase::MMAP_PLAYBACK:
+    case AudioFlinger::ThreadBase::MMAP_CAPTURE:
         return true;
     default:
         return false;
     }
 }
 
-bool AudioFlinger::EffectChain::EffectCallback::isSpatializer() const {
-    return mThreadType == ThreadBase::SPATIALIZER;
+bool EffectChain::EffectCallback::isSpatializer() const {
+    return mThreadType == AudioFlinger::ThreadBase::SPATIALIZER;
 }
 
-uint32_t AudioFlinger::EffectChain::EffectCallback::sampleRate() const {
-    sp<ThreadBase> t = thread().promote();
+uint32_t EffectChain::EffectCallback::sampleRate() const {
+    sp<AudioFlinger::ThreadBase> t = thread().promote();
     if (t == nullptr) {
         return 0;
     }
     return t->sampleRate();
 }
 
-audio_channel_mask_t AudioFlinger::EffectChain::EffectCallback::inChannelMask(int id) const {
-    sp<ThreadBase> t = thread().promote();
+audio_channel_mask_t EffectChain::EffectCallback::inChannelMask(int id) const {
+    sp<AudioFlinger::ThreadBase> t = thread().promote();
     if (t == nullptr) {
         return AUDIO_CHANNEL_NONE;
     }
-    sp<EffectChain> c = chain().promote();
+    sp<IAfEffectChain> c = chain().promote();
     if (c == nullptr) {
         return AUDIO_CHANNEL_NONE;
     }
 
-    if (mThreadType == ThreadBase::SPATIALIZER) {
+    if (mThreadType == AudioFlinger::ThreadBase::SPATIALIZER) {
         if (c->sessionId() == AUDIO_SESSION_OUTPUT_STAGE) {
             if (c->isFirstEffect(id)) {
                 return t->mixerChannelMask();
@@ -3073,7 +3097,8 @@
                 return t->channelMask();
             }
         } else if (!audio_is_global_session(c->sessionId())) {
-            if ((t->hasAudioSession_l(c->sessionId()) & ThreadBase::SPATIALIZED_SESSION) != 0) {
+            if ((t->hasAudioSession_l(c->sessionId())
+                    & AudioFlinger::ThreadBase::SPATIALIZED_SESSION) != 0) {
                 return t->mixerChannelMask();
             } else {
                 return t->channelMask();
@@ -3086,23 +3111,24 @@
     }
 }
 
-uint32_t AudioFlinger::EffectChain::EffectCallback::inChannelCount(int id) const {
+uint32_t EffectChain::EffectCallback::inChannelCount(int id) const {
     return audio_channel_count_from_out_mask(inChannelMask(id));
 }
 
-audio_channel_mask_t AudioFlinger::EffectChain::EffectCallback::outChannelMask() const {
-    sp<ThreadBase> t = thread().promote();
+audio_channel_mask_t EffectChain::EffectCallback::outChannelMask() const {
+    sp<AudioFlinger::ThreadBase> t = thread().promote();
     if (t == nullptr) {
         return AUDIO_CHANNEL_NONE;
     }
-    sp<EffectChain> c = chain().promote();
+    sp<IAfEffectChain> c = chain().promote();
     if (c == nullptr) {
         return AUDIO_CHANNEL_NONE;
     }
 
-    if (mThreadType == ThreadBase::SPATIALIZER) {
+    if (mThreadType == AudioFlinger::ThreadBase::SPATIALIZER) {
         if (!audio_is_global_session(c->sessionId())) {
-            if ((t->hasAudioSession_l(c->sessionId()) & ThreadBase::SPATIALIZED_SESSION) != 0) {
+            if ((t->hasAudioSession_l(c->sessionId())
+                    & AudioFlinger::ThreadBase::SPATIALIZED_SESSION) != 0) {
                 return t->mixerChannelMask();
             } else {
                 return t->channelMask();
@@ -3115,30 +3141,30 @@
     }
 }
 
-uint32_t AudioFlinger::EffectChain::EffectCallback::outChannelCount() const {
+uint32_t EffectChain::EffectCallback::outChannelCount() const {
     return audio_channel_count_from_out_mask(outChannelMask());
 }
 
-audio_channel_mask_t AudioFlinger::EffectChain::EffectCallback::hapticChannelMask() const {
-    sp<ThreadBase> t = thread().promote();
+audio_channel_mask_t EffectChain::EffectCallback::hapticChannelMask() const {
+    sp<AudioFlinger::ThreadBase> t = thread().promote();
     if (t == nullptr) {
         return AUDIO_CHANNEL_NONE;
     }
     return t->hapticChannelMask();
 }
 
-size_t AudioFlinger::EffectChain::EffectCallback::frameCount() const {
-    sp<ThreadBase> t = thread().promote();
+size_t EffectChain::EffectCallback::frameCount() const {
+    sp<AudioFlinger::ThreadBase> t = thread().promote();
     if (t == nullptr) {
         return 0;
     }
     return t->frameCount();
 }
 
-uint32_t AudioFlinger::EffectChain::EffectCallback::latency() const
+uint32_t EffectChain::EffectCallback::latency() const
 NO_THREAD_SAFETY_ANALYSIS  // latency_l() access
 {
-    sp<ThreadBase> t = thread().promote();
+    sp<AudioFlinger::ThreadBase> t = thread().promote();
     if (t == nullptr) {
         return 0;
     }
@@ -3146,25 +3172,25 @@
     return t->latency_l();
 }
 
-void AudioFlinger::EffectChain::EffectCallback::setVolumeForOutput(float left, float right) const
+void EffectChain::EffectCallback::setVolumeForOutput(float left, float right) const
 NO_THREAD_SAFETY_ANALYSIS  // setVolumeForOutput_l() access
 {
-    sp<ThreadBase> t = thread().promote();
+    sp<AudioFlinger::ThreadBase> t = thread().promote();
     if (t == nullptr) {
         return;
     }
     t->setVolumeForOutput_l(left, right);
 }
 
-void AudioFlinger::EffectChain::EffectCallback::checkSuspendOnEffectEnabled(
-        const sp<EffectBase>& effect, bool enabled, bool threadLocked) {
-    sp<ThreadBase> t = thread().promote();
+void EffectChain::EffectCallback::checkSuspendOnEffectEnabled(
+        const sp<IAfEffectBase>& effect, bool enabled, bool threadLocked) {
+    sp<AudioFlinger::ThreadBase> t = thread().promote();
     if (t == nullptr) {
         return;
     }
     t->checkSuspendOnEffectEnabled(enabled, effect->sessionId(), threadLocked);
 
-    sp<EffectChain> c = chain().promote();
+    sp<IAfEffectChain> c = chain().promote();
     if (c == nullptr) {
         return;
     }
@@ -3172,8 +3198,8 @@
     c->checkSuspendOnEffectEnabled(effect->asEffectModule(), enabled);
 }
 
-void AudioFlinger::EffectChain::EffectCallback::onEffectEnable(const sp<EffectBase>& effect) {
-    sp<ThreadBase> t = thread().promote();
+void EffectChain::EffectCallback::onEffectEnable(const sp<IAfEffectBase>& effect) {
+    sp<AudioFlinger::ThreadBase> t = thread().promote();
     if (t == nullptr) {
         return;
     }
@@ -3181,19 +3207,19 @@
     t->onEffectEnable(effect->asEffectModule());
 }
 
-void AudioFlinger::EffectChain::EffectCallback::onEffectDisable(const sp<EffectBase>& effect) {
+void EffectChain::EffectCallback::onEffectDisable(const sp<IAfEffectBase>& effect) {
     checkSuspendOnEffectEnabled(effect, false, false /*threadLocked*/);
 
-    sp<ThreadBase> t = thread().promote();
+    sp<AudioFlinger::ThreadBase> t = thread().promote();
     if (t == nullptr) {
         return;
     }
     t->onEffectDisable();
 }
 
-bool AudioFlinger::EffectChain::EffectCallback::disconnectEffectHandle(EffectHandle *handle,
+bool EffectChain::EffectCallback::disconnectEffectHandle(IAfEffectHandle *handle,
                                                       bool unpinIfLast) {
-    sp<ThreadBase> t = thread().promote();
+    sp<AudioFlinger::ThreadBase> t = thread().promote();
     if (t == nullptr) {
         return false;
     }
@@ -3201,8 +3227,8 @@
     return true;
 }
 
-void AudioFlinger::EffectChain::EffectCallback::resetVolume() {
-    sp<EffectChain> c = chain().promote();
+void EffectChain::EffectCallback::resetVolume() {
+    sp<IAfEffectChain> c = chain().promote();
     if (c == nullptr) {
         return;
     }
@@ -3210,16 +3236,16 @@
 
 }
 
-product_strategy_t AudioFlinger::EffectChain::EffectCallback::strategy() const {
-    sp<EffectChain> c = chain().promote();
+product_strategy_t EffectChain::EffectCallback::strategy() const {
+    sp<IAfEffectChain> c = chain().promote();
     if (c == nullptr) {
         return PRODUCT_STRATEGY_NONE;
     }
     return c->strategy();
 }
 
-int32_t AudioFlinger::EffectChain::EffectCallback::activeTrackCnt() const {
-    sp<EffectChain> c = chain().promote();
+int32_t EffectChain::EffectCallback::activeTrackCnt() const {
+    sp<IAfEffectChain> c = chain().promote();
     if (c == nullptr) {
         return 0;
     }
@@ -3228,9 +3254,20 @@
 
 
 #undef LOG_TAG
-#define LOG_TAG "AudioFlinger::DeviceEffectProxy"
+#define LOG_TAG "DeviceEffectProxy"
 
-status_t AudioFlinger::DeviceEffectProxy::setEnabled(bool enabled, bool fromHandle)
+/* static */
+sp<IAfDeviceEffectProxy> IAfDeviceEffectProxy::create(
+        const AudioDeviceTypeAddr& device,
+        const sp</* DeviceEffectManagerCallback */ RefBase>& callback,  // TODO(b/288339104) type
+        effect_descriptor_t *desc, int id, bool notifyFramesProcessed)
+{
+    return sp<DeviceEffectProxy>::make(device,
+            sp<AudioFlinger::DeviceEffectManagerCallback>::cast(callback),
+            desc, id, notifyFramesProcessed);
+}
+
+status_t DeviceEffectProxy::setEnabled(bool enabled, bool fromHandle)
 {
     status_t status = EffectBase::setEnabled(enabled, fromHandle);
     Mutex::Autolock _l(mProxyLock);
@@ -3238,9 +3275,9 @@
         for (auto& handle : mEffectHandles) {
             Status bs;
             if (enabled) {
-                bs = handle.second->enable(&status);
+                bs = handle.second->asIEffect()->enable(&status);
             } else {
-                bs = handle.second->disable(&status);
+                bs = handle.second->asIEffect()->disable(&status);
             }
             if (!bs.isOk()) {
               status = statusTFromBinderStatus(bs);
@@ -3251,8 +3288,8 @@
     return status;
 }
 
-status_t AudioFlinger::DeviceEffectProxy::init(
-        const std::map <audio_patch_handle_t, PatchPanel::Patch>& patches) {
+status_t DeviceEffectProxy::init(
+        const std::map <audio_patch_handle_t, AudioFlinger::PatchPanel::Patch>& patches) {
 //For all audio patches
 //If src or sink device match
 //If the effect is HW accelerated
@@ -3274,10 +3311,10 @@
     return status;
 }
 
-status_t AudioFlinger::DeviceEffectProxy::onCreatePatch(
+status_t DeviceEffectProxy::onCreatePatch(
         audio_patch_handle_t patchHandle, const AudioFlinger::PatchPanel::Patch& patch) {
     status_t status = NAME_NOT_FOUND;
-    sp<EffectHandle> handle;
+    sp<IAfEffectHandle> handle;
     // only consider source[0] as this is the only "true" source of a patch
     status = checkPort(patch, &patch.mAudioPatch.sources[0], &handle);
     ALOGV("%s source checkPort status %d", __func__, status);
@@ -3295,8 +3332,8 @@
     return status;
 }
 
-status_t AudioFlinger::DeviceEffectProxy::checkPort(const PatchPanel::Patch& patch,
-        const struct audio_port_config *port, sp <EffectHandle> *handle) {
+status_t DeviceEffectProxy::checkPort(const AudioFlinger::PatchPanel::Patch& patch,
+        const struct audio_port_config *port, sp<IAfEffectHandle> *handle) {
 
     ALOGV("%s type %d device type %d address %s device ID %d patch.isSoftware() %d",
             __func__, port->type, port->ext.device.type,
@@ -3341,7 +3378,7 @@
             mDevicePort.id = AUDIO_PORT_HANDLE_NONE;
         }
     } else if (patch.isSoftware() || patch.thread().promote() != nullptr) {
-        sp <ThreadBase> thread;
+        sp <AudioFlinger::ThreadBase> thread;
         if (audio_port_config_has_input_direction(port)) {
             if (patch.isSoftware()) {
                 thread = patch.mRecord.thread();
@@ -3368,9 +3405,9 @@
     if (status == NO_ERROR || status == ALREADY_EXISTS) {
         Status bs;
         if (isEnabled()) {
-            bs = (*handle)->enable(&status);
+            bs = (*handle)->asIEffect()->enable(&status);
         } else {
-            bs = (*handle)->disable(&status);
+            bs = (*handle)->asIEffect()->disable(&status);
         }
         if (!bs.isOk()) {
             status = statusTFromBinderStatus(bs);
@@ -3379,8 +3416,8 @@
     return status;
 }
 
-void AudioFlinger::DeviceEffectProxy::onReleasePatch(audio_patch_handle_t patchHandle) {
-    sp<EffectHandle> effect;
+void DeviceEffectProxy::onReleasePatch(audio_patch_handle_t patchHandle) {
+    sp<IAfEffectHandle> effect;
     {
         Mutex::Autolock _l(mProxyLock);
         if (mEffectHandles.find(patchHandle) != mEffectHandles.end()) {
@@ -3391,7 +3428,7 @@
 }
 
 
-size_t AudioFlinger::DeviceEffectProxy::removeEffect(const sp<EffectModule>& effect)
+size_t DeviceEffectProxy::removeEffect(const sp<IAfEffectModule>& effect)
 {
     Mutex::Autolock _l(mProxyLock);
     if (effect == mHalEffect) {
@@ -3402,7 +3439,7 @@
     return mHalEffect == nullptr ? 0 : 1;
 }
 
-status_t AudioFlinger::DeviceEffectProxy::addEffectToHal(
+status_t DeviceEffectProxy::addEffectToHal(
         const sp<EffectHalInterface>& effect) {
     if (mHalEffect == nullptr) {
         return NO_INIT;
@@ -3410,7 +3447,7 @@
     return mManagerCallback->addEffectToHal(&mDevicePort, effect);
 }
 
-status_t AudioFlinger::DeviceEffectProxy::removeEffectFromHal(
+status_t DeviceEffectProxy::removeEffectFromHal(
         const sp<EffectHalInterface>& effect) {
     if (mHalEffect == nullptr) {
         return NO_INIT;
@@ -3418,14 +3455,14 @@
     return mManagerCallback->removeEffectFromHal(&mDevicePort, effect);
 }
 
-bool AudioFlinger::DeviceEffectProxy::isOutput() const {
+bool DeviceEffectProxy::isOutput() const {
     if (mDevicePort.id != AUDIO_PORT_HANDLE_NONE) {
         return mDevicePort.role == AUDIO_PORT_ROLE_SINK;
     }
     return true;
 }
 
-uint32_t AudioFlinger::DeviceEffectProxy::sampleRate() const {
+uint32_t DeviceEffectProxy::sampleRate() const {
     if (mDevicePort.id != AUDIO_PORT_HANDLE_NONE &&
             (mDevicePort.config_mask & AUDIO_PORT_CONFIG_SAMPLE_RATE) != 0) {
         return mDevicePort.sample_rate;
@@ -3433,7 +3470,7 @@
     return DEFAULT_OUTPUT_SAMPLE_RATE;
 }
 
-audio_channel_mask_t AudioFlinger::DeviceEffectProxy::channelMask() const {
+audio_channel_mask_t DeviceEffectProxy::channelMask() const {
     if (mDevicePort.id != AUDIO_PORT_HANDLE_NONE &&
             (mDevicePort.config_mask & AUDIO_PORT_CONFIG_CHANNEL_MASK) != 0) {
         return mDevicePort.channel_mask;
@@ -3441,20 +3478,20 @@
     return AUDIO_CHANNEL_OUT_STEREO;
 }
 
-uint32_t AudioFlinger::DeviceEffectProxy::channelCount() const {
+uint32_t DeviceEffectProxy::channelCount() const {
     if (isOutput()) {
         return audio_channel_count_from_out_mask(channelMask());
     }
     return audio_channel_count_from_in_mask(channelMask());
 }
 
-void AudioFlinger::DeviceEffectProxy::dump(int fd, int spaces)
+void DeviceEffectProxy::dump2(int fd, int spaces) const
 NO_THREAD_SAFETY_ANALYSIS  // conditional try lock
 {
     const Vector<String16> args;
     EffectBase::dump(fd, args);
 
-    const bool locked = dumpTryLock(mProxyLock);
+    const bool locked = AudioFlinger::dumpTryLock(mProxyLock);
     if (!locked) {
         String8 result("DeviceEffectProxy may be deadlocked\n");
         write(fd, result.string(), result.size());
@@ -3477,33 +3514,33 @@
         outStr.appendFormat("%*sEffect for patch handle %d:\n", spaces + 2, "", iter.first);
         write(fd, outStr.string(), outStr.size());
         outStr.clear();
-        sp<EffectBase> effect = iter.second->effect().promote();
+        sp<IAfEffectBase> effect = iter.second->effect().promote();
         if (effect != nullptr) {
             effect->dump(fd, args);
         }
     }
 
     if (locked) {
-        mLock.unlock();
+        mProxyLock.unlock();
     }
 }
 
 #undef LOG_TAG
-#define LOG_TAG "AudioFlinger::DeviceEffectProxy::ProxyCallback"
+#define LOG_TAG "DeviceEffectProxy::ProxyCallback"
 
-int AudioFlinger::DeviceEffectProxy::ProxyCallback::newEffectId() {
+int DeviceEffectProxy::ProxyCallback::newEffectId() {
     return mManagerCallback->newEffectId();
 }
 
 
-bool AudioFlinger::DeviceEffectProxy::ProxyCallback::disconnectEffectHandle(
-        EffectHandle *handle, bool unpinIfLast) {
-    sp<EffectBase> effectBase = handle->effect().promote();
+bool DeviceEffectProxy::ProxyCallback::disconnectEffectHandle(
+        IAfEffectHandle *handle, bool unpinIfLast) {
+    sp<IAfEffectBase> effectBase = handle->effect().promote();
     if (effectBase == nullptr) {
         return false;
     }
 
-    sp<EffectModule> effect = effectBase->asEffectModule();
+    sp<IAfEffectModule> effect = effectBase->asEffectModule();
     if (effect == nullptr) {
         return false;
     }
@@ -3522,13 +3559,13 @@
     return true;
 }
 
-status_t AudioFlinger::DeviceEffectProxy::ProxyCallback::createEffectHal(
+status_t DeviceEffectProxy::ProxyCallback::createEffectHal(
         const effect_uuid_t *pEffectUuid, int32_t sessionId, int32_t deviceId,
         sp<EffectHalInterface> *effect) {
     return mManagerCallback->createEffectHal(pEffectUuid, sessionId, deviceId, effect);
 }
 
-status_t AudioFlinger::DeviceEffectProxy::ProxyCallback::addEffectToHal(
+status_t DeviceEffectProxy::ProxyCallback::addEffectToHal(
         const sp<EffectHalInterface>& effect) {
     sp<DeviceEffectProxy> proxy = mProxy.promote();
     if (proxy == nullptr) {
@@ -3537,7 +3574,7 @@
     return proxy->addEffectToHal(effect);
 }
 
-status_t AudioFlinger::DeviceEffectProxy::ProxyCallback::removeEffectFromHal(
+status_t DeviceEffectProxy::ProxyCallback::removeEffectFromHal(
         const sp<EffectHalInterface>& effect) {
     sp<DeviceEffectProxy> proxy = mProxy.promote();
     if (proxy == nullptr) {
@@ -3546,7 +3583,7 @@
     return proxy->removeEffectFromHal(effect);
 }
 
-bool AudioFlinger::DeviceEffectProxy::ProxyCallback::isOutput() const {
+bool DeviceEffectProxy::ProxyCallback::isOutput() const {
     sp<DeviceEffectProxy> proxy = mProxy.promote();
     if (proxy == nullptr) {
         return true;
@@ -3554,7 +3591,7 @@
     return proxy->isOutput();
 }
 
-uint32_t AudioFlinger::DeviceEffectProxy::ProxyCallback::sampleRate() const {
+uint32_t DeviceEffectProxy::ProxyCallback::sampleRate() const {
     sp<DeviceEffectProxy> proxy = mProxy.promote();
     if (proxy == nullptr) {
         return DEFAULT_OUTPUT_SAMPLE_RATE;
@@ -3562,7 +3599,7 @@
     return proxy->sampleRate();
 }
 
-audio_channel_mask_t AudioFlinger::DeviceEffectProxy::ProxyCallback::inChannelMask(
+audio_channel_mask_t DeviceEffectProxy::ProxyCallback::inChannelMask(
         int id __unused) const {
     sp<DeviceEffectProxy> proxy = mProxy.promote();
     if (proxy == nullptr) {
@@ -3571,7 +3608,7 @@
     return proxy->channelMask();
 }
 
-uint32_t AudioFlinger::DeviceEffectProxy::ProxyCallback::inChannelCount(int id __unused) const {
+uint32_t DeviceEffectProxy::ProxyCallback::inChannelCount(int id __unused) const {
     sp<DeviceEffectProxy> proxy = mProxy.promote();
     if (proxy == nullptr) {
         return 2;
@@ -3579,7 +3616,7 @@
     return proxy->channelCount();
 }
 
-audio_channel_mask_t AudioFlinger::DeviceEffectProxy::ProxyCallback::outChannelMask() const {
+audio_channel_mask_t DeviceEffectProxy::ProxyCallback::outChannelMask() const {
     sp<DeviceEffectProxy> proxy = mProxy.promote();
     if (proxy == nullptr) {
         return AUDIO_CHANNEL_OUT_STEREO;
@@ -3587,7 +3624,7 @@
     return proxy->channelMask();
 }
 
-uint32_t AudioFlinger::DeviceEffectProxy::ProxyCallback::outChannelCount() const {
+uint32_t DeviceEffectProxy::ProxyCallback::outChannelCount() const {
     sp<DeviceEffectProxy> proxy = mProxy.promote();
     if (proxy == nullptr) {
         return 2;
@@ -3595,18 +3632,18 @@
     return proxy->channelCount();
 }
 
-void AudioFlinger::DeviceEffectProxy::ProxyCallback::onEffectEnable(
-        const sp<EffectBase>& effectBase) {
-    sp<EffectModule> effect = effectBase->asEffectModule();
+void DeviceEffectProxy::ProxyCallback::onEffectEnable(
+        const sp<IAfEffectBase>& effectBase) {
+    sp<IAfEffectModule> effect = effectBase->asEffectModule();
     if (effect == nullptr) {
         return;
     }
     effect->start();
 }
 
-void AudioFlinger::DeviceEffectProxy::ProxyCallback::onEffectDisable(
-        const sp<EffectBase>& effectBase) {
-    sp<EffectModule> effect = effectBase->asEffectModule();
+void DeviceEffectProxy::ProxyCallback::onEffectDisable(
+        const sp<IAfEffectBase>& effectBase) {
+    sp<IAfEffectModule> effect = effectBase->asEffectModule();
     if (effect == nullptr) {
         return;
     }
diff --git a/services/audioflinger/Effects.h b/services/audioflinger/Effects.h
index 57acc67..2d8775b 100644
--- a/services/audioflinger/Effects.h
+++ b/services/audioflinger/Effects.h
@@ -15,62 +15,10 @@
 ** limitations under the License.
 */
 
-#ifndef INCLUDING_FROM_AUDIOFLINGER_H
-    #error This header file should only be included from AudioFlinger.h
-#endif
+namespace android {
 
 //--- Audio Effect Management
 
-// Interface implemented by the EffectModule parent or owner (e.g an EffectChain) to abstract
-// interactions between the EffectModule and the reset of the audio framework.
-class EffectCallbackInterface : public RefBase {
-public:
-            ~EffectCallbackInterface() override = default;
-
-    // Trivial methods usually implemented with help from ThreadBase
-    virtual audio_io_handle_t io() const = 0;
-    virtual bool isOutput() const = 0;
-    virtual bool isOffload() const = 0;
-    virtual bool isOffloadOrDirect() const = 0;
-    virtual bool isOffloadOrMmap() const = 0;
-    virtual bool isSpatializer() const = 0;
-    virtual uint32_t sampleRate() const = 0;
-    virtual audio_channel_mask_t inChannelMask(int id) const = 0;
-    virtual uint32_t inChannelCount(int id) const = 0;
-    virtual audio_channel_mask_t outChannelMask() const = 0;
-    virtual uint32_t outChannelCount() const = 0;
-    virtual audio_channel_mask_t hapticChannelMask() const = 0;
-    virtual size_t frameCount() const = 0;
-
-    // Non trivial methods usually implemented with help from ThreadBase:
-    //   pay attention to mutex locking order
-    virtual uint32_t latency() const { return 0; }
-    virtual status_t addEffectToHal(const sp<EffectHalInterface>& effect) = 0;
-    virtual status_t removeEffectFromHal(const sp<EffectHalInterface>& effect) = 0;
-    virtual void setVolumeForOutput(float left, float right) const = 0;
-    virtual bool disconnectEffectHandle(EffectHandle *handle, bool unpinIfLast) = 0;
-    virtual void checkSuspendOnEffectEnabled(const sp<EffectBase>& effect,
-                                             bool enabled,
-                                             bool threadLocked) = 0;
-    virtual void onEffectEnable(const sp<EffectBase>& effect) = 0;
-    virtual void onEffectDisable(const sp<EffectBase>& effect) = 0;
-
-    // Methods usually implemented with help from AudioFlinger: pay attention to mutex locking order
-    virtual status_t createEffectHal(const effect_uuid_t *pEffectUuid,
-                    int32_t sessionId, int32_t deviceId, sp<EffectHalInterface> *effect) = 0;
-    virtual status_t allocateHalBuffer(size_t size, sp<EffectBufferHalInterface>* buffer) = 0;
-    virtual bool updateOrphanEffectChains(const sp<EffectBase>& effect) = 0;
-
-    // Methods usually implemented with help from EffectChain: pay attention to mutex locking order
-    virtual product_strategy_t strategy() const = 0;
-    virtual int32_t activeTrackCnt() const = 0;
-    virtual void resetVolume() = 0;
-
-    virtual wp<EffectChain> chain() const = 0;
-
-    virtual bool isAudioPolicyReady() const = 0;
-};
-
 // EffectBase(EffectModule) and EffectChain classes both have their own mutex to protect
 // state changes or resource modifications. Always respect the following order
 // if multiple mutexes must be acquired to avoid cross deadlock:
@@ -90,7 +38,7 @@
 // The EffectBase class contains common properties, state and behavior for and EffectModule or
 // other derived classes managing an audio effect instance within the effect framework.
 // It also contains the class mutex (see comment on locking order above).
-class EffectBase : public RefBase {
+class EffectBase : public virtual IAfEffectBase {
 public:
     EffectBase(const sp<EffectCallbackInterface>& callback,
                effect_descriptor_t *desc,
@@ -98,76 +46,69 @@
                audio_session_t sessionId,
                bool pinned);
 
-    ~EffectBase() override = default;
-
-    enum effect_state {
-        IDLE,
-        RESTART,
-        STARTING,
-        ACTIVE,
-        STOPPING,
-        STOPPED,
-        DESTROYED
-    };
-
-    int id() const { return mId; }
-    effect_state state() const {
+    int id() const final { return mId; }
+    effect_state state() const final {
         return mState;
     }
-    audio_session_t sessionId() const {
+    audio_session_t sessionId() const final {
         return mSessionId;
     }
-    const effect_descriptor_t& desc() const { return mDescriptor; }
-    bool             isOffloadable() const
+    const effect_descriptor_t& desc() const final { return mDescriptor; }
+    bool isOffloadable() const final
                         { return (mDescriptor.flags & EFFECT_FLAG_OFFLOAD_SUPPORTED) != 0; }
-    bool             isImplementationSoftware() const
+    bool isImplementationSoftware() const final
                         { return (mDescriptor.flags & EFFECT_FLAG_HW_ACC_MASK) == 0; }
-    bool             isProcessImplemented() const
+    bool isProcessImplemented() const final
                         { return (mDescriptor.flags & EFFECT_FLAG_NO_PROCESS) == 0; }
-    bool             isVolumeControl() const
+    bool isVolumeControl() const
                         { return (mDescriptor.flags & EFFECT_FLAG_VOLUME_MASK)
                             == EFFECT_FLAG_VOLUME_CTRL; }
-    bool             isVolumeMonitor() const
+    bool isVolumeMonitor() const final
                         { return (mDescriptor.flags & EFFECT_FLAG_VOLUME_MASK)
                             == EFFECT_FLAG_VOLUME_MONITOR; }
 
-    virtual status_t setEnabled(bool enabled, bool fromHandle);
-    status_t    setEnabled_l(bool enabled);
-    bool isEnabled() const;
+    status_t setEnabled(bool enabled, bool fromHandle) override;
+    status_t setEnabled_l(bool enabled) final;
+    bool isEnabled() const final;
+    void setSuspended(bool suspended) final;
+    bool suspended() const final;
 
-    void             setSuspended(bool suspended);
-    bool             suspended() const;
-
-    virtual status_t command(int32_t __unused,
+    status_t command(int32_t __unused,
                              const std::vector<uint8_t>& __unused,
                              int32_t __unused,
-                             std::vector<uint8_t>* __unused) { return NO_ERROR; };
+                             std::vector<uint8_t>* __unused) override {
+        return NO_ERROR;
+    }
 
     // mCallback is atomic so this can be lock-free.
-    void setCallback(const sp<EffectCallbackInterface>& callback) { mCallback = callback; }
-    sp<EffectCallbackInterface> getCallback() const { return mCallback.load(); }
+    void setCallback(const sp<EffectCallbackInterface>& callback) final {
+        mCallback = callback;
+    }
+    sp<EffectCallbackInterface> getCallback() const final {
+        return mCallback.load();
+    }
 
-    status_t addHandle(EffectHandle *handle);
-    ssize_t disconnectHandle(EffectHandle *handle, bool unpinIfLast);
-    ssize_t removeHandle(EffectHandle *handle);
-    ssize_t removeHandle_l(EffectHandle *handle);
-    EffectHandle* controlHandle_l();
-    bool purgeHandles();
+    status_t addHandle(IAfEffectHandle *handle) final;
+    ssize_t disconnectHandle(IAfEffectHandle *handle, bool unpinIfLast) final;
+    ssize_t removeHandle(IAfEffectHandle *handle) final;
+    ssize_t removeHandle_l(IAfEffectHandle *handle) final;
+    IAfEffectHandle* controlHandle_l() final;
+    bool purgeHandles() final;
 
-    void             checkSuspendOnEffectEnabled(bool enabled, bool threadLocked);
+    void             checkSuspendOnEffectEnabled(bool enabled, bool threadLocked) final;
 
-    bool             isPinned() const { return mPinned; }
-    void             unPin() { mPinned = false; }
+    bool             isPinned() const final { return mPinned; }
+    void             unPin() final { mPinned = false; }
 
-    void             lock() ACQUIRE(mLock) { mLock.lock(); }
-    void             unlock() RELEASE(mLock) { mLock.unlock(); }
+    void             lock() ACQUIRE(mLock) final { mLock.lock(); }
+    void             unlock() RELEASE(mLock) final { mLock.unlock(); }
 
-    status_t         updatePolicyState();
+    status_t         updatePolicyState() final;
 
-    virtual          sp<EffectModule> asEffectModule() { return nullptr; }
-    virtual          sp<DeviceEffectProxy> asDeviceEffectProxy() { return nullptr; }
+    sp<IAfEffectModule> asEffectModule() override { return nullptr; }
+    sp<IAfDeviceEffectProxy> asDeviceEffectProxy() override { return nullptr; }
 
-    void             dump(int fd, const Vector<String16>& args);
+    void             dump(int fd, const Vector<String16>& args) const override;
 
 protected:
     bool             isInternal_l() const {
@@ -179,13 +120,11 @@
                          return true;
                      }
 
-private:
-    friend class AudioFlinger;      // for mHandles
     bool             mPinned = false;
 
     DISALLOW_COPY_AND_ASSIGN(EffectBase);
 
-mutable Mutex                 mLock;      // mutex for process, commands and handles list protection
+    mutable Mutex mLock;      // mutex for process, commands and handles list protection
     mediautils::atomic_sp<EffectCallbackInterface> mCallback; // parent effect chain
     const int                 mId;        // this instance unique ID
     const audio_session_t     mSessionId; // audio session ID
@@ -194,7 +133,7 @@
     // effect is suspended: temporarily disabled by framework
     bool                      mSuspended = false;
 
-    Vector<EffectHandle *>    mHandles;   // list of client handles
+    Vector<IAfEffectHandle *> mHandles;  // list of client handles
                 // First handle in mHandles has highest priority and controls the effect module
 
     // Audio policy effect state management
@@ -217,7 +156,7 @@
 // ramping when effects are activated/deactivated.
 // When controlling an auxiliary effect, the EffectModule also provides an input buffer used by
 // the attached track(s) to accumulate their auxiliary channel.
-class EffectModule : public EffectBase {
+class EffectModule : public IAfEffectModule, public EffectBase {
 public:
     EffectModule(const sp<EffectCallbackInterface>& callabck,
                     effect_descriptor_t *desc,
@@ -225,72 +164,65 @@
                     audio_session_t sessionId,
                     bool pinned,
                     audio_port_handle_t deviceId);
-    virtual ~EffectModule();
+    ~EffectModule() override;
 
-    void process();
-    bool updateState();
+    void process() final;
+    bool updateState() final;
     status_t command(int32_t cmdCode,
                      const std::vector<uint8_t>& cmdData,
                      int32_t maxReplySize,
-                     std::vector<uint8_t>* reply) override;
+                     std::vector<uint8_t>* reply) final;
 
-    void reset_l();
-    status_t configure();
-    status_t init();
-
-    uint32_t status() {
+    void reset_l() final;
+    status_t configure() final;
+    status_t init() final;
+    uint32_t status() const final {
         return mStatus;
     }
-
-    bool isProcessEnabled() const;
-    bool isOffloadedOrDirect() const;
-    bool isVolumeControlEnabled() const;
-
-    void        setInBuffer(const sp<EffectBufferHalInterface>& buffer);
-    int16_t     *inBuffer() const {
+    bool isProcessEnabled() const final;
+    bool isOffloadedOrDirect() const final;
+    bool isVolumeControlEnabled() const final;
+    void setInBuffer(const sp<EffectBufferHalInterface>& buffer) final;
+    int16_t *inBuffer() const final {
         return mInBuffer != 0 ? reinterpret_cast<int16_t*>(mInBuffer->ptr()) : NULL;
     }
-    void        setOutBuffer(const sp<EffectBufferHalInterface>& buffer);
-    int16_t     *outBuffer() const {
+    void setOutBuffer(const sp<EffectBufferHalInterface>& buffer) final;
+    int16_t *outBuffer() const final {
         return mOutBuffer != 0 ? reinterpret_cast<int16_t*>(mOutBuffer->ptr()) : NULL;
     }
-
     // Updates the access mode if it is out of date.  May issue a new effect configure.
-    void        updateAccessMode() {
+    void updateAccessMode() final {
                     if (requiredEffectBufferAccessMode() != mConfig.outputCfg.accessMode) {
                         configure();
                     }
                 }
+    status_t setDevices(const AudioDeviceTypeAddrVector &devices) final;
+    status_t setInputDevice(const AudioDeviceTypeAddr &device) final;
+    status_t setVolume(uint32_t *left, uint32_t *right, bool controller) final;
+    status_t setMode(audio_mode_t mode) final;
+    status_t setAudioSource(audio_source_t source) final;
+    status_t start() final;
+    status_t stop() final;
 
-    status_t         setDevices(const AudioDeviceTypeAddrVector &devices);
-    status_t         setInputDevice(const AudioDeviceTypeAddr &device);
-    status_t         setVolume(uint32_t *left, uint32_t *right, bool controller);
-    status_t         setMode(audio_mode_t mode);
-    status_t         setAudioSource(audio_source_t source);
-    status_t         start();
-    status_t         stop();
+    status_t setOffloaded(bool offloaded, audio_io_handle_t io) final;
+    bool isOffloaded() const final;
+    void addEffectToHal_l() final;
+    void release_l() final;
 
-    status_t         setOffloaded(bool offloaded, audio_io_handle_t io);
-    bool             isOffloaded() const;
-    void             addEffectToHal_l();
-    void             release_l();
+    sp<IAfEffectModule> asEffectModule() final { return this; }
 
-    sp<EffectModule> asEffectModule() override { return this; }
+    bool isHapticGenerator() const final;
 
-    static bool      isHapticGenerator(const effect_uuid_t* type);
-    bool             isHapticGenerator() const;
+    status_t setHapticIntensity(int id, os::HapticScale intensity) final;
+    status_t setVibratorInfo(const media::AudioVibratorInfo& vibratorInfo) final;
 
-    status_t         setHapticIntensity(int id, os::HapticScale intensity);
-    status_t         setVibratorInfo(const media::AudioVibratorInfo& vibratorInfo);
-
-    status_t         getConfigs(audio_config_base_t* inputCfg,
+    status_t getConfigs(audio_config_base_t* inputCfg,
                                 audio_config_base_t* outputCfg,
-                                bool* isOutput) const;
+                                bool* isOutput) const final;
 
-    void             dump(int fd, const Vector<String16>& args);
+    void dump(int fd, const Vector<String16>& args) const final;
 
 private:
-    friend class AudioFlinger;      // for mHandles
 
     // Maximum time allocated to effect engines to complete the turn off sequence
     static const uint32_t MAX_DISABLE_TIME_MS = 10000;
@@ -354,32 +286,37 @@
 // There is one EffectHandle object for each application controlling (or using)
 // an effect module.
 // The EffectHandle is obtained by calling AudioFlinger::createEffect().
-class EffectHandle: public android::media::BnEffect {
+class EffectHandle: public IAfEffectHandle, public android::media::BnEffect {
 public:
 
-    EffectHandle(const sp<EffectBase>& effect,
+    EffectHandle(const sp<IAfEffectBase>& effect,
             const sp<AudioFlinger::Client>& client,
             const sp<media::IEffectClient>& effectClient,
             int32_t priority, bool notifyFramesProcessed);
-    virtual ~EffectHandle();
+    ~EffectHandle() override;
     status_t onTransact(
-            uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags) override;
-    virtual status_t initCheck();
+            uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags) final;
+    status_t initCheck() const final;
 
     // IEffect
-    android::binder::Status enable(int32_t* _aidl_return) override;
-    android::binder::Status disable(int32_t* _aidl_return) override;
+    android::binder::Status enable(int32_t* _aidl_return) final;
+    android::binder::Status disable(int32_t* _aidl_return) final;
     android::binder::Status command(int32_t cmdCode,
                                     const std::vector<uint8_t>& cmdData,
                                     int32_t maxResponseSize,
                                     std::vector<uint8_t>* response,
-                                    int32_t* _aidl_return) override;
-    android::binder::Status disconnect() override;
-    android::binder::Status getCblk(media::SharedFileRegion* _aidl_return) override;
+                                    int32_t* _aidl_return) final;
+    android::binder::Status disconnect() final;
+    android::binder::Status getCblk(media::SharedFileRegion* _aidl_return) final;
     android::binder::Status getConfig(media::EffectConfig* _config,
-                                      int32_t* _aidl_return) override;
+                                      int32_t* _aidl_return) final;
 
-    sp<Client> client() const { return mClient; }
+    // TODO(b/288339104) type
+    sp<RefBase /* AudioFlinger::Client */> client() const final { return mClient; }
+
+    sp<android::media::IEffect> asIEffect() final {
+        return sp<android::media::IEffect>::fromExisting(this);
+    }
 
 private:
     void disconnect(bool unpinIfLast);
@@ -388,38 +325,39 @@
     // - hasControl: true if control is given, false if removed
     // - signal: true client app should be signaled of change, false otherwise
     // - enabled: state of the effect when control is passed
-    void setControl(bool hasControl, bool signal, bool enabled);
+    void setControl(bool hasControl, bool signal, bool enabled) final;
     void commandExecuted(uint32_t cmdCode,
                          const std::vector<uint8_t>& cmdData,
-                         const std::vector<uint8_t>& replyData);
-    void setEnabled(bool enabled);
-    bool enabled() const { return mEnabled; }
+                         const std::vector<uint8_t>& replyData) final;
+    bool enabled() const final { return mEnabled; }
+    void setEnabled(bool enabled) final;
+    void framesProcessed(int32_t frames) const final;
 
-    void framesProcessed(int32_t frames) const;
-
+public:
     // Getters
-    wp<EffectBase> effect() const { return mEffect; }
-    int id() const {
-        sp<EffectBase> effect = mEffect.promote();
+    wp<IAfEffectBase> effect() const final { return mEffect; }
+    int id() const final {
+        sp<IAfEffectBase> effect = mEffect.promote();
         if (effect == 0) {
             return 0;
         }
         return effect->id();
     }
-    int priority() const { return mPriority; }
-    bool hasControl() const { return mHasControl; }
-    bool disconnected() const { return mDisconnected; }
+private:
+    int priority() const final { return mPriority; }
+    bool hasControl() const final { return mHasControl; }
+    bool disconnected() const final { return mDisconnected; }
 
-    void dumpToBuffer(char* buffer, size_t size);
+    void dumpToBuffer(char* buffer, size_t size) const final;
+
 
 private:
-    friend class AudioFlinger;          // for mEffect, mHasControl, mEnabled
     DISALLOW_COPY_AND_ASSIGN(EffectHandle);
 
     Mutex mLock;                             // protects IEffect method calls
-    const wp<EffectBase> mEffect;            // pointer to controlled EffectModule
+    const wp<IAfEffectBase> mEffect;               // pointer to controlled EffectModule
     const sp<media::IEffectClient> mEffectClient;  // callback interface for client notifications
-    /*const*/ sp<Client> mClient;            // client for shared memory allocation, see
+    /*const*/ sp<AudioFlinger::Client> mClient;    // client for shared memory allocation, see
                                              //   disconnect()
     sp<IMemory> mCblkMemory;                 // shared memory for control block
     effect_param_cblk_t* mCblk;              // control block for deferred parameter setting via
@@ -443,123 +381,139 @@
 // order corresponding in the effect process order. When attached to a track (session ID !=
 // AUDIO_SESSION_OUTPUT_MIX),
 // it also provide it's own input buffer used by the track as accumulation buffer.
-class EffectChain : public RefBase {
+class EffectChain : public IAfEffectChain {
 public:
-    EffectChain(const wp<ThreadBase>& wThread, audio_session_t sessionId);
-    virtual ~EffectChain();
+    EffectChain(const wp<AudioFlinger::ThreadBase>& wThread, audio_session_t sessionId);
+    ~EffectChain() override;
 
-    // special key used for an entry in mSuspendedEffects keyed vector
-    // corresponding to a suspend all request.
-    static const int        kKeyForSuspendAll = 0;
+    void process_l() final;
 
-    // minimum duration during which we force calling effect process when last track on
-    // a session is stopped or removed to allow effect tail to be rendered
-    static const int        kProcessTailDurationMs = 1000;
-
-    void process_l();
-
-    void lock() ACQUIRE(mLock) {
+    void lock() ACQUIRE(mLock) final {
         mLock.lock();
     }
-    void unlock() RELEASE(mLock) {
+    void unlock() RELEASE(mLock) final {
         mLock.unlock();
     }
-
-    status_t createEffect_l(sp<EffectModule>& effect,
+    status_t createEffect_l(sp<IAfEffectModule>& effect,
                             effect_descriptor_t *desc,
                             int id,
                             audio_session_t sessionId,
-                            bool pinned);
-    status_t addEffect_l(const sp<EffectModule>& handle);
-    status_t addEffect_ll(const sp<EffectModule>& handle);
-    size_t removeEffect_l(const sp<EffectModule>& handle, bool release = false);
+                            bool pinned) final;
+    status_t addEffect_l(const sp<IAfEffectModule>& handle) final;
+    status_t addEffect_ll(const sp<IAfEffectModule>& handle) final;
+    size_t removeEffect_l(const sp<IAfEffectModule>& handle, bool release = false) final;
 
-    audio_session_t sessionId() const { return mSessionId; }
-    void setSessionId(audio_session_t sessionId) { mSessionId = sessionId; }
+    audio_session_t sessionId() const final { return mSessionId; }
+    void setSessionId(audio_session_t sessionId) final { mSessionId = sessionId; }
 
-    sp<EffectModule> getEffectFromDesc_l(effect_descriptor_t *descriptor);
-    sp<EffectModule> getEffectFromId_l(int id);
-    sp<EffectModule> getEffectFromType_l(const effect_uuid_t *type);
-    std::vector<int> getEffectIds();
+    sp<IAfEffectModule> getEffectFromDesc_l(effect_descriptor_t *descriptor) const final;
+    sp<IAfEffectModule> getEffectFromId_l(int id) const final;
+    sp<IAfEffectModule> getEffectFromType_l(const effect_uuid_t *type) const final;
+    std::vector<int> getEffectIds() const final;
     // FIXME use float to improve the dynamic range
-    bool setVolume_l(uint32_t *left, uint32_t *right, bool force = false);
-    void resetVolume_l();
-    void setDevices_l(const AudioDeviceTypeAddrVector &devices);
-    void setInputDevice_l(const AudioDeviceTypeAddr &device);
-    void setMode_l(audio_mode_t mode);
-    void setAudioSource_l(audio_source_t source);
 
-    void setInBuffer(const sp<EffectBufferHalInterface>& buffer) {
+    bool setVolume_l(uint32_t *left, uint32_t *right, bool force = false) final;
+    void resetVolume_l() final;
+    void setDevices_l(const AudioDeviceTypeAddrVector &devices) final;
+    void setInputDevice_l(const AudioDeviceTypeAddr &device) final;
+    void setMode_l(audio_mode_t mode) final;
+    void setAudioSource_l(audio_source_t source) final;
+
+    void setInBuffer(const sp<EffectBufferHalInterface>& buffer) final {
         mInBuffer = buffer;
     }
-    float *inBuffer() const {
+    float *inBuffer() const final {
         return mInBuffer != 0 ? reinterpret_cast<float*>(mInBuffer->ptr()) : NULL;
     }
-    void setOutBuffer(const sp<EffectBufferHalInterface>& buffer) {
+    void setOutBuffer(const sp<EffectBufferHalInterface>& buffer) final {
         mOutBuffer = buffer;
     }
-    float *outBuffer() const {
+    float *outBuffer() const final {
         return mOutBuffer != 0 ? reinterpret_cast<float*>(mOutBuffer->ptr()) : NULL;
     }
+    void incTrackCnt() final { android_atomic_inc(&mTrackCnt); }
+    void decTrackCnt() final { android_atomic_dec(&mTrackCnt); }
+    int32_t trackCnt() const final { return android_atomic_acquire_load(&mTrackCnt); }
 
-    void incTrackCnt() { android_atomic_inc(&mTrackCnt); }
-    void decTrackCnt() { android_atomic_dec(&mTrackCnt); }
-    int32_t trackCnt() const { return android_atomic_acquire_load(&mTrackCnt); }
-
-    void incActiveTrackCnt() { android_atomic_inc(&mActiveTrackCnt);
+    void incActiveTrackCnt() final { android_atomic_inc(&mActiveTrackCnt);
                                mTailBufferCount = mMaxTailBuffers; }
-    void decActiveTrackCnt() { android_atomic_dec(&mActiveTrackCnt); }
-    int32_t activeTrackCnt() const { return android_atomic_acquire_load(&mActiveTrackCnt); }
+    void decActiveTrackCnt() final { android_atomic_dec(&mActiveTrackCnt); }
+    int32_t activeTrackCnt() const final {
+        return android_atomic_acquire_load(&mActiveTrackCnt);
+    }
 
-    product_strategy_t strategy() const { return mStrategy; }
-    void setStrategy(product_strategy_t strategy)
+    product_strategy_t strategy() const final { return mStrategy; }
+    void setStrategy(product_strategy_t strategy) final
             { mStrategy = strategy; }
 
     // suspend or restore effects of the specified type. The number of suspend requests is counted
     // and restore occurs once all suspend requests are cancelled.
     void setEffectSuspended_l(const effect_uuid_t *type,
-                              bool suspend);
+                              bool suspend) final;
     // suspend all eligible effects
-    void setEffectSuspendedAll_l(bool suspend);
+    void setEffectSuspendedAll_l(bool suspend) final;
     // check if effects should be suspended or restored when a given effect is enable or disabled
-    void checkSuspendOnEffectEnabled(const sp<EffectModule>& effect, bool enabled);
+    void checkSuspendOnEffectEnabled(
+            const sp<IAfEffectModule>& effect, bool enabled) final;
 
-    void clearInputBuffer();
+    void clearInputBuffer() final;
 
     // At least one non offloadable effect in the chain is enabled
-    bool isNonOffloadableEnabled();
-    bool isNonOffloadableEnabled_l();
+    bool isNonOffloadableEnabled() const final;
+    bool isNonOffloadableEnabled_l() const final;
 
-    void syncHalEffectsState();
+    void syncHalEffectsState() final;
 
     // flags is an ORed set of audio_output_flags_t which is updated on return.
-    void checkOutputFlagCompatibility(audio_output_flags_t *flags) const;
+    void checkOutputFlagCompatibility(audio_output_flags_t *flags) const final;
 
     // flags is an ORed set of audio_input_flags_t which is updated on return.
-    void checkInputFlagCompatibility(audio_input_flags_t *flags) const;
+    void checkInputFlagCompatibility(audio_input_flags_t *flags) const final;
 
     // Is this EffectChain compatible with the RAW audio flag.
-    bool isRawCompatible() const;
+    bool isRawCompatible() const final;
 
     // Is this EffectChain compatible with the FAST audio flag.
-    bool isFastCompatible() const;
+    bool isFastCompatible() const final;
 
     // Is this EffectChain compatible with the bit-perfect audio flag.
-    bool isBitPerfectCompatible() const;
+    bool isBitPerfectCompatible() const final;
 
     // isCompatibleWithThread_l() must be called with thread->mLock held
-    bool isCompatibleWithThread_l(const sp<ThreadBase>& thread) const;
+    // TODO(b/288339104) type
+    bool isCompatibleWithThread_l(const sp<Thread>& thread) const final {
+        return isCompatibleWithThread_l(sp<AudioFlinger::ThreadBase>::cast(thread));
+    }
 
-    bool containsHapticGeneratingEffect_l();
+    bool isCompatibleWithThread_l(const sp<AudioFlinger::ThreadBase>& thread) const;
 
-    void setHapticIntensity_l(int id, os::HapticScale intensity);
+    bool containsHapticGeneratingEffect_l() final;
 
-    sp<EffectCallbackInterface> effectCallback() const { return mEffectCallback; }
-    wp<ThreadBase> thread() const { return mEffectCallback->thread(); }
+    void setHapticIntensity_l(int id, os::HapticScale intensity) final;
 
-    bool isFirstEffect(int id) const { return !mEffects.isEmpty() && id == mEffects[0]->id(); }
+    sp<EffectCallbackInterface> effectCallback() const final { return mEffectCallback; }
 
-    void dump(int fd, const Vector<String16>& args);
+    // TODO(b/288339104) type
+    wp<Thread> thread() const final { return mEffectCallback->thread(); }
+
+    bool isFirstEffect(int id) const final {
+        return !mEffects.isEmpty() && id == mEffects[0]->id();
+    }
+
+    void dump(int fd, const Vector<String16>& args) const final;
+
+    size_t numberOfEffects() const final { return mEffects.size(); }
+
+    sp<IAfEffectModule> getEffectModule(size_t index) const final {
+        return mEffects[index];
+    }
+
+    // TODO(b/288339104) type
+    void setThread(const sp<Thread>& thread) final {
+        setThread(sp<AudioFlinger::ThreadBase>::cast(thread));
+    }
+
+    void setThread(const sp<AudioFlinger::ThreadBase>& thread);
 
 private:
 
@@ -574,22 +528,22 @@
         // Note: ctors taking a weak pointer to their owner must not promote it
         // during construction (but may keep a reference for later promotion).
         EffectCallback(const wp<EffectChain>& owner,
-                       const wp<ThreadBase>& thread)
+                       const wp<AudioFlinger::ThreadBase>& thread)
             : mChain(owner)
             , mThread(thread)
-            , mAudioFlinger(*gAudioFlinger) {
-            sp<ThreadBase> base = thread.promote();
+            , mAudioFlinger(*AudioFlinger::gAudioFlinger) {
+            sp<AudioFlinger::ThreadBase> base = thread.promote();
             if (base != nullptr) {
                 mThreadType = base->type();
             } else {
-                mThreadType = ThreadBase::MIXER;  // assure a consistent value.
+                mThreadType = AudioFlinger::ThreadBase::MIXER;  // assure a consistent value.
             }
         }
 
         status_t createEffectHal(const effect_uuid_t *pEffectUuid,
                int32_t sessionId, int32_t deviceId, sp<EffectHalInterface> *effect) override;
         status_t allocateHalBuffer(size_t size, sp<EffectBufferHalInterface>* buffer) override;
-        bool updateOrphanEffectChains(const sp<EffectBase>& effect) override;
+        bool updateOrphanEffectChains(const sp<IAfEffectBase>& effect) override;
 
         audio_io_handle_t io() const override;
         bool isOutput() const override;
@@ -609,39 +563,38 @@
 
         status_t addEffectToHal(const sp<EffectHalInterface>& effect) override;
         status_t removeEffectFromHal(const sp<EffectHalInterface>& effect) override;
-        bool disconnectEffectHandle(EffectHandle *handle, bool unpinIfLast) override;
+        bool disconnectEffectHandle(IAfEffectHandle *handle, bool unpinIfLast) override;
         void setVolumeForOutput(float left, float right) const override;
 
         // check if effects should be suspended/restored when a given effect is enable/disabled
-        void checkSuspendOnEffectEnabled(const sp<EffectBase>& effect,
+        void checkSuspendOnEffectEnabled(const sp<IAfEffectBase>& effect,
                               bool enabled, bool threadLocked) override;
         void resetVolume() override;
         product_strategy_t strategy() const override;
         int32_t activeTrackCnt() const override;
-        void onEffectEnable(const sp<EffectBase>& effect) override;
-        void onEffectDisable(const sp<EffectBase>& effect) override;
+        void onEffectEnable(const sp<IAfEffectBase>& effect) override;
+        void onEffectDisable(const sp<IAfEffectBase>& effect) override;
 
-        wp<EffectChain> chain() const override { return mChain; }
+        wp<IAfEffectChain> chain() const final { return mChain; }
 
-        bool isAudioPolicyReady() const override {
+        bool isAudioPolicyReady() const final {
             return mAudioFlinger.isAudioPolicyReady();
         }
 
-        wp<ThreadBase> thread() const { return mThread.load(); }
+        wp<AudioFlinger::ThreadBase> thread() const { return mThread.load(); }
 
-        void setThread(const sp<ThreadBase>& thread) {
+        void setThread(const sp<AudioFlinger::ThreadBase>& thread) {
             mThread = thread;
             mThreadType = thread->type();
         }
 
     private:
-        const wp<EffectChain> mChain;
-        mediautils::atomic_wp<ThreadBase> mThread;
+        const wp<IAfEffectChain> mChain;
+        mediautils::atomic_wp<AudioFlinger::ThreadBase> mThread;
         AudioFlinger &mAudioFlinger;  // implementation detail: outer instance always exists.
-        ThreadBase::type_t mThreadType;
+        AudioFlinger::ThreadBase::type_t mThreadType;
     };
 
-    friend class AudioFlinger;  // for mThread, mEffects
     DISALLOW_COPY_AND_ASSIGN(EffectChain);
 
     class SuspendedEffectDesc : public RefBase {
@@ -650,15 +603,15 @@
 
         int mRefCount;   // > 0 when suspended
         effect_uuid_t mType;
-        wp<EffectModule> mEffect;
+        wp<IAfEffectModule> mEffect;
     };
 
     // get a list of effect modules to suspend when an effect of the type
     // passed is enabled.
-    void                       getSuspendEligibleEffects(Vector< sp<EffectModule> > &effects);
+    void  getSuspendEligibleEffects(Vector<sp<IAfEffectModule>> &effects);
 
     // get an effect module if it is currently enable
-    sp<EffectModule> getEffectIfEnabled(const effect_uuid_t *type);
+    sp<IAfEffectModule> getEffectIfEnabled(const effect_uuid_t *type);
     // true if the effect whose descriptor is passed can be suspended
     // OEMs can modify the rules implemented in this method to exclude specific effect
     // types or implementations from the suspend/restore mechanism.
@@ -668,8 +621,6 @@
 
     void clearInputBuffer_l();
 
-    void setThread(const sp<ThreadBase>& thread);
-
     // true if any effect module within the chain has volume control
     bool hasVolumeControlEnabled_l() const;
 
@@ -678,7 +629,7 @@
     ssize_t getInsertIndex(const effect_descriptor_t& desc);
 
     mutable  Mutex mLock;        // mutex protecting effect list
-             Vector< sp<EffectModule> > mEffects; // list of effect modules
+             Vector<sp<IAfEffectModule>> mEffects; // list of effect modules
              audio_session_t mSessionId; // audio session ID
              sp<EffectBufferHalInterface> mInBuffer;  // chain input buffer
              sp<EffectBufferHalInterface> mOutBuffer; // chain output buffer
@@ -704,35 +655,50 @@
              const sp<EffectCallback> mEffectCallback;
 };
 
-class DeviceEffectProxy : public EffectBase {
+class DeviceEffectProxy : public IAfDeviceEffectProxy, public EffectBase {
 public:
-        DeviceEffectProxy (const AudioDeviceTypeAddr& device,
-                const sp<DeviceEffectManagerCallback>& callback,
+    DeviceEffectProxy(const AudioDeviceTypeAddr& device,
+                const sp<AudioFlinger::DeviceEffectManagerCallback>& callback,
                 effect_descriptor_t *desc, int id, bool notifyFramesProcessed)
             : EffectBase(callback, desc, id, AUDIO_SESSION_DEVICE, false),
                 mDevice(device), mManagerCallback(callback),
                 mMyCallback(new ProxyCallback(wp<DeviceEffectProxy>(this), callback)),
                 mNotifyFramesProcessed(notifyFramesProcessed) {}
 
-    status_t setEnabled(bool enabled, bool fromHandle) override;
-    sp<DeviceEffectProxy> asDeviceEffectProxy() override { return this; }
+    status_t setEnabled(bool enabled, bool fromHandle) final;
+    sp<IAfDeviceEffectProxy> asDeviceEffectProxy() final { return this; }
 
-    status_t init(const std::map<audio_patch_handle_t, PatchPanel::Patch>& patches);
-    status_t onCreatePatch(audio_patch_handle_t patchHandle, const PatchPanel::Patch& patch);
-    void onReleasePatch(audio_patch_handle_t patchHandle);
+    // TODO(b/288339104) type
+    status_t init(const /* std::map<audio_patch_handle_t,
+            PatchPanel::Patch>& */ void * patches) final {
+        return init(*reinterpret_cast<const std::map<
+                audio_patch_handle_t, AudioFlinger::PatchPanel::Patch> *>(patches));
+    }
+    // TODO(b/288339104) type
+    status_t onCreatePatch(audio_patch_handle_t patchHandle,
+            /* const PatchPanel::Patch& */ const void * patch) final {
+        return onCreatePatch(patchHandle,
+                *reinterpret_cast<const AudioFlinger::PatchPanel::Patch *>(patch));
+    }
 
-    size_t removeEffect(const sp<EffectModule>& effect);
+    status_t init(const std::map<audio_patch_handle_t, AudioFlinger::PatchPanel::Patch>& patches);
+    status_t onCreatePatch(
+            audio_patch_handle_t patchHandle, const AudioFlinger::PatchPanel::Patch& patch);
 
-    status_t addEffectToHal(const sp<EffectHalInterface>& effect);
-    status_t removeEffectFromHal(const sp<EffectHalInterface>& effect);
+    void onReleasePatch(audio_patch_handle_t patchHandle) final;
 
-    const AudioDeviceTypeAddr& device() { return mDevice; };
-    bool isOutput() const;
-    uint32_t sampleRate() const;
-    audio_channel_mask_t channelMask() const;
-    uint32_t channelCount() const;
+    size_t removeEffect(const sp<IAfEffectModule>& effect) final;
 
-    void dump(int fd, int spaces);
+    status_t addEffectToHal(const sp<EffectHalInterface>& effect) final;
+    status_t removeEffectFromHal(const sp<EffectHalInterface>& effect) final;
+
+    const AudioDeviceTypeAddr& device() const final { return mDevice; };
+    bool isOutput() const final;
+    uint32_t sampleRate() const final;
+    audio_channel_mask_t channelMask() const final;
+    uint32_t channelCount() const final;
+
+    void dump2(int fd, int spaces) const final;
 
 private:
 
@@ -741,14 +707,14 @@
         // Note: ctors taking a weak pointer to their owner must not promote it
         // during construction (but may keep a reference for later promotion).
         ProxyCallback(const wp<DeviceEffectProxy>& owner,
-                const sp<DeviceEffectManagerCallback>& callback)
+                const sp<AudioFlinger::DeviceEffectManagerCallback>& callback)
             : mProxy(owner), mManagerCallback(callback) {}
 
         status_t createEffectHal(const effect_uuid_t *pEffectUuid,
                int32_t sessionId, int32_t deviceId, sp<EffectHalInterface> *effect) override;
         status_t allocateHalBuffer(size_t size __unused,
                 sp<EffectBufferHalInterface>* buffer __unused) override { return NO_ERROR; }
-        bool updateOrphanEffectChains(const sp<EffectBase>& effect __unused) override {
+        bool updateOrphanEffectChains(const sp<IAfEffectBase>& effect __unused) override {
                     return false;
         }
 
@@ -771,18 +737,18 @@
         status_t addEffectToHal(const sp<EffectHalInterface>& effect) override;
         status_t removeEffectFromHal(const sp<EffectHalInterface>& effect) override;
 
-        bool disconnectEffectHandle(EffectHandle *handle, bool unpinIfLast) override;
+        bool disconnectEffectHandle(IAfEffectHandle *handle, bool unpinIfLast) override;
         void setVolumeForOutput(float left __unused, float right __unused) const override {}
 
-        void checkSuspendOnEffectEnabled(const sp<EffectBase>& effect __unused,
+        void checkSuspendOnEffectEnabled(const sp<IAfEffectBase>& effect __unused,
                               bool enabled __unused, bool threadLocked __unused) override {}
         void resetVolume() override {}
         product_strategy_t strategy() const override  { return static_cast<product_strategy_t>(0); }
         int32_t activeTrackCnt() const override { return 0; }
-        void onEffectEnable(const sp<EffectBase>& effect __unused) override;
-        void onEffectDisable(const sp<EffectBase>& effect __unused) override;
+        void onEffectEnable(const sp<IAfEffectBase>& effect __unused) override;
+        void onEffectDisable(const sp<IAfEffectBase>& effect __unused) override;
 
-        wp<EffectChain> chain() const override { return nullptr; }
+        wp<IAfEffectChain> chain() const override { return nullptr; }
 
         bool isAudioPolicyReady() const override {
             return mManagerCallback->isAudioPolicyReady();
@@ -792,19 +758,21 @@
 
     private:
         const wp<DeviceEffectProxy> mProxy;
-        const sp<DeviceEffectManagerCallback> mManagerCallback;
+        const sp<AudioFlinger::DeviceEffectManagerCallback> mManagerCallback;
     };
 
-    status_t checkPort(const PatchPanel::Patch& patch, const struct audio_port_config *port,
-            sp<EffectHandle> *handle);
+    status_t checkPort(const AudioFlinger::PatchPanel::Patch& patch,
+            const struct audio_port_config *port, sp<IAfEffectHandle> *handle);
 
     const AudioDeviceTypeAddr mDevice;
-    const sp<DeviceEffectManagerCallback> mManagerCallback;
+    const sp<AudioFlinger::DeviceEffectManagerCallback> mManagerCallback;
     const sp<ProxyCallback> mMyCallback;
 
-    Mutex mProxyLock;
-    std::map<audio_patch_handle_t, sp<EffectHandle>> mEffectHandles; // protected by mProxyLock
-    sp<EffectModule> mHalEffect; // protected by mProxyLock
+    mutable Mutex mProxyLock;
+    std::map<audio_patch_handle_t, sp<IAfEffectHandle>> mEffectHandles; // protected by mProxyLock
+    sp<IAfEffectModule> mHalEffect; // protected by mProxyLock
     struct audio_port_config mDevicePort = { .id = AUDIO_PORT_HANDLE_NONE };
     const bool mNotifyFramesProcessed;
 };
+
+} // namespace android
diff --git a/services/audioflinger/IAfEffect.h b/services/audioflinger/IAfEffect.h
new file mode 100644
index 0000000..7c3be0f
--- /dev/null
+++ b/services/audioflinger/IAfEffect.h
@@ -0,0 +1,368 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+namespace android {
+
+class IAfDeviceEffectProxy;
+class IAfEffectBase;
+class IAfEffectChain;
+class IAfEffectHandle;
+class IAfEffectModule;
+
+// Interface implemented by the EffectModule parent or owner (e.g an EffectChain) to abstract
+// interactions between the EffectModule and the reset of the audio framework.
+class EffectCallbackInterface : public RefBase {
+public:
+    // Trivial methods usually implemented with help from ThreadBase
+    virtual audio_io_handle_t io() const = 0;
+    virtual bool isOutput() const = 0;
+    virtual bool isOffload() const = 0;
+    virtual bool isOffloadOrDirect() const = 0;
+    virtual bool isOffloadOrMmap() const = 0;
+    virtual bool isSpatializer() const = 0;
+    virtual uint32_t sampleRate() const = 0;
+    virtual audio_channel_mask_t inChannelMask(int id) const = 0;
+    virtual uint32_t inChannelCount(int id) const = 0;
+    virtual audio_channel_mask_t outChannelMask() const = 0;
+    virtual uint32_t outChannelCount() const = 0;
+    virtual audio_channel_mask_t hapticChannelMask() const = 0;
+    virtual size_t frameCount() const = 0;
+
+    // Non trivial methods usually implemented with help from ThreadBase:
+    // pay attention to mutex locking order
+    virtual uint32_t latency() const { return 0; }
+    virtual status_t addEffectToHal(const sp<EffectHalInterface>& effect) = 0;
+    virtual status_t removeEffectFromHal(const sp<EffectHalInterface>& effect) = 0;
+    virtual void setVolumeForOutput(float left, float right) const = 0;
+    virtual bool disconnectEffectHandle(IAfEffectHandle *handle, bool unpinIfLast) = 0;
+    virtual void checkSuspendOnEffectEnabled(
+            const sp<IAfEffectBase>& effect, bool enabled, bool threadLocked) = 0;
+    virtual void onEffectEnable(const sp<IAfEffectBase>& effect) = 0;
+    virtual void onEffectDisable(const sp<IAfEffectBase>& effect) = 0;
+
+    // Methods usually implemented with help from AudioFlinger: pay attention to mutex locking order
+    virtual status_t createEffectHal(const effect_uuid_t *pEffectUuid,
+            int32_t sessionId, int32_t deviceId, sp<EffectHalInterface> *effect) = 0;
+    virtual status_t allocateHalBuffer(size_t size, sp<EffectBufferHalInterface>* buffer) = 0;
+    virtual bool updateOrphanEffectChains(const sp<IAfEffectBase>& effect) = 0;
+
+    // Methods usually implemented with help from EffectChain: pay attention to mutex locking order
+    virtual product_strategy_t strategy() const = 0;
+    virtual int32_t activeTrackCnt() const = 0;
+    virtual void resetVolume() = 0;
+    virtual wp<IAfEffectChain> chain() const = 0;
+    virtual bool isAudioPolicyReady() const = 0;
+};
+
+class IAfEffectBase : public virtual RefBase {
+    friend class EffectChain;
+    friend class EffectHandle;
+
+public:
+    enum effect_state {
+        IDLE,
+        RESTART,
+        STARTING,
+        ACTIVE,
+        STOPPING,
+        STOPPED,
+        DESTROYED
+    };
+    virtual int id() const = 0;
+    virtual effect_state state() const = 0;
+    virtual audio_session_t sessionId() const = 0;
+    virtual const effect_descriptor_t& desc() const = 0;
+    virtual bool isOffloadable() const = 0;
+    virtual bool isImplementationSoftware() const = 0;
+    virtual bool isProcessImplemented() const = 0;
+    virtual bool isVolumeControl() const = 0;
+    virtual bool isVolumeMonitor() const = 0;
+    virtual bool isEnabled() const = 0;
+    virtual bool isPinned() const = 0;
+    virtual void unPin() = 0;
+    virtual status_t updatePolicyState() = 0;
+    virtual bool purgeHandles() = 0;
+    virtual void checkSuspendOnEffectEnabled(bool enabled, bool threadLocked) = 0;
+
+    // mCallback is atomic so this can be lock-free.
+    virtual void setCallback(const sp<EffectCallbackInterface>& callback) = 0;
+    virtual sp<EffectCallbackInterface> getCallback() const = 0;
+
+    virtual status_t addHandle(IAfEffectHandle *handle) = 0;
+    virtual ssize_t removeHandle(IAfEffectHandle *handle) = 0;
+
+    virtual sp<IAfEffectModule> asEffectModule() = 0;
+    virtual sp<IAfDeviceEffectProxy> asDeviceEffectProxy() = 0;
+
+    virtual void dump(int fd, const Vector<String16>& args) const = 0;
+
+private:
+    virtual status_t setEnabled(bool enabled, bool fromHandle) = 0;
+    virtual status_t setEnabled_l(bool enabled) = 0;
+    virtual void setSuspended(bool suspended) = 0;
+    virtual bool suspended() const = 0;
+
+    virtual status_t command(int32_t cmdCode,
+            const std::vector<uint8_t>& cmdData,
+            int32_t maxReplySize,
+            std::vector<uint8_t>* reply) = 0;
+
+    virtual ssize_t disconnectHandle(IAfEffectHandle *handle, bool unpinIfLast) = 0;
+    virtual ssize_t removeHandle_l(IAfEffectHandle *handle) = 0;
+    virtual IAfEffectHandle* controlHandle_l() = 0;
+
+    virtual void lock() = 0;
+    virtual void unlock() = 0;
+};
+
+class IAfEffectModule : public virtual IAfEffectBase {
+    friend class DeviceEffectProxy;
+    friend class EffectChain;
+
+public:
+    static sp<IAfEffectModule> create(
+            const sp<EffectCallbackInterface>& callabck,
+            effect_descriptor_t *desc,
+            int id,
+            audio_session_t sessionId,
+            bool pinned,
+            audio_port_handle_t deviceId);
+
+    virtual int16_t *inBuffer() const = 0;
+    virtual status_t setDevices(const AudioDeviceTypeAddrVector &devices) = 0;
+    virtual status_t setInputDevice(const AudioDeviceTypeAddr &device) = 0;
+    virtual status_t setVolume(uint32_t *left, uint32_t *right, bool controller) = 0;
+    virtual status_t setOffloaded(bool offloaded, audio_io_handle_t io) = 0;
+    virtual bool isOffloaded() const = 0;
+
+    virtual status_t setAudioSource(audio_source_t source) = 0;
+    virtual status_t setMode(audio_mode_t mode) = 0;
+
+    virtual status_t start() = 0;
+    virtual status_t getConfigs(audio_config_base_t* inputCfg,
+            audio_config_base_t* outputCfg,
+            bool* isOutput) const = 0;
+
+    static bool isHapticGenerator(const effect_uuid_t* type);
+    virtual bool isHapticGenerator() const = 0;
+    virtual status_t setHapticIntensity(int id, os::HapticScale intensity) = 0;
+    virtual status_t setVibratorInfo(const media::AudioVibratorInfo& vibratorInfo) = 0;
+
+private:
+    virtual void process() = 0;
+    virtual bool updateState() = 0;
+    virtual void reset_l() = 0;
+    virtual status_t configure() = 0;
+    virtual status_t init() = 0;
+    virtual uint32_t status() const = 0;
+    virtual bool isProcessEnabled() const = 0;
+    virtual bool isOffloadedOrDirect() const = 0;
+    virtual bool isVolumeControlEnabled() const = 0;
+
+    virtual void setInBuffer(const sp<EffectBufferHalInterface>& buffer) = 0;
+    virtual void setOutBuffer(const sp<EffectBufferHalInterface>& buffer) = 0;
+    virtual int16_t *outBuffer() const = 0;
+
+    // Updates the access mode if it is out of date.  May issue a new effect configure.
+    virtual void updateAccessMode() = 0;
+
+    virtual status_t stop() = 0;
+    virtual void addEffectToHal_l() = 0;
+    virtual void release_l() = 0;
+};
+
+class IAfEffectChain : public RefBase {
+    // Most of these methods are accessed from AudioFlinger::Thread
+public:
+    static sp<IAfEffectChain> create(
+            const wp<Thread /*ThreadBase*/>& wThread,  // TODO(b/288339104) type
+            audio_session_t sessionId);
+
+    // special key used for an entry in mSuspendedEffects keyed vector
+    // corresponding to a suspend all request.
+    static constexpr int kKeyForSuspendAll = 0;
+
+    // minimum duration during which we force calling effect process when last track on
+    // a session is stopped or removed to allow effect tail to be rendered
+    static constexpr int kProcessTailDurationMs = 1000;
+
+    virtual void process_l() = 0;
+
+    virtual void lock() = 0;
+    virtual void unlock() = 0;
+
+    virtual status_t createEffect_l(sp<IAfEffectModule>& effect,
+                            effect_descriptor_t *desc,
+                            int id,
+                            audio_session_t sessionId,
+                            bool pinned) = 0;
+
+    virtual status_t addEffect_l(const sp<IAfEffectModule>& handle) = 0;
+    virtual status_t addEffect_ll(const sp<IAfEffectModule>& handle) = 0;
+    virtual size_t removeEffect_l(const sp<IAfEffectModule>& handle, bool release = false) = 0;
+
+    virtual audio_session_t sessionId() const = 0;
+    virtual void setSessionId(audio_session_t sessionId) = 0;
+
+    virtual sp<IAfEffectModule> getEffectFromDesc_l(effect_descriptor_t *descriptor) const = 0;
+    virtual sp<IAfEffectModule> getEffectFromId_l(int id) const = 0;
+    virtual sp<IAfEffectModule> getEffectFromType_l(const effect_uuid_t *type) const = 0;
+    virtual std::vector<int> getEffectIds() const = 0;
+    virtual bool setVolume_l(uint32_t *left, uint32_t *right, bool force = false) = 0;
+    virtual void resetVolume_l() = 0;
+    virtual void setDevices_l(const AudioDeviceTypeAddrVector &devices) = 0;
+    virtual void setInputDevice_l(const AudioDeviceTypeAddr &device) = 0;
+    virtual void setMode_l(audio_mode_t mode) = 0;
+    virtual void setAudioSource_l(audio_source_t source) = 0;
+
+    virtual void setInBuffer(const sp<EffectBufferHalInterface>& buffer) = 0;
+    virtual float *inBuffer() const = 0;
+    virtual void setOutBuffer(const sp<EffectBufferHalInterface>& buffer) = 0;
+    virtual float *outBuffer() const = 0;
+
+    virtual void incTrackCnt() = 0;
+    virtual void decTrackCnt() = 0;
+    virtual int32_t trackCnt() const = 0;
+
+    virtual void incActiveTrackCnt() = 0;
+    virtual void decActiveTrackCnt() = 0;
+    virtual int32_t activeTrackCnt() const = 0;
+
+    virtual product_strategy_t strategy() const = 0;
+    virtual void setStrategy(product_strategy_t strategy) = 0;
+
+    // suspend or restore effects of the specified type. The number of suspend requests is counted
+    // and restore occurs once all suspend requests are cancelled.
+    virtual void setEffectSuspended_l(
+            const effect_uuid_t *type, bool suspend) = 0;
+    // suspend all eligible effects
+    virtual void setEffectSuspendedAll_l(bool suspend) = 0;
+    // check if effects should be suspended or restored when a given effect is enable or disabled
+    virtual void checkSuspendOnEffectEnabled(const sp<IAfEffectModule>& effect, bool enabled) = 0;
+
+    virtual void clearInputBuffer() = 0;
+
+    // At least one non offloadable effect in the chain is enabled
+    virtual bool isNonOffloadableEnabled() const = 0;
+    virtual bool isNonOffloadableEnabled_l() const = 0;
+
+    virtual void syncHalEffectsState() = 0;
+
+    // flags is an ORed set of audio_output_flags_t which is updated on return.
+    virtual void checkOutputFlagCompatibility(audio_output_flags_t *flags) const = 0;
+
+    // flags is an ORed set of audio_input_flags_t which is updated on return.
+    virtual void checkInputFlagCompatibility(audio_input_flags_t *flags) const = 0;
+
+    // Is this EffectChain compatible with the RAW audio flag.
+    virtual bool isRawCompatible() const = 0;
+
+    // Is this EffectChain compatible with the FAST audio flag.
+    virtual bool isFastCompatible() const = 0;
+
+    // Is this EffectChain compatible with the bit-perfect audio flag.
+    virtual bool isBitPerfectCompatible() const = 0;
+
+    // isCompatibleWithThread_l() must be called with thread->mLock held
+    //  TODO(b/288339104) type
+    virtual bool isCompatibleWithThread_l(const sp<Thread>& thread) const = 0;
+
+    virtual bool containsHapticGeneratingEffect_l() = 0;
+
+    virtual void setHapticIntensity_l(int id, os::HapticScale intensity) = 0;
+
+    virtual sp<EffectCallbackInterface> effectCallback() const = 0;
+
+    virtual wp<Thread> thread() const = 0;  // TODO(b/288339104) type
+    virtual void setThread(const sp<Thread>& thread) = 0;  // TODO(b/288339104) type
+
+    virtual bool isFirstEffect(int id) const = 0;
+
+    virtual size_t numberOfEffects() const = 0;
+    virtual sp<IAfEffectModule> getEffectModule(size_t index) const = 0;
+
+    virtual void dump(int fd, const Vector<String16>& args) const = 0;
+};
+
+class IAfEffectHandle : public virtual RefBase {
+    friend class EffectBase;
+    friend class EffectChain;
+    friend class EffectModule;
+
+public:
+    static sp<IAfEffectHandle> create(
+            const sp<IAfEffectBase>& effect,
+            const sp<RefBase /*AudioFlinger::Client */>& client,  // TODO(b/288339104) type
+            const sp<media::IEffectClient>& effectClient,
+            int32_t priority, bool notifyFramesProcessed);
+
+    virtual status_t initCheck() const = 0;
+    virtual bool enabled() const = 0;
+    virtual int id() const = 0;
+    virtual wp<IAfEffectBase> effect() const = 0;
+    virtual sp<android::media::IEffect> asIEffect() = 0;
+    // TODO(b/288339104) type
+    virtual sp<RefBase /* AudioFlinger::Client */> client() const = 0;
+
+private:
+    virtual void setControl(bool hasControl, bool signal, bool enabled) = 0;
+    virtual bool hasControl() const = 0;
+    virtual void setEnabled(bool enabled) = 0;
+    virtual bool disconnected() const = 0;
+    virtual int priority() const = 0;
+
+    virtual void commandExecuted(uint32_t cmdCode,
+            const std::vector<uint8_t>& cmdData,
+            const std::vector<uint8_t>& replyData) = 0;
+    virtual void framesProcessed(int32_t frames) const = 0;
+
+    virtual void dumpToBuffer(char* buffer, size_t size) const = 0;
+};
+
+class IAfDeviceEffectProxy : public virtual IAfEffectBase {
+public:
+    // TODO(b/288339104) type
+    static sp<IAfDeviceEffectProxy> create(const AudioDeviceTypeAddr& device,
+                const sp</* DeviceEffectManagerCallback */ RefBase>& callback,
+                effect_descriptor_t *desc, int id, bool notifyFramesProcessed);
+
+    virtual status_t init(
+            const /* std::map<audio_patch_handle_t,
+            PatchPanel::Patch>& */ void * patches) = 0; // TODO(b/288339104) type
+    virtual const AudioDeviceTypeAddr& device() const = 0;
+
+    virtual status_t onCreatePatch(
+            audio_patch_handle_t patchHandle,
+            /* const PatchPanel::Patch& */ const void * patch) = 0;
+    virtual void onReleasePatch(audio_patch_handle_t patchHandle) = 0;
+
+    virtual void dump2(int fd, int spaces) const = 0; // TODO(b/288339104) naming?
+
+private:
+    // used by DeviceEffectProxy
+    virtual bool isOutput() const = 0;
+    virtual uint32_t sampleRate() const = 0;
+    virtual audio_channel_mask_t channelMask() const = 0;
+    virtual uint32_t channelCount() const = 0;
+
+    virtual size_t removeEffect(const sp<IAfEffectModule>& effect) = 0;
+    virtual status_t addEffectToHal(const sp<EffectHalInterface>& effect) = 0;
+    virtual status_t removeEffectFromHal(const sp<EffectHalInterface>& effect) = 0;
+};
+
+} // namespace android
diff --git a/services/audioflinger/PatchPanel.h b/services/audioflinger/PatchPanel.h
index 5555766..2c5e47c 100644
--- a/services/audioflinger/PatchPanel.h
+++ b/services/audioflinger/PatchPanel.h
@@ -19,7 +19,7 @@
     #error This header file should only be included from AudioFlinger.h
 #endif
 
-
+public: // TODO(b/288339104) extract out of AudioFlinger class
 // PatchPanel is concealed within AudioFlinger, their lifetimes are the same.
 class PatchPanel {
 public:
@@ -265,3 +265,5 @@
     };
     std::map<audio_module_handle_t, ModuleConnections> mInsertedModules;
 };
+
+private:
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index 95883d9..0add182 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -1061,7 +1061,7 @@
     write(fd, buffer, strlen(buffer));
 
     for (size_t i = 0; i < numEffectChains; ++i) {
-        sp<EffectChain> chain = mEffectChains[i];
+        sp<IAfEffectChain> chain = mEffectChains[i];
         if (chain != 0) {
             chain->dump(fd, args);
         }
@@ -1211,7 +1211,7 @@
 void AudioFlinger::ThreadBase::setEffectSuspended_l(
         const effect_uuid_t *type, bool suspend, audio_session_t sessionId)
 {
-    sp<EffectChain> chain = getEffectChain_l(sessionId);
+    sp<IAfEffectChain> chain = getEffectChain_l(sessionId);
     if (chain != 0) {
         if (type != NULL) {
             chain->setEffectSuspended_l(type, suspend);
@@ -1223,7 +1223,7 @@
     updateSuspendedSessions_l(type, suspend, sessionId);
 }
 
-void AudioFlinger::ThreadBase::checkSuspendOnAddEffectChain_l(const sp<EffectChain>& chain)
+void AudioFlinger::ThreadBase::checkSuspendOnAddEffectChain_l(const sp<IAfEffectChain>& chain)
 {
     ssize_t index = mSuspendedSessions.indexOfKey(chain->sessionId());
     if (index < 0) {
@@ -1236,7 +1236,7 @@
     for (size_t i = 0; i < sessionEffects.size(); i++) {
         const sp<SuspendedSessionDesc>& desc = sessionEffects.valueAt(i);
         for (int j = 0; j < desc->mRefCount; j++) {
-            if (sessionEffects.keyAt(i) == EffectChain::kKeyForSuspendAll) {
+            if (sessionEffects.keyAt(i) == IAfEffectChain::kKeyForSuspendAll) {
                 chain->setEffectSuspendedAll_l(true);
             } else {
                 ALOGV("checkSuspendOnAddEffectChain_l() suspending effects %08x",
@@ -1269,7 +1269,7 @@
     }
 
 
-    int key = EffectChain::kKeyForSuspendAll;
+    int key = IAfEffectChain::kKeyForSuspendAll;
     if (type != NULL) {
         key = type->timeLow;
     }
@@ -1370,7 +1370,7 @@
         }
     }
 
-    if (EffectModule::isHapticGenerator(&desc->type)) {
+    if (IAfEffectModule::isHapticGenerator(&desc->type)) {
         ALOGE("%s(): HapticGenerator is not supported in RecordThread", __func__);
         return BAD_VALUE;
     }
@@ -1393,7 +1393,7 @@
         return NO_ERROR;
     }
 
-    if (EffectModule::isHapticGenerator(&desc->type) && mHapticChannelCount == 0) {
+    if (IAfEffectModule::isHapticGenerator(&desc->type) && mHapticChannelCount == 0) {
         ALOGW("%s: thread doesn't support haptic playback while the effect is HapticGenerator",
                 __func__);
         return BAD_VALUE;
@@ -1533,7 +1533,7 @@
 }
 
 // ThreadBase::createEffect_l() must be called with AudioFlinger::mLock held
-sp<AudioFlinger::EffectHandle> AudioFlinger::ThreadBase::createEffect_l(
+sp<IAfEffectHandle> AudioFlinger::ThreadBase::createEffect_l(
         const sp<AudioFlinger::Client>& client,
         const sp<IEffectClient>& effectClient,
         int32_t priority,
@@ -1545,10 +1545,10 @@
         bool probe,
         bool notifyFramesProcessed)
 {
-    sp<EffectModule> effect;
-    sp<EffectHandle> handle;
+    sp<IAfEffectModule> effect;
+    sp<IAfEffectHandle> handle;
     status_t lStatus;
-    sp<EffectChain> chain;
+    sp<IAfEffectChain> chain;
     bool chainCreated = false;
     bool effectCreated = false;
     audio_unique_id_t effectId = AUDIO_UNIQUE_ID_USE_UNSPECIFIED;
@@ -1574,7 +1574,7 @@
         if (chain == 0) {
             // create a new chain for this session
             ALOGV("createEffect_l() new effect chain for session %d", sessionId);
-            chain = new EffectChain(this, sessionId);
+            chain = IAfEffectChain::create(this, sessionId);
             addEffectChain_l(chain);
             chain->setStrategy(getStrategyForSession_l(sessionId));
             chainCreated = true;
@@ -1610,7 +1610,8 @@
             }
         }
         // create effect handle and connect it to effect module
-        handle = new EffectHandle(effect, client, effectClient, priority, notifyFramesProcessed);
+        handle = IAfEffectHandle::create(
+                effect, client, effectClient, priority, notifyFramesProcessed);
         lStatus = handle->initCheck();
         if (lStatus == OK) {
             lStatus = effect->addHandle(handle.get());
@@ -1637,14 +1638,14 @@
     return handle;
 }
 
-void AudioFlinger::ThreadBase::disconnectEffectHandle(EffectHandle *handle,
+void AudioFlinger::ThreadBase::disconnectEffectHandle(IAfEffectHandle *handle,
                                                       bool unpinIfLast)
 {
     bool remove = false;
-    sp<EffectModule> effect;
+    sp<IAfEffectModule> effect;
     {
         Mutex::Autolock _l(mLock);
-        sp<EffectBase> effectBase = handle->effect().promote();
+        sp<IAfEffectBase> effectBase = handle->effect().promote();
         if (effectBase == nullptr) {
             return;
         }
@@ -1667,7 +1668,7 @@
     }
 }
 
-void AudioFlinger::ThreadBase::onEffectEnable(const sp<EffectModule>& effect) {
+void AudioFlinger::ThreadBase::onEffectEnable(const sp<IAfEffectModule>& effect) {
     if (isOffloadOrMmap()) {
         Mutex::Autolock _l(mLock);
         broadcast_l();
@@ -1690,33 +1691,33 @@
     }
 }
 
-sp<AudioFlinger::EffectModule> AudioFlinger::ThreadBase::getEffect(audio_session_t sessionId,
+sp<IAfEffectModule> AudioFlinger::ThreadBase::getEffect(audio_session_t sessionId,
         int effectId)
 {
     Mutex::Autolock _l(mLock);
     return getEffect_l(sessionId, effectId);
 }
 
-sp<AudioFlinger::EffectModule> AudioFlinger::ThreadBase::getEffect_l(audio_session_t sessionId,
+sp<IAfEffectModule> AudioFlinger::ThreadBase::getEffect_l(audio_session_t sessionId,
         int effectId)
 {
-    sp<EffectChain> chain = getEffectChain_l(sessionId);
+    sp<IAfEffectChain> chain = getEffectChain_l(sessionId);
     return chain != 0 ? chain->getEffectFromId_l(effectId) : 0;
 }
 
 std::vector<int> AudioFlinger::ThreadBase::getEffectIds_l(audio_session_t sessionId)
 {
-    sp<EffectChain> chain = getEffectChain_l(sessionId);
+    sp<IAfEffectChain> chain = getEffectChain_l(sessionId);
     return chain != nullptr ? chain->getEffectIds() : std::vector<int>{};
 }
 
 // PlaybackThread::addEffect_l() must be called with AudioFlinger::mLock and
 // PlaybackThread::mLock held
-status_t AudioFlinger::ThreadBase::addEffect_l(const sp<EffectModule>& effect)
+status_t AudioFlinger::ThreadBase::addEffect_l(const sp<IAfEffectModule>& effect)
 {
     // check for existing effect chain with the requested audio session
     audio_session_t sessionId = effect->sessionId();
-    sp<EffectChain> chain = getEffectChain_l(sessionId);
+    sp<IAfEffectChain> chain = getEffectChain_l(sessionId);
     bool chainCreated = false;
 
     ALOGD_IF((mType == OFFLOAD) && !effect->isOffloadable(),
@@ -1726,7 +1727,7 @@
     if (chain == 0) {
         // create a new chain for this session
         ALOGV("addEffect_l() new effect chain for session %d", sessionId);
-        chain = new EffectChain(this, sessionId);
+        chain = IAfEffectChain::create(this, sessionId);
         addEffectChain_l(chain);
         chain->setStrategy(getStrategyForSession_l(sessionId));
         chainCreated = true;
@@ -1757,7 +1758,7 @@
     return NO_ERROR;
 }
 
-void AudioFlinger::ThreadBase::removeEffect_l(const sp<EffectModule>& effect, bool release) {
+void AudioFlinger::ThreadBase::removeEffect_l(const sp<IAfEffectModule>& effect, bool release) {
 
     ALOGV("%s %p effect %p", __FUNCTION__, this, effect.get());
     effect_descriptor_t desc = effect->desc();
@@ -1765,7 +1766,7 @@
         detachAuxEffect_l(effect->id());
     }
 
-    sp<EffectChain> chain = effect->getCallback()->chain().promote();
+    sp<IAfEffectChain> chain = effect->getCallback()->chain().promote();
     if (chain != 0) {
         // remove effect chain if removing last effect
         if (chain->removeEffect_l(effect, release) == 0) {
@@ -1777,7 +1778,7 @@
 }
 
 void AudioFlinger::ThreadBase::lockEffectChains_l(
-        Vector< sp<AudioFlinger::EffectChain> >& effectChains)
+        Vector<sp<IAfEffectChain>>& effectChains)
 NO_THREAD_SAFETY_ANALYSIS  // calls EffectChain::lock()
 {
     effectChains = mEffectChains;
@@ -1787,7 +1788,7 @@
 }
 
 void AudioFlinger::ThreadBase::unlockEffectChains(
-        const Vector< sp<AudioFlinger::EffectChain> >& effectChains)
+        const Vector<sp<IAfEffectChain>>& effectChains)
 NO_THREAD_SAFETY_ANALYSIS  // calls EffectChain::unlock()
 {
     for (size_t i = 0; i < effectChains.size(); i++) {
@@ -1795,13 +1796,13 @@
     }
 }
 
-sp<AudioFlinger::EffectChain> AudioFlinger::ThreadBase::getEffectChain(audio_session_t sessionId)
+sp<IAfEffectChain> AudioFlinger::ThreadBase::getEffectChain(audio_session_t sessionId)
 {
     Mutex::Autolock _l(mLock);
     return getEffectChain_l(sessionId);
 }
 
-sp<AudioFlinger::EffectChain> AudioFlinger::ThreadBase::getEffectChain_l(audio_session_t sessionId)
+sp<IAfEffectChain> AudioFlinger::ThreadBase::getEffectChain_l(audio_session_t sessionId)
         const
 {
     size_t size = mEffectChains.size();
@@ -2350,7 +2351,7 @@
     }
 
     if (isBitPerfect) {
-        sp<EffectChain> chain = getEffectChain_l(sessionId);
+        sp<IAfEffectChain> chain = getEffectChain_l(sessionId);
         if (chain.get() != nullptr) {
             // Bit-perfect is required according to the configuration and preferred mixer
             // attributes, but it is not in the output flag from the client's request. Explicitly
@@ -2407,7 +2408,7 @@
                     AUDIO_SESSION_OUTPUT_MIX,
                     sessionId,
                 }) {
-                sp<EffectChain> chain = getEffectChain_l(session);
+                sp<IAfEffectChain> chain = getEffectChain_l(session);
                 if (chain.get() != nullptr) {
                     audio_output_flags_t old = *flags;
                     chain->checkOutputFlagCompatibility(flags);
@@ -2657,7 +2658,7 @@
             }
         }
 
-        sp<EffectChain> chain = getEffectChain_l(sessionId);
+        sp<IAfEffectChain> chain = getEffectChain_l(sessionId);
         if (chain != 0) {
             ALOGV("createTrack_l() setting main buffer %p", chain->inBuffer());
             track->setMainBuffer(chain->inBuffer());
@@ -2825,7 +2826,7 @@
                     track->sharedBuffer() != 0 ? Track::FS_FILLED : Track::FS_FILLING;
         }
 
-        sp<EffectChain> chain = getEffectChain_l(track->sessionId());
+        sp<IAfEffectChain> chain = getEffectChain_l(track->sessionId());
         if (mHapticChannelMask != AUDIO_CHANNEL_NONE
                 && ((track->channelMask() & AUDIO_CHANNEL_HAPTIC_ALL) != AUDIO_CHANNEL_NONE
                         || (chain != nullptr && chain->containsHapticGeneratingEffect_l()))) {
@@ -2918,7 +2919,7 @@
         // redundant as track is about to be destroyed, for dumpsys only
         track->mFastIndex = -1;
     }
-    sp<EffectChain> chain = getEffectChain_l(track->sessionId());
+    sp<IAfEffectChain> chain = getEffectChain_l(track->sessionId());
     if (chain != 0) {
         chain->decTrackCnt();
     }
@@ -3206,7 +3207,7 @@
     // but in this case nothing is done below as no audio sessions have effect yet so it doesn't
     // matter.
     // create a copy of mEffectChains as calling moveEffectChain_l() can reorder some effect chains
-    Vector< sp<EffectChain> > effectChains = mEffectChains;
+    Vector<sp<IAfEffectChain>> effectChains = mEffectChains;
     for (size_t i = 0; i < effectChains.size(); i ++) {
         mAudioFlinger->moveEffectChain_l(effectChains[i]->sessionId(),
             this/* srcThread */, this/* dstThread */);
@@ -3617,7 +3618,7 @@
     return nullptr;
 }
 
-status_t AudioFlinger::PlaybackThread::addEffectChain_l(const sp<EffectChain>& chain)
+status_t AudioFlinger::PlaybackThread::addEffectChain_l(const sp<IAfEffectChain>& chain)
 {
     audio_session_t session = chain->sessionId();
     sp<EffectBufferHalInterface> halInBuffer, halOutBuffer;
@@ -3753,7 +3754,7 @@
     return NO_ERROR;
 }
 
-size_t AudioFlinger::PlaybackThread::removeEffectChain_l(const sp<EffectChain>& chain)
+size_t AudioFlinger::PlaybackThread::removeEffectChain_l(const sp<IAfEffectChain>& chain)
 {
     audio_session_t session = chain->sessionId();
 
@@ -3801,7 +3802,7 @@
         track->setAuxBuffer(0, NULL);
     } else {
         // Auxiliary effects are always in audio session AUDIO_SESSION_OUTPUT_MIX
-        sp<EffectModule> effect = getEffect_l(AUDIO_SESSION_OUTPUT_MIX, EffectId);
+        sp<IAfEffectModule> effect = getEffect_l(AUDIO_SESSION_OUTPUT_MIX, EffectId);
         if (effect != 0) {
             if ((effect->desc().flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_AUXILIARY) {
                 track->setAuxBuffer(EffectId, (int32_t *)effect->inBuffer());
@@ -3882,7 +3883,7 @@
 
         cpuStats.sample(myName);
 
-        Vector< sp<EffectChain> > effectChains;
+        Vector<sp<IAfEffectChain>> effectChains;
         audio_session_t activeHapticSessionId = AUDIO_SESSION_NONE;
         bool isHapticSessionSpatialized = false;
         std::vector<sp<Track>> activeTracks;
@@ -4048,7 +4049,7 @@
             // TODO: Write haptic data directly to sink buffer when mixing.
             if (mHapticChannelCount > 0) {
                 for (const auto& track : mActiveTracks) {
-                    sp<EffectChain> effectChain = getEffectChain_l(track->sessionId());
+                    sp<IAfEffectChain> effectChain = getEffectChain_l(track->sessionId());
                     if (effectChain != nullptr
                             && effectChain->containsHapticGeneratingEffect_l()) {
                         activeHapticSessionId = track->sessionId();
@@ -4622,7 +4623,7 @@
     for (const auto& track : tracksToRemove) {
         mActiveTracks.remove(track);
         ALOGV("%s(%d): removing track on session %d", __func__, track->id(), track->sessionId());
-        sp<EffectChain> chain = getEffectChain_l(track->sessionId());
+        sp<IAfEffectChain> chain = getEffectChain_l(track->sessionId());
         if (chain != 0) {
             ALOGV("%s(%d): stopping track on chain %p for session Id: %d",
                     __func__, track->id(), chain.get(), track->sessionId());
@@ -5343,7 +5344,7 @@
         masterVolume = 0;
     }
     // Delegate master volume control to effect in output mix effect chain if needed
-    sp<EffectChain> chain = getEffectChain_l(AUDIO_SESSION_OUTPUT_MIX);
+    sp<IAfEffectChain> chain = getEffectChain_l(AUDIO_SESSION_OUTPUT_MIX);
     if (chain != 0) {
         uint32_t v = (uint32_t)(masterVolume * (1 << 24));
         chain->setVolume_l(&v, &v);
@@ -7714,10 +7715,10 @@
 {
     bool hasVirtualizer = false;
     bool hasDownMixer = false;
-    sp<EffectHandle> finalDownMixer;
+    sp<IAfEffectHandle> finalDownMixer;
     {
         Mutex::Autolock _l(mLock);
-        sp<EffectChain> chain = getEffectChain_l(AUDIO_SESSION_OUTPUT_STAGE);
+        sp<IAfEffectChain> chain = getEffectChain_l(AUDIO_SESSION_OUTPUT_STAGE);
         if (chain != 0) {
             hasVirtualizer = chain->getEffectFromType_l(FX_IID_SPATIALIZER) != nullptr;
             hasDownMixer = chain->getEffectFromType_l(EFFECT_UIID_DOWNMIX) != nullptr;
@@ -7730,7 +7731,7 @@
     if (hasVirtualizer) {
         if (finalDownMixer != nullptr) {
             int32_t ret;
-            finalDownMixer->disable(&ret);
+            finalDownMixer->asIEffect()->disable(&ret);
         }
         finalDownMixer.clear();
     } else if (!hasDownMixer) {
@@ -7752,7 +7753,7 @@
             finalDownMixer.clear();
         } else {
             int32_t ret;
-            finalDownMixer->enable(&ret);
+            finalDownMixer->asIEffect()->enable(&ret);
         }
     }
 
@@ -7984,7 +7985,7 @@
 
     // loop while there is work to do
     for (int64_t loopCount = 0;; ++loopCount) {  // loopCount used for statistics tracking
-        Vector< sp<EffectChain> > effectChains;
+        Vector<sp<IAfEffectChain>> effectChains;
 
         // activeTracks accumulates a copy of a subset of mActiveTracks
         Vector< sp<RecordTrack> > activeTracks;
@@ -8671,7 +8672,7 @@
           // check compatibility with audio effects.
           Mutex::Autolock _l(mLock);
           // Do not accept FAST flag if the session has software effects
-          sp<EffectChain> chain = getEffectChain_l(sessionId);
+          sp<IAfEffectChain> chain = getEffectChain_l(sessionId);
           if (chain != 0) {
               audio_input_flags_t old = *flags;
               chain->checkInputFlagCompatibility(flags);
@@ -9496,7 +9497,7 @@
     return mInput->stream;
 }
 
-status_t AudioFlinger::RecordThread::addEffectChain_l(const sp<EffectChain>& chain)
+status_t AudioFlinger::RecordThread::addEffectChain_l(const sp<IAfEffectChain>& chain)
 {
     ALOGV("addEffectChain_l() %p on thread %p", chain.get(), this);
     chain->setThread(this);
@@ -9514,7 +9515,7 @@
     return NO_ERROR;
 }
 
-size_t AudioFlinger::RecordThread::removeEffectChain_l(const sp<EffectChain>& chain)
+size_t AudioFlinger::RecordThread::removeEffectChain_l(const sp<IAfEffectChain>& chain)
 {
     ALOGV("removeEffectChain_l() %p from thread %p", chain.get(), this);
 
@@ -10042,7 +10043,7 @@
     }
 
     mActiveTracks.add(track);
-    sp<EffectChain> chain = getEffectChain_l(mSessionId);
+    sp<IAfEffectChain> chain = getEffectChain_l(mSessionId);
     if (chain != 0) {
         chain->setStrategy(getStrategyForStream(streamType()));
         chain->incTrackCnt();
@@ -10102,7 +10103,7 @@
     }
     mLock.lock();
 
-    sp<EffectChain> chain = getEffectChain_l(track->sessionId());
+    sp<IAfEffectChain> chain = getEffectChain_l(track->sessionId());
     if (chain != 0) {
         chain->decActiveTrackCnt();
         chain->decTrackCnt();
@@ -10186,7 +10187,7 @@
 
     while (!exitPending())
     {
-        Vector< sp<EffectChain> > effectChains;
+        Vector<sp<IAfEffectChain>> effectChains;
 
         { // under Thread lock
         Mutex::Autolock _l(mLock);
@@ -10429,7 +10430,7 @@
     }
 }
 
-status_t AudioFlinger::MmapThread::addEffectChain_l(const sp<EffectChain>& chain)
+status_t AudioFlinger::MmapThread::addEffectChain_l(const sp<IAfEffectChain>& chain)
 {
     audio_session_t session = chain->sessionId();
 
@@ -10453,7 +10454,7 @@
     return NO_ERROR;
 }
 
-size_t AudioFlinger::MmapThread::removeEffectChain_l(const sp<EffectChain>& chain)
+size_t AudioFlinger::MmapThread::removeEffectChain_l(const sp<IAfEffectChain>& chain)
 {
     audio_session_t session = chain->sessionId();
 
@@ -10524,7 +10525,7 @@
         return BAD_VALUE;
     }
 
-    if (EffectModule::isHapticGenerator(&desc->type)) {
+    if (IAfEffectModule::isHapticGenerator(&desc->type)) {
         ALOGE("%s(): HapticGenerator is not supported for MmapThread", __func__);
         return BAD_VALUE;
     }
diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h
index b2700db..8620c15 100644
--- a/services/audioflinger/Threads.h
+++ b/services/audioflinger/Threads.h
@@ -19,6 +19,7 @@
     #error This header file should only be included from AudioFlinger.h
 #endif
 
+public: // TODO(b/288339104) extract out of AudioFlinger class
 class ThreadBase : public Thread {
 public:
 
@@ -409,7 +410,7 @@
 
     virtual     sp<StreamHalInterface> stream() const = 0;
 
-                sp<EffectHandle> createEffect_l(
+                sp<IAfEffectHandle> createEffect_l(
                                     const sp<AudioFlinger::Client>& client,
                                     const sp<media::IEffectClient>& effectClient,
                                     int32_t priority,
@@ -436,37 +437,37 @@
                 };
 
                 // get effect chain corresponding to session Id.
-                sp<EffectChain> getEffectChain(audio_session_t sessionId);
+                sp<IAfEffectChain> getEffectChain(audio_session_t sessionId);
                 // same as getEffectChain() but must be called with ThreadBase mutex locked
-                sp<EffectChain> getEffectChain_l(audio_session_t sessionId) const;
+                sp<IAfEffectChain> getEffectChain_l(audio_session_t sessionId) const;
                 std::vector<int> getEffectIds_l(audio_session_t sessionId);
                 // add an effect chain to the chain list (mEffectChains)
-    virtual     status_t addEffectChain_l(const sp<EffectChain>& chain) = 0;
+    virtual     status_t addEffectChain_l(const sp<IAfEffectChain>& chain) = 0;
                 // remove an effect chain from the chain list (mEffectChains)
-    virtual     size_t removeEffectChain_l(const sp<EffectChain>& chain) = 0;
+    virtual     size_t removeEffectChain_l(const sp<IAfEffectChain>& chain) = 0;
                 // lock all effect chains Mutexes. Must be called before releasing the
                 // ThreadBase mutex before processing the mixer and effects. This guarantees the
                 // integrity of the chains during the process.
                 // Also sets the parameter 'effectChains' to current value of mEffectChains.
-                void lockEffectChains_l(Vector< sp<EffectChain> >& effectChains);
+                void lockEffectChains_l(Vector<sp<IAfEffectChain>>& effectChains);
                 // unlock effect chains after process
-                void unlockEffectChains(const Vector< sp<EffectChain> >& effectChains);
+                void unlockEffectChains(const Vector<sp<IAfEffectChain>>& effectChains);
                 // get a copy of mEffectChains vector
-                Vector< sp<EffectChain> > getEffectChains_l() const { return mEffectChains; };
+                Vector<sp<IAfEffectChain>> getEffectChains_l() const { return mEffectChains; };
                 // set audio mode to all effect chains
                 void setMode(audio_mode_t mode);
                 // get effect module with corresponding ID on specified audio session
-                sp<AudioFlinger::EffectModule> getEffect(audio_session_t sessionId, int effectId);
-                sp<AudioFlinger::EffectModule> getEffect_l(audio_session_t sessionId, int effectId);
+                sp<IAfEffectModule> getEffect(audio_session_t sessionId, int effectId);
+                sp<IAfEffectModule> getEffect_l(audio_session_t sessionId, int effectId);
                 // add and effect module. Also creates the effect chain is none exists for
                 // the effects audio session. Only called in a context of moving an effect
                 // from one thread to another
-                status_t addEffect_l(const sp< EffectModule>& effect);
+                status_t addEffect_l(const sp<IAfEffectModule>& effect);
                 // remove and effect module. Also removes the effect chain is this was the last
                 // effect
-                void removeEffect_l(const sp< EffectModule>& effect, bool release = false);
+                void removeEffect_l(const sp<IAfEffectModule>& effect, bool release = false);
                 // disconnect an effect handle from module and destroy module if last handle
-                void disconnectEffectHandle(EffectHandle *handle, bool unpinIfLast);
+                void disconnectEffectHandle(IAfEffectHandle *handle, bool unpinIfLast);
                 // detach all tracks connected to an auxiliary effect
     virtual     void detachAuxEffect_l(int effectId __unused) {}
                 // returns a combination of:
@@ -552,7 +553,7 @@
 
     mutable     Mutex                   mLock;
 
-                void onEffectEnable(const sp<EffectModule>& effect);
+                void onEffectEnable(const sp<IAfEffectModule>& effect);
                 void onEffectDisable();
 
                 // invalidateTracksForAudioSession_l must be called with holding mLock.
@@ -607,7 +608,7 @@
                                                       bool suspend,
                                                       audio_session_t sessionId);
                 // check if some effects must be suspended when an effect chain is added
-                void checkSuspendOnAddEffectChain_l(const sp<EffectChain>& chain);
+                void checkSuspendOnAddEffectChain_l(const sp<IAfEffectChain>& chain);
 
                 // sends the metadata of the active tracks to the HAL
                 struct MetadataUpdate {
@@ -627,8 +628,10 @@
                                     ExtendedTimestamp *timestamp __unused) const {
                                 return INVALID_OPERATION;
                             }
-
+public:
+// TODO(b/288339104) organize with publics
                 product_strategy_t getStrategyForStream(audio_stream_type_t stream) const;
+protected:
 
     virtual     void        onHalLatencyModesChanged_l() {}
 
@@ -680,7 +683,7 @@
                 audio_source_t          mAudioSource;
 
                 const audio_io_handle_t mId;
-                Vector< sp<EffectChain> > mEffectChains;
+                Vector<sp<IAfEffectChain>> mEffectChains;
 
                 static const int        kThreadNameLength = 16; // prctl(PR_SET_NAME) limit
                 char                    mThreadName[kThreadNameLength]; // guaranteed NUL-terminated
@@ -844,6 +847,8 @@
                 void dumpEffectChains_l(int fd, const Vector<String16>& args);
 };
 
+private:
+
 class VolumeInterface {
  public:
 
@@ -1030,8 +1035,8 @@
                 status_t attachAuxEffect_l(const sp<AudioFlinger::PlaybackThread::Track>& track,
                         int EffectId);
 
-                virtual status_t addEffectChain_l(const sp<EffectChain>& chain);
-                virtual size_t removeEffectChain_l(const sp<EffectChain>& chain);
+                virtual status_t addEffectChain_l(const sp<IAfEffectChain>& chain);
+                virtual size_t removeEffectChain_l(const sp<IAfEffectChain>& chain);
                         uint32_t hasAudioSession_l(audio_session_t sessionId) const override {
                             return ThreadBase::hasAudioSession_l(sessionId, mTracks);
                         }
@@ -1857,7 +1862,7 @@
             // Do not request a specific mode by default
             audio_latency_mode_t mRequestedLatencyMode = AUDIO_LATENCY_MODE_FREE;
 
-            sp<EffectHandle> mFinalDownMixer;
+            sp<IAfEffectHandle> mFinalDownMixer;
 };
 
 // record thread
@@ -1984,8 +1989,8 @@
             void        readInputParameters_l();
     virtual uint32_t    getInputFramesLost();
 
-    virtual status_t addEffectChain_l(const sp<EffectChain>& chain);
-    virtual size_t removeEffectChain_l(const sp<EffectChain>& chain);
+    virtual status_t addEffectChain_l(const sp<IAfEffectChain>& chain);
+    virtual size_t removeEffectChain_l(const sp<IAfEffectChain>& chain);
             uint32_t hasAudioSession_l(audio_session_t sessionId) const override {
                          return ThreadBase::hasAudioSession_l(sessionId, mTracks);
                      }
@@ -2193,8 +2198,8 @@
     virtual     void        toAudioPortConfig(struct audio_port_config *config);
 
     virtual     sp<StreamHalInterface> stream() const { return mHalStream; }
-    virtual     status_t    addEffectChain_l(const sp<EffectChain>& chain);
-    virtual     size_t      removeEffectChain_l(const sp<EffectChain>& chain);
+    virtual     status_t    addEffectChain_l(const sp<IAfEffectChain>& chain);
+    virtual     size_t      removeEffectChain_l(const sp<IAfEffectChain>& chain);
     virtual     status_t    checkEffectCompatibility_l(const effect_descriptor_t *desc,
                                                                audio_session_t sessionId);
 
diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp
index 00c88bc..e23b150 100644
--- a/services/audioflinger/Tracks.cpp
+++ b/services/audioflinger/Tracks.cpp
@@ -1064,7 +1064,7 @@
         if (isOffloaded()) {
             Mutex::Autolock _laf(thread->mAudioFlinger->mLock);
             Mutex::Autolock _lth(thread->mLock);
-            sp<EffectChain> ec = thread->getEffectChain_l(mSessionId);
+            sp<IAfEffectChain> ec = thread->getEffectChain_l(mSessionId);
             if (thread->mAudioFlinger->isNonOffloadableGlobalEffectEnabled_l() ||
                     (ec != 0 && ec->isNonOffloadableEnabled())) {
                 invalidate();
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index cd46279..0e971b0 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -1325,10 +1325,15 @@
         AudioProfileVector profiles;
         status_t ret = getProfilesForDevices(outputDevices, profiles, *flags, false /*isInput*/);
         if (ret == NO_ERROR && !profiles.empty()) {
-            config->channel_mask = profiles[0]->getChannels().empty() ? config->channel_mask
-                    : *profiles[0]->getChannels().begin();
-            config->sample_rate = profiles[0]->getSampleRates().empty() ? config->sample_rate
-                    : *profiles[0]->getSampleRates().begin();
+            const auto channels = profiles[0]->getChannels();
+            if (!channels.empty() && (channels.find(config->channel_mask) == channels.end())) {
+                config->channel_mask = *channels.begin();
+            }
+            const auto sampleRates = profiles[0]->getSampleRates();
+            if (!sampleRates.empty() &&
+                    (sampleRates.find(config->sample_rate) == sampleRates.end())) {
+                config->sample_rate = *sampleRates.begin();
+            }
             config->format = profiles[0]->getFormat();
         }
         return INVALID_OPERATION;
@@ -2774,10 +2779,15 @@
         status_t ret = getProfilesForDevices(
                 DeviceVector(device), profiles, flags, true /*isInput*/);
         if (ret == NO_ERROR && !profiles.empty()) {
-            config->channel_mask = profiles[0]->getChannels().empty() ? config->channel_mask
-                    : *profiles[0]->getChannels().begin();
-            config->sample_rate = profiles[0]->getSampleRates().empty() ? config->sample_rate
-                    : *profiles[0]->getSampleRates().begin();
+            const auto channels = profiles[0]->getChannels();
+            if (!channels.empty() && (channels.find(config->channel_mask) == channels.end())) {
+                config->channel_mask = *channels.begin();
+            }
+            const auto sampleRates = profiles[0]->getSampleRates();
+            if (!sampleRates.empty() &&
+                    (sampleRates.find(config->sample_rate) == sampleRates.end())) {
+                config->sample_rate = *sampleRates.begin();
+            }
             config->format = profiles[0]->getFormat();
         }
         goto error;
diff --git a/services/audiopolicy/service/Android.bp b/services/audiopolicy/service/Android.bp
index f4fc8f1..3a530aa 100644
--- a/services/audiopolicy/service/Android.bp
+++ b/services/audiopolicy/service/Android.bp
@@ -7,26 +7,8 @@
     default_applicable_licenses: ["frameworks_av_license"],
 }
 
-cc_library_shared {
-    name: "libaudiopolicyservice",
-
-    defaults: [
-        "latest_android_media_audio_common_types_cpp_shared",
-    ],
-
-    srcs: [
-        "AudioPolicyClientImpl.cpp",
-        "AudioPolicyEffects.cpp",
-        "AudioPolicyInterfaceImpl.cpp",
-        "AudioPolicyService.cpp",
-        "CaptureStateNotifier.cpp",
-        "Spatializer.cpp",
-        "SpatializerPoseController.cpp",
-    ],
-
-    include_dirs: [
-        "frameworks/av/services/audioflinger"
-    ],
+cc_defaults {
+    name: "libaudiopolicyservice_dependencies",
 
     shared_libs: [
         "libactivitymanager_aidl",
@@ -41,7 +23,6 @@
         "libaudioutils",
         "libbinder",
         "libcutils",
-        "libeffectsconfig",
         "libhardware_legacy",
         "libheadtracking",
         "libheadtracking-binding",
@@ -67,6 +48,35 @@
     ],
 
     static_libs: [
+        "libeffectsconfig",
+        "libaudiopolicycomponents",
+    ]
+}
+
+cc_library {
+    name: "libaudiopolicyservice",
+
+    defaults: [
+        "libaudiopolicyservice_dependencies",
+        "latest_android_media_audio_common_types_cpp_shared",
+    ],
+
+    srcs: [
+        "AudioPolicyClientImpl.cpp",
+        "AudioPolicyEffects.cpp",
+        "AudioPolicyInterfaceImpl.cpp",
+        "AudioPolicyService.cpp",
+        "CaptureStateNotifier.cpp",
+        "Spatializer.cpp",
+        "SpatializerPoseController.cpp",
+    ],
+
+    include_dirs: [
+        "frameworks/av/services/audioflinger"
+    ],
+
+
+    static_libs: [
         "framework-permission-aidl-cpp",
     ],
 
diff --git a/services/camera/libcameraservice/Android.bp b/services/camera/libcameraservice/Android.bp
index a45365a..84dcf26 100644
--- a/services/camera/libcameraservice/Android.bp
+++ b/services/camera/libcameraservice/Android.bp
@@ -35,9 +35,91 @@
     ],
 }
 
-cc_library_shared {
+cc_defaults {
+    name: "libcameraservice_deps",
+
+    shared_libs: [
+        "libactivitymanager_aidl",
+        "libbase",
+        "libdl",
+        "libui",
+        "liblog",
+        "libutilscallstack",
+        "libutils",
+        "libbinder",
+        "libbinder_ndk",
+        "libactivitymanager_aidl",
+        "libpermission",
+        "libcutils",
+        "libexif",
+        "libmedia",
+        "libmediautils",
+        "libcamera_client",
+        "libcamera_metadata",
+        "libfmq",
+        "libgui",
+        "libhardware",
+        "libhidlbase",
+        "libimage_io",
+        "libjpeg",
+        "libultrahdr",
+        "libmedia_codeclist",
+        "libmedia_omx",
+        "libmemunreachable",
+        "libprocessgroup",
+        "libprocinfo",
+        "libsensorprivacy",
+        "libstagefright",
+        "libstagefright_foundation",
+        "libxml2",
+        "libyuv",
+        "android.hardware.camera.common@1.0",
+        "android.hardware.camera.device@1.0",
+        "android.hardware.camera.device@3.2",
+        "android.hardware.camera.device@3.3",
+        "android.hardware.camera.device@3.4",
+        "android.hardware.camera.device@3.5",
+        "android.hardware.camera.device@3.6",
+        "android.hardware.camera.device@3.7",
+        "android.hardware.common-V2-ndk",
+        "android.hardware.common.fmq-V1-ndk",
+        "android.hardware.graphics.common-V4-ndk",
+        "media_permission-aidl-cpp",
+    ],
+
+    static_libs: [
+        "android.frameworks.cameraservice.common@2.0",
+        "android.frameworks.cameraservice.service@2.0",
+        "android.frameworks.cameraservice.service@2.1",
+        "android.frameworks.cameraservice.service@2.2",
+        "android.frameworks.cameraservice.device@2.0",
+        "android.frameworks.cameraservice.device@2.1",
+        "android.frameworks.cameraservice.common-V1-ndk",
+        "android.frameworks.cameraservice.service-V1-ndk",
+        "android.frameworks.cameraservice.device-V1-ndk",
+        "android.hardware.camera.common-V1-ndk",
+        "android.hardware.camera.device-V2-ndk",
+        "android.hardware.camera.metadata-V2-ndk",
+        "android.hardware.camera.provider@2.4",
+        "android.hardware.camera.provider@2.5",
+        "android.hardware.camera.provider@2.6",
+        "android.hardware.camera.provider@2.7",
+        "android.hardware.camera.provider-V2-ndk",
+        "libaidlcommonsupport",
+        "libdynamic_depth",
+        "libprocessinfoservice_aidl",
+        "libbinderthreadstateutils",
+        "media_permission-aidl-cpp",
+        "libcameraservice_device_independent",
+    ],
+}
+
+cc_library {
     name: "libcameraservice",
 
+    defaults: [
+        "libcameraservice_deps",
+    ],
     // Camera service source
 
     srcs: [
@@ -105,6 +187,7 @@
         "utils/CameraThreadState.cpp",
         "utils/CameraTraces.cpp",
         "utils/AutoConditionLock.cpp",
+        "utils/SchedulingPolicyUtils.cpp",
         "utils/SessionConfigurationUtils.cpp",
         "utils/SessionConfigurationUtilsHidl.cpp",
         "utils/SessionStatsBuilder.cpp",
@@ -119,73 +202,6 @@
         "libmediametrics_headers",
     ],
 
-    shared_libs: [
-        "libactivitymanager_aidl",
-        "libbase",
-        "libdl",
-        "libexif",
-        "libui",
-        "liblog",
-        "libutilscallstack",
-        "libutils",
-        "libbinder",
-        "libbinder_ndk",
-        "libactivitymanager_aidl",
-        "libpermission",
-        "libcutils",
-        "libmedia",
-        "libmediautils",
-        "libcamera_client",
-        "libcamera_metadata",
-        "libdynamic_depth",
-        "libfmq",
-        "libgui",
-        "libhardware",
-        "libhidlbase",
-        "libimage_io",
-        "libjpeg",
-        "libultrahdr",
-        "libmedia_codeclist",
-        "libmedia_omx",
-        "libmemunreachable",
-        "libsensorprivacy",
-        "libstagefright",
-        "libstagefright_foundation",
-        "libxml2",
-        "libyuv",
-        "android.frameworks.cameraservice.common@2.0",
-        "android.frameworks.cameraservice.service@2.0",
-        "android.frameworks.cameraservice.service@2.1",
-        "android.frameworks.cameraservice.service@2.2",
-        "android.frameworks.cameraservice.device@2.0",
-        "android.frameworks.cameraservice.device@2.1",
-        "android.frameworks.cameraservice.common-V1-ndk",
-        "android.frameworks.cameraservice.service-V1-ndk",
-        "android.frameworks.cameraservice.device-V1-ndk",
-        "android.hardware.camera.common@1.0",
-        "android.hardware.camera.provider@2.4",
-        "android.hardware.camera.provider@2.5",
-        "android.hardware.camera.provider@2.6",
-        "android.hardware.camera.provider@2.7",
-        "android.hardware.camera.provider-V2-ndk",
-        "android.hardware.camera.device@3.2",
-        "android.hardware.camera.device@3.3",
-        "android.hardware.camera.device@3.4",
-        "android.hardware.camera.device@3.5",
-        "android.hardware.camera.device@3.6",
-        "android.hardware.camera.device@3.7",
-        "android.hardware.camera.device-V2-ndk",
-        "media_permission-aidl-cpp",
-    ],
-
-    static_libs: [
-        "libaidlcommonsupport",
-        "libprocessinfoservice_aidl",
-        "libbinderthreadstateutils",
-        "media_permission-aidl-cpp",
-        "libcameraservice_device_independent",
-    ],
-
     export_shared_lib_headers: [
         "libbinder",
         "libactivitymanager_aidl",
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index 668a51a..ca894fe 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -87,6 +87,7 @@
     const char* kActivityServiceName = "activity";
     const char* kSensorPrivacyServiceName = "sensor_privacy";
     const char* kAppopsServiceName = "appops";
+    const char* kProcessInfoServiceName = "processinfo";
 }; // namespace anonymous
 
 namespace android {
@@ -145,6 +146,7 @@
 static constexpr int32_t kSystemNativeClientScore = resource_policy::PERCEPTIBLE_APP_ADJ;
 static constexpr int32_t kSystemNativeClientState =
         ActivityManager::PROCESS_STATE_PERSISTENT_UI;
+static const String16 kServiceName("cameraserver");
 
 const String8 CameraService::kOfflineDevice("offline-");
 const String16 CameraService::kWatchAllClientsFlag("all");
@@ -701,17 +703,100 @@
     broadcastTorchModeStatus(cameraId, newStatus, systemCameraKind);
 }
 
-static bool hasPermissionsForSystemCamera(int callingPid, int callingUid) {
+static bool isAutomotiveDevice() {
+    // Checks the property ro.hardware.type and returns true if it is
+    // automotive.
+    char value[PROPERTY_VALUE_MAX] = {0};
+    property_get("ro.hardware.type", value, "");
+    return strncmp(value, "automotive", PROPERTY_VALUE_MAX) == 0;
+}
+
+static bool isAutomotivePrivilegedClient(int32_t uid) {
+    // Returns false if this is not an automotive device type.
+    if (!isAutomotiveDevice())
+        return false;
+
+    // Returns true if the uid is AID_AUTOMOTIVE_EVS which is a
+    // privileged client uid used for safety critical use cases such as
+    // rear view and surround view.
+    return uid == AID_AUTOMOTIVE_EVS;
+}
+
+bool CameraService::isAutomotiveExteriorSystemCamera(const String8& cam_id) const{
+    // Returns false if this is not an automotive device type.
+    if (!isAutomotiveDevice())
+        return false;
+
+    // Returns false if no camera id is provided.
+    if (cam_id.isEmpty())
+        return false;
+
+    SystemCameraKind systemCameraKind = SystemCameraKind::PUBLIC;
+    if (getSystemCameraKind(cam_id, &systemCameraKind) != OK) {
+        // This isn't a known camera ID, so it's not a system camera.
+        ALOGE("%s: Unknown camera id %s, ", __FUNCTION__, cam_id.c_str());
+        return false;
+    }
+
+    if (systemCameraKind != SystemCameraKind::SYSTEM_ONLY_CAMERA) {
+        ALOGE("%s: camera id %s is not a system camera", __FUNCTION__, cam_id.c_str());
+        return false;
+    }
+
+    CameraMetadata cameraInfo;
+    status_t res = mCameraProviderManager->getCameraCharacteristics(
+            cam_id.string(), false, &cameraInfo, false);
+    if (res != OK){
+        ALOGE("%s: Not able to get camera characteristics for camera id %s",__FUNCTION__,
+                cam_id.c_str());
+        return false;
+    }
+
+    camera_metadata_entry auto_location  = cameraInfo.find(ANDROID_AUTOMOTIVE_LOCATION);
+    if (auto_location.count != 1)
+        return false;
+
+    uint8_t location = auto_location.data.u8[0];
+    if ((location != ANDROID_AUTOMOTIVE_LOCATION_EXTERIOR_FRONT) &&
+            (location != ANDROID_AUTOMOTIVE_LOCATION_EXTERIOR_REAR) &&
+            (location != ANDROID_AUTOMOTIVE_LOCATION_EXTERIOR_LEFT) &&
+            (location != ANDROID_AUTOMOTIVE_LOCATION_EXTERIOR_RIGHT)) {
+        return false;
+    }
+
+    return true;
+}
+
+bool CameraService::checkPermission(const String8& cam_id, const String16& permission,
+        const AttributionSourceState& attributionSource,const String16& message,
+        int32_t attributedOpCode) const{
+    if (isAutomotivePrivilegedClient(attributionSource.uid)) {
+        // If cam_id is empty, then it means that this check is not used for the
+        // purpose of accessing a specific camera, hence grant permission just
+        // based on uid to the automotive privileged client.
+        if (cam_id.isEmpty())
+            return true;
+        // If this call is used for accessing a specific camera then cam_id must be provided.
+        // In that case, only pre-grants the permission for accessing the exterior system only
+        // camera.
+        return isAutomotiveExteriorSystemCamera(cam_id);
+
+    }
+
     permission::PermissionChecker permissionChecker;
+    return permissionChecker.checkPermissionForPreflight(permission, attributionSource,
+            message, attributedOpCode) != permission::PermissionChecker::PERMISSION_HARD_DENIED;
+}
+
+bool CameraService::hasPermissionsForSystemCamera(const String8& cam_id, int callingPid,
+        int callingUid) const{
     AttributionSourceState attributionSource{};
     attributionSource.pid = callingPid;
     attributionSource.uid = callingUid;
-    bool checkPermissionForSystemCamera = permissionChecker.checkPermissionForPreflight(
-            sSystemCameraPermission, attributionSource, String16(), AppOpsManager::OP_NONE)
-            != permission::PermissionChecker::PERMISSION_HARD_DENIED;
-    bool checkPermissionForCamera = permissionChecker.checkPermissionForPreflight(
-            sCameraPermission, attributionSource, String16(), AppOpsManager::OP_NONE)
-            != permission::PermissionChecker::PERMISSION_HARD_DENIED;
+    bool checkPermissionForSystemCamera = checkPermission(cam_id,
+            sSystemCameraPermission, attributionSource, String16(), AppOpsManager::OP_NONE);
+    bool checkPermissionForCamera = checkPermission(cam_id,
+            sCameraPermission, attributionSource, String16(), AppOpsManager::OP_NONE);
     return checkPermissionForSystemCamera && checkPermissionForCamera;
 }
 
@@ -719,7 +804,7 @@
     ATRACE_CALL();
     Mutex::Autolock l(mServiceLock);
     bool hasSystemCameraPermissions =
-            hasPermissionsForSystemCamera(CameraThreadState::getCallingPid(),
+            hasPermissionsForSystemCamera(String8(), CameraThreadState::getCallingPid(),
                     CameraThreadState::getCallingUid());
     switch (type) {
         case CAMERA_TYPE_BACKWARD_COMPATIBLE:
@@ -760,9 +845,8 @@
         return STATUS_ERROR(ERROR_DISCONNECTED,
                 "Camera subsystem is not available");
     }
-    bool hasSystemCameraPermissions =
-            hasPermissionsForSystemCamera(CameraThreadState::getCallingPid(),
-                    CameraThreadState::getCallingUid());
+    bool hasSystemCameraPermissions = hasPermissionsForSystemCamera(String8::format("%d", cameraId),
+            CameraThreadState::getCallingPid(), CameraThreadState::getCallingUid());
     int cameraIdBound = mNumberOfCamerasWithoutSystemCamera;
     if (hasSystemCameraPermissions) {
         cameraIdBound = mNumberOfCameras;
@@ -791,13 +875,11 @@
     const std::vector<std::string> *deviceIds = &mNormalDeviceIdsWithoutSystemCamera;
     auto callingPid = CameraThreadState::getCallingPid();
     auto callingUid = CameraThreadState::getCallingUid();
-    permission::PermissionChecker permissionChecker;
     AttributionSourceState attributionSource{};
     attributionSource.pid = callingPid;
     attributionSource.uid = callingUid;
-    bool checkPermissionForSystemCamera = permissionChecker.checkPermissionForPreflight(
-                sSystemCameraPermission, attributionSource, String16(), AppOpsManager::OP_NONE)
-                != permission::PermissionChecker::PERMISSION_HARD_DENIED;
+    bool checkPermissionForSystemCamera = checkPermission(String8::format("%d", cameraIdInt),
+                sSystemCameraPermission, attributionSource, String16(), AppOpsManager::OP_NONE);
     if (checkPermissionForSystemCamera || getpid() == callingPid) {
         deviceIds = &mNormalDeviceIds;
     }
@@ -869,13 +951,11 @@
     // If it's not calling from cameraserver, check the permission only if
     // android.permission.CAMERA is required. If android.permission.SYSTEM_CAMERA was needed,
     // it would've already been checked in shouldRejectSystemCameraConnection.
-    permission::PermissionChecker permissionChecker;
     AttributionSourceState attributionSource{};
     attributionSource.pid = callingPid;
     attributionSource.uid = callingUid;
-    bool checkPermissionForCamera = permissionChecker.checkPermissionForPreflight(
-                sCameraPermission, attributionSource, String16(), AppOpsManager::OP_NONE)
-                != permission::PermissionChecker::PERMISSION_HARD_DENIED;
+    bool checkPermissionForCamera = checkPermission(String8(cameraId), sCameraPermission,
+            attributionSource, String16(), AppOpsManager::OP_NONE);
     if ((callingPid != getpid()) &&
             (deviceKind != SystemCameraKind::SYSTEM_ONLY_CAMERA) &&
             !checkPermissionForCamera) {
@@ -1154,13 +1234,12 @@
 Status CameraService::initializeShimMetadata(int cameraId) {
     int uid = CameraThreadState::getCallingUid();
 
-    String16 internalPackageName("cameraserver");
     String8 id = String8::format("%d", cameraId);
     Status ret = Status::ok();
     sp<Client> tmp = nullptr;
     if (!(ret = connectHelper<ICameraClient,Client>(
             sp<ICameraClient>{nullptr}, id, cameraId,
-            internalPackageName, /*systemNativeClient*/ false, {}, uid, USE_CALLING_PID,
+            kServiceName, /*systemNativeClient*/ false, {}, uid, USE_CALLING_PID,
             API_1, /*shimUpdateOnly*/ true, /*oomScoreOffset*/ 0,
             /*targetSdkVersion*/ __ANDROID_API_FUTURE__, /*overrideToPortrait*/ true,
             /*forceSlowJpegMode*/false, /*out*/ tmp)
@@ -1314,7 +1393,6 @@
 Status CameraService::validateClientPermissionsLocked(const String8& cameraId,
         const String8& clientName8, int& clientUid, int& clientPid,
         /*out*/int& originalClientPid) const {
-    permission::PermissionChecker permissionChecker;
     AttributionSourceState attributionSource{};
 
     int callingPid = CameraThreadState::getCallingPid();
@@ -1366,9 +1444,8 @@
     attributionSource.pid = clientPid;
     attributionSource.uid = clientUid;
     attributionSource.packageName = clientName8;
-    bool checkPermissionForCamera = permissionChecker.checkPermissionForPreflight(
-            sCameraPermission, attributionSource, String16(), AppOpsManager::OP_NONE)
-            != permission::PermissionChecker::PERMISSION_HARD_DENIED;
+    bool checkPermissionForCamera = checkPermission(cameraId, sCameraPermission, attributionSource,
+            String16(), AppOpsManager::OP_NONE);
     if (callingPid != getpid() &&
                 (deviceKind != SystemCameraKind::SYSTEM_ONLY_CAMERA) && !checkPermissionForCamera) {
         ALOGE("Permission Denial: can't use the camera pid=%d, uid=%d", clientPid, clientUid);
@@ -1389,8 +1466,13 @@
                 callingUid, procState);
     }
 
-    // If sensor privacy is enabled then prevent access to the camera
-    if (mSensorPrivacyPolicy->isSensorPrivacyEnabled()) {
+
+    // Automotive privileged client AID_AUTOMOTIVE_EVS using exterior system camera for use cases
+    // such as rear view and surround view cannot be disabled and are exempt from sensor privacy
+    // policy. In all other cases,if sensor privacy is enabled then prevent access to the camera.
+    if ((!isAutomotivePrivilegedClient(callingUid) ||
+            !isAutomotiveExteriorSystemCamera(cameraId)) &&
+            mSensorPrivacyPolicy->isSensorPrivacyEnabled()) {
         ALOGE("Access Denial: cannot use the camera when sensor privacy is enabled");
         return STATUS_ERROR_FMT(ERROR_DISABLED,
                 "Caller \"%s\" (PID %d, UID %d) cannot open camera \"%s\" when sensor privacy "
@@ -1508,33 +1590,6 @@
             }
         }
 
-        // Get current active client PIDs
-        std::vector<int> ownerPids(mActiveClientManager.getAllOwners());
-        ownerPids.push_back(clientPid);
-
-        std::vector<int> priorityScores(ownerPids.size());
-        std::vector<int> states(ownerPids.size());
-
-        // Get priority scores of all active PIDs
-        status_t err = ProcessInfoService::getProcessStatesScoresFromPids(
-                ownerPids.size(), &ownerPids[0], /*out*/&states[0],
-                /*out*/&priorityScores[0]);
-        if (err != OK) {
-            ALOGE("%s: Priority score query failed: %d",
-                  __FUNCTION__, err);
-            return err;
-        }
-
-        // Update all active clients' priorities
-        std::map<int,resource_policy::ClientPriority> pidToPriorityMap;
-        for (size_t i = 0; i < ownerPids.size() - 1; i++) {
-            pidToPriorityMap.emplace(ownerPids[i],
-                    resource_policy::ClientPriority(priorityScores[i], states[i],
-                            /* isVendorClient won't get copied over*/ false,
-                            /* oomScoreOffset won't get copied over*/ 0));
-        }
-        mActiveClientManager.updatePriorities(pidToPriorityMap);
-
         // Get state for the given cameraId
         auto state = getCameraState(cameraId);
         if (state == nullptr) {
@@ -1544,16 +1599,57 @@
             return BAD_VALUE;
         }
 
-        int32_t actualScore = priorityScores[priorityScores.size() - 1];
-        int32_t actualState = states[states.size() - 1];
+        sp<IServiceManager> sm = defaultServiceManager();
+        sp<IBinder> binder = sm->checkService(String16(kProcessInfoServiceName));
+        if (!binder && isAutomotivePrivilegedClient(CameraThreadState::getCallingUid())) {
+            // If processinfo service is not available and the client is automotive privileged
+            // client used for safety critical uses cases such as rear-view and surround-view which
+            // needs to be available before android boot completes, then use the hardcoded values
+            // for the process state and priority score. As this scenario is before android system
+            // services are up and client is native client, hence using NATIVE_ADJ as the priority
+            // score and state as PROCESS_STATE_BOUND_TOP as such automotive apps need to be
+            // visible on the top.
+            clientDescriptor = CameraClientManager::makeClientDescriptor(cameraId,
+                    sp<BasicClient>{nullptr}, static_cast<int32_t>(state->getCost()),
+                    state->getConflicting(), resource_policy::NATIVE_ADJ, clientPid,
+                    ActivityManager::PROCESS_STATE_BOUND_TOP, oomScoreOffset, systemNativeClient);
+        } else {
+            // Get current active client PIDs
+            std::vector<int> ownerPids(mActiveClientManager.getAllOwners());
+            ownerPids.push_back(clientPid);
 
-        // Make descriptor for incoming client. We store the oomScoreOffset
-        // since we might need it later on new handleEvictionsLocked and
-        // ProcessInfoService would not take that into account.
-        clientDescriptor = CameraClientManager::makeClientDescriptor(cameraId,
-                sp<BasicClient>{nullptr}, static_cast<int32_t>(state->getCost()),
-                state->getConflicting(), actualScore, clientPid, actualState,
-                oomScoreOffset, systemNativeClient);
+            std::vector<int> priorityScores(ownerPids.size());
+            std::vector<int> states(ownerPids.size());
+
+            // Get priority scores of all active PIDs
+            status_t err = ProcessInfoService::getProcessStatesScoresFromPids(ownerPids.size(),
+                    &ownerPids[0], /*out*/&states[0], /*out*/&priorityScores[0]);
+            if (err != OK) {
+                ALOGE("%s: Priority score query failed: %d", __FUNCTION__, err);
+                return err;
+            }
+
+            // Update all active clients' priorities
+            std::map<int,resource_policy::ClientPriority> pidToPriorityMap;
+            for (size_t i = 0; i < ownerPids.size() - 1; i++) {
+                pidToPriorityMap.emplace(ownerPids[i],
+                        resource_policy::ClientPriority(priorityScores[i], states[i],
+                        /* isVendorClient won't get copied over*/ false,
+                        /* oomScoreOffset won't get copied over*/ 0));
+            }
+            mActiveClientManager.updatePriorities(pidToPriorityMap);
+
+            int32_t actualScore = priorityScores[priorityScores.size() - 1];
+            int32_t actualState = states[states.size() - 1];
+
+            // Make descriptor for incoming client. We store the oomScoreOffset
+            // since we might need it later on new handleEvictionsLocked and
+            // ProcessInfoService would not take that into account.
+            clientDescriptor = CameraClientManager::makeClientDescriptor(cameraId,
+                    sp<BasicClient>{nullptr}, static_cast<int32_t>(state->getCost()),
+                    state->getConflicting(), actualScore, clientPid, actualState,
+                    oomScoreOffset, systemNativeClient);
+        }
 
         resource_policy::ClientPriority clientPriority = clientDescriptor->getPriority();
 
@@ -1729,7 +1825,7 @@
     //      have android.permission.SYSTEM_CAMERA permissions.
     if (!isVendorListener && (systemCameraKind == SystemCameraKind::HIDDEN_SECURE_CAMERA ||
             (systemCameraKind == SystemCameraKind::SYSTEM_ONLY_CAMERA &&
-            !hasPermissionsForSystemCamera(clientPid, clientUid)))) {
+            !hasPermissionsForSystemCamera(String8(), clientPid, clientUid)))) {
         return true;
     }
     return false;
@@ -1769,7 +1865,7 @@
     //     characteristics) even if clients don't have android.permission.CAMERA. We do not want the
     //     same behavior for system camera devices.
     if (!systemClient && systemCameraKind == SystemCameraKind::SYSTEM_ONLY_CAMERA &&
-            !hasPermissionsForSystemCamera(cPid, cUid)) {
+            !hasPermissionsForSystemCamera(cameraId, cPid, cUid)) {
         ALOGW("Rejecting access to system only camera %s, inadequete permissions",
                 cameraId.c_str());
         return true;
@@ -1817,7 +1913,10 @@
         clientUserId = multiuser_get_user_id(callingUid);
     }
 
-    if (mCameraServiceProxyWrapper->isCameraDisabled(clientUserId)) {
+    // Automotive privileged client AID_AUTOMOTIVE_EVS using exterior system camera for use cases
+    // such as rear view and surround view cannot be disabled.
+    if ((!isAutomotivePrivilegedClient(callingUid) || !isAutomotiveExteriorSystemCamera(id)) &&
+            mCameraServiceProxyWrapper->isCameraDisabled(clientUserId)) {
         String8 msg =
                 String8::format("Camera disabled by device policy");
         ALOGE("%s: %s", __FUNCTION__, msg.string());
@@ -1826,7 +1925,7 @@
 
     // enforce system camera permissions
     if (oomScoreOffset > 0 &&
-            !hasPermissionsForSystemCamera(callingPid, CameraThreadState::getCallingUid()) &&
+            !hasPermissionsForSystemCamera(id, callingPid, CameraThreadState::getCallingUid()) &&
             !isTrustedCallingUid(CameraThreadState::getCallingUid())) {
         String8 msg =
                 String8::format("Cannot change the priority of a client %s pid %d for "
@@ -1915,6 +2014,8 @@
 
     bool isNonSystemNdk = false;
     String16 clientPackageName;
+    int packageUid = (clientUid == USE_CALLING_UID) ?
+            CameraThreadState::getCallingUid() : clientUid;
     if (clientPackageNameMaybe.size() <= 0) {
         // NDK calls don't come with package names, but we need one for various cases.
         // Generally, there's a 1:1 mapping between UID and package name, but shared UIDs
@@ -1922,8 +2023,6 @@
         // same permissions, so picking any associated package name is sufficient. For some
         // other cases, this may give inaccurate names for clients in logs.
         isNonSystemNdk = true;
-        int packageUid = (clientUid == USE_CALLING_UID) ?
-            CameraThreadState::getCallingUid() : clientUid;
         clientPackageName = getPackageNameFromUid(packageUid);
     } else {
         clientPackageName = clientPackageNameMaybe;
@@ -2119,32 +2218,38 @@
                     clientPackageName));
         }
 
-        // Set camera muting behavior
-        bool isCameraPrivacyEnabled =
-                mSensorPrivacyPolicy->isCameraPrivacyEnabled();
-        if (client->supportsCameraMute()) {
-            client->setCameraMute(
-                    mOverrideCameraMuteMode || isCameraPrivacyEnabled);
-        } else if (isCameraPrivacyEnabled) {
-            // no camera mute supported, but privacy is on! => disconnect
-            ALOGI("Camera mute not supported for package: %s, camera id: %s",
-                    String8(client->getPackageName()).string(), cameraId.string());
-            // Do not hold mServiceLock while disconnecting clients, but
-            // retain the condition blocking other clients from connecting
-            // in mServiceLockWrapper if held.
-            mServiceLock.unlock();
-            // Clear caller identity temporarily so client disconnect PID
-            // checks work correctly
-            int64_t token = CameraThreadState::clearCallingIdentity();
-            // Note AppOp to trigger the "Unblock" dialog
-            client->noteAppOp();
-            client->disconnect();
-            CameraThreadState::restoreCallingIdentity(token);
-            // Reacquire mServiceLock
-            mServiceLock.lock();
+        // Automotive privileged client AID_AUTOMOTIVE_EVS using exterior system camera for use
+        // cases such as rear view and surround view cannot be disabled and are exempt from camera
+        // privacy policy.
+        if ((!isAutomotivePrivilegedClient(packageUid) ||
+                !isAutomotiveExteriorSystemCamera(cameraId))) {
+            // Set camera muting behavior.
+            bool isCameraPrivacyEnabled =
+                    mSensorPrivacyPolicy->isCameraPrivacyEnabled();
+            if (client->supportsCameraMute()) {
+                client->setCameraMute(
+                        mOverrideCameraMuteMode || isCameraPrivacyEnabled);
+            } else if (isCameraPrivacyEnabled) {
+                // no camera mute supported, but privacy is on! => disconnect
+                ALOGI("Camera mute not supported for package: %s, camera id: %s",
+                        String8(client->getPackageName()).string(), cameraId.string());
+                // Do not hold mServiceLock while disconnecting clients, but
+                // retain the condition blocking other clients from connecting
+                // in mServiceLockWrapper if held.
+                mServiceLock.unlock();
+                // Clear caller identity temporarily so client disconnect PID
+                // checks work correctly
+                int64_t token = CameraThreadState::clearCallingIdentity();
+                // Note AppOp to trigger the "Unblock" dialog
+                client->noteAppOp();
+                client->disconnect();
+                CameraThreadState::restoreCallingIdentity(token);
+                // Reacquire mServiceLock
+                mServiceLock.lock();
 
-            return STATUS_ERROR_FMT(ERROR_DISABLED,
-                    "Camera \"%s\" disabled due to camera mute", cameraId.string());
+                return STATUS_ERROR_FMT(ERROR_DISABLED,
+                        "Camera \"%s\" disabled due to camera mute", cameraId.string());
+            }
         }
 
         if (shimUpdateOnly) {
@@ -2747,13 +2852,11 @@
     // Check for camera permissions
     int callingPid = CameraThreadState::getCallingPid();
     int callingUid = CameraThreadState::getCallingUid();
-    permission::PermissionChecker permissionChecker;
     AttributionSourceState attributionSource{};
     attributionSource.pid = callingPid;
     attributionSource.uid = callingUid;
-    bool checkPermissionForCamera = permissionChecker.checkPermissionForPreflight(
-                sCameraPermission, attributionSource, String16(), AppOpsManager::OP_NONE)
-                != permission::PermissionChecker::PERMISSION_HARD_DENIED;
+    bool checkPermissionForCamera = checkPermission(String8(),
+                sCameraPermission, attributionSource, String16(), AppOpsManager::OP_NONE);
     if ((callingPid != getpid()) && !checkPermissionForCamera) {
         ALOGE("%s: pid %d doesn't have camera permissions", __FUNCTION__, callingPid);
         return STATUS_ERROR(ERROR_PERMISSION_DENIED,
@@ -2801,13 +2904,13 @@
 
     auto clientUid = CameraThreadState::getCallingUid();
     auto clientPid = CameraThreadState::getCallingPid();
-    permission::PermissionChecker permissionChecker;
     AttributionSourceState attributionSource{};
     attributionSource.uid = clientUid;
     attributionSource.pid = clientPid;
-    bool openCloseCallbackAllowed = permissionChecker.checkPermissionForPreflight(
+
+   bool openCloseCallbackAllowed = checkPermission(String8(),
             sCameraOpenCloseListenerPermission, attributionSource, String16(),
-            AppOpsManager::OP_NONE) != permission::PermissionChecker::PERMISSION_HARD_DENIED;
+            AppOpsManager::OP_NONE);
 
     Mutex::Autolock lock(mServiceLock);
 
@@ -3926,7 +4029,7 @@
             | ActivityManager::UID_OBSERVER_ACTIVE | ActivityManager::UID_OBSERVER_PROCSTATE
             | ActivityManager::UID_OBSERVER_PROC_OOM_ADJ,
             ActivityManager::PROCESS_STATE_UNKNOWN,
-            String16("cameraserver"), emptyUidArray, 0, mObserverToken);
+            kServiceName, emptyUidArray, 0, mObserverToken);
     if (res == OK) {
         mRegistered = true;
         ALOGV("UidPolicy: Registered with ActivityManager");
@@ -4069,7 +4172,7 @@
         monitoredUid.procAdj = resource_policy::UNKNOWN_ADJ;
         monitoredUid.refCount = 1;
         it = mMonitoredUids.emplace(std::pair<uid_t, MonitoredUid>(uid, monitoredUid)).first;
-        status_t res = mAm.addUidToObserver(mObserverToken, String16("cameraserver"), uid);
+        status_t res = mAm.addUidToObserver(mObserverToken, kServiceName, uid);
         if (res != OK) {
             ALOGE("UidPolicy: Failed to add uid to observer: 0x%08x", res);
         }
@@ -4090,7 +4193,7 @@
         it->second.refCount--;
         if (it->second.refCount == 0) {
             mMonitoredUids.erase(it);
-            status_t res = mAm.removeUidFromObserver(mObserverToken, String16("cameraserver"), uid);
+            status_t res = mAm.removeUidFromObserver(mObserverToken, kServiceName, uid);
             if (res != OK) {
                 ALOGE("UidPolicy: Failed to remove uid from observer: 0x%08x", res);
             }
diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h
index 3214d4c..38336ee 100644
--- a/services/camera/libcameraservice/CameraService.h
+++ b/services/camera/libcameraservice/CameraService.h
@@ -17,6 +17,7 @@
 #ifndef ANDROID_SERVERS_CAMERA_CAMERASERVICE_H
 #define ANDROID_SERVERS_CAMERA_CAMERASERVICE_H
 
+#include <android/content/AttributionSourceState.h>
 #include <android/hardware/BnCameraService.h>
 #include <android/hardware/BnSensorPrivacyListener.h>
 #include <android/hardware/ICameraServiceListener.h>
@@ -607,6 +608,13 @@
     int32_t updateAudioRestrictionLocked();
 
 private:
+    /**
+     * Returns true if the device is an automotive device and cameraId is system
+     * only camera which has characteristic AUTOMOTIVE_LOCATION value as either
+     * AUTOMOTIVE_LOCATION_EXTERIOR_LEFT,AUTOMOTIVE_LOCATION_EXTERIOR_RIGHT,
+     * AUTOMOTIVE_LOCATION_EXTERIOR_FRONT or AUTOMOTIVE_LOCATION_EXTERIOR_REAR.
+     */
+    bool isAutomotiveExteriorSystemCamera(const String8& cameraId) const;
 
     // TODO: b/263304156 update this to make use of a death callback for more
     // robust/fault tolerant logging
@@ -623,6 +631,22 @@
     }
 
     /**
+     * Pre-grants the permission if the attribution source uid is for an automotive
+     * privileged client. Otherwise uses system service permission checker to check
+     * for the appropriate permission. If this function is called for accessing a specific
+     * camera,then the cameraID must not be empty. CameraId is used only in case of automotive
+     * privileged client so that permission is pre-granted only to access system camera device
+     * which is located outside of the vehicle body frame because camera located inside the vehicle
+     * cabin would need user permission.
+     */
+    bool checkPermission(const String8& cameraId, const String16& permission,
+            const content::AttributionSourceState& attributionSource,const String16& message,
+            int32_t attributedOpCode) const;
+
+    bool hasPermissionsForSystemCamera(const String8& cameraId, int callingPid, int callingUid)
+            const;
+
+   /**
      * Typesafe version of device status, containing both the HAL-layer and the service interface-
      * layer values.
      */
@@ -885,7 +909,7 @@
     // Should a device status update be skipped for a particular camera device ? (this can happen
     // under various conditions. For example if a camera device is advertised as
     // system only or hidden secure camera, amongst possible others.
-    static bool shouldSkipStatusUpdates(SystemCameraKind systemCameraKind, bool isVendorListener,
+    bool shouldSkipStatusUpdates(SystemCameraKind systemCameraKind, bool isVendorListener,
             int clientPid, int clientUid);
 
     // Gets the kind of camera device (i.e public, hidden secure or system only)
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index 61c3298..4487970 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -60,9 +60,9 @@
 #include "device3/Camera3InputStream.h"
 #include "device3/Camera3OutputStream.h"
 #include "device3/Camera3SharedOutputStream.h"
-#include "mediautils/SchedulingPolicyService.h"
 #include "utils/CameraThreadState.h"
 #include "utils/CameraTraces.h"
+#include "utils/SchedulingPolicyUtils.h"
 #include "utils/SessionConfigurationUtils.h"
 #include "utils/TraceHFR.h"
 
@@ -2625,8 +2625,8 @@
     if (disableFifo != 1) {
         // Boost priority of request thread to SCHED_FIFO.
         pid_t requestThreadTid = mRequestThread->getTid();
-        res = requestPriority(getpid(), requestThreadTid,
-                kRequestThreadPriority, /*isForApp*/ false, /*asynchronous*/ false);
+        res = SchedulingPolicyUtils::requestPriorityDirect(getpid(), requestThreadTid,
+                kRequestThreadPriority);
         if (res != OK) {
             ALOGW("Can't set realtime priority for request processing thread: %s (%d)",
                     strerror(-res), res);
diff --git a/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp b/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
index f742a6d..a2a9d04 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
@@ -260,7 +260,7 @@
 
     auto mapper = states.rotateAndCropMappers.find(states.cameraId.c_str());
     if (mapper != states.rotateAndCropMappers.end()) {
-        const auto& remappedKeys = iter->second.getRemappedKeys();
+        const auto& remappedKeys = mapper->second.getRemappedKeys();
         keysToRemove.insert(remappedKeys.begin(), remappedKeys.end());
     }
 
diff --git a/services/camera/libcameraservice/tests/Android.bp b/services/camera/libcameraservice/tests/Android.bp
index 5e2a3fb..b035edd 100644
--- a/services/camera/libcameraservice/tests/Android.bp
+++ b/services/camera/libcameraservice/tests/Android.bp
@@ -31,11 +31,14 @@
         "libmedia_headers",
     ],
 
+    defaults: [
+        "libcameraservice_deps",
+    ],
+
     shared_libs: [
         "libbase",
         "libbinder",
         "libcutils",
-        "libcameraservice",
         "libhidlbase",
         "liblog",
         "libcamera_client",
@@ -45,11 +48,6 @@
         "libjpeg",
         "libexif",
         "android.hardware.camera.common@1.0",
-        "android.hardware.camera.provider@2.4",
-        "android.hardware.camera.provider@2.5",
-        "android.hardware.camera.provider@2.6",
-        "android.hardware.camera.provider@2.7",
-        "android.hardware.camera.provider-V2-ndk",
         "android.hardware.camera.device@1.0",
         "android.hardware.camera.device@3.2",
         "android.hardware.camera.device@3.4",
@@ -58,6 +56,12 @@
     ],
 
     static_libs: [
+        "android.hardware.camera.provider@2.4",
+        "android.hardware.camera.provider@2.5",
+        "android.hardware.camera.provider@2.6",
+        "android.hardware.camera.provider@2.7",
+        "android.hardware.camera.provider-V2-ndk",
+        "libcameraservice",
         "libgmock",
     ],
 
diff --git a/services/camera/libcameraservice/utils/SchedulingPolicyUtils.cpp b/services/camera/libcameraservice/utils/SchedulingPolicyUtils.cpp
new file mode 100644
index 0000000..92a1030
--- /dev/null
+++ b/services/camera/libcameraservice/utils/SchedulingPolicyUtils.cpp
@@ -0,0 +1,66 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "SchedulingPolicyUtils.h"
+
+#include <errno.h>
+#include <pthread.h>
+#include <sched.h>
+
+#include "CameraThreadState.h"
+#include <private/android_filesystem_config.h>
+#include <processgroup/processgroup.h>
+#include <processgroup/sched_policy.h>
+#include <procinfo/process.h>
+#include <utils/Log.h>
+
+namespace android {
+namespace camera3 {
+namespace SchedulingPolicyUtils {
+
+int requestPriorityDirect(int pid, int tid, int prio) {
+    android::procinfo::ProcessInfo processInfo;
+    static const int kMinPrio = 1;
+    static const int kMaxPrio = 3;
+
+    if (!android::procinfo::GetProcessInfo(tid, &processInfo)) {
+       ALOGE("%s: Error getting process info", __FUNCTION__);
+       return -EPERM;
+    }
+
+    if (prio < kMinPrio || prio > kMaxPrio || processInfo.pid != pid) {
+        ALOGE("%s: Invalid parameter prio=%d pid=%d procinfo.pid=%d", __FUNCTION__, prio, pid,
+                processInfo.pid);
+        return -EPERM;
+    }
+
+    // Set the thread group as audio system thread group in consistent with the
+    // implementation in SchedulingPolicyService.java when isApp is false in
+    // requestPriority method.
+    if (!SetTaskProfiles(tid, {get_sched_policy_profile_name(SP_AUDIO_SYS)},
+            /*use_fd_cache*/ true)) {
+        ALOGE("%s:Error in  SetTaskProfiles", __FUNCTION__);
+        return -EPERM;
+    }
+
+    struct sched_param param;
+    param.sched_priority = prio;
+    return sched_setscheduler(tid, SCHED_FIFO | SCHED_RESET_ON_FORK, &param);
+}
+
+} // namespace SchedulingPolicyUtils
+} // namespace camera3
+} // namespace android
diff --git a/services/camera/libcameraservice/utils/SchedulingPolicyUtils.h b/services/camera/libcameraservice/utils/SchedulingPolicyUtils.h
new file mode 100644
index 0000000..f71fddf
--- /dev/null
+++ b/services/camera/libcameraservice/utils/SchedulingPolicyUtils.h
@@ -0,0 +1,35 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#ifndef ANDROID_SERVICE_CAMERA_SCHEDULING_POLICY_UTILS_H
+#define ANDROID_SERVICE_CAMERA_SCHEDULING_POLICY_UTILS_H
+
+namespace android {
+namespace camera3 {
+namespace SchedulingPolicyUtils {
+
+/**
+ * Request elevated priority for thread tid, whose thread group leader must be pid.
+ * Instead of using scheduling policy service, this method uses direct system calls.
+ * The priority parameter is currently restricted from 1 to 3 matching
+ * scheduling policy service implementation.
+ */
+int requestPriorityDirect(int pid, int tid, int prio);
+
+} // SchedulingPolicyUtils
+} // camera3
+} // android
+
+#endif
diff --git a/services/mediaresourcemanager/fuzzer/Android.bp b/services/mediaresourcemanager/fuzzer/Android.bp
index d98974f..f38a085 100644
--- a/services/mediaresourcemanager/fuzzer/Android.bp
+++ b/services/mediaresourcemanager/fuzzer/Android.bp
@@ -27,21 +27,18 @@
     default_applicable_licenses: ["frameworks_av_license"],
 }
 
-cc_fuzz {
-    name: "mediaresourcemanager_fuzzer",
-    srcs: [
-        "mediaresourcemanager_fuzzer.cpp",
+cc_defaults {
+    name: "mediaresourcemanager_fuzzer_defaults",
+    defaults: [
+        "service_fuzzer_defaults",
     ],
     static_libs: [
         "liblog",
         "libresourcemanagerservice",
     ],
     shared_libs: [
-        "libbinder",
-        "libbinder_ndk",
         "libmedia",
         "libmediautils",
-        "libutils",
         "libstats_media_metrics",
         "libstatspull",
         "libstatssocket",
@@ -54,3 +51,39 @@
         componentid: 155276,
     },
 }
+
+cc_fuzz {
+    name: "mediaresourcemanager_fuzzer",
+    defaults: [
+        "mediaresourcemanager_fuzzer_defaults",
+    ],
+    srcs: [
+        "mediaresourcemanager_fuzzer.cpp",
+    ],
+}
+
+cc_fuzz {
+    name: "resourcemanager_service_fuzzer",
+    defaults: [
+        "mediaresourcemanager_fuzzer_defaults",
+    ],
+    srcs: [
+        "resourcemanager_service_fuzzer.cpp",
+    ],
+}
+
+cc_fuzz {
+    name: "resourceobserver_service_fuzzer",
+    defaults: [
+        "mediaresourcemanager_fuzzer_defaults",
+    ],
+    static_libs: [
+        "resourceobserver_aidl_interface-V1-ndk",
+    ],
+    srcs: [
+        "resourceobserver_service_fuzzer.cpp",
+    ],
+    fuzz_config: {
+        triage_assignee: "waghpawan@google.com",
+    },
+}
diff --git a/services/mediaresourcemanager/fuzzer/resourcemanager_service_fuzzer.cpp b/services/mediaresourcemanager/fuzzer/resourcemanager_service_fuzzer.cpp
new file mode 100644
index 0000000..ca10d20
--- /dev/null
+++ b/services/mediaresourcemanager/fuzzer/resourcemanager_service_fuzzer.cpp
@@ -0,0 +1,32 @@
+/*
+ * Copyright 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <android/binder_interface_utils.h>
+
+#include <fuzzbinder/libbinder_ndk_driver.h>
+#include <fuzzer/FuzzedDataProvider.h>
+
+#include "ResourceManagerService.h"
+
+using android::fuzzService;
+using android::ResourceManagerService;
+using ndk::SharedRefBase;
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+   auto service = SharedRefBase::make<ResourceManagerService>();
+   fuzzService(service->asBinder().get(), FuzzedDataProvider(data, size));
+   return 0;
+}
diff --git a/services/mediaresourcemanager/fuzzer/resourceobserver_service_fuzzer.cpp b/services/mediaresourcemanager/fuzzer/resourceobserver_service_fuzzer.cpp
new file mode 100644
index 0000000..e69368d
--- /dev/null
+++ b/services/mediaresourcemanager/fuzzer/resourceobserver_service_fuzzer.cpp
@@ -0,0 +1,32 @@
+/*
+ * Copyright 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <android/binder_interface_utils.h>
+
+#include <fuzzbinder/libbinder_ndk_driver.h>
+#include <fuzzer/FuzzedDataProvider.h>
+
+#include "ResourceObserverService.h"
+
+using android::fuzzService;
+using android::ResourceObserverService;
+using ndk::SharedRefBase;
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+   auto service = SharedRefBase::make<ResourceObserverService>();
+   fuzzService(service->asBinder().get(), FuzzedDataProvider(data, size));
+   return 0;
+}
diff --git a/services/oboeservice/AAudioThread.cpp b/services/oboeservice/AAudioThread.cpp
index 549fa59..502d773 100644
--- a/services/oboeservice/AAudioThread.cpp
+++ b/services/oboeservice/AAudioThread.cpp
@@ -75,7 +75,9 @@
 
 aaudio_result_t AAudioThread::stop() {
     if (!mHasThread) {
-        ALOGE("stop() but no thread running");
+        // There can be cases that the thread is just created but not started.
+        // Logging as warning to attract attention but not too serious.
+        ALOGW("stop() but no thread running");
         return AAUDIO_ERROR_INVALID_STATE;
     }
 
diff --git a/services/oboeservice/Android.bp b/services/oboeservice/Android.bp
index c5080a4..3521979 100644
--- a/services/oboeservice/Android.bp
+++ b/services/oboeservice/Android.bp
@@ -78,12 +78,38 @@
 
 ]
 
+cc_defaults {
+    name: "libaaudioservice_dependencies",
 
-cc_library {
+    shared_libs: [
+        "libaaudio_internal",
+        "libaudioclient",
+        "libaudioutils",
+        "libmedia_helper",
+        "libmediametrics",
+        "libmediautils",
+        "libbase",
+        "libbinder",
+        "libcutils",
+        "liblog",
+        "libutils",
+        "aaudio-aidl-cpp",
+        "framework-permission-aidl-cpp",
+        "libaudioclient_aidl_conversion",
+        "packagemanager_aidl-cpp",
+    ],
+
+    static_libs: [
+        "libaudioflinger",
+    ]
+}
+
+cc_library_static {
 
     name: "libaaudioservice",
 
     defaults: [
+        "libaaudioservice_dependencies",
         "latest_android_media_audio_common_types_cpp_shared",
     ],
 
@@ -116,25 +142,6 @@
         "-Werror",
     ],
 
-    shared_libs: [
-        "libaaudio_internal",
-        "libaudioclient",
-        "libaudioflinger",
-        "libaudioutils",
-        "libmedia_helper",
-        "libmediametrics",
-        "libmediautils",
-        "libbase",
-        "libbinder",
-        "libcutils",
-        "liblog",
-        "libutils",
-        "aaudio-aidl-cpp",
-        "framework-permission-aidl-cpp",
-        "libaudioclient_aidl_conversion",
-        "packagemanager_aidl-cpp",
-    ],
-
     export_shared_lib_headers: [
         "libaaudio_internal",
         "framework-permission-aidl-cpp",
diff --git a/services/oboeservice/fuzzer/oboeservice_fuzzer.cpp b/services/oboeservice/fuzzer/oboeservice_fuzzer.cpp
index f047065..f5c2e6c 100644
--- a/services/oboeservice/fuzzer/oboeservice_fuzzer.cpp
+++ b/services/oboeservice/fuzzer/oboeservice_fuzzer.cpp
@@ -403,13 +403,6 @@
 
     request.getConfiguration().setBufferCapacity(fdp.ConsumeIntegral<int32_t>());
 
-    request.getConfiguration().setHardwareSampleRate(fdp.ConsumeIntegral<int32_t>());
-    request.getConfiguration().setHardwareSamplesPerFrame(fdp.ConsumeIntegral<int32_t>());
-    request.getConfiguration().setHardwareFormat((audio_format_t)(
-        fdp.ConsumeBool()
-            ? fdp.ConsumeIntegral<int32_t>()
-            : kAAudioFormats[fdp.ConsumeIntegralInRange<int32_t>(0, kNumAAudioFormats - 1)]));
-
     auto streamHandleInfo = mClient->openStream(request, configurationOutput);
     if (streamHandleInfo.getHandle() < 0) {
         // invalid request, stream not opened.
diff --git a/services/tuner/Android.bp b/services/tuner/Android.bp
index ea5139d..e29d520 100644
--- a/services/tuner/Android.bp
+++ b/services/tuner/Android.bp
@@ -86,6 +86,7 @@
         "android.hardware.tv.tuner@1.1",
         "android.hardware.tv.tuner-V2-ndk",
         "libbase",
+        "libcutils",
         "libbinder",
         "libfmq",
         "libhidlbase",
diff --git a/services/tuner/main_tunerservice.cpp b/services/tuner/main_tunerservice.cpp
index 90f1731..acfaf8a 100644
--- a/services/tuner/main_tunerservice.cpp
+++ b/services/tuner/main_tunerservice.cpp
@@ -17,6 +17,7 @@
 #include <android-base/logging.h>
 #include <binder/IPCThreadState.h>
 #include <binder/IServiceManager.h>
+#include <cutils/properties.h>
 #include <utils/Log.h>
 #include <hidl/HidlTransportSupport.h>
 
@@ -31,6 +32,11 @@
 int main() {
     ALOGD("Tuner service starting");
 
+    if (!property_get_bool("tuner.server.enable", false)) {
+        ALOGD("tuner is not enabled, terminating");
+        return 0;
+    }
+
     sp<ProcessState> proc(ProcessState::self());
     sp<IServiceManager> sm = defaultServiceManager();
     hardware::configureRpcThreadpool(16, true);